blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
37c0f4f4f38905db494572485aff0acda8d01ef6 | 21a15af3e5b38d1cb10a107c2b66b0f712b49753 | /final_project/combine_pred/clean_mgdb.py | 3c242cf452a390a095112304c7c42cb3ecf45c4a | [
"MIT"
]
| permissive | dbirman/cs375 | 00793f07730e9d606e6c83125d1d16ad337f1a1c | 7aeac1ed57eff74cbecb3e1091b01f00d34629a8 | refs/heads/master | 2021-05-08T06:28:28.505935 | 2017-12-15T02:19:27 | 2017-12-15T02:19:27 | 106,620,325 | 0 | 2 | MIT | 2017-12-13T02:47:25 | 2017-10-11T23:33:22 | Jupyter Notebook | UTF-8 | Python | false | false | 2,111 | py | from __future__ import division, print_function, absolute_import
import os, sys
import numpy as np
import cPickle
import json
import copy
import argparse
import pymongo as pm
import gridfs
def get_parser():
parser = argparse.ArgumentParser(description='The script to delete the models saved in mongodb')
parser.add_argument('--nport', default = 27009, type = int, action = 'store', help = 'Port number of mongodb')
parser.add_argument('--expId', default = "combinet_alexnet_ncp_new_2", type = str, action = 'store', help = 'Name of experiment id')
parser.add_argument('--dbname', default = "combinet-test", type = str, action = 'store', help = 'Database name')
parser.add_argument('--collname', default = "combinet", type = str, action = 'store', help = 'Collection name')
return parser
def main():
parser = get_parser()
args = parser.parse_args()
load_conn = pm.MongoClient(port=args.nport)
collfs = gridfs.GridFS(load_conn[args.dbname], args.collname)
coll = collfs._GridFS__files
query = {'exp_id': args.expId, 'saved_filters': True}
count = collfs.find(query).count()
count_gfs = coll.find(query).count()
print(count, count_gfs)
find_res = coll.find(query)
print(find_res[0].keys())
print(find_res[0]['chunkSize'])
print(find_res[0]['filename'])
print(find_res[0]['_id'])
'''
loading_from = coll
fsbucket = gridfs.GridFSBucket(loading_from._Collection__database, bucket_name=loading_from.name.split('.')[0])
filename = os.path.basename(find_res[0]['filename'])
cache_filename = os.path.join('/home/chengxuz/.tfutils/tmp', filename)
load_dest = open(cache_filename, "w+")
load_dest.close()
load_dest = open(cache_filename, 'rwb+')
fsbucket.download_to_stream(find_res[0]['_id'], load_dest)
load_dest.close()
'''
#collfs.delete(find_res[0]['_id'])
loading_from = coll
fsbucket = gridfs.GridFSBucket(loading_from._Collection__database, bucket_name=loading_from.name.split('.')[0])
#fsbucket.delete(find_res[0]['_id'])
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
c9dd894b248f8e6b4bf9d0ecaec51c0503a07b4e | 9cec93a18ea94504947820205d0faae4d67ecd8d | /H2TauTau/prod/h2TauTauMiniAOD_emu_data_cfg.py | 29fd19e53e01ae472034e8e29c7df85f5492613b | []
| no_license | DESY-CMS-SUS/cmgtools-lite | de88b1d5dc20a925ed5b7c7be69fa3ef677955c6 | db52d50047178563a0eb7f5858ae100aa408ec68 | refs/heads/8_0_25 | 2021-05-23T04:36:22.900460 | 2017-11-09T10:32:41 | 2017-11-09T10:32:41 | 60,184,794 | 3 | 9 | null | 2021-02-17T23:22:12 | 2016-06-01T14:37:18 | Python | UTF-8 | Python | false | false | 118 | py | from CMGTools.H2TauTau.h2TauTauMiniAOD_generic_cfg import *
process = createProcess(channel='mu-ele', runOnMC=False)
| [
"[email protected]"
]
| |
fdae1cba35a9ea316df4511227052aeff18950ea | 9b79dc0b4b2f13dea85a1d29177e5eb266b6e7f7 | /var/lib/python-support/python2.6/orca/scripts/apps/yelp.py | c9fc5a9290f9ecd62ae22fb6f6fd9b245cfa4fec | []
| no_license | haniokasai/netwalker-rootfs | 0bc87efc0ae478338b6326fd9118befcbcc5cd06 | d08f7bf370a82b6970387bb9f165d374a9d9092b | refs/heads/master | 2021-01-10T11:04:34.436513 | 2016-01-12T06:09:50 | 2016-01-12T06:09:50 | 36,504,146 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | /usr/share/python-support/gnome-orca/orca/scripts/apps/yelp.py | [
"[email protected]"
]
| |
3c10902f7900b2106e7a058e821f3ff2c015af73 | d0b52237c314fbae746a3922205c7f3c22c99498 | /server/runserver.py | bc997c42cae9f2a2a678909f3e02796ebcc9a5f0 | []
| no_license | kallebefelipe/spotify-albums | 5a7e9db6729351477e89ff5bdb7d7a10fd310393 | 44b16225a950f9a1580ede8c317af688b6d73dec | refs/heads/master | 2022-12-10T16:45:43.685587 | 2019-10-30T00:54:49 | 2019-10-30T00:54:49 | 217,175,675 | 0 | 0 | null | 2022-12-10T06:37:08 | 2019-10-24T00:08:25 | JavaScript | UTF-8 | Python | false | false | 618 | py | from flask import request, Flask
import requests
import json
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
@app.route('/token', methods=['GET'])
def login():
if request.method == 'GET':
url = "https://accounts.spotify.com/api/token"
payload = {
'grant_type': "client_credentials"
}
headers = {
'Authorization': "Basic MjBiMzk3ZGU5YmEyNDE0Yjk2NGJmNTVjZmNlYzllYzM6ZmY0MDYzZGM1MDAxNDFkZTlhNjBiNjI2ZjY1YmNiMDg=",
}
response = requests.request("POST", url, data=payload, headers=headers)
return json.loads(response.text)
| [
"[email protected]"
]
| |
35d34c6d612853fb568231e774c9d4634b8cd4de | 025f930f0d342d116604a185103d13826d7ac360 | /GenericApiViews2/api/views.py | a61030fbcd532dfad511ff531fd4e9a4cc35ca15 | []
| no_license | moinakmalkhan/Learn-Django-Rest-Framework | 196c30591ed43ef7722cb22dea600a5ddcc0b8cf | de2ce66779525647d582998450a47558b70376f9 | refs/heads/master | 2023-09-03T08:51:50.346047 | 2021-11-14T15:48:23 | 2021-11-14T15:48:23 | 426,378,883 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,392 | py | from .serializers import StudentSerializer
from .models import Student
from rest_framework.generics import ListAPIView, CreateAPIView, RetrieveAPIView, UpdateAPIView, DestroyAPIView, ListCreateAPIView, RetrieveDestroyAPIView, RetrieveUpdateAPIView, RetrieveUpdateDestroyAPIView
class StudentList(ListAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentCreate(CreateAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentRetrieve(RetrieveAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentUpdate(UpdateAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentDestroy(DestroyAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentListCreate(ListCreateAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentRetrieveUpdate(RetrieveUpdateAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentRetrieveDestroy(RetrieveDestroyAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentRetrieveUpdateDestroy(RetrieveUpdateDestroyAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
| [
"[email protected]"
]
| |
61979b816a8ccba14cde24e7256aaea343707468 | ff23e5c890216a1a63278ecb40cd7ac79ab7a4cd | /clients/kratos/python/test/test_update_login_flow_with_totp_method.py | 1a1c70aa5b19aa111a665097c39a43f3b7ef81ef | [
"Apache-2.0"
]
| permissive | ory/sdk | fcc212166a92de9d27b2dc8ff587dcd6919e53a0 | 7184e13464948d68964f9b605834e56e402ec78a | refs/heads/master | 2023-09-01T10:04:39.547228 | 2023-08-31T08:46:23 | 2023-08-31T08:46:23 | 230,928,630 | 130 | 85 | Apache-2.0 | 2023-08-14T11:09:31 | 2019-12-30T14:21:17 | C# | UTF-8 | Python | false | false | 1,061 | py | """
Ory Identities API
This is the API specification for Ory Identities with features such as registration, login, recovery, account verification, profile settings, password reset, identity management, session management, email and sms delivery, and more. # noqa: E501
The version of the OpenAPI document: v1.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ory_kratos_client
from ory_kratos_client.model.update_login_flow_with_totp_method import UpdateLoginFlowWithTotpMethod
class TestUpdateLoginFlowWithTotpMethod(unittest.TestCase):
"""UpdateLoginFlowWithTotpMethod unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testUpdateLoginFlowWithTotpMethod(self):
"""Test UpdateLoginFlowWithTotpMethod"""
# FIXME: construct object with mandatory attributes with example values
# model = UpdateLoginFlowWithTotpMethod() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
50d95be7ef2ce5d4a134ccc436ebc29970f1a7f8 | afbae26b958b5ef20548402a65002dcc8e55b66a | /release/stubs.min/Autodesk/Revit/DB/__init___parts/AreaTag.py | 332d52a8e52d3635319402c9944936eaa97a724a | [
"MIT"
]
| permissive | gtalarico/ironpython-stubs | d875cb8932c7644f807dc6fde9dd513d159e4f5c | c7f6a6cb197e3949e40a4880a0b2a44e72d0a940 | refs/heads/master | 2023-07-12T01:43:47.295560 | 2022-05-23T18:12:06 | 2022-05-23T18:12:06 | 95,340,553 | 235 | 88 | NOASSERTION | 2023-07-05T06:36:28 | 2017-06-25T05:30:46 | Python | UTF-8 | Python | false | false | 1,410 | py | class AreaTag(SpatialElementTag,IDisposable):
""" Provides access to the area topology in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element,A_0: bool) """
pass
def getBoundingBox(self,*args):
""" getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self,*args):
""" ReleaseUnmanagedResources(self: Element,disposing: bool) """
pass
def setElementType(self,*args):
""" setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str) """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Area=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The area that the tag is associated with.
Get: Area(self: AreaTag) -> Area
"""
AreaTagType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The tag type.
Get: AreaTagType(self: AreaTag) -> AreaTagType
Set: AreaTagType(self: AreaTag)=value
"""
| [
"[email protected]"
]
| |
ec944233f08b6b31e0c6a20e89a857540fba1341 | 504efba4ab5ba1721ab3388144b16fa5f24833e7 | /05_Chroma_Scan_NoSC/02_04/simulation_parameters.py | 3e62fc436c63071f23e47fade4bde02984f16c0f | [
"MIT"
]
| permissive | HaroonRafique/PS_Transfer | b568fe41c98357877c3bc63b2ca89f8724439da0 | 59ed8a0978ba4699f34c9f7a2500e0026759a2b6 | refs/heads/master | 2023-05-25T21:13:36.586605 | 2020-07-10T07:41:40 | 2020-07-10T07:41:40 | 213,405,455 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,799 | py | import numpy as np
parameters = {}
dps = [-2.5E-3, -2E-3, -1.5E-3, -1E-3, -0.5E-3, 0, 0.5E-3, 1E-3, 1.5E-3, 2E-3, 2.5E-3]
parameters['dpp_rms'] = dps[3]
parameters['x_offset'] = 0.#50E-6 # 50 micron orbit offset to begin oscillation
parameters['n_macroparticles'] = int(5E4) # int(5E5)
# Include machine (PS), tunes, lattice start position (BWS65H) for bunch output file label
parameters['tunex'] = '6218'
parameters['tuney'] = '624'
parameters['machine'] = 'PS'
parameters['lattice_start'] = 'BSG52'
parameters['Optics'] = 'Op' #'ReM' #'Lattice', #,
parameters['bunch_label'] = parameters['machine'] + '_Lattice_Tune_' + parameters['tunex'] + '_' + parameters['tuney'] + '_' + parameters['lattice_start']
parameters['flat_file'] = '../../00_Lattice_Setup/Optimised_Lattice/PTC-PyORBIT_flat_file.flt'
parameters['tomo_file'] = 'PyORBIT_Tomo_file_BCMS_PreLIU.mat'
parameters['bunch_file'] = '../../01_Generate_Distn/Bunches/PyORBIT_Tomo_Bunch_Manual_Twiss_Nmp_' + str(parameters['n_macroparticles'])+'_PS_Lattice_Tune_6218_624_' + parameters['lattice_start']+'_'+parameters['Optics']+'.mat'
parameters['intensity'] = 65E+10
parameters['macrosize'] = parameters['intensity']/float(parameters['n_macroparticles'])
parameters['gamma'] = 2.49253731343
parameters['bunch_length'] = 140e-9
parameters['blength'] = 140e-9
parameters['epsn_x'] = 1E-6
parameters['epsn_y'] = 1E-6
parameters['LongitudinalJohoParameter'] = 1.2
parameters['LongitudinalCut'] = 2.4
parameters['TransverseCut'] = 5
parameters['rf_voltage'] = 0.0212942055190595723
# ~ parameters['rf_voltage'] = 0.0
parameters['circumference'] = 2*np.pi*100
parameters['phi_s'] = 0
parameters['macrosize'] = parameters['intensity']/float(parameters['n_macroparticles'])
# PS Injection 1.4 GeV
parameters['gamma'] = 2.49253731343
parameters['beta'] = np.sqrt(parameters['gamma']**2-1)/parameters['gamma']
c = 299792458
parameters['sig_z'] = (parameters['beta'] * c * parameters['blength'])/4.
parameters['turns_max'] = int(30)
parameters['turns_print'] = range(0, parameters['turns_max'])
parameters['turns_update'] = range(0, parameters['turns_max'])
switches = {
'CreateDistn': True,
'Space_Charge': False,
'GridSizeX': 64,
'GridSizeY': 64,
'GridSizeZ': 32
}
# PTC RF Table Parameters
harmonic_factors = [1] # this times the base harmonic defines the RF harmonics (for SPS = 4620, PS 10MHz 7, 8, or 9)
time = np.array([0,1,2])
ones = np.ones_like(time)
Ekin_GeV = 1.4*ones
RF_voltage_MV = np.array([0.0212942055190595723*ones]).T # in MV
# ~ RF_voltage_MV = np.array([0.0*ones]).T # in MV
RF_phase = np.array([np.pi*ones]).T
RFparameters = {
'harmonic_factors': harmonic_factors,
'time': time,
'Ekin_GeV': Ekin_GeV,
'voltage_MV': RF_voltage_MV,
'phase': RF_phase
}
| [
"[email protected]"
]
| |
46e9e80b3ef30dbb6efd906e9158852e265a5b91 | d125c002a6447c3f14022b786b07712a7f5b4974 | /tests/bugs/core_4811_test.py | 2bbaf2d77f1cba7d0e6a4e8ea3c6a6fc24a2c090 | [
"MIT"
]
| permissive | FirebirdSQL/firebird-qa | 89d5b0035071f9f69d1c869997afff60c005fca9 | cae18186f8c31511a7f68248b20f03be2f0b97c6 | refs/heads/master | 2023-08-03T02:14:36.302876 | 2023-07-31T23:02:56 | 2023-07-31T23:02:56 | 295,681,819 | 3 | 2 | MIT | 2023-06-16T10:05:55 | 2020-09-15T09:41:22 | Python | UTF-8 | Python | false | false | 7,408 | py | #coding:utf-8
"""
ID: issue-5109
ISSUE: 5109
TITLE: Make user names behave according to SQL identifiers rules
DESCRIPTION:
JIRA: CORE-4811
FBTEST: bugs.core_4811
"""
import pytest
from firebird.qa import *
substitutions = [('set echo.*', ''), ('Use CONNECT or CREATE DATABASE.*', ''),
('Your user name and password.*', ''), ('line: [0-9]+, col: [0-9]+', ''),
('exception [0-9]+', 'exception')]
db = db_factory()
tmp_user = user_factory('db', name='tmp$c4811', password='1')
tmp_role = role_factory('db', name='Boss')
test_script = """
set wng off;
set list on;
create or alter procedure sp_check_actual_role as begin end;
commit;
recreate exception ex_have_no_role 'You''ve specified role: >@1< -- but your actual role is NONE.';
set term ^;
create or alter procedure sp_check_actual_role(
a_probe_role varchar(31)
) returns(
checking varchar(80),
result varchar(31)
) as
begin
if ( upper(current_role) = 'NONE' )
then
exception ex_have_no_role using ( a_probe_role );
checking = 'role: >' || a_probe_role || '< - '
|| trim(
iif( a_probe_role containing '''', 'in apostrophes',
iif( a_probe_role containing '"', 'in double quotes', 'without delimiters' )
)
)
|| ', ' || iif( upper(a_probe_role) = a_probe_role, 'UPPER case', 'CaMeL case' )
;
result = current_role;
suspend;
end
^
set term ;^
commit;
set bail on;
set echo on;
grant Boss to Tmp$c4811;
grant usage on exception ex_have_no_role to Tmp$c4811;
grant execute on procedure sp_check_actual_role to Tmp$c4811;
set echo off;
set bail off;
-- show grants;
commit;
-- set echo on;
-- checking for USER name:
connect '$(DSN)' user 'Tmp$c4811' password '1';
-- PASSES since http://sourceforge.net/p/firebird/code/62016 (2015-07-16 14:26), this was build = 31981
select 'user: >''Tmp$c4811''< - in apostrophes, CaMeL case' checking, current_user as result from rdb$database;
commit;
connect '$(DSN)' user 'TMP$C4811' password '1'; -- should PASS, checked on builds 31948, 31981
select 'user: >''TMP$C4811''< - in apostrophes, UPPER case' checking, current_user as result from rdb$database;
commit;
connect '$(DSN)' user Tmp$c4811 password '1'; -- should PASS, checked on builds 31948, 31981
select 'user: >Tmp$c4811< - without delimiters, CaMeL case' checking, current_user as result from rdb$database;
commit;
connect '$(DSN)' user TMP$C4811 password '1'; -- should PASS, checked on builds 31948, 31981
select 'user: >TMP$C4811< - without delimiters, UPPER case' checking, current_user as result from rdb$database;
commit;
connect '$(DSN)' user "Tmp$c4811" password '1'; -- should *** FAIL ***
select 'user: >"Tmp$c4811"< - in double quotes, CaMeL case' checking, current_user as result from rdb$database;
commit;
connect '$(DSN)' user "TMP$C4811" password '1'; -- should PASS, checked on builds 31948, 31981
select 'user: >"TMP$C4811" - in double quotes, UPPER case' checking, current_user as result from rdb$database;
commit;
-- checking for ROLE (actual role in all following cases will be: [BOSS], checked on builds 31948, 31981)
-- Statement that created role (see above):
-- create role Boss;
-- Enclosing role in apostrophes and specifying it exactly like it was in its creation sttm:
connect '$(DSN)' user 'TMP$C4811' password '1' role 'Boss';
select * from sp_check_actual_role( '''Boss''' ); --------------- should return: BOSS
commit;
-- Enclosing role in apostrophes and specifying it in UPPERCASE (i.e. differ than in its CREATE ROLE statement):
connect '$(DSN)' user 'TMP$C4811' password '1' role 'BOSS';
select * from sp_check_actual_role( '''BOSS''' ); --------------- should return: BOSS
commit;
-- do NOT enclosing role in any delimiters and change CaSe of its characters (i.e. differ than in its CREATE ROLE statement):
connect '$(DSN)' user 'TMP$C4811' password '1' role BosS;
select * from sp_check_actual_role( 'BosS' ); --------------- should return: BOSS
commit;
-- do NOT enclosing role in any delimiters and specifying it in UPPERCASE (i.e. differ than in its CREATE ROLE statement):
connect '$(DSN)' user 'TMP$C4811' password '1' role BOSS;
select * from sp_check_actual_role( 'BOSS' ); --------------- should return: BOSS
commit;
-- Enclosing role in double quotes and change CaSe of its characters (i.e. differ than in its CREATE ROLE statement):
connect '$(DSN)' user 'TMP$C4811' password '1' role "BoSs";
select * from sp_check_actual_role( '"BoSs"' ); --------------- should raise EX_HAVE_NO_ROLE, actual role will be 'NONE'
commit;
-- Enclosing role in double quotes and specifying it in UPPERCASE (i.e. differ than in its CREATE ROLE statement):
connect '$(DSN)' user 'TMP$C4811' password '1' role "BOSS";
select * from sp_check_actual_role( '"BOSS"' ); --------------- should return: BOSS
commit;
"""
act = isql_act('db', test_script, substitutions=substitutions)
expected_stdout = """
grant Boss to Tmp$c4811;
grant usage on exception ex_have_no_role to Tmp$c4811;
grant execute on procedure sp_check_actual_role to Tmp$c4811;
CHECKING user: >'Tmp$c4811'< - in apostrophes, CaMeL case
RESULT TMP$C4811
CHECKING user: >'TMP$C4811'< - in apostrophes, UPPER case
RESULT TMP$C4811
CHECKING user: >Tmp$c4811< - without delimiters, CaMeL case
RESULT TMP$C4811
CHECKING user: >TMP$C4811< - without delimiters, UPPER case
RESULT TMP$C4811
CHECKING user: >"TMP$C4811" - in double quotes, UPPER case
RESULT TMP$C4811
CHECKING role: >'Boss'< - in apostrophes, CaMeL case
RESULT BOSS
CHECKING role: >'BOSS'< - in apostrophes, UPPER case
RESULT BOSS
CHECKING role: >BosS< - without delimiters, CaMeL case
RESULT BOSS
CHECKING role: >BOSS< - without delimiters, UPPER case
RESULT BOSS
CHECKING role: >"BOSS"< - in double quotes, UPPER case
RESULT BOSS
"""
expected_stderr = """
Statement failed, SQLSTATE = 28000
Statement failed, SQLSTATE = HY000
exception 3
-EX_HAVE_NO_ROLE
-You've specified role: >"BoSs"< -- but your actual role is NONE.
-At procedure 'SP_CHECK_ACTUAL_ROLE'
"""
@pytest.mark.version('>=3.0')
def test_1(act: Action, tmp_user: User, tmp_role: Role):
act.expected_stdout = expected_stdout
act.expected_stderr = expected_stderr
act.execute()
assert (act.clean_stderr == act.clean_expected_stderr and
act.clean_stdout == act.clean_expected_stdout)
| [
"[email protected]"
]
| |
31abca92b67478232e45a2f43afb9ea19a723fb7 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_13917.py | 902b5e03feced9e01f95264b296d6efbe93b91b0 | []
| no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,843 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((667.109, 507.825, 533.431), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((598.529, 515.802, 527.87), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((519.938, 535.6, 521.688), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((614.933, 634.902, 541.058), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((327.887, 536.019, 492.105), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((621.004, 511.883, 536.518), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((622.83, 511.634, 537.162), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((607.228, 506.212, 559.908), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((583.036, 494.808, 567.711), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((587.677, 498.739, 595.052), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((598.621, 504.046, 620.399), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((624.997, 504.981, 630.195), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((643.414, 499.458, 523.997), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((604.561, 503.354, 733.586), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((422.202, 495.792, 646.752), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((422.202, 495.792, 646.752), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((427.93, 498.577, 617.452), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((441.395, 501.717, 591.245), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((464.569, 503.472, 572.92), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((491.928, 505.445, 562.431), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((521.126, 505.101, 555.454), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((549.813, 500.049, 549.968), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((462.997, 554.071, 785.911), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((664.454, 436.262, 323.669), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((530.181, 489.408, 524.905), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((530.181, 489.408, 524.905), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((537.209, 510.854, 506.841), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((541.612, 537.771, 497.218), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((537.871, 562.934, 510.914), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((662.8, 563.183, 515.403), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((413.24, 569.974, 510.008), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((615.294, 539.027, 520.413), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((615.294, 539.027, 520.413), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((625.883, 563.906, 513.248), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((630.129, 561.999, 541.16), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((629.177, 541.625, 560.981), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((629.957, 521.301, 580.818), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((625.074, 497.906, 596.724), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((620.715, 476.148, 615.401), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((592.628, 443.302, 542.615), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((647.726, 509.767, 692.654), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((581.573, 459.826, 500.938), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((579.447, 486.869, 502.351), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((572.59, 544.056, 507.924), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((562.773, 600.32, 512.541), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((633.517, 602.74, 473.999), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((497.445, 673.65, 546.961), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((654.722, 548.367, 481.671), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((626.555, 551.349, 480.756), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((599.365, 549.012, 473.982), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((572.786, 542.742, 466.258), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((545.21, 538.14, 462.587), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((517.404, 532.912, 464.267), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((587.607, 509.799, 498.039), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((446.497, 555.67, 431.378), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
]
| |
efeb6964d4ee7986f15a1bfc4dc59835f24a8359 | af9268e1ead8cdb491868c14a2240d9e44fb3b56 | /last-minute-env/lib/python2.7/site-packages/django/core/management/sql.py | 5cc7aaf4f51755cba6b812d95cb2f39c8ea9b876 | []
| no_license | frosqh/Cousinade2017 | d5154c24c93ca8089eeba26b53c594e92cb6bd82 | c34d5707af02402bf2bb7405eddc91297da399ff | refs/heads/master | 2021-01-20T07:57:34.586476 | 2017-10-22T18:42:45 | 2017-10-22T18:42:45 | 90,074,802 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,026 | py | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
def sql_flush(style, connection, only_django=False, reset_sequences=True, allow_cascade=False):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that have associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True, include_views=False)
else:
tables = connection.introspection.table_names(include_views=False)
seqs = connection.introspection.sequence_list() if reset_sequences else ()
statements = connection.ops.sql_flush(style, tables, seqs, allow_cascade)
return statements
def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs):
# Emit the pre_migrate signal for every application.
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
if verbosity >= 2:
print("Running pre-migrate handlers for application %s" % app_config.label)
models.signals.pre_migrate.send(
sender=app_config,
app_config=app_config,
verbosity=verbosity,
interactive=interactive,
using=db,
**kwargs
)
def emit_post_migrate_signal(verbosity, interactive, db, **kwargs):
# Emit the post_migrate signal for every application.
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
if verbosity >= 2:
print("Running post-migrate handlers for application %s" % app_config.label)
models.signals.post_migrate.send(
sender=app_config,
app_config=app_config,
verbosity=verbosity,
interactive=interactive,
using=db,
**kwargs
)
| [
"[email protected]"
]
| |
35239d3462f35f0aad5fdfef0333f99b1a39b0e5 | 473d3edf1cc1fda57f7da875c16dc93a4ebbdb23 | /blog/migrations/0001_initial.py | 56ca95d90d3ca7a5f27d1d98cd9b39041a7ceb8e | []
| no_license | nelliejellie/blog | df9a61bc40f8589252591528ed238b8010a17e53 | b629c8d3e2dd20b64c960f6f1f6f8bc7c62f95a4 | refs/heads/master | 2022-06-24T01:35:21.104705 | 2020-05-08T20:07:13 | 2020-05-08T20:07:13 | 261,609,317 | 0 | 0 | null | 2020-05-08T20:07:14 | 2020-05-05T23:57:54 | JavaScript | UTF-8 | Python | false | false | 1,365 | py | # Generated by Django 3.0.2 on 2020-04-29 09:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('slug', models.SlugField(max_length=100, unique_for_date='publish')),
('body', models.TextField()),
('publish', models.DateTimeField(default=django.utils.timezone.now)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('draft', 'Draft'), ('published', 'Published')], default='draft', max_length=10)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_post', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-publish',),
},
),
]
| [
"[email protected]"
]
| |
06e39ae81eeb08e88f62ee72fdeb30b8088d12d1 | 4d8f7abf3ec6ff049815663f1c3c0f85926caab9 | /SF2D/Compute_renorm_HT_4tops.py | cbe5a6bbc3a44497c365ea653ac9440806ccf4c5 | []
| no_license | daniel-sunyou-li/ChargedHiggs | 07e160d92ae628ed950a1e13e9bbe41aabfa69d1 | 282139b9cee9788a2fc6c536c86cc4731e4f7de7 | refs/heads/master | 2023-08-06T06:27:44.969889 | 2021-08-06T14:42:00 | 2021-08-06T14:42:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,799 | py | import ROOT
import sys
import numpy
import argparse
import array
from ROOT import TFile, TTree
parser = argparse.ArgumentParser(description="compute the renormalization factors for charged Higgs analysis")
parser.add_argument("-f", "--file", default="", help="The path to the analysis tree")
parser.add_argument("-l", "--label", default="", help="The name of the output file")
args = parser.parse_args()
filename = args.file
tfile = TFile.Open(filename)
fout = ROOT.TFile("Fourtops_Weights_"+args.label+"_extended_HT_cuts.root", "RECREATE")
h2D_origin = ROOT.TH2F("h2D_origin", "h2D_origin", 6, 4, 10, 40, 150, 4000)
h2D_origin.Sumw2()
h2D_weight_dcsv = ROOT.TH2F("h2D_weight_dcsv", "h2D_weight_dcsv", 6, 4, 10, 40, 150, 4000)
h2D_weight_dcsv.Sumw2()
h2D_weight_djet = ROOT.TH2F("h2D_weight_djet", "h2D_weight_djet", 6, 4, 10, 40, 150, 4000)
h2D_weight_djet.Sumw2()
ttree = tfile.Get("ljmet")
ttree.SetBranchStatus("*", 0)
ttree.SetBranchStatus("NJets_JetSubCalc*", 1)
ttree.SetBranchStatus("theJetPt_JetSubCalc_PtOrdered*", 1)
ttree.SetBranchStatus("AK4HT*", 1)
ttree.SetBranchStatus("btagCSVWeight*", 1)
ttree.SetBranchStatus("btagDeepJetWeight*", 1)
ttree.SetBranchStatus("leptonPt_MultiLepCalc*", 1)
ttree.SetBranchStatus("isElectron*", 1)
ttree.SetBranchStatus("isMuon*", 1)
ttree.SetBranchStatus("corr_met_MultiLepCalc*", 1)
ttree.SetBranchStatus("MCPastTrigger*", 1)
nevents = ttree.GetEntries()
for iev in range(nevents):
if iev%1000==1:
print(iev)
ttree.GetEntry(iev)
njet = ttree.NJets_JetSubCalc
if not ((ttree.leptonPt_MultiLepCalc > 35 and ttree.isElectron) or (ttree.leptonPt_MultiLepCalc > 30 and ttree.isMuon)): continue
if not (ttree.corr_met_MultiLepCalc > 30): continue
if not (ttree.MCPastTrigger): continue
HT = ttree.AK4HT
if njet>9: njet=9
#for ijet in range(njet):
# if ttree.theJetPt_JetSubCalc_PtOrdered.at(ijet)>120:
# n_fastjet+=1
# elif (ttree.theJetPt_JetSubCalc_PtOrdered.at(ijet)<=120 and ttree.theJetPt_JetSubCalc_PtOrdered.at(ijet)>40):
# n_slowjet+=1
#if n_fastjet>5: n_fastjet=5
#if n_slowjet>5: n_slowjet=5
h2D_origin.Fill(njet, HT)
h2D_weight_dcsv.Fill(njet, HT, ttree.btagCSVWeight)
h2D_weight_djet.Fill(njet, HT, ttree.btagDeepJetWeight)
h2D_scale_dcsv = h2D_origin.Clone()
h2D_scale_dcsv.SetTitle("h2D_scale_dcsv")
h2D_scale_dcsv.Divide(h2D_weight_dcsv)
h2D_scale_djet = h2D_origin.Clone()
h2D_scale_djet.SetTitle("h2D_scale_dcsv")
h2D_scale_djet.Divide(h2D_weight_djet)
fout.WriteTObject(h2D_origin, "h2D_origin")
fout.WriteTObject(h2D_weight_dcsv, "h2D_weight_dcsv")
fout.WriteTObject(h2D_scale_dcsv, "h2D_scale_dcsv")
fout.WriteTObject(h2D_weight_djet, "h2D_weight_djet")
fout.WriteTObject(h2D_scale_djet, "h2D_scale_djet")
fout.Close()
| [
"[email protected]"
]
| |
c78eff018f1221cf0e9bbbe56bc0e7d2cb566ff7 | fca3644a3ab3c83bba33fb7a9a3bd94538a4dd5c | /drive/web/front/forms.py | a046ffe6bf4ba9cdacc31105def77467977003e6 | []
| no_license | enixdark/pyra-structures | 1a8327cf7de5c7b6ab552900e43d83001011cf15 | fb8df7bdbc7a256381d42c501bf55c54ebf7dae6 | refs/heads/master | 2023-01-22T01:39:38.648567 | 2020-10-02T07:25:52 | 2020-10-02T07:25:52 | 98,119,242 | 0 | 0 | null | 2023-01-12T13:10:39 | 2017-07-23T19:05:22 | CSS | UTF-8 | Python | false | false | 5,104 | py | from marshmallow import (
fields,
pre_load,
Schema,
validate,
validates,
validates_schema,
ValidationError,
)
import re
from drive.utils.email_address import EmailAddress
from drive.utils.i18n import _
from datetime import date
def IsText(field):
def text_validator(value):
if re.search(r'[\x00-\x1f\x7f-\x9f]', value):
raise ValidationError(
_(
'Invalid character(s) in ${field}.',
mapping={'field': field},
)
)
return text_validator
def IsMultiLineText(field):
def text_validator(value):
if re.search(r'[\x00-\x09\x0b-\x0c\x0e-\x1f\x7f-\x9f]', value):
raise ValidationError(
_(
'Invalid character(s) in ${field}.',
mapping={'field': field},
)
)
return text_validator
def IsIdentifier(field):
def text_validator(value):
if re.search(r'[^A-Za-z0-9_-]', value):
raise ValidationError(
_(
'Invalid character(s) in ${field}.',
mapping={'field': field},
)
)
return text_validator
def my_required(value, field_name=''):
if len(value) > 0:
raise ValidationError(
_(
'${field} is required.',
mapping={'field': field_name}
)
)
class Form(Schema):
def __init__(self, data, *args, **kwargs):
super(Form, self).__init__(*args, **kwargs)
self.data = data
self.has_error = False
self.errors = {}
def validate(self):
errors = super().validate(data=self.data)
if bool(errors):
self.has_error = True
self.errors = errors
return False
return True
def value(self, name):
if self.data[name] is not None:
return self.data[name]
def add_error(self, name='', message=""):
self.has_error = True
if self.errors and self.errors[name] is not None:
self.errors[name].append(message)
else:
self.errors[name] = [message]
return self.errors
def error_message(self):
for key, val in self.errors.items():
if len(val) > 0:
return val[0]
break
return ''
class UserForm(Form):
first_name = fields.String(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'user first name is required'},
)
last_name = fields.String(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'user last name is required'},
)
email = fields.Email(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'Email required'}
)
class SignupForm(Form):
first_name = fields.String(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'First name required'}
)
last_name = fields.String(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'Last name required'}
)
email = fields.Email(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'Email required'}
)
password = fields.String(
required=True,
validate=validate.Length(min=8),
error_messages={'required': 'Password required'}
)
retype_password = fields.String(
required=True,
validate=validate.Length(min=8),
error_messages={'required': 'Retype Password required'}
)
class LoginForm(Form):
email = fields.Email(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'Email required'}
)
password = fields.String(
required=True,
validate=[validate.Length(min=8)],
error_messages={'required': 'Password required'}
)
class ChangePasswordForm(Form):
old_password = fields.String(
required=True,
validate=[validate.Length(min=8)],
error_messages={'required': 'Password required'}
)
new_password = fields.String(
required=True,
validate=[validate.Length(min=8)],
error_messages={'required': 'Password required'}
)
confirm_password = fields.String(
required=True,
validate=[validate.Length(min=8)],
error_messages={'required': 'Password required'}
)
class ForgotPasswordForm(Form):
email = fields.Email(
required=True,
validate=[validate.Length(min=1, max=255)],
error_messages={'required': 'Email required'},
default="",
) | [
"[email protected]"
]
| |
17d1add048a7e5db1e09574e9d1fe27e3d3112e2 | 548fbb3bf6648e76e711ee398148cae9ee10a0d2 | /running_sum_array.py | 14687c688612f9dc8f01a1f9715982cc4b68a444 | []
| no_license | KonstantinSKY/LeetCode | 34cce8eda7182aa6a1616b3471b0cfe9310fe1d4 | 1570122134b962412b0530c3850eb37f1c8c585e | refs/heads/master | 2023-04-16T17:03:23.753146 | 2023-04-03T18:16:21 | 2023-04-03T18:16:21 | 310,714,169 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | """ """
import time
from typing import List
class Solution:
def runningSum(self, nums: List[int]) -> List[int]:
for i in range(1, len(nums)):
nums[i] += nums[i-1]
return nums
if __name__ == "__main__":
start_time = time.time()
print(Solution().runningSum([1, 2, 3, 4]))
print(Solution().runningSum([1, 1, 1, 1, 1]))
print(Solution().runningSum([3, 1, 2, 10, 1]))
print(Solution().runningSum([3]))
print("--- %s seconds ---" % (time.time() - start_time))
| [
"[email protected]"
]
| |
544d0d079ce7d07e8a91f5a310818a244f1b8764 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5744014401732608_1/Python/izubr/gcj.py | fd75bd5e4d6cbfd5653b75455e59fbedb1601060 | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,770 | py |
def form_result(possibility, matrix, case):
if possibility == "POSSIBLE":
print "Case #%s: %s" % (case + 1, possibility)
print matrix.rstrip()
else:
print "Case #%s: %s" % (case + 1, possibility)
t = int(raw_input(""))
for case in range(t):
b, m = raw_input("").split(" ")
b, m = int(b), int(m)
matrix = [[1] * b for i in range(b)]
for i in range(b):
for j in range(b):
if i >= j:
matrix[i][j] = 0
in_point = [0] * b
from_point = [0] * b
in_point[0] = 1
cur_sum = 1
for i in range(1, b):
in_point[i] = 2 ** (i - 1)
for i in range(b):
from_point[i] = in_point[b - 1 - i]
paths = [[0] * b for i in range(b)]
for i in range(b):
for j in range(b):
if i < j:
paths[i][j] = in_point[i] * from_point[j]
if m > in_point[b-1]:
# print in_point[b-1]
form_result("IMPOSSIBLE", [], case)
continue
bin_digits = bin(m)[2:] + '0'
bin_digits = '0' * (b - len(bin_digits)) + bin_digits
# print bin_digits
all_zeros = True
for j in range(1, b):
if bin_digits[j] == '0':
matrix[0][j] = 0
else:
all_zeros = False
if all_zeros:
for j in range(1, b):
matrix[0][j] = 1
res_matrix = ""
for i in range(b):
res = ""
for j in range(b):
res += str(matrix[i][j])
res_matrix += res + "\n"
form_result("POSSIBLE", res_matrix, case)
# print in_point
# print from_point
# for i in range(b):
# res = ""
# for j in range(b):
# res += str(paths[i][j]) + " "
# print res
# form_result(best_code, best_jam, case)
| [
"[email protected]"
]
| |
0664ffff7d89af5ac8e3b22e1e91030ca4ea3943 | 547695aff7b19ec2fe3f6b6ab7447d7b65c89322 | /FlaskWebProject1/test/myapi/commons/apispec.py | c346f60a6453fb5111bffe93ad7447311ce92cce | []
| no_license | yuchanmo/FlaskWebProject1 | d914454fd71f226e83cf909268ae297e2edbf6db | ae862f950146ceb6638e2b1a25beb2ad8c1207d9 | refs/heads/master | 2023-01-24T01:03:23.167143 | 2019-08-21T15:06:51 | 2019-08-21T15:06:51 | 203,608,914 | 0 | 0 | null | 2022-12-27T15:34:13 | 2019-08-21T15:06:56 | Python | UTF-8 | Python | false | false | 2,391 | py | from flask import jsonify, render_template, Blueprint
from apispec import APISpec
from apispec.exceptions import APISpecError
from apispec.ext.marshmallow import MarshmallowPlugin
from apispec_webframeworks.flask import FlaskPlugin
class FlaskRestfulPlugin(FlaskPlugin):
"""Small plugin override to handle flask-restful resources
"""
@staticmethod
def _rule_for_view(view, app=None):
view_funcs = app.view_functions
endpoint = None
for ept, view_func in view_funcs.items():
if hasattr(view_func, "view_class"):
view_func = view_func.view_class
if view_func == view:
endpoint = ept
if not endpoint:
raise APISpecError('Could not find endpoint for view {0}'.format(view))
# WARNING: Assume 1 rule per view function for now
rule = app.url_map._rules_by_endpoint[endpoint][0]
return rule
class APISpecExt:
"""Very simple and small extension to use apispec with this API as a flask extension
"""
def __init__(self, app=None):
self.spec = None
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault("APISPEC_TITLE", "restful_api")
app.config.setdefault("APISPEC_VERSION", "1.0.0")
app.config.setdefault("OPENAPI_VERSION", "3.0.2")
app.config.setdefault("SWAGGER_JSON_URL", "/swagger.json")
app.config.setdefault("SWAGGER_UI_URL", "/swagger-ui")
app.config.setdefault("SWAGGER_URL_PREFIX", None)
self.spec = APISpec(
title=app.config["APISPEC_TITLE"],
version=app.config["APISPEC_VERSION"],
openapi_version=app.config["OPENAPI_VERSION"],
plugins=[MarshmallowPlugin(), FlaskRestfulPlugin()],
)
blueprint = Blueprint(
"swagger",
__name__,
template_folder="./templates",
url_prefix=app.config["SWAGGER_URL_PREFIX"],
)
blueprint.add_url_rule(app.config["SWAGGER_JSON_URL"], "swagger_json", self.swagger_json)
blueprint.add_url_rule(app.config["SWAGGER_UI_URL"], "swagger_ui", self.swagger_ui)
app.register_blueprint(blueprint)
def swagger_json(self):
return jsonify(self.spec.to_dict())
def swagger_ui(self):
return render_template("swagger.j2")
| [
"[email protected]"
]
| |
94f8efa5ea21ca487bbe42513b4bed2ca3819a07 | 8cf2b4717e73e55c10563d51f58ea852263624bb | /django_01/settings.py | e812f3cb38b36241e458dbcd7d006300a4d7fe01 | []
| no_license | buleleaf/Django-New-Web | 9e795a5065faa6826f00e52b1c2e82239e77d2e8 | f6b79a2b788eba0280358026d0dca8f5b838f8ad | refs/heads/master | 2020-11-29T18:33:38.000849 | 2019-12-25T08:31:33 | 2019-12-25T08:31:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,712 | py | """
Django settings for django_01 project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
from . import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(!az*nhv-51*ki2f*d1(#m+e(@03-cqy77gtmy=a++2=4@7&fl'
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = False # 调试开关,True是调试模式,False是关闭调试模式
DEBUG = True # 调试开关,True是调试模式,False是关闭调试模式
# 两种方式在括号种加*,或者加入该服务器的地址
ALLOWED_HOSTS = ['www.achjiang.cn', '47.93.8.105', '127.0.0.1', '122.51.243.116']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.course',
'apps.cms',
'apps.news',
'apps.payinfo',
'apps.xfzauth',
'rest_framework',
]
# 中间键,针对所有的app有效,可以方便批量修改处理时使用
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware', # 跨域{% csrf_token %}
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# 根目录的url路径设置
ROOT_URLCONF = 'django_01.urls'
# from django.templatetags import static
# 模板设置
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 会在当前项目下的‘templates’文件夹下寻找所有的文件
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
# auth中的user中有属性.is_authenticated可以判断该用户是否存在
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media', # media_url 文件
],
# 在setting文件中按照如下配置后,在后续的模板中就不用再加载静待模板。省去了所有文件中的这句代码‘{% load
# static%}’
'builtins': ['django.templatetags.static'] # 模板内置标签
},
},
]
WSGI_APPLICATION = 'django_01.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
# 数据库信息配置
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': config.MYSQL_NAME,
'USER': config.MYSQL_USER,
'PASSWORD': config.MYSQL_PASSWORD,
'HOST': config.MYSQL_HOST,
'POST': config.MYSQL_POST,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# AUTH_USER_MODEL 这个属性时Django内置的,它会主动的到文件中来
# 查找这个属性,如果找到了,那么就会使用这个属性指定的模型来作为User对象
# AUTH_USER_MODEL 这个属性是一个字符串,它的规则是'appname.ModelName'
# 如果设置了AUTH_USER_MODEL ,那么项目的makemigrations以及migrate命令
# 必须要在设置(setting文件配置)完这些东西之后执行
AUTH_USER_MODEL = 'xfzauth.User'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
# 'rest_framework.permissions.DjangoModelPermissions',
]
}
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'zh-Hans' # 语言配置'zh-Hans'/'en-us'
TIME_ZONE = 'Asia/Shanghai' # 时区配置
USE_I18N = True
USE_L10N = True
USE_TZ = True # 为True时,数据库时间与系统时间相差8小时
# USE_TZ = False # 为True时,数据库时间与系统时间相差8小时
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
# 静态文件路径配置
STATIC_URL = '/static/'
# STATICFILES_DIRS = [
# os.path.join(BASE_DIR, 'static')
# ]
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# 同步到服务器部署时,关闭Debug模式,需要新建一个文件夹
# 用于收集所有的文件
# STATIC_ROOT = '/static_dist/' # 没有设置路径,会默认存放在根目录下
STATIC_ROOT = os.path.join(BASE_DIR, 'static_dist')
# 存储文件路径配置
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Ueditor编辑器配置
# 上传服务器配置
UEDITOR_UPLOAD_TO_SERVER = True
UEDITOR_UPLOAD_PATH = MEDIA_ROOT
UEDITOR_CONFIG_PATH = os.path.join(
BASE_DIR, 'static', 'ueditor', 'config.json')
# 上传七牛云配置
UEDITOR_QINIU_ACCESS_KEY = config.QINIU_ACCESS_KEY
UEDITOR_QINIU_SECRET_KEY = config.QINIU_SECRET_KEY
UEDITOR_QINIU_BUCKET_NAME = config.QINIU_BUCKET_NAME
UEDITOR_QINIU_DOMAIN = config.QINIU_DOMAIN
UEDITOR_UPLOAD_TO_QINIU = True
# 定义首页新闻一页展示多少条新闻
ONE_PAGE_NEWS_COUNT = 2
# 百度云的配置
# 控制台->用户中心->用户ID
BAIDU_CLOUD_USER_ID = config.BAIDU_CLOUD_USER_ID
# 点播VOD->全局设置->发布设置->安全设置->UserKey
BAIDU_CLOUD_USER_KEY = config.BAIDU_CLOUD_USER_KEY
# 第三方支付
PAY_TOKEN = config.PAY_TOKEN
PAY_UID = config.PAY_UID
# 阿里云短信配置
ACCESS_KEY_ID = config.ALI_ACCESS_KEY_ID
ACCESS_KEY_SECRET = config.ALI_ACCESS_KEY_SECRET
SIGN_NAME = config.ALI_SIGN_NAME
TEMPLATE_CODE = config.ALI_TEMPLATE_CODE
# 全文检索框架的配置
HAYSTACK_CONNECTIONS = {
'default': {
# 使用whoosh引擎
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
# 'ENGINE': 'haystack.backends.whoosh_cn_backend.WhooshEngine',
# 索引文件路径
'PATH': os.path.join(BASE_DIR, 'whoosh_index'),
}
}
# 当添加、修改、删除数据时,自动生成索引
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
# log日志配置
LOGGING = {
'version': 1, # 版本
'disable_existing_loggers': False,
# 输出格式化:定义两种'verbose'/'simple'选择
'formatters': {
'verbose': {
# 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' # 官方示例
# WARNING 2018-11-08 16:19:23,113 views 24300 5424 文章分类修改'3'成功
'format': '%(asctime)s - %(filename)s\%(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s',
# 2018-11-08 16:34:17,512 - views.py\edit_news_category[line:329] - WARNING: 文章分类修改'时政热点'成功
# 'format': '[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d %(module)s] %(message)s'
# [2018-11-08 16:22:29,817] WARNING [django.edit_news_category:329 views] 文章分类修改'4'成功
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
# 过滤器:定义两种
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
# 处理器:定义三种
'handlers': {
'log_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': 'django_01.log',
'maxBytes': 16777216, # 16 MB
'formatter': 'verbose'
},
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'filters': ['require_debug_false']
}
},
# 记录器:
'loggers': {
'django': {
'handlers': ['console', 'log_file'],
'propagate': True,
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'myproject.custom': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
'filters': ['require_debug_false']
}
}
}
| [
"[email protected]"
]
| |
52fedae218df70fb543cf1dd2cd22af4b39b6488 | 3f53e38076713ab49fd03a54c7c9d3e21de5eb14 | /Pyrado/pyrado/tasks/predefined.py | 50d88df14b227c763e6a5a33ef902b36a254c00d | [
"BSD-2-Clause",
"BSD-3-Clause"
]
| permissive | arlene-kuehn/SimuRLacra | 4510473789d1c8927c8d5969a9606238523d5dd7 | 15901f70f0538bce19acdda2a0018984f67cc0fe | refs/heads/master | 2023-01-28T13:10:05.607575 | 2020-12-04T14:47:01 | 2020-12-04T14:47:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,810 | py | # Copyright (c) 2020, Fabio Muratore, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Fabio Muratore, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL FABIO MURATORE, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import functools
import numpy as np
import pyrado
from pyrado.spaces.empty import EmptySpace
from pyrado.tasks.desired_state import DesStateTask
from pyrado.tasks.final_reward import FinalRewTask, FinalRewMode
from pyrado.tasks.masked import MaskedTask
from pyrado.tasks.reward_functions import RewFcn, ZeroPerStepRewFcn, AbsErrRewFcn
from pyrado.tasks.utils import proximity_succeeded, never_succeeded
from pyrado.utils.data_types import EnvSpec
def create_goal_dist_task(env_spec: EnvSpec,
ds_index: int,
rew_fcn: RewFcn,
succ_thold: float = 0.01) -> MaskedTask:
# Define the indices for selection. This needs to match the observations' names in RcsPySim.
idcs = [f'GD_DS{ds_index}']
# Get the masked environment specification
spec = EnvSpec(
env_spec.obs_space,
env_spec.act_space,
env_spec.state_space.subspace(
env_spec.state_space.create_mask(idcs)) if env_spec.state_space is not EmptySpace else EmptySpace
)
# Create a desired state task with the goal [0, 0]
dst = DesStateTask(spec, np.zeros(2), rew_fcn, functools.partial(proximity_succeeded, thold_dist=succ_thold))
# Mask selected goal distance
return MaskedTask(env_spec, dst, idcs)
def create_goal_dist_distvel_task(env_spec: EnvSpec,
ds_index: int,
rew_fcn: RewFcn,
succ_thold: float = 0.01) -> MaskedTask:
# Define the indices for selection. This needs to match the observations' names in RcsPySim.
idcs = [f'GD_DS{ds_index}', f'GD_DS{ds_index}d']
# Get the masked environment specification
spec = EnvSpec(
env_spec.obs_space,
env_spec.act_space,
env_spec.state_space.subspace(
env_spec.state_space.create_mask(idcs)) if env_spec.state_space is not EmptySpace else EmptySpace
)
# Create a desired state task with the goal [0, 0]
dst = DesStateTask(spec, np.zeros(2), rew_fcn, functools.partial(proximity_succeeded, thold_dist=succ_thold))
# Mask selected goal distance velocities
return MaskedTask(env_spec, dst, idcs)
def create_check_all_boundaries_task(env_spec: EnvSpec, penalty: float) -> FinalRewTask:
# Check every limit (nut just of a subspace of the state state as it could happen when using a MaskedTask)
return FinalRewTask(
DesStateTask(env_spec, np.zeros(env_spec.state_space.shape), ZeroPerStepRewFcn(), never_succeeded),
FinalRewMode(always_negative=True), factor=penalty
)
def create_task_space_discrepancy_task(env_spec: EnvSpec, rew_fcn: RewFcn) -> MaskedTask:
"""
Create a task which punishes the discrepancy between the actual and the commanded state of the observed body.
The observed body is specified in in the associated experiment configuration file in RcsPySim.
This task only looks at the X and Z coordinates.
:param env_spec: environment specification
:param rew_fcn: reward function
:return: masked task
"""
# Define the indices for selection. This needs to match the observations' names in RcsPySim.
idcs = [idx for idx in env_spec.state_space.labels if 'DiscrepTS' in idx]
if not idcs:
raise pyrado.ValueErr(msg="No state space labels found that contain 'DiscrepTS'")
# Get the masked environment specification
spec = EnvSpec(
env_spec.obs_space,
env_spec.act_space,
env_spec.state_space.subspace(env_spec.state_space.create_mask(idcs))
)
# Create a desired state task (no task space discrepancy is desired and the task never stops because of success)
dst = DesStateTask(spec, np.zeros(spec.state_space.shape), rew_fcn, never_succeeded)
# Mask selected discrepancy observation
return MaskedTask(env_spec, dst, idcs)
def create_collision_task(env_spec: EnvSpec, factor: float) -> MaskedTask:
"""
Create a task which punishes collision costs given a collision model with pairs of bodies.
This task only looks at the instantaneous collision cost.
:param env_spec: environment specification
:param factor: cost / reward function scaling factor
:return: masked task
"""
if not factor >= 0:
raise pyrado.ValueErr(given=factor, ge_constraint='0')
# Define the indices for selection. This needs to match the observations' names in RcsPySim.
idcs = ['CollCost']
# Get the masked environment specification
spec = EnvSpec(
env_spec.obs_space,
env_spec.act_space,
env_spec.state_space.subspace(env_spec.state_space.create_mask(idcs))
)
rew_fcn = AbsErrRewFcn(q=np.array([factor]), r=np.zeros(spec.act_space.shape))
# Create a desired state task (no collision is desired and the task never stops because of success)
dst = DesStateTask(spec, np.zeros(spec.state_space.shape), rew_fcn, never_succeeded)
# Mask selected collision cost observation
return MaskedTask(env_spec, dst, idcs)
| [
"[email protected]"
]
| |
4c85ede7212f8220b8d5663534c28dcba7c46309 | 639b371a36aa7bc346375fb0c63fe4357a7fa928 | /isnobal/lib.py | d320666b29d383f6ff407741928bf5e110c31b16 | []
| no_license | rogerlew/isnobal | baf9dccc19a8c898d6c5eb5c6554e61c0c42b541 | 887e02cb6361e8f35f3cbcb2aaeba35b62a65d67 | refs/heads/master | 2016-09-05T21:47:37.365412 | 2015-01-09T18:03:02 | 2015-01-09T18:03:02 | 28,944,436 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,608 | py |
import os
import shutil
import time
def clean(path, isdir=None):
"""
cleans the path
if path is a file it is removed, if path is a
directory the directory tree is removed and the
root directory is recreated
Parameters
----------
path : string
path to be cleaned
isdir : bool
if path does not currently exist and ispath
is true a new directory is created
"""
if os.path.exists(path):
if os.path.isdir(path):
shutil.rmtree(path)
time.sleep(1)
os.mkdir(path)
else:
os.remove(path)
elif isdir:
os.mkdir(path)
def identifyStep(fname):
"""
identifies the simulation step from a filename
Parameters
----------
fname : string
only the basename is used for determining step
Returns
-------
step : int
integer representing the simulation step
"""
basename = os.path.basename(fname)
if 'dem' in basename:
return 0
elif 'mask' in basename:
return 0
elif 'init' in basename:
try:
return int(''.join([a for a in basename if a in '0123456789']))
except:
return 0
try:
return int(''.join([a for a in basename.split('_')[1]
if a in '0123456789']))
except:
try:
return int(basename.split('.')[1])
except:
warnings.warn('Could not identify step for "%s", '
'returning 0'% basename)
return 0
| [
"[email protected]"
]
| |
f430f08749ef992d69507cf068f4f2d1e99c4b86 | dc7c62f22e5b7da4691d2bdf9a1da2f3ba9edd75 | /sketch_181106c/stack.py | e0a7adc48f20d65c672224ee5626dd4d30872a45 | []
| no_license | xiaohaiguicc/CS5001 | 563c17637f06f0074ccb743db4f0bdd2a326f978 | 51698ba8bfc2201639e6f4d358e0fc531780d2fc | refs/heads/master | 2020-04-06T17:32:55.046301 | 2018-12-20T23:53:05 | 2018-12-20T23:53:05 | 157,664,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | class Stack:
def _init_(self):
self.content = []
def push(self, item):
self.content.append(item)
def pop(self):
if len(self.content) > 0:
return self.content.pop()
| [
"[email protected]"
]
| |
86988fc60904cf6ad10ea91b11b7a4d5313cdc80 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_121/ch118_2020_09_30_02_00_29_265968.py | 81e75271029f2a468b29c741aee70ec9ceea4b9c | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | import math
def reflexao_total_interna(n1, n2, θ2):
θ1=math.degrees(math.asin(math.sin(θ2)*n2/n1))
if sin(θ1)>1:
resultado=True
else:
resultado=False
return resultado | [
"[email protected]"
]
| |
bf540d38a0dea62068f14ad5ec55fae7d96a6b0e | d6d20681f41102df3feb2b438ef80569bd73730f | /.history/Uge5-pandas/handinExercises_20200311123505.py | f29264f126048debd63967bdff8beb92c1762646 | []
| no_license | MukHansen/pythonAfleveringer | d0ad2629da5ba2b6011c9e92212949e385443789 | 4107c3c378f757733961812dd124efc99623ff2e | refs/heads/master | 2020-12-22T13:27:19.135138 | 2020-05-22T11:35:52 | 2020-05-22T11:35:52 | 236,796,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,354 | py | import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
dataA = pd.read_csv('https://api.statbank.dk/v1/data/FOLK1A/CSV?delimiter=Semicolon&Tid=2008K1%2C2020K1&CIVILSTAND=G%2CF', delimiter=';')
dataB1 = pd.read_csv('https://api.statbank.dk/v1/data/FOLK1A/CSV?delimiter=Semicolon&OMR%C3%85DE=*&CIVILSTAND=TOT', delimiter=';')
# dataB = pd.read_csv('https://api.statbank.dk/v1/data/FOLK1A/CSV?delimiter=Semicolon&CIVILSTAND(Head)=U%2CTOT&OMR%C3%85DE=*&Tid=2020K1', delimiter=';')
dataB2 = pd.read_csv('https://api.statbank.dk/v1/data/FOLK1A/CSV?delimiter=Semicolon&OMR%C3%85DE=*&CIVILSTAND=U', delimiter=';')
dataC1 = pd.read_csv('https://api.statbank.dk/v1/data/FOLK1A/CSV?delimiter=Semicolon&OMR%C3%85DE=101&CIVILSTAND=*&Tid=*', delimiter=';')
test = np.genfromtxt('https://api.statbank.dk/v1/data/FOLK1A/CSV?delimiter=Semicolon&OMR%C3%85DE=101&CIVILSTAND=*&Tid=*', delimiter=';', dtype=np.uint, skip_header=1)
pct08 = dataA['INDHOLD'][1] / dataA['INDHOLD'][0] * 100
pct20 = dataA['INDHOLD'][3] / dataA['INDHOLD'][2] * 100
pctchange = pct20 - pct08
print('5.A')
print('------------------------------------------')
print("Divorced procent in '08",pct08)
print("Divorced procent in '20",pct20)
print('Change in procent',pctchange)
print('------------------------------------------')
# 1:6 to skip "hele landet" at index 0
largestCities = dataB1.sort_values('INDHOLD', ascending=False)[1:6]
# ls = largestCities.head(2)
def highestPctOfNeverMarried():
pctList = {}
lst = [1,2,32,50,73]
for number in lst:
area = dataB2['OMRÅDE'][number]
val = dataB2['INDHOLD'][number] / dataB1['INDHOLD'][number] * 100
# print(area, 'Ugifte i procent', val)
pctList.update({area: val})
print('Highest --------------------------- /n', max(zip(pctList.values(), pctList.keys())))
def printMeYo():
# deezMask = (data[:,0] == 2015) & (data[:,2] <= 65) & (data[:,3] == countrycode)
# return np.sum(data[deezMask][:,4])
deezMask = (dataC1[:,1] == 'I alt')
print(np.sum(dataC1[deezMask][:,3]))
# for i in dataC1:
# if dataC1['CIVILSTAND'][i] == 'I alt':
# print('------------------------------------------------ IN FOR LOOOP')
# print('YO!')
# print(dataC1['INDHOLD'][i])
# if dataC1['CIVILSTAND'][idx] == 'I alt':
# print(dataC1['INDHOLD'][idx])
# if dataC1['CIVILSTAND'][i] == 'I alt':
# print(dataC1['INDHOLD'][i]
# def changesInPctPlot():
# yearsToDisp = []
# eastpopulation = []
# westpopulation = []
# for key, value in years.items():
# yearsToDisp.append(key)
# for key, value in east.items():
# eastpopulation.append(value)
# for key, value in west.items():
# westpopulation.append(value)
# plt.plot(yearsToDisp, eastpopulation, linewidth=2)
# plt.plot(yearsToDisp, westpopulation, linewidth=2)
# plt.title("Number of poeple in %", fontsize=18)
# plt.xlabel("Year", fontsize=10)
# plt.xticks(yearsToDisp, rotation=65)
# plt.tick_params(axis='both', labelsize=10)
# plt.show()
regionH = dataB2['INDHOLD'][1] / dataB1['INDHOLD'][1] * 100
# regionM = dataB2['INDHOLD'][73] / dataB1['INDHOLD'][73] * 100
# regionSD = dataB2['INDHOLD'][50] / dataB1['INDHOLD'][50] * 100
# regionS = dataB2['INDHOLD'][32] / dataB1['INDHOLD'][32] * 100
# kbh = dataB2['INDHOLD'][2] / dataB1['INDHOLD'][2] * 100
print('5.B')
# print(largestCities)
# print('------------------------------')
# print('Region H procent',regionH)
highestPctOfNeverMarried()
# print(dataB2.loc[0])
# print(yo)
# print(dataB)
print('5.C')
# print(dataC1)
# printMeYo()
# print(test)
# plt.bar(ages, no_citicens, width=0.5, linewidth=0, align='center') # first plot: danes
# plt.ticklabel_format(useOffset=False)
# # plt.axis([0, max(ages) + 10, 0, 17000])
# title = 'Distribution of CPH Citizens in {}'.format(2015)
# plt.title(title, fontsize=12)
# plt.xlabel("Year", fontsize=10)
# plt.ylabel("Number of poeple in %", fontsize=10)
# plt.tick_params(axis='both', which='major', labelsize=10)
# p1 = plt.bar(ages, no_citicens, width=0.5, linewidth=0, align='center', color='red')
# p2 = plt.bar(ages_f, no_citicens_f, width=0.5, linewidth=0, align='center', color='yellow')
# plt.legend([p1,p2],['danish','foreigners'],loc=1)
# plt.show()
| [
"[email protected]"
]
| |
39ef6807088c40eb988f2dd2e8540cb782c446cf | 62dc63713e8c8ce8622c70117595bae857618107 | /BlackFlyCameraClass.py | 152951b5f96ad3d211963330218e880700e8a66b | []
| no_license | LiamDroog/BaldrControlSuite | ad7544d5e92b5e27537e7f20c7cf3ddc78b36769 | 2ca76c4c97c334b6bd5924b00cbcb8e6f687f495 | refs/heads/master | 2023-07-04T07:57:46.115829 | 2021-08-03T17:36:49 | 2021-08-03T17:36:49 | 371,804,134 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,708 | py | import os
import matplotlib.pyplot as plt
import numpy as np
import time
from SimpleFLIR import Camera, GetBFSCameras
class CameraNotConnected(Exception):
pass
class ListCameras:
def __init__(self):
self.camlist = GetBFSCameras().getCameras()
def get(self):
return self.camlist
class RunBlackFlyCamera:
# Todo: loop upon instantiation to get commands from queue (as input when creating)
# Handle the inputs for shutdown
# Handle how to implement hardware trigger
# Move this to new file bc it'll get specialized quick
"""
Per-camera instance of this is required. Deals with a hardware trigger (currently software as no hardware triggers
have been configured) and writes data to specified file directory so that the file daemon can transfer it to the
queue for writing to shot hdf5 file
"""
def __init__(self, camserial, filenum):
"""
Initalizes camera from input serial number and starting filename
:param camserial: Camera's serial number
:param filenum: number to start numbering files at
"""
try:
self.cam = Camera(camserial)
except:
raise CameraNotConnected('Camera ' + camserial + ' not connected.')
self.camserial = camserial
self.camName = self.cam.getDeviceName()
self.filenum = filenum
self.datafilename = self.camserial + '_shotdata_' + '0' * (4 - len(str(self.filenum))) + str(filenum) + '.npy'
# self.metadatafilename = self.camserial + '_shotmetadata_' + '0' * (4 - len(str(self.filenum))) + str(
# filenum) + '.npy'
# set file directory
self.filepath = 'TempDataFiles/' + self.camserial + '/'
if not os.path.exists(self.filepath):
os.makedirs(self.filepath)
# initialize camera
self.cam.init()
# todo: run trigger watch loop here?
def adjust(self, target, value):
self.cam.setattr(target, value)
def get(self, attr):
return self.cam.getDeviceParams(attr)
def wait_for_trigger(self):
pass
def handleTrigger(self):
self.filenum += 1
self.__saveData(self.cam.get_array())
def get_image_array(self):
return self.cam.get_array()
def __getShotMetadata(self):
return self.cam.getDeviceParams()
def __saveData(self, data):
self.datafilename = self.camserial + '_shotdata_' + '0' * (4 - len(str(self.filenum))) + str(
self.filenum) + '.npy'
returndict = {}
returndict['diagnosticname'] = self.camName + ' Serial: ' + self.camserial
returndict['targetfile'] = self.filenum
returndict['data'] = data
returndict['metadata'] = self.__getShotMetadata()
np.save(self.filepath + self.datafilename, returndict)
print('Saved image ' + self.datafilename)
def start(self):
self.cam.start()
def stop(self):
self.cam.stop()
def close(self):
self.cam.close()
def printInfo(self):
self.cam.document()
def liveView(self):
self.isLiveOut = True
self.cam.configliveout()
self.cam.start()
fig = plt.figure(1)
fig.canvas.mpl_connect('close_event', self.__closeLiveView)
while self.isLiveOut:
image = self.cam.get_array()
plt.imshow(image, cmap='bone')
plt.colorbar()
plt.pause(0.001)
plt.clf()
def __closeLiveView(self, event):
self.isLiveOut = False
if __name__ == '__main__':
camera = RunBlackFlyCamera('19129388', 1)
#camera.start()
#camera.stop()
camera.close()
| [
"[email protected]"
]
| |
1802e16b7d00226435e6164b100705ff00463f91 | b3b68efa404a7034f0d5a1c10b281ef721f8321a | /Scripts/simulation/interactions/utils/destruction_liability.py | 3cfd2f7aae1a2fe038831b0aeb4ff3d4fc657c5b | [
"Apache-2.0"
]
| permissive | velocist/TS4CheatsInfo | 62195f3333076c148b2a59f926c9fb5202f1c6fb | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | refs/heads/main | 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,071 | py | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\interactions\utils\destruction_liability.py
# Compiled at: 2020-06-20 03:55:42
# Size of source mod 2**32: 1064 bytes
from interactions.liability import SharedLiability
DELETE_OBJECT_LIABILITY = 'DeleteObjectLiability'
class DeleteObjectLiability(SharedLiability):
def __init__(self, obj_list, source_liability=None):
super().__init__(source_liability=source_liability)
self._delete_objects = obj_list
def shared_release(self):
for obj in self._delete_objects:
obj.schedule_destroy_asap()
self._delete_objects.clear()
def merge(self, interaction, key, new_liability):
new_liability._delete_objects.extend(self._delete_objects)
return new_liability
def create_new_liability(self, interaction):
return self.__class__((self._delete_objects), source_liability=self) | [
"[email protected]"
]
| |
cada85a3a534bd5162de90bc54c1f74691921397 | 215fd5c4f9893d9f38e4e48199ea16d7d6ef9430 | /2.Binary_Search/2.10_62_Search_In_Rotated_Sorted_Array.py | f96b0cbc87e0ec031279dc7c1b691c17e7d12dcc | []
| no_license | fztest/Classified | fd01622c097ca21b2e20285b06997ff0e9792dd1 | b046d94657c0d04f3803ca15437dfe9a6f6f3252 | refs/heads/master | 2020-03-25T06:34:07.885108 | 2017-05-04T17:22:36 | 2017-05-04T17:22:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,606 | py | """
Description
_____________
Suppose a sorted array is rotated at some pivot unknown to you beforehand.
(i.e., 0 1 2 4 5 6 7 might become 4 5 6 7 0 1 2).
You are given a target value to search. If found in the array return its index, otherwise return -1.
You may assume no duplicate exists in the array.
Example
____________
For [4, 5, 1, 2, 3] and target=1, return 2.
For [4, 5, 1, 2, 3] and target=0, return -1.
Approach
____________
Binary Search
++++++++++++++
Constraint: A[mid] == target
Goal: find target
Strategy: The goal is to ditch half of the target each iteration
There are five situations we need to perform correspondingly(actions might be
duplicative but nice to distinquish between them)
1. if find the target return it
if A[mid] == target:
return mid
2. mid land at index 3 of [9,1,2,3,4,5,6]
elif A[mid] < A[end]:
now there are two sections, at right, it's a normal sorted array, at left
it's a rotated sorted array again. We can both essentially recursively handle
them and ditch the other half after knowing where target lands
2(a). target is in the right section (sorted array)
if target >= A[mid] and target <= A[end]:
start = mid
2(b). target is in the left section (rotated array)
else:
end = mid
3. mid land at index 3 of [3,4,5,6,7,1,2]
else:
Now similarly, there are two sections
3(b). left sections of sorted array
if target >= A[start] and target <= A[mid]:
end = mid
3(c). right section of rotated arrays
else:
start = mid
Complexity
______________
Time - O(Lg(N))
Space - O(1)
"""
class Solution:
"""
@param A : a list of integers
@param target : an integer to be searched
@return : an integer
"""
def search(self, A, target):
# write your code here
if A is None or len(A) == 0 or target is None:
return -1
start, end = 0, len(A) - 1
while start + 1 < end:
mid = start + (end - start) / 2
if A[mid] == target:
return mid
elif A[mid] < A[end]:
if target >= A[mid] and target <= A[end]:
start = mid
else:
end = mid
else:
if target >= A[start] and target <= A[mid]:
end = mid
else:
start = mid
if A[start] == target:
return start
if A[end] == target:
return end
return -1
| [
"[email protected]"
]
| |
56a0f90fa491c88e80bdabddde8c0dfdbfd2f47c | 5ea53027a9353f70e6a54f1211521bacbd5a46e2 | /src/goodtoknow/views.py | e1565eb86038e94fe2ef8761b992c07db748e1b8 | []
| no_license | Covee/Pillme_django | 380d4f696e503ed4f9278c44770d1840f66ec10b | 7d00581af09ae2ebe6610600083b11ab7ed29540 | refs/heads/master | 2022-12-16T05:50:49.685119 | 2018-07-25T11:22:58 | 2018-07-25T11:22:58 | 135,764,707 | 0 | 0 | null | 2022-11-22T02:30:45 | 2018-06-01T21:24:26 | Python | UTF-8 | Python | false | false | 1,242 | py | from django.shortcuts import render, get_object_or_404
from hitcount.views import HitCountDetailView
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from django.http import HttpResponse
from django.views.generic import DetailView, ListView
from .models import gPost
import json
class gPostListView(ListView):
model = gPost
template_name = 'goodtoknow/post_list.html'
class gPostCountHitDetailView(HitCountDetailView):
model = gPost # your model goes here
count_hit = True # set to True if you want it to try and count the hit
# class gPostDetailView(DetailView):
# model = gPost
# template_name = 'goodtoknow/gpost_detail.html'
@login_required
@require_POST # POST method만 받음
def gpost_like(request):
pk = request.POST.get('pk', None)
gpost = get_object_or_404(gPost, pk=pk)
gpost_like, gpost_like_created = gpost.likegpost_set.get_or_create(user=request.user)
if not gpost_like_created:
gpost_like.delete()
message = "좋아요 취소"
else:
message = "좋아요"
context = {
'like_count': gpost.like_count,
'message': message,
'username': request.user.username
}
return HttpResponse(json.dumps(context))
| [
"[email protected]"
]
| |
299de9361c771a3ef2f202cfcdc387d919e1fb73 | 3e24611b7315b5ad588b2128570f1341b9c968e8 | /pacbiolib/thirdparty/pythonpkgs/networkx/networkx_1.9.1/lib/python2.7/site-packages/networkx/algorithms/tests/test_euler.py | c9936ea9a316115e662048c757e53837be943115 | [
"BSD-2-Clause"
]
| permissive | bioCKO/lpp_Script | dc327be88c7d12243e25557f7da68d963917aa90 | 0cb2eedb48d4afa25abc2ed7231eb1fdd9baecc2 | refs/heads/master | 2022-02-27T12:35:05.979231 | 2019-08-27T05:56:33 | 2019-08-27T05:56:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,961 | py | #! python
# run with nose: nosetests -v test_euler.py
from nose.tools import *
import networkx as nx
from networkx import is_eulerian,eulerian_circuit
class TestEuler:
def test_is_eulerian(self):
assert_true(is_eulerian(nx.complete_graph(5)))
assert_true(is_eulerian(nx.complete_graph(7)))
assert_true(is_eulerian(nx.hypercube_graph(4)))
assert_true(is_eulerian(nx.hypercube_graph(6)))
assert_false(is_eulerian(nx.complete_graph(4)))
assert_false(is_eulerian(nx.complete_graph(6)))
assert_false(is_eulerian(nx.hypercube_graph(3)))
assert_false(is_eulerian(nx.hypercube_graph(5)))
assert_false(is_eulerian(nx.petersen_graph()))
assert_false(is_eulerian(nx.path_graph(4)))
def test_is_eulerian2(self):
# not connected
G = nx.Graph()
G.add_nodes_from([1,2,3])
assert_false(is_eulerian(G))
# not strongly connected
G = nx.DiGraph()
G.add_nodes_from([1,2,3])
assert_false(is_eulerian(G))
G = nx.MultiDiGraph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,3)
G.add_edge(3,1)
assert_false(is_eulerian(G))
def test_eulerian_circuit_cycle(self):
G=nx.cycle_graph(4)
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,3,2,1])
assert_equal(edges,[(0,3),(3,2),(2,1),(1,0)])
edges=list(eulerian_circuit(G,source=1))
nodes=[u for u,v in edges]
assert_equal(nodes,[1,2,3,0])
assert_equal(edges,[(1,2),(2,3),(3,0),(0,1)])
G=nx.complete_graph(3)
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,2,1])
assert_equal(edges,[(0,2),(2,1),(1,0)])
edges=list(eulerian_circuit(G,source=1))
nodes=[u for u,v in edges]
assert_equal(nodes,[1,2,0])
assert_equal(edges,[(1,2),(2,0),(0,1)])
def test_eulerian_circuit_digraph(self):
G=nx.DiGraph()
G.add_cycle([0,1,2,3])
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,1,2,3])
assert_equal(edges,[(0,1),(1,2),(2,3),(3,0)])
edges=list(eulerian_circuit(G,source=1))
nodes=[u for u,v in edges]
assert_equal(nodes,[1,2,3,0])
assert_equal(edges,[(1,2),(2,3),(3,0),(0,1)])
def test_eulerian_circuit_multigraph(self):
G=nx.MultiGraph()
G.add_cycle([0,1,2,3])
G.add_edge(1,2)
G.add_edge(1,2)
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,3,2,1,2,1])
assert_equal(edges,[(0,3),(3,2),(2,1),(1,2),(2,1),(1,0)])
@raises(nx.NetworkXError)
def test_not_eulerian(self):
f=list(eulerian_circuit(nx.complete_graph(4)))
| [
"[email protected]"
]
| |
a293bd444aa2724b6df8d537890f9990a47b15c1 | 139af68b78734a6bc53bd942ffa05476baf3d71d | /Python Fundamentals 2020 - 2021/MID - Exams/01. Counter Strike.py | 2d8040229eed4fc397fcd553deec104baade8d55 | []
| no_license | MiroVatov/Python-SoftUni | 7fe3fc0a3928848c5317fb120f789c773bfc117e | 0d0d6f116281b4de8c413d254386e27d992d047b | refs/heads/main | 2023-08-24T09:44:31.261137 | 2021-10-18T14:04:03 | 2021-10-18T14:04:03 | 317,510,574 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | battles_won = 0
initial_energy = int(input())
while True:
distance = input()
if distance == "End of battle":
print(f"Won battles: {battles_won}. Energy left: {initial_energy}")
break
energy = int(distance)
if initial_energy < energy:
print(f"Not enough energy! Game ends with {battles_won} won battles and {initial_energy} energy")
break
else:
battles_won += 1
initial_energy -= energy
if battles_won % 3 == 0:
initial_energy += battles_won
| [
"[email protected]"
]
| |
a7a9b0d01df39a7334dbb96bcabf78d7796a7cde | 3e1f833a1362fde69ea5deb5636b6600c3734a54 | /beutifullmatrix.py | d598c31e05a02807cf2b5e7c441112a5ce392476 | []
| no_license | pawan-1997/portfolio | 75549baf691189ec4230e6b8e3b5d6a2061f170f | 955a4daa312b11d45b91347cfc19d82142ee5906 | refs/heads/main | 2022-12-28T20:23:52.233594 | 2020-10-03T16:21:50 | 2020-10-03T16:21:50 | 300,917,042 | 0 | 1 | null | 2020-10-03T16:21:52 | 2020-10-03T15:48:06 | JavaScript | UTF-8 | Python | false | false | 1,136 | py | matrix = []
for i in range(5):
m = input()
matrix.append(m)
index_i = 1
pos_i = 0
pos_y = 0
for i in matrix:
if("1" in i):
pos_i = index_i
# print(pos_i)
# print(i.find("1"))
pos_y = i.find("1")
# print(pos_y)
index_i = index_i + 1
# print(pos_i)
moves = 0
def calc_i():
if(pos_i == 1 or pos_i == 5):
# print("In 1 5 for x")
return 2
elif(pos_i == 2 or pos_i == 4):
# print("In 2 4 for x")
return 1
elif(pos_i == 3):
# print("In 3 for x")
return 0
else:
# print("In else for x")
return 0
def calc_y():
if(pos_y == 0 or pos_y == 8):
# print("In 0 8 for y")
return 2
elif(pos_y == 2 or pos_y == 6):
# print("In 2 6 for y")
return 1
elif(pos_y == 4):
# print("In 4 for y")
return 0
else:
# print("In else for y")
pass
moves_i = calc_i()
moves_j = calc_y()
# print(moves_i)
# print(moves_j)
moves = moves_i + moves_j
print(moves)
# print(matrix)
| [
"[email protected]"
]
| |
eabf5a12f02c113b17e02b3868d4257e2b22e4d9 | 9e271a3bc1bf388d82bc5a01d275d910c00f315c | /user/migrations/0016_history.py | db099076f07e14f65b384b61c7664c39e3115ac3 | [
"MIT"
]
| permissive | kthaisociety/website | 36f11b704f9c38414e0999b55db4513444b53f9e | 4c4efb8a93218ae128d203b15c4340f90fe9f6a6 | refs/heads/master | 2023-08-09T19:44:16.968356 | 2023-05-20T20:33:05 | 2023-05-20T20:33:05 | 218,593,606 | 2 | 3 | MIT | 2023-05-20T20:33:06 | 2019-10-30T18:17:10 | Python | UTF-8 | Python | false | false | 998 | py | # Generated by Django 2.2.18 on 2021-03-28 21:49
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [("user", "0015_user_slack_picture_hash")]
operations = [
migrations.CreateModel(
name="History",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("title", models.CharField(max_length=255)),
("body", models.TextField(max_length=5000)),
("time", models.DateField()),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
],
options={"verbose_name_plural": "histories"},
)
]
| [
"[email protected]"
]
| |
b683462d7673efeb1ba027e98d544e1841f91e03 | 5b70fbd53b534306c146ffb98a0f99d2343a948f | /src/Python/Problem74.py | 341431d1b4675c9eded21d48205b99cf477cd670 | []
| no_license | aniruddhamurali/Project-Euler | 1f4ff3aa1e9c4efbc2a85026821e19a28b5edf90 | 408b3098fbc98ff3954679602c0468ddb56ea0ac | refs/heads/master | 2020-03-20T23:07:22.178103 | 2018-07-27T01:40:46 | 2018-07-27T01:40:46 | 137,830,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | import math
import time
def digitFactorialSum(n):
total = 0
for digit in str(n):
total += math.factorial(int(digit))
return total
def main():
start = time.time()
count = 0
for i in range(0,1000000):
chain = set()
n = i
while i not in chain:
nSum = digitFactorialSum(n)
if nSum in chain:
break
chain.add(nSum)
n = nSum
chain.add(i)
if len(chain) == 60:
count += 1
#print(chain)
print(time.time()-start)
return count
# Answer: 402
| [
"[email protected]"
]
| |
4e85d7818cb34cbf49231ec6e85e7ddcccdb4759 | cacb92c6dba32dfb7f2a4a2a02269f40ab0413dd | /mmdet/models/dense_heads/fsaf_head.py | ee4c3cfe15778b71e4e7e6e5321daa9b74c84ead | [
"Apache-2.0"
]
| permissive | dereyly/mmdet_sota | 697eab302faf28d5bce4092ecf6c4fd9ffd48b91 | fc14933ca0ec2eebb8e7b3ec0ed67cae0da3f236 | refs/heads/master | 2022-11-26T14:52:13.665272 | 2020-08-04T00:26:46 | 2020-08-04T00:26:46 | 272,046,903 | 15 | 5 | Apache-2.0 | 2020-07-16T06:22:39 | 2020-06-13T16:37:26 | Python | UTF-8 | Python | false | false | 16,080 | py | import numpy as np
import torch
from mmcv.cnn import normal_init
from mmdet.core import (anchor_inside_flags, force_fp32, images_to_levels,
multi_apply, unmap)
from ..builder import HEADS
from ..losses.utils import weight_reduce_loss
from .retina_head import RetinaHead
@HEADS.register_module()
class FSAFHead(RetinaHead):
"""Anchor-free head used in `FSAF <https://arxiv.org/abs/1903.00621>`_.
The head contains two subnetworks. The first classifies anchor boxes and
the second regresses deltas for the anchors (num_anchors is 1 for anchor-
free methods)
Example:
>>> import torch
>>> self = FSAFHead(11, 7)
>>> x = torch.rand(1, 7, 32, 32)
>>> cls_score, bbox_pred = self.forward_single(x)
>>> # Each anchor predicts a score for each class except background
>>> cls_per_anchor = cls_score.shape[1] / self.num_anchors
>>> box_per_anchor = bbox_pred.shape[1] / self.num_anchors
>>> assert cls_per_anchor == self.num_classes
>>> assert box_per_anchor == 4
"""
def forward_single(self, x):
cls_score, bbox_pred = super().forward_single(x)
# relu: TBLR encoder only accepts positive bbox_pred
return cls_score, self.relu(bbox_pred)
def init_weights(self):
super(FSAFHead, self).init_weights()
# The positive bias in self.retina_reg conv is to prevent predicted \
# bbox with 0 area
normal_init(self.retina_reg, std=0.01, bias=0.25)
def _get_targets_single(self,
flat_anchors,
valid_flags,
gt_bboxes,
gt_bboxes_ignore,
gt_labels,
img_meta,
label_channels=1,
unmap_outputs=True):
"""Compute regression and classification targets for anchors in
a single image.
Most of the codes are the same with the base class
:obj: `AnchorHead`, except that it also collects and returns
the matched gt index in the image (from 0 to num_gt-1). If the
anchor bbox is not matched to any gt, the corresponding value in
pos_gt_inds is -1.
"""
inside_flags = anchor_inside_flags(flat_anchors, valid_flags,
img_meta['img_shape'][:2],
self.train_cfg.allowed_border)
if not inside_flags.any():
return (None, ) * 7
# Assign gt and sample anchors
anchors = flat_anchors[inside_flags.type(torch.bool), :]
assign_result = self.assigner.assign(
anchors, gt_bboxes, gt_bboxes_ignore,
None if self.sampling else gt_labels)
sampling_result = self.sampler.sample(assign_result, anchors,
gt_bboxes)
num_valid_anchors = anchors.shape[0]
bbox_targets = torch.zeros_like(anchors)
bbox_weights = torch.zeros_like(anchors)
labels = anchors.new_full((num_valid_anchors, ),
self.background_label,
dtype=torch.long)
label_weights = anchors.new_zeros((num_valid_anchors, label_channels),
dtype=torch.float)
pos_gt_inds = anchors.new_full((num_valid_anchors, ),
-1,
dtype=torch.long)
pos_inds = sampling_result.pos_inds
neg_inds = sampling_result.neg_inds
if len(pos_inds) > 0:
if not self.reg_decoded_bbox:
pos_bbox_targets = self.bbox_coder.encode(
sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes)
else:
pos_bbox_targets = sampling_result.pos_gt_bboxes
bbox_targets[pos_inds, :] = pos_bbox_targets
bbox_weights[pos_inds, :] = 1.0
# The assigned gt_index for each anchor. (0-based)
pos_gt_inds[pos_inds] = sampling_result.pos_assigned_gt_inds
if gt_labels is None:
# only rpn gives gt_labels as None, this time FG is 1
labels[pos_inds] = 1
else:
labels[pos_inds] = gt_labels[
sampling_result.pos_assigned_gt_inds]
if self.train_cfg.pos_weight <= 0:
label_weights[pos_inds] = 1.0
else:
label_weights[pos_inds] = self.train_cfg.pos_weight
if len(neg_inds) > 0:
label_weights[neg_inds] = 1.0
# shadowed_labels is a tensor composed of tuples
# (anchor_inds, class_label) that indicate those anchors lying in the
# outer region of a gt or overlapped by another gt with a smaller
# area.
#
# Therefore, only the shadowed labels are ignored for loss calculation.
# the key `shadowed_labels` is defined in :obj:`CenterRegionAssigner`
shadowed_labels = assign_result.get_extra_property('shadowed_labels')
if shadowed_labels is not None and shadowed_labels.numel():
if len(shadowed_labels.shape) == 2:
idx_, label_ = shadowed_labels[:, 0], shadowed_labels[:, 1]
assert (labels[idx_] != label_).all(), \
'One label cannot be both positive and ignored'
# If background_label is 0. Then all labels increase by 1
label_ += int(self.background_label == 0)
label_weights[idx_, label_] = 0
else:
label_weights[shadowed_labels] = 0
# map up to original set of anchors
if unmap_outputs:
num_total_anchors = flat_anchors.size(0)
labels = unmap(labels, num_total_anchors, inside_flags)
label_weights = unmap(label_weights, num_total_anchors,
inside_flags)
bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags)
bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags)
pos_gt_inds = unmap(
pos_gt_inds, num_total_anchors, inside_flags, fill=-1)
return (labels, label_weights, bbox_targets, bbox_weights, pos_inds,
neg_inds, sampling_result, pos_gt_inds)
@force_fp32(apply_to=('cls_scores', 'bbox_preds'))
def loss(self,
cls_scores,
bbox_preds,
gt_bboxes,
gt_labels,
img_metas,
gt_bboxes_ignore=None):
for i in range(len(bbox_preds)): # loop over fpn level
# avoid 0 area of the predicted bbox
bbox_preds[i] = bbox_preds[i].clamp(min=1e-4)
# TODO: It may directly use the base-class loss function.
featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]
assert len(featmap_sizes) == self.anchor_generator.num_levels
batch_size = len(gt_bboxes)
device = cls_scores[0].device
anchor_list, valid_flag_list = self.get_anchors(
featmap_sizes, img_metas, device=device)
label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1
cls_reg_targets = self.get_targets(
anchor_list,
valid_flag_list,
gt_bboxes,
img_metas,
gt_bboxes_ignore_list=gt_bboxes_ignore,
gt_labels_list=gt_labels,
label_channels=label_channels)
if cls_reg_targets is None:
return None
(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list,
num_total_pos, num_total_neg,
pos_assigned_gt_inds_list) = cls_reg_targets
num_gts = np.array(list(map(len, gt_labels)))
num_total_samples = (
num_total_pos + num_total_neg if self.sampling else num_total_pos)
# anchor number of multi levels
num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]]
# concat all level anchors and flags to a single tensor
concat_anchor_list = []
for i in range(len(anchor_list)):
concat_anchor_list.append(torch.cat(anchor_list[i]))
all_anchor_list = images_to_levels(concat_anchor_list,
num_level_anchors)
losses_cls, losses_bbox = multi_apply(
self.loss_single,
cls_scores,
bbox_preds,
all_anchor_list,
labels_list,
label_weights_list,
bbox_targets_list,
bbox_weights_list,
num_total_samples=num_total_samples)
# `pos_assigned_gt_inds_list` (length: fpn_levels) stores the assigned
# gt index of each anchor bbox in each fpn level.
cum_num_gts = list(np.cumsum(num_gts)) # length of batch_size
for i, assign in enumerate(pos_assigned_gt_inds_list):
# loop over fpn levels
for j in range(1, batch_size):
# loop over batch size
# Convert gt indices in each img to those in the batch
assign[j][assign[j] >= 0] += int(cum_num_gts[j - 1])
pos_assigned_gt_inds_list[i] = assign.flatten()
labels_list[i] = labels_list[i].flatten()
num_gts = sum(map(len, gt_labels)) # total number of gt in the batch
# The unique label index of each gt in the batch
label_sequence = torch.arange(num_gts, device=device)
# Collect the average loss of each gt in each level
with torch.no_grad():
loss_levels, = multi_apply(
self.collect_loss_level_single,
losses_cls,
losses_bbox,
pos_assigned_gt_inds_list,
labels_seq=label_sequence)
# Shape: (fpn_levels, num_gts). Loss of each gt at each fpn level
loss_levels = torch.stack(loss_levels, dim=0)
# Locate the best fpn level for loss back-propagation
if loss_levels.numel() == 0: # zero gt
argmin = loss_levels.new_empty((num_gts, ), dtype=torch.long)
else:
_, argmin = loss_levels.min(dim=0)
# Reweight the loss of each (anchor, label) pair, so that only those
# at the best gt level are back-propagated.
losses_cls, losses_bbox, pos_inds = multi_apply(
self.reweight_loss_single,
losses_cls,
losses_bbox,
pos_assigned_gt_inds_list,
labels_list,
list(range(len(losses_cls))),
min_levels=argmin)
num_pos = torch.cat(pos_inds, 0).sum().float()
acc = self.calculate_accuracy(cls_scores, labels_list, pos_inds)
if num_pos == 0: # No gt
avg_factor = num_pos + float(num_total_neg)
else:
avg_factor = num_pos
for i in range(len(losses_cls)):
losses_cls[i] /= avg_factor
losses_bbox[i] /= avg_factor
return dict(
loss_cls=losses_cls,
loss_bbox=losses_bbox,
num_pos=num_pos / batch_size,
accuracy=acc)
def calculate_accuracy(self, cls_scores, labels_list, pos_inds):
with torch.no_grad():
num_pos = torch.cat(pos_inds, 0).sum().float().clamp(min=1e-3)
num_class = cls_scores[0].size(1)
scores = [
cls.permute(0, 2, 3, 1).reshape(-1, num_class)[pos]
for cls, pos in zip(cls_scores, pos_inds)
]
labels = [
label.reshape(-1)[pos]
for label, pos in zip(labels_list, pos_inds)
]
def argmax(x):
return x.argmax(1) if x.numel() > 0 else -100
num_correct = sum([(argmax(score) == label).sum()
for score, label in zip(scores, labels)])
return num_correct.float() / num_pos
def collect_loss_level_single(self, cls_loss, reg_loss, assigned_gt_inds,
labels_seq):
"""Get the average loss in each FPN level w.r.t. each gt label
Args:
cls_loss (Tensor): Classification loss of each feature map pixel,
shape (num_anchor, num_class)
reg_loss (Tensor): Regression loss of each feature map pixel,
shape (num_anchor, 4)
assigned_gt_inds (Tensor): It indicates which gt the prior is
assigned to (0-based, -1: no assignment). shape (num_anchor),
labels_seq: The rank of labels. shape (num_gt)
Returns:
shape: (num_gt), average loss of each gt in this level
"""
if len(reg_loss.shape) == 2: # iou loss has shape (num_prior, 4)
reg_loss = reg_loss.sum(dim=-1) # sum loss in tblr dims
if len(cls_loss.shape) == 2:
cls_loss = cls_loss.sum(dim=-1) # sum loss in class dims
loss = cls_loss + reg_loss
assert loss.size(0) == assigned_gt_inds.size(0)
# Default loss value is 1e6 for a layer where no anchor is positive
# to ensure it will not be chosen to back-propagate gradient
losses_ = loss.new_full(labels_seq.shape, 1e6)
for i, l in enumerate(labels_seq):
match = assigned_gt_inds == l
if match.any():
losses_[i] = loss[match].mean()
return losses_,
def reweight_loss_single(self, cls_loss, reg_loss, assigned_gt_inds,
labels, level, min_levels):
"""Reweight loss values at each level.
Reassign loss values at each level by masking those where the
pre-calculated loss is too large. Then return the reduced losses.
Args:
cls_loss (Tensor): Element-wise classification loss.
Shape: (num_anchors, num_classes)
reg_loss (Tensor): Element-wise regression loss.
Shape: (num_anchors, 4)
assigned_gt_inds (Tensor): The gt indices that each anchor bbox
is assigned to. -1 denotes a negative anchor, otherwise it is the
gt index (0-based). Shape: (num_anchors, ),
labels (Tensor): Label assigned to anchors. Shape: (num_anchors, ).
level (int): The current level index in the pyramid
(0-4 for RetinaNet)
min_levels (Tensor): The best-matching level for each gt.
Shape: (num_gts, ),
Returns:
tuple:
- cls_loss: Reduced corrected classification loss. Scalar.
- reg_loss: Reduced corrected regression loss. Scalar.
- pos_flags (Tensor): Corrected bool tensor indicating the
final postive anchors. Shape: (num_anchors, ).
"""
loc_weight = torch.ones_like(reg_loss)
cls_weight = torch.ones_like(cls_loss)
pos_flags = assigned_gt_inds >= 0 # positive pixel flag
pos_indices = torch.nonzero(pos_flags, as_tuple=False).flatten()
if pos_flags.any(): # pos pixels exist
pos_assigned_gt_inds = assigned_gt_inds[pos_flags]
zeroing_indices = (min_levels[pos_assigned_gt_inds] != level)
neg_indices = pos_indices[zeroing_indices]
if neg_indices.numel():
pos_flags[neg_indices] = 0
loc_weight[neg_indices] = 0
# Only the weight corresponding to the label is
# zeroed out if not selected
zeroing_labels = labels[neg_indices]
assert (zeroing_labels >= 0).all()
cls_weight[neg_indices, zeroing_labels] = 0
# Weighted loss for both cls and reg loss
cls_loss = weight_reduce_loss(cls_loss, cls_weight, reduction='sum')
reg_loss = weight_reduce_loss(reg_loss, loc_weight, reduction='sum')
return cls_loss, reg_loss, pos_flags
| [
"[email protected]"
]
| |
9776eb8757fcf05c3b20a7a1ed8a475210100c6d | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-4571.py | 7986ba899ca704b6b17a66b634ad4ecdbec2d94f | []
| no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,755 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = $Var.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
]
| |
bc4f9b46cbfb936585c30642734af6d52f04b823 | 7cf00f09f9a46175a08993196da0db7b3a48a992 | /arrays/is_monotonic.py | 90fc581517fcf3a0346b47cf67b74f76d3070754 | []
| no_license | newsteinking/algorithms3 | 4fdde66f2f40ce53346752173493265391307ccd | c090674898d97fc2564ac688dc2347a5d0c33dfb | refs/heads/master | 2022-04-21T10:02:47.091503 | 2020-04-21T06:32:07 | 2020-04-21T06:32:07 | 257,497,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,183 | py | '''
An array is monotonic if it is either monotone increasing or monotone decreasing.
An array A is monotone increasing if for all i <= j, A[i] <= A[j]. An array A is monotone
decreasing if for all i <= j, A[i] >= A[j].
Return true if and only if the given array A is monotonic.
Example 1:
Input: [1,2,2,3]
Output: true
Example 2:
Input: [6,5,4,4]
Output: true
Example 3:
Input: [1,3,2]
Output: false
Example 4:
Input: [1,2,4,5]
Output: true
Example 5:
Input: [1,1,1]
Output: true
'''
# def monotonic_array(A):
# if len(A) == 1: return True
# down, up = False, False
#
# for i in range(len(A) - 1):
# if A[i] > A[i + 1]:
# if up: return False
# down = True
# elif A[i] < A[i + 1]:
# if down: return False
# up = True
#
# return False if up and down else True
def is_monotonic(A):
if len(A) == 1: return True
down, up = False, False
for i in range(len(A) - 1):
if A[i] > A[i + 1]:
if up: return False
down = True
elif A[i] < A[i + 1]:
if down: return False
up = True
return False if up and down else True
| [
"[email protected]"
]
| |
c8d40e1b41a5aa6ba1ecbbad3fd6ded3ce1e72cb | bd08d0532f20b7285b437c9bf620de1bbcd5b9ea | /aalh_iit_natreghis_001/debug-convert-dates.py | 01dd4897c756affd40535d0c2248df0dd5aaf371 | [
"Unlicense"
]
| permissive | johndewees/iitmigration | a9e8a31ba6ceb541ce12c22fd612596cc243dbca | 4dadfbecda719d6e7d60af076a231aedec3c862f | refs/heads/main | 2023-03-14T17:06:58.777683 | 2021-03-27T20:44:58 | 2021-03-27T20:44:58 | 320,086,321 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,850 | py | from openpyxl import load_workbook
import re
filename = 'aalh_iit_natreghis_001.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 31
maximumcol = 31
minimumrow = 7
maximumrow = 194
iterationrow = 7
targetcol = 31
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
for cell in row:
testvar = ws.cell(row=iterationrow, column=targetcol).value
#print(testvar)
if testvar == None:
continue
#print('No Date Digitized')
elif testvar.find('/') != -1:
testvar2 = re.search('\d\d\d\d\/\d\d\/\d\d', testvar)
if testvar2:
testvar3 = testvar2[0]
testvar3 = testvar3.replace('/','-')
ws.cell(row=iterationrow, column=targetcol).value = testvar3
print(iterationrow,'|',testvar,'|',ws.cell(row=iterationrow, column=targetcol).value)
else:
testvarlist = testvar.split('/')
testvaryear = testvarlist[2]
testvaryear = testvaryear.strip()
testvarmonth = testvarlist[0]
testvarmonth = testvarmonth.strip()
testvarmonth = int(testvarmonth)
if testvarmonth < 10:
testvarmonth = str(testvarmonth)
testvarmonth = '0' + testvarmonth
else:
testvarmonth = str(testvarmonth)
testvarday = testvarlist[1]
testvarday = testvarday.strip()
testvarday = int(testvarday)
if testvarday < 10:
testvarday = str(testvarday)
testvarday = '0' + testvarday
else:
testvarday = str(testvarday)
isodate = testvaryear + '-' + testvarmonth + '-' + testvarday
ws.cell(row=iterationrow, column=targetcol).value = isodate
print(iterationrow,'|',testvar,'|',ws.cell(row=iterationrow, column=targetcol).value)
else:
continue
#print('Date is already formatted correctly')
for cell in row:
testvar2 = ws.cell(row=iterationrow, column=targetcol).value
if testvar2 == None:
continue
#print('Still No Date Digitized')
elif testvar2.find('-') != -1:
length = len(testvar2)
if length > 10:
print('***CHECK THIS LINE FOR INCORRECT FORMATTING***')
elif length < 10:
print('***CHECK THIS LINE FOR INCORRECT FORMATTING***')
#else:
#print('Date is correctly formatted')
iterationrow = iterationrow + 1
wb.save('aalh_iit_natreghis_001.xlsx') | [
"[email protected]"
]
| |
d8dcc43724528dd231ed378820c811ee17da5ad7 | 67325192c1e528a39d457f11e61b480d68826708 | /__main__.py | 82f897e592bf6b74763104fad108b9230f332b9a | [
"MIT"
]
| permissive | vashistaarav1611/mcpython-a-minecraft-clone-in-python | 5851b377b54fd2b28c106112c7b18f397b71ab50 | c16cd66f319efdeec4130e1a43f5a857caf1ea13 | refs/heads/master | 2023-02-01T22:48:51.787106 | 2020-12-21T15:02:25 | 2020-12-21T15:02:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 221 | py | print("MCPYTHON ModLoader")
import sys, os, globals as G
dir = sys.argv[1] if len(sys.argv) > 1 else "./"
G.local = dir
sys.argv.append(os.path.dirname(os.path.realpath(__file__)))
import ModLoader
ModLoader.load(dir)
| [
"[email protected]"
]
| |
60b231062ee9be5296c037658e8fe6e337909004 | 43bac293a3ee710140f3869937ef4b37345bac2a | /ex37.py | f6dfdccbd4c692a1335b88844333df307e5eb468 | []
| no_license | DikranHachikyan/python-PLDA-20191011 | 4e176f7eaa65627a4670acd75f470016bfed4f8e | 80cc4740039fcc473cdf436499b0c602a9ab48e0 | refs/heads/master | 2020-08-11T15:54:02.413645 | 2019-11-05T09:39:17 | 2019-11-05T09:39:17 | 214,591,265 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | #!/home/wizard/anaconda3/bin/python
# import time
from time import time,sleep
def foo(sleep_time=0.3):
'''Function foo sleeps sleep_time seconds'''
sleep(sleep_time)
def measure(func):
def wrapper(*args,**kwargs):
t = time()
func(*args,**kwargs)
print(f'{func.__name__} : {time() - t:.4f}')
print(f'{func.__doc__}')
return wrapper
f = measure(foo)
f(0.5)
print(f'{f.__name__}:{f.__doc__}')
| [
"[email protected]"
]
| |
c39386cd8e78c8663735bc64da95f42972ef91f9 | 1033906372e48d2f53b907848b86dec2eab635f4 | /old/midify.py | dc63760c0b5b28e1d2f23765396f8c3eee5b2a8c | []
| no_license | csv/ddpy | b0a29fbc094f728b8cbfafa4d5a301c7a35006c8 | 2c1fccdff907f0504b5f514cfd67199a2e77514e | refs/heads/master | 2016-09-05T17:44:25.506356 | 2013-11-08T18:57:25 | 2013-11-08T18:57:25 | 12,258,676 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,998 | py | from midiutil.MidiFileGenerator import MidiFileGenerator, MidiTrack
from midiutil.Scales import *
from math import ceil
from random import sample
import json
from defaults import note_lookup, root_lookup
def note_to_midi(n):
if isinstance(n, basestring):
return note_lookup[n]
elif isinstance(n, int):
return n
def root_to_midi(n):
if isinstance(n, basestring):
return root_lookup[n]
elif isinstance(n, int):
return n
def bpm_time(bpm=120, count=0.25):
onebar = float((60.0/float(bpm))*4.0)
return onebar*float(count)
def scale_vec(vec, low, high):
# extract min and max info
min_vec = min(vec)
max_vec = max(vec)
# scale
return [(int(ceil(v - min_vec)) * (high-low) / (max_vec - min_vec)) for v in vec]
def midify(
vec,
out_file,
key = "C",
scale=MAJOR,
bpm=120,
count=0.25,
channel=1,
min_note="C-1",
max_note="G9"
):
# transform keys and min/max notes
key = root_to_midi(key)
min_note = note_to_midi(min_note)
max_note = note_to_midi(max_note)
# select notes
notes = build_scale(key, scale, min_note, max_note)
# scale notes
note_indexes = scale_vec(vec, low=0, high=(len(notes)-1))
# determinte note length
beat = bpm_time(bpm, count)
# generate midi file
m = MidiFileGenerator()
track = MidiTrack(channel=channel, tempo=bpm)
t = 0
for i in note_indexes:
n = notes[i]
track.add_note(time=t, duration=beat, note=n, velocity=100)
t += beat
m.tracks.append(track)
m.writeToFile(out_file)
if __name__ == '__main__':
vec = sample(range(1,10000), 32)
midify(vec, bpm=130, count= 0.125, out_file="random.mid", scale=CHROMATIC, min_note="C2", max_note="D#3")
vec = sample(range(1,10000), 32)
midify(vec, bpm=130, count= 0.125, out_file="bass.mid", key = "E", scale=MAJOR, min_note="E2", max_note="G#4")
vec = sample(range(1,10000), 32)
midify(vec, bpm=130, count= 0.125, out_file="arp.mid", key = "E", scale=MAJOR, min_note="B5", max_note="G#7") | [
"[email protected]"
]
| |
5a9f06ce2274a5bf1cf80deef6615eda13e914a2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03359/s464721803.py | dae496665ea39358e0189f534bf32abdff60ee66 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | a, b = map(int, input().split())
if a < b:
print(a)
else:
print(max(a-1,b)) | [
"[email protected]"
]
| |
803e427fe7e8e3ed41280faf81f3782e5db35337 | f0a624fc75db12a105e096391d354d5847a7afa5 | /day08/demo03.py | 4998c37c3149e7644e42bac91846e763e07c3c91 | []
| no_license | Linkin-1995/test_code1 | e50399e929bdf23ac7b82f54dd9ff63a64223d6a | 7d0b41516751538a967aa5d42161195ac49fc842 | refs/heads/master | 2022-12-06T18:47:06.858474 | 2020-08-21T10:40:31 | 2020-08-21T10:40:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | """
函数 - 功能
参数: 用法 向 做法 传递的信息
制作
def 函数名称(变量1,变量2):
函数体
使用
函数名(数据1,数据2)
练习:exercise01~03
"""
# 做法
# 形式参数:表面的数据(真实数据的代表)
def attack(count):
for __ in range(count):
print("摆拳")
print("临门一脚")
print("勾拳")
# 实际参数:具有真实数据(客观存在)
# 用法 10次
attack(10) # 调试如果希望审查函数体代码,按F7
attack(3)
| [
"[email protected]"
]
| |
49ac1ff9278d3e2219128273a10b912b104b6472 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /MY_REPOS/Lambda-Resource-Static-Assets/2-resources/_External-learning-resources/02-pyth/algorithms-master/algorithms/backtrack/letter_combination.py | 5bece7303612a8a562505e1618ed24a503bc5002 | [
"MIT"
]
| permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 791 | py | """
Given a digit string, return all possible letter
combinations that the number could represent.
A mapping of digit to letters (just like on the telephone buttons) is given below:
2: "abc"
3: "def"
4: "ghi"
5: "jkl"
6: "mno"
7: "pqrs"
8: "tuv"
9: "wxyz"
Input:Digit string "23"
Output: ["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"].
"""
def letter_combinations(digits):
if digits == "":
return []
kmaps = {
"2": "abc",
"3": "def",
"4": "ghi",
"5": "jkl",
"6": "mno",
"7": "pqrs",
"8": "tuv",
"9": "wxyz",
}
ans = [""]
for num in digits:
tmp = []
for an in ans:
for char in kmaps[num]:
tmp.append(an + char)
ans = tmp
return ans
| [
"[email protected]"
]
| |
a70fa1ef04a8545a6a13b33878d492afa39584b7 | 5b3bd326998606188b45a7870852643eda024a97 | /utils/dataset_util_test.py | 7b293e9d4759ba5e20c82eb256f1963d613d98af | []
| no_license | KuznetsovIllya/clearml_od_toy | 31556d0726d15a054c1c18317c361d97801381a4 | 92f15f04a023d4e0e165a250fddc3129144913d0 | refs/heads/main | 2023-04-11T05:55:56.248478 | 2021-04-14T15:59:40 | 2021-04-14T15:59:40 | 357,827,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:5b987aa43023f49104601b5e44e6096dd219539d07712d377a93b0716555ebfc
size 1416
| [
"[email protected]"
]
| |
ad84bdb8f2de5e3741f4d7ffefeadb7518f4c055 | 48894ae68f0234e263d325470178d67ab313c73e | /sa/profiles/Dell/Powerconnect55xx/get_config.py | cb916fc0aa0de2175f60138f19dba501f00cb408 | [
"BSD-3-Clause"
]
| permissive | DreamerDDL/noc | 7f949f55bb2c02c15ac2cc46bc62d957aee43a86 | 2ab0ab7718bb7116da2c3953efd466757e11d9ce | refs/heads/master | 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null | UTF-8 | Python | false | false | 671 | py | # -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## Dell.Powerconnect55xx.get_config
##----------------------------------------------------------------------
## Copyright (C) 2007-2013 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
## NOC modules
from noc.sa.script import Script as NOCScript
from noc.sa.interfaces import IGetConfig
class Script(NOCScript):
name = "Dell.Powerconnect55xx.get_config"
implements = [IGetConfig]
def execute(self):
config = self.cli("show running-config")
return self.cleaned_config(config)
| [
"[email protected]"
]
| |
e0060a932e35c03b8ddfa5590407f24a3b89b43b | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/compareVersions_20200909132443.py | e3af193a67a236c06299befe3c97f2948195dae3 | []
| no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | def compare(version1,version2):
# split where there are ,
# then loop through both of them
# if v1 > v2 return 1
# if v1 < v2 return -1
# otherwise return 0
v1 = version1.split(".")
v2 = version2.split(".")
v1 = [int(i) for i in v1]
v2= [int(i) for i in v2]
if len(v1) > len(v2):
length = len(v1)
else:
length = len(v2)
for i in range(length):
if v1[i] > v2[i]:
return 1
elif v1[i] < v2[i]:
return -1
for a,b in zip(v1,v2):
print('a',a,'b',b)
if a > b or (a is not None and b is None):
return 1
elif a < b or (b is not None and a is None):
return -1
return 0
print(compare("1","1.1"))
| [
"[email protected]"
]
| |
a2362b33a515b42f0d88781d9ff0ffc257add61e | f63c4eb29ce57319441f5469d1d049b63bc220de | /swu_cycle_variance/run731.py | a339111e3d6be576fc0ce0a490a578c86293539f | []
| no_license | a-co/diversion_models | 0237642153668b16035699e9e734ff0538568582 | 69eed2687b1cd2b48f5717d15919eccd24a0eabc | refs/heads/main | 2023-05-02T19:04:26.333677 | 2020-06-18T20:50:18 | 2020-06-18T20:50:18 | 216,904,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,361 | py | SIMULATION = {'simulation': {'agent': [{'name': 'deployer_military', 'prototype': 'military_deployer'}, {'name': 'deployer_civilian', 'prototype': 'civilian_deployer'}, {'name': 'deployer_shared', 'prototype': 'shared_deployer'}], 'archetypes': {'spec': [{'lib': 'cycamore', 'name': 'DeployInst'}, {'lib': 'cycamore', 'name': 'Source'}, {'lib': 'cycamore', 'name': 'Sink'}, {'lib': 'cycamore', 'name': 'Storage'}, {'lib': 'cycamore', 'name': 'Reactor'}, {'lib': 'cycamore', 'name': 'Separations'}, {'lib': 'cycamore', 'name': 'Enrichment'}]}, 'control': {'duration': '144', 'explicit_inventory': 'true', 'startmonth': '1', 'startyear': '2020'}, 'prototype': [{'config': {'Source': {'inventory_size': '1e30', 'outcommod': 'u_ore', 'outrecipe': 'r_u_ore', 'throughput': '1e10'}}, 'name': 'mine'}, {'config': {'Separations': {'feed_commod_prefs': {'val': ['1.0', '10.0', '100.0']}, 'feed_commods': {'val': ['u_ore', 'u_ore1', 'u_ore2']}, 'feedbuf_size': '2e8', 'leftover_commod': 'waste', 'streams': {'item': {'commod': 'u_nat', 'info': {'buf_size': '150000', 'efficiencies': {'item': [{'comp': 'U', 'eff': '.99'}, {'comp': 'O', 'eff': '.99'}]}}}}, 'throughput': '2e8'}}, 'name': 'milling'}, {'config': {'Separations': {'feed_commod_prefs': {'val': '1.0'}, 'feed_commods': {'val': 'u_nat'}, 'feedbuf_size': '200000', 'leftover_commod': 'waste', 'streams': {'item': {'commod': 'uf6', 'info': {'buf_size': '200000', 'efficiencies': {'item': {'comp': 'U', 'eff': '.99'}}}}}, 'throughput': '200000'}}, 'name': 'conversion'}, {'config': {'Enrichment': {'feed_commod_prefs': {'val': '1'}, 'feed_commods': {'val': 'uf6'}, 'feed_recipe': 'r_uox', 'max_feed_inventory': '20000', 'product_commod': 'mil_fiss', 'swu_capacity': '9876.671050324314', 'tails_assay': '0.003', 'tails_commod': 'mil_u_dep'}}, 'name': 'mil_enrichment'}, {'config': {'Storage': {'in_commods': {'val': 'mil_u_dep'}, 'out_commods': {'val': 'mil_u_dep_str'}, 'residence_time': '0'}}, 'name': 'mil_str_u_dep'}, {'config': {'Storage': {'in_commod_prefs': {'val': '1'}, 'in_commods': {'val': 'uf6'}, 'in_recipe': 'r_mil_uox', 'max_inv_size': '30000', 'out_commods': {'val': 'mil_uox'}, 'residence_time': '0'}}, 'name': 'mil_uox_fabrication'}, {'config': {'Reactor': {'assem_size': '14000', 'cycle_time': '23', 'fuel_incommods': {'val': 'mil_uox'}, 'fuel_inrecipes': {'val': 'r_mil_uox'}, 'fuel_outcommods': {'val': 'mil_uox_spent'}, 'fuel_outrecipes': {'val': 'r_mil_uox_spent'}, 'fuel_prefs': {'val': '1'}, 'n_assem_batch': '1', 'n_assem_core': '1', 'power_cap': '0.15', 'refuel_time': '0'}}, 'lifetime': '960', 'name': 'mil_lwr'}, {'config': {'Storage': {'in_commods': {'val': 'mil_mox_spent'}, 'out_commods': {'val': 'mil_mox_spent_str'}, 'residence_time': '60'}}, 'name': 'mil_str_mox_spent'}, {'config': {'Separations': {'feed_commod_prefs': {'val': '1.0'}, 'feed_commods': {'val': 'mil_uox_spent'}, 'feedbuf_size': '30000000000', 'leftover_commod': 'waste', 'streams': {'item': {'commod': 'mil_fiss', 'info': {'buf_size': '3000000000', 'efficiencies': {'item': {'comp': 'Pu', 'eff': '.95'}}}}}, 'throughput': '1e100'}}, 'name': 'reprocessing'}, {'config': {'Storage': {'in_commod_prefs': {'val': '10'}, 'in_commods': {'val': 'mil_fiss'}, 'in_recipe': 'r_mil_heu', 'max_inv_size': '1e100', 'out_commods': {'val': 'mil_heu'}, 'residence_time': '0'}}, 'name': 'mil_str_fiss'}, {'config': {'Enrichment': {'feed_commod_prefs': {'val': ['1', '20']}, 'feed_commods': {'val': ['uf6', 'mil_uf6']}, 'feed_recipe': 'r_natl_u', 'max_feed_inventory': '100000', 'product_commod': 'civ_leu', 'swu_capacity': '35000', 'tails_assay': '0.003', 'tails_commod': 'u_dep'}}, 'name': 'civ_enrichment'}, {'config': {'Storage': {'in_commods': {'val': 'u_dep'}, 'out_commods': {'val': 'u_dep_str'}, 'residence_time': '0'}}, 'name': 'civ_str_u_dep'}, {'config': {'Storage': {'in_commod_prefs': {'val': '1000'}, 'in_commods': {'val': 'civ_leu'}, 'in_recipe': 'r_uox', 'max_inv_size': '30000', 'out_commods': {'val': 'uox'}, 'residence_time': '1'}}, 'name': 'civ_fabrication'}, {'config': {'Reactor': {'assem_size': '29565', 'cycle_time': '18', 'fuel_incommods': {'val': 'uox'}, 'fuel_inrecipes': {'val': 'r_uox'}, 'fuel_outcommods': {'val': 'uox_spent'}, 'fuel_outrecipes': {'val': 'r_uox_spent'}, 'n_assem_batch': '1', 'n_assem_core': '3', 'power_cap': '900', 'refuel_time': '0'}}, 'lifetime': '960', 'name': 'civ_lwr'}, {'config': {'Storage': {'in_commods': {'val': 'uox_spent'}, 'out_commods': {'val': 'uox_spent_str'}, 'residence_time': '60'}}, 'name': 'civ_str_uox_spent'}, {'config': {'DeployInst': {'build_times': {'val': ['37', '37', '61', '73']}, 'n_build': {'val': ['1', '1', '1', '1']}, 'prototypes': {'val': ['mil_enrichment', 'mil_str_u_dep', 'mil_uox_fabrication', 'mil_str_fiss']}}}, 'name': 'military_deployer'}, {'config': {'DeployInst': {'build_times': {'val': ['121', '121', '121', '145', '157', '169']}, 'n_build': {'val': ['1', '1', '1', '1', '1', '1']}, 'prototypes': {'val': ['civ_enrichment', 'civ_str_u_dep', 'civ_fabrication', 'civ_lwr', 'civ_str_uox_spent', 'civ_lwr']}}}, 'name': 'civilian_deployer'}, {'config': {'DeployInst': {'build_times': {'val': ['1', '1', '1']}, 'n_build': {'val': ['1', '1', '1']}, 'prototypes': {'val': ['mine', 'milling', 'conversion']}}}, 'name': 'shared_deployer'}], 'recipe': [{'basis': 'mass', 'name': 'r_u_ore', 'nuclide': [{'comp': '0.0071', 'id': '922350000'}, {'comp': '0.9929', 'id': '922380000'}, {'comp': '999', 'id': '120240000'}]}, {'basis': 'mass', 'name': 'r_natl_u', 'nuclide': [{'comp': '0.0071', 'id': '922350000'}, {'comp': '0.9929', 'id': '922380000'}]}, {'basis': 'mass', 'name': 'r_uox', 'nuclide': [{'comp': '0.05', 'id': '922350000'}, {'comp': '0.95', 'id': '922380000'}]}, {'basis': 'mass', 'name': 'r_uox_spent', 'nuclide': [{'comp': '0.01', 'id': '922350000'}, {'comp': '0.94', 'id': '922380000'}, {'comp': '0.01', 'id': '942390000'}, {'comp': '0.001', 'id': '952410000'}, {'comp': '0.03', 'id': '551350000'}]}, {'basis': 'mass', 'name': 'r_mil_uox', 'nuclide': [{'comp': '0.0071', 'id': '922350000'}, {'comp': '0.9929', 'id': '922380000'}]}, {'basis': 'mass', 'name': 'r_mil_uox_spent', 'nuclide': [{'comp': '0.0071', 'id': '922350000'}, {'comp': '0.9919', 'id': '922380000'}, {'comp': '0.001', 'id': '942390000'}]}, {'basis': 'mass', 'name': 'r_mil_heu', 'nuclide': [{'comp': '0.90', 'id': '922350000'}, {'comp': '0.10', 'id': '922380000'}]}]}} | [
"[email protected]"
]
| |
ddfca99d193430dadb415a4fc865f00805199317 | 464850ba426263b17084fc71363ca14b8278b15e | /08.py | 44f4c2b9d406b330216766b8bf809b7a7f07f5da | []
| no_license | eng-arvind/python | 8442c30ec10f979f913b354458b4f910539d8728 | 249f5f35f245a3f1742b10310de37ca6c6023af2 | refs/heads/master | 2020-12-23T06:40:16.911269 | 2020-02-02T18:42:01 | 2020-02-02T18:42:01 | 237,069,973 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | def pat(n):
k = 1
for i in range(n):
for j in range(i + 1):
print(k, end=" ")
k += 1
print()
pat(5)
| [
"[email protected]"
]
| |
54f0a4e994fa3dc317186e28957003463120f74a | fc2b8c646223acd17819d631ce57146cd1725456 | /collect_dataset/main_collect.py | 9482d6222d69a652ec7a04fa6d5909b487bf616f | []
| no_license | peachman05/RGB_action_recognition | bd4ac4a60097ac70795b0a9cbdf6d332cd85d764 | 21b47f1d1c0f1712d0dc22bb57c52db3c31b47ed | refs/heads/master | 2020-08-31T13:35:38.699161 | 2020-04-12T09:11:45 | 2020-04-12T09:11:45 | 218,701,781 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | from collect_dataset import run_collect
# Define
run_time = 60 # second
action_select = 3 # 0=dribble, 1=shoot, 2=pass, 3=stand
path_dataset = 'F:\\Master Project\\Dataset\\BasketBall-RGB\\' # folder path
show_FPS = False
action_list = ['dribble','shoot','pass','stand']
action = action_list[action_select]
path_save = path_dataset +'\\'+action+'\\'+action
run_collect(path_save, run_time, show_FPS)
print("finish main")
| [
"[email protected]"
]
| |
ab25a0d7423a94f6e815e35c92274f70bf90ad71 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2887/60717/245850.py | ee87114ecb204b742c07e205a3999ba39009bdb5 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | n=int(input())
fLive=0
fDead=0
sLive=0
sDead=0
for i in range(0,n):
list2=input().split()
for j in range(0,3):
list2[j]=int(list2[j])
if list2[0]==1:
fLive+=list2[1]
fDead+=list2[2]
else:
sLive+=list2[1]
sDead+=list2[2]
if fLive>=fDead:
print('LIVE')
else:
print('DEAD')
if sLive>=sDead:
print('LIVE')
else:
print('DEAD')
| [
"[email protected]"
]
| |
f7c6f5795fda746e447912d435cc4d8d0ee17c71 | fa8dc42cfcf99de58f76807accc5c3566ddae6e4 | /tests/test_verify.py | 408a92f3380584be91aae4234fe172385484b523 | [
"MIT"
]
| permissive | thusoy/porridge | 1124cc99cd77f672e6fec5e3d87396c72938a944 | f332b67f29bcbc19b7bb7da2f68ad3af35a9cd4d | refs/heads/master | 2021-01-23T03:53:04.352136 | 2018-05-17T16:28:41 | 2018-05-17T16:28:41 | 86,129,217 | 19 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,526 | py | # coding: utf-8
from __future__ import unicode_literals
import pytest
from hypothesis import given, assume
from hypothesis.strategies import integers, text
from porridge import Porridge, MissingKeyError, EncodedPasswordError
from porridge.utils import ensure_bytes
@pytest.mark.parametrize('test_password', (
"pässword".encode("latin-1"),
"pässword",
))
def test_verify(test_password):
"""
Verification works with unicode and bytes.
"""
porridge = Porridge('keyid1:key1', encoding='latin1')
encoded = ( # handrolled test vector lifted from argon2_cffi
"$argon2i$m=8,t=1,p=1$"
"bL/lLsegFKTuR+5vVyA8tA$VKz5CHavCtFOL1N5TIXWSA"
)
assert porridge.verify(test_password, encoded)
@given(text())
def test_verify_self(porridge, given_password):
assert porridge.verify(given_password, porridge.boil(given_password))
@given(
time_cost=integers(1, 5),
memory_cost=integers(0, 513),
parallelism=integers(1, 5),
)
def test_verify_custom_parameters(password, time_cost, memory_cost, parallelism):
assume(parallelism * 8 <= memory_cost)
porridge = Porridge('key:secret', time_cost=time_cost, memory_cost=memory_cost,
parallelism=parallelism)
assert porridge.verify(password, porridge.boil(password))
def test_verify_self_default_parameters(password):
porridge = Porridge('key:secret')
assert porridge.verify(password, porridge.boil(password))
def test_invalid_password(porridge):
assert porridge.verify('pass1', porridge.boil('pass2')) == False
def test_attacker_cant_verify_without_secret(password):
our_porridge = Porridge('id1:key1')
attacker_porridge = Porridge('otherid:otherkey')
encoded_password = our_porridge.boil(password)
with pytest.raises(MissingKeyError):
attacker_porridge.verify(password, encoded_password)
def test_verify_invalid_password_type(porridge):
with pytest.raises(TypeError) as exception:
porridge.verify(1, '')
assert exception.value.args[0].startswith("'password' must be a str")
@pytest.mark.parametrize('encoded', (
# these are all encoded versions of 'password'
'$argon2i$v=19$m=512,t=2,p=2$Vr7zN80DmEZdRQcMGeV2lA$/fcYY5wcLE9YR4ttKuwshw',
'$argon2i$v=16$m=8,t=1,p=1$bXlzYWx0eXNhbHQ$nz8csvIXGASHCkUia+K4Zg',
'$argon2i$m=8,t=1,p=1$bXlzYWx0eXNhbHQ$nz8csvIXGASHCkUia+K4Zg',
))
def test_verify_legacy_passwords_without_secret(encoded):
# Set high enough parameters to avoid triggering the safety check
porridge = Porridge('key1:secret1', memory_cost=256, time_cost=1, parallelism=2)
assert porridge.verify('password', encoded)
@pytest.mark.parametrize('encoded', (
'definitely not a valid',
'$argon2i$m=8,t=1,p=1$bXlzYWx0eXNhbHQ$nz8csvIXGASHCkUia+K4Zg' + 'a' * 207,
))
def test_verify_invalid_encode(porridge, encoded):
with pytest.raises(EncodedPasswordError):
porridge.verify('password', encoded)
@pytest.mark.parametrize('parameter', ('time_cost', 'memory_cost', 'parallelism'))
def test_verify_bails_on_values_higher_than_configured(porridge, parameter):
parameters = {
'time_cost': porridge.time_cost,
'memory_cost': porridge.memory_cost,
'parallelism': porridge.parallelism,
}
parameters[parameter] *= porridge.parameter_threshold + 1
encoded = get_encoded_password_with_parameters(parameters)
with pytest.raises(EncodedPasswordError):
porridge.verify('password', encoded)
@pytest.mark.parametrize('parameter', ('time_cost', 'memory_cost', 'parallelism'))
@given(threshold=integers(1, 8))
def test_verify_doesnt_bail_on_values_equal_to_threshold(parameter, threshold):
# Create an instance where memory_cost is at least the highest parallelism*8
porridge = Porridge('key1:secret1', memory_cost=64, time_cost=1, parallelism=1,
parameter_threshold=threshold)
parameters = {
'time_cost': porridge.time_cost,
'memory_cost': porridge.memory_cost,
'parallelism': porridge.parallelism,
}
parameters[parameter] *= porridge.parameter_threshold
encoded = get_encoded_password_with_parameters(parameters)
# Since the parameters are wrong the password should not be valid
assert porridge.verify('password', encoded) == False
def get_encoded_password_with_parameters(parameters):
template = '$argon2i$v=19$m={memory_cost},t={time_cost},p={parallelism}{tail}'
tail = ',keyid=key1$AhkxHIhp4o4KOuYBCbduUg$vXvsYVvrrzRdOMpVLXgs4w'
return template.format(tail=tail, **parameters)
| [
"[email protected]"
]
| |
59763f5bcd4fd02e277f6764e628fd7a08f72889 | a0e63dcefb114d024b2c56ae00a3525caebb8f31 | /shutit_threads.py | 2a0b1cf8198e5f85307393d8ec3630162227bfa6 | [
"MIT"
]
| permissive | andrzejsydor/shutit | 2eec3d66c3a3f973ee93e64b87c313fda9f5ea3b | 5f3fbb7236b7c18806c4156910de4425591c197a | refs/heads/master | 2020-03-22T13:54:22.854701 | 2018-07-07T13:32:37 | 2018-07-07T13:32:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,330 | py | import curtsies
from curtsies.events import PasteEvent
from curtsies.input import Input
import itertools
import time
import threading
import traceback
import sys
import os
# There are two threads running in ShutIt. The 'main' one, which drives the
# automation, and the 'watcher' one, which manages either the different view
# panes, or outputs a stack trace of the main thread if 'nothing happens' on it.
PY3 = sys.version_info[0] >= 3
# TODO: reject tmux sessions - it does not seem to play nice
# TODO: keep a time counter after the line
# TODO: show context of line (ie lines around)
# TODO: put the lines into an array of objects and mark the lines as inverted/not
def gather_module_paths():
import shutit_global
shutit_global_object = shutit_global.shutit_global_object
owd = shutit_global_object.owd
shutit_module_paths = set()
for shutit_object in shutit_global.shutit_global_object.shutit_objects:
shutit_module_paths = shutit_module_paths.union(set(shutit_object.host['shutit_module_path']))
if '.' in shutit_module_paths:
shutit_module_paths.remove('.')
shutit_module_paths.add(owd)
for path in shutit_module_paths:
if path[0] != '/':
shutit_module_paths.remove(path)
shutit_module_paths.add(owd + '/' + path)
return shutit_module_paths
def managing_thread_main():
import shutit_global
from shutit_global import SessionPaneLine
shutit_global.shutit_global_object.global_thread_lock.acquire()
shutit_module_paths = gather_module_paths()
shutit_global.shutit_global_object.global_thread_lock.release()
shutit_global.shutit_global_object.stacktrace_lines_arr = [SessionPaneLine('',time.time(),'log'),]
last_code = []
draw_type = 'default'
zoom_state = None
while True:
# We have acquired the lock, so read in input
with Input() as input_generator:
input_char = input_generator.send(0.001)
if input_char == 'r':
# Rotate sessions at the bottom
shutit_global.shutit_global_object.lower_pane_rotate_count += 1
elif input_char == '1':
if zoom_state == 1:
draw_type = 'default'
zoom_state = None
else:
draw_type = 'zoomed1'
zoom_state = 1
elif input_char == '2':
if zoom_state == 2:
draw_type = 'default'
zoom_state = None
else:
draw_type = 'zoomed2'
zoom_state = 2
elif input_char == '3':
if zoom_state == 3:
draw_type = 'default'
zoom_state = None
else:
draw_type = 'zoomed3'
zoom_state = 3
elif input_char == '4':
if zoom_state == 4:
draw_type = 'default'
zoom_state = None
else:
draw_type = 'zoomed4'
zoom_state = 4
elif input_char == 'q':
draw_type = 'clearscreen'
shutit_global.shutit_global_object.pane_manager.draw_screen(draw_type=draw_type)
os.system('reset')
os._exit(1)
# Acquire lock to write screen. Prevents nasty race conditions.
# Different depending PY2/3
if PY3:
if not shutit_global.shutit_global_object.global_thread_lock.acquire(blocking=False):
time.sleep(0.01)
continue
else:
if not shutit_global.shutit_global_object.global_thread_lock.acquire(False):
time.sleep(0.01)
continue
code = []
for thread_id, stack in sys._current_frames().items():
# ignore own thread:
if thread_id == threading.current_thread().ident:
continue
for filename, lineno, name, line in traceback.extract_stack(stack):
# if the file is in the same folder or subfolder as a folder in: self.host['shutit_module_path']
# then show that context
for shutit_module_path in shutit_module_paths:
if filename.find(shutit_module_path) == 0:
if len(shutit_global.shutit_global_object.stacktrace_lines_arr) == 0 or shutit_global.shutit_global_object.stacktrace_lines_arr[-1] != line:
linearrow = '===> ' + str(line)
code.append('_' * 80)
code.append('=> %s:%d:%s' % (filename, lineno, name))
code.append('%s' % (linearrow,))
from_lineno = lineno - 5
if from_lineno < 0:
from_lineno = 0
to_lineno = 10
else:
to_lineno = lineno + 5
lineno_count = from_lineno
with open(filename, "r") as f:
for line in itertools.islice(f, from_lineno, to_lineno):
line = line.replace('\t',' ')
lineno_count += 1
if lineno_count == lineno:
code.append('***' + str(lineno_count) + '> ' + line.rstrip())
else:
code.append('===' + str(lineno_count) + '> ' + line.rstrip())
code.append('_' * 80)
if code != last_code:
for line in code:
shutit_global.shutit_global_object.stacktrace_lines_arr.append(SessionPaneLine(line,time.time(),'log'))
last_code = code
shutit_global.shutit_global_object.pane_manager.draw_screen(draw_type=draw_type)
shutit_global.shutit_global_object.global_thread_lock.release()
def track_main_thread():
t = threading.Thread(target=managing_thread_main)
t.daemon = True
t.start()
def managing_thread_main_simple():
"""Simpler thread to track whether main thread has been quiet for long enough
that a thread dump should be printed.
"""
import shutit_global
last_msg = ''
while True:
printed_anything = False
if shutit_global.shutit_global_object.log_trace_when_idle and time.time() - shutit_global.shutit_global_object.last_log_time > 10:
this_msg = ''
for thread_id, stack in sys._current_frames().items():
# ignore own thread:
if thread_id == threading.current_thread().ident:
continue
printed_thread_started = False
for filename, lineno, name, line in traceback.extract_stack(stack):
if not printed_anything:
printed_anything = True
this_msg += '='*80 + '\n'
this_msg += 'STACK TRACES PRINTED ON IDLE: THREAD_ID: ' + str(thread_id) + ' at ' + time.strftime('%c') + '\n'
this_msg += '='*80 + '\n'
if not printed_thread_started:
printed_thread_started = True
this_msg += '%s:%d:%s' % (filename, lineno, name) + '\n'
if line:
this_msg += ' %s' % (line,) + '\n'
if printed_anything:
this_msg += '='*80 + '\n'
this_msg += 'STACK TRACES DONE\n'
this_msg += '='*80 + '\n'
if this_msg != last_msg:
print(this_msg)
last_msg = this_msg
time.sleep(5)
def track_main_thread_simple():
t = threading.Thread(target=managing_thread_main_simple)
t.daemon = True
t.start()
| [
"[email protected]"
]
| |
1820e9bea144a350bba9d060cb30654b92aefb91 | 1f1048624ee8d71101ae1127c9aa9c9dc81b857f | /tests/test_bug_fixes.py | bb5c7e5d42f69d5ddf7e161a607b0a2909c18b24 | [
"BSD-3-Clause"
]
| permissive | netpastor/pyexcel | 434983a942de4d70549bcec578854d3da241c576 | 0c126b9e4c650c6735665c79e616546149f2b717 | refs/heads/master | 2021-01-19T10:40:19.107503 | 2017-04-10T07:04:50 | 2017-04-10T15:13:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,968 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from textwrap import dedent
import pyexcel as pe
from datetime import datetime
from _compact import StringIO, OrderedDict
from nose.tools import eq_
def test_bug_01():
"""
if first row of csv is shorter than the rest of the rows,
the csv will be truncated by first row. This is a bug
"a,d,e,f" <- this will be 1
'1',2,3,4 <- 4
'2',3,4,5
'b' <- give '' for missing cells
"""
r = pe.Reader(os.path.join("tests", "fixtures", "bug_01.csv"))
assert len(r.row[0]) == 4
# test "" is append for empty cells
assert r[0, 1] == ""
assert r[3, 1] == ""
def test_issue_03():
file_prefix = "issue_03_test"
csv_file = "%s.csv" % file_prefix
xls_file = "%s.xls" % file_prefix
my_sheet_name = "mysheetname"
data = [[1, 1]]
sheet = pe.Sheet(data, name=my_sheet_name)
sheet.save_as(csv_file)
assert(os.path.exists(csv_file))
sheet.save_as(xls_file)
book = pe.load_book(xls_file)
assert book.sheet_names()[0] == my_sheet_name
os.unlink(csv_file)
os.unlink(xls_file)
def test_issue_06():
import logging
logger = logging.getLogger("test")
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
output = StringIO()
book = pe.Book({'hoja1': [['datos', 'de', 'prueba'], [1, 2, 3]], })
book.save_to_memory('csv', output)
logger.debug(output.getvalue())
def test_issue_09():
pe.book.LOCAL_UUID = 0
merged = pe.Book()
sheet1 = pe.Sheet(sheet=[[1, 2]])
sheet2 = pe.Sheet(sheet=[[1, 2]])
merged += sheet1
merged += sheet2
eq_(merged[1].name, "pyexcel sheet_1")
def test_issue_10():
thedict = OrderedDict()
thedict.update({"Column 1": [1, 2, 3]})
thedict.update({"Column 2": [1, 2, 3]})
thedict.update({"Column 3": [1, 2, 3]})
pe.save_as(adict=thedict, dest_file_name="issue10.xls")
newdict = pe.get_dict(file_name="issue10.xls")
assert isinstance(newdict, OrderedDict) is True
assert thedict == newdict
os.unlink("issue10.xls")
def test_issue_29():
a = [
# error case
['2016-03-31 10:59', '0123', 'XS360_EU', '04566651561653122'],
# python types
[datetime(2016, 4, 15, 17, 52, 11), 123, False, 456193284757]
]
s = pe.get_sheet(array=a)
content = dedent("""
pyexcel_sheet1:
+------------------+------+----------+-------------------+
| 2016-03-31 10:59 | 0123 | XS360_EU | 04566651561653122 |
+------------------+------+----------+-------------------+
| 15/04/16 | 123 | false | 456193284757 |
+------------------+------+----------+-------------------+""")
eq_(str(s), content.strip('\n'))
def test_issue_29_nominablesheet():
a = [
['date', 'number', 'misc', 'long number'],
# error case
['2016-03-31 10:59', '0123', 'XS360_EU', '04566651561653122'],
# python types
[datetime(2016, 4, 15, 17, 52, 11), 123, False, 456193284757]
]
s = pe.get_sheet(array=a)
s.name_columns_by_row(0)
content = dedent("""
pyexcel_sheet1:
+------------------+--------+----------+-------------------+
| date | number | misc | long number |
+==================+========+==========+===================+
| 2016-03-31 10:59 | 0123 | XS360_EU | 04566651561653122 |
+------------------+--------+----------+-------------------+
| 15/04/16 | 123 | false | 456193284757 |
+------------------+--------+----------+-------------------+""")
eq_(str(s), content.strip('\n'))
def test_issue_51_orderred_dict_in_records():
from pyexcel.plugins.sources.pydata import RecordsReader
records = []
orderred_dict = OrderedDict()
orderred_dict.update({"Zebra": 10})
orderred_dict.update({"Hippo": 9})
orderred_dict.update({"Monkey": 8})
records.append(orderred_dict)
orderred_dict2 = OrderedDict()
orderred_dict2.update({"Zebra": 1})
orderred_dict2.update({"Hippo": 2})
orderred_dict2.update({"Monkey": 3})
records.append(orderred_dict2)
records_reader = RecordsReader(records)
array = list(records_reader.to_array())
expected = [['Zebra', 'Hippo', 'Monkey'], [10, 9, 8], [1, 2, 3]]
eq_(array, expected)
def test_issue_51_normal_dict_in_records():
from pyexcel.plugins.sources.pydata import RecordsReader
records = []
orderred_dict = {}
orderred_dict.update({"Zebra": 10})
orderred_dict.update({"Hippo": 9})
orderred_dict.update({"Monkey": 8})
records.append(orderred_dict)
orderred_dict2 = {}
orderred_dict2.update({"Zebra": 1})
orderred_dict2.update({"Hippo": 2})
orderred_dict2.update({"Monkey": 3})
records.append(orderred_dict2)
records_reader = RecordsReader(records)
array = list(records_reader.to_array())
expected = [['Hippo', 'Monkey', 'Zebra'], [9, 8, 10], [2, 3, 1]]
eq_(array, expected)
def test_issue_55_unicode_in_headers():
headers = [u'Äkkilähdöt', u'Matkakirjoituksia', u'Matkatoimistot']
content = [headers, [1, 2, 3]]
sheet = pe.Sheet(content)
sheet.name_columns_by_row(0)
eq_(sheet.colnames, headers)
def test_issue_60_chinese_text_in_python_2_stdout():
import sys
data = [['这', '是', '中', '文'], ['这', '是', '中', '文']]
sheet = pe.Sheet(data)
sys.stdout.write(repr(sheet))
def test_issue_60_chinese_text_in_python_2_stdout_on_book():
import sys
adict = {"Sheet 1": [['这', '是', '中', '文'], ['这', '是', '中', '文']]}
book = pe.Book()
book.bookdict = adict
sys.stdout.write(repr(book))
def test_issue_63_empty_array_crash_texttable_renderer():
sheet = pe.Sheet([])
print(sheet)
def test_xls_issue_11():
data = [[1, 2]]
sheet = pe.Sheet(data)
sheet2 = pe.get_sheet(file_content=sheet.xls, file_type='XLS')
eq_(sheet.array, sheet2.array)
test_file = 'xls_issue_11.JSON'
sheet2.save_as(test_file)
os.unlink(test_file)
def test_issue_68():
data = [[1]]
sheet = pe.Sheet(data)
stream = sheet.save_to_memory('csv')
eq_(stream.read(), '1\r\n')
data = {"sheet": [[1]]}
book = pe.Book(data)
stream = book.save_to_memory('csv')
eq_(stream.read(), '1\r\n')
def test_issue_74():
from decimal import Decimal
data = [[Decimal("1.1")]]
sheet = pe.Sheet(data)
table = sheet.texttable
expected = 'pyexcel sheet:\n+-----+\n| 1.1 |\n+-----+'
eq_(table, expected)
def test_issue_76():
from pyexcel._compact import StringIO
tsv_stream = StringIO()
tsv_stream.write('1\t2\t3\t4\n')
tsv_stream.write('1\t2\t3\t4\n')
tsv_stream.seek(0)
sheet = pe.get_sheet(file_stream=tsv_stream, file_type='csv',
delimiter='\t')
data = [
[1, 2, 3, 4],
[1, 2, 3, 4]
]
eq_(sheet.array, data)
| [
"[email protected]"
]
| |
69869813ffb36d5a2c382d673696ec7c7fd2fbe9 | 084e35c598426b1137f9cd502e1b5e7f09cdf034 | /leetcode_weekly_competition/226周赛/1.py | 09da9b65e006ed6ec0c0456861c0ba4c9e9e6cf0 | []
| no_license | sakurasakura1996/Leetcode | 3a941dadd198ee2f54b69057ae3bbed99941974c | 78f239959af98dd3bd987fb17a3544010e54ae34 | refs/heads/master | 2021-09-11T05:07:44.987616 | 2021-09-07T05:39:34 | 2021-09-07T05:39:34 | 240,848,992 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | class Solution:
def countBalls(self, lowLimit: int, highLimit: int) -> int:
ans = [0] * 46
for i in range(lowLimit, highLimit+1):
tmp = 0
cur = i
while cur:
tmp += int(cur%10)
cur = int(cur/10)
ans[tmp] += 1
return max(ans)
if __name__ == '__main__':
solu = Solution()
lowLimit = 52603
highLimit = 87295
ans = solu.countBalls(lowLimit, highLimit)
print(ans) | [
"[email protected]"
]
| |
491e8253f9d4c90d5822a1fc62bb284b383d4408 | cc595296b60913bfd6e718c60aaa68d9a5008781 | /profiler/weather_plot.py | 74168fba17d23322089332a6a9ef6e24b4c0039b | []
| no_license | JBlaschke/divelite | f04e24afe1b160702a95878586210d9739141222 | e9a54c67ab8c201003783e50da3a9a46acf24507 | refs/heads/master | 2021-05-21T05:40:25.118724 | 2020-10-11T01:02:56 | 2020-10-11T01:02:56 | 252,570,721 | 0 | 0 | null | 2020-04-02T21:41:56 | 2020-04-02T21:41:55 | null | UTF-8 | Python | false | false | 939 | py | import sys
import numpy as np
import matplotlib.pyplot as plt
log_fname = sys.argv[1]
size = int(sys.argv[2])
data = {}
with open(log_fname) as f:
for line in f:
cols = line.split()
if len(cols) == 2:
rank = int(cols[0])
ts = float(cols[1])
if rank not in data:
data[rank] = [ts]
else:
data[rank].append(ts)
deltas = []
for i in range(1, size):
if i in data:
plt.scatter([i]*len(data[i]), data[i], s=2, marker='o')
# calculate delta
ts_list = data[i]
for j in range(len(ts_list)-1):
deltas.append(ts_list[j+1] - ts_list[j])
plt.show()
# plot histogram of deltas
deltas = np.asarray(deltas)
plt.hist(deltas, bins=100)
plt.title("No. points %d More than 0.5s %d Min %f Max %f Mean %f"%(deltas.size, deltas[deltas>0.5].size, np.min(deltas), np.max(deltas), np.average(deltas)))
plt.show()
| [
"[email protected]"
]
| |
ba0e2b9495f7c37519b04935cc2e4a99f79786e5 | 4fc1c45a7e570cc1204d4b5f21150f0771d34ea5 | /quan_table/read_data/read_data_test.py | 7f8abcee5c4e3a5e9366bbd3813bec0c96321901 | []
| no_license | CN1Ember/feathernet_mine | 77d29576e4ecb4f85626b94e6ff5884216af3098 | ac0351f59a1ed30abecd1088a46c7af01afa29d5 | refs/heads/main | 2023-05-28T17:19:06.624448 | 2021-06-17T04:39:09 | 2021-06-17T04:39:09 | 374,603,757 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,150 | py | from PIL import Image
import numpy as np
import os
from torch.utils.data import Dataset
import math
import cv2
import torchvision
import torch
import random
class CASIA(Dataset):
def __init__(self,data_flag = None, transform=None, phase_train=True, data_dir=None,phase_test=False,add_mask = True):
self.phase_train = phase_train
self.phase_test = phase_test
self.transform = transform
self.add_mask = add_mask
self.mask_np = None
# for val
val_file = os.getcwd() +'/data/test_file_list/%s_val.txt'%data_flag
label_val_file = os.getcwd() + '/data/test_file_list/%s_val_label.txt'%data_flag
self.mask_np = np.fromfile('./data/mask_file/mask_for_nir.bin', np.uint8).reshape((112,112))
try:
with open(val_file, 'r') as f:
self.depth_dir_val = f.read().splitlines()
with open(label_val_file, 'r') as f:
self.label_dir_val = f.read().splitlines()
except:
print('can not open files, may be filelist is not exist')
exit()
def __len__(self):
if self.phase_train:
return len(self.depth_dir_train)
else:
if self.phase_test:
return len(self.depth_dir_test)
else:
return len(self.depth_dir_val)
def __getitem__(self, idx):
depth_dir = self.depth_dir_val
label_dir = self.label_dir_val
label = int(label_dir[idx])
label = np.array(label)
depth = Image.open(depth_dir[idx])
# depth = depth.convert('RGB')
depth = depth.convert('L')
# '''filp left and right randonly and add mask'''
# if random.randint(0,9) < 5:
# depth = depth.transpose(Image.FLIP_LEFT_RIGHT) #水平翻转
'''transform'''
if self.transform:
depth = self.transform(depth)
if self.phase_train:
return depth,label
else:
return depth,label,depth_dir[idx]
| [
"[email protected]"
]
| |
9415639e3ab0499619b3e5d066463c3db20866ca | 2a07f85d91050192f5eaa8d5c72fc2e0fbdfb015 | /bmi/pages.py | 32dfdd5024793394903a3fd024d64f2e7af4104a | []
| no_license | chapkovski/hse-otree-workshop | b95c9e18fc49908a820f15666dc56ffce9e39c49 | 6a83b6b3543c3079408f50c7c3e5a22179862446 | refs/heads/master | 2020-04-20T08:37:27.161917 | 2019-02-01T19:22:22 | 2019-02-01T19:22:22 | 168,744,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 675 | py | from otree.api import Currency as c, currency_range
from ._builtin import Page, WaitPage
from .models import Constants
class Input(Page):
form_model = 'player'
form_fields = ['height', 'weight']
class ResultsWaitPage(WaitPage):
def is_displayed(self):
# if this is interpersonal treatment (in other words 'individual' is False in settings then
# they should wait for the partner. It is not necessary if we are in individual treatment
return not self.session.config.get('individual')
def after_all_players_arrive(self):
pass
class Results(Page):
pass
page_sequence = [
Input,
ResultsWaitPage,
Results
]
| [
"[email protected]"
]
| |
3927f0b16b37f189e7300890368a1e2fe92314dd | 3435505aa0b760eddf175a9a8f62119952020380 | /wienerschnitzelgemeinschaft/src/Christof/models/ResNet34/3/base_100/train_f3.py | 4146a3970f61333a42bd7a296c1353a7f224af2d | [
"MIT"
]
| permissive | CellProfiling/HPA-competition-solutions | 2cc365b2be086eec382136856ebcf790d4eaabb9 | 547d53aaca148fdb5f4585526ad7364dfa47967d | refs/heads/master | 2022-12-16T16:58:15.578281 | 2020-07-09T23:04:55 | 2020-07-09T23:04:55 | 223,605,040 | 61 | 25 | MIT | 2022-11-22T04:37:30 | 2019-11-23T14:49:57 | Jupyter Notebook | UTF-8 | Python | false | false | 8,888 | py | import os, sys
#os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="2"
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import skimage.io
from skimage.transform import resize
from imgaug import augmenters as iaa
from tqdm import tqdm
import PIL
from PIL import Image
import cv2
from sklearn.utils import class_weight, shuffle
from ml_stratifiers import MultilabelStratifiedKFold
import albumentations as A
import warnings
warnings.filterwarnings("ignore")
from classification_models.resnet.models import ResNet34
from classification_models.resnet import preprocess_input
#from keras.applications.densenet import DenseNet121, preprocess_input
MODEL_PATH = 'Christof/models/ResNet34/3/base_100/'
SIZE = 256
fold_id = 3
# Load dataset info
path_to_train = 'Christof/assets/train_rgb_256/'
data = pd.read_csv('Christof/assets/train.csv')
clusters = pd.read_csv('Russ/cluster4_folds.csv')
folds = dict(zip(clusters.Id,clusters.cluster4))
data['fold'] = data['Id'].apply(lambda x: folds[x])
def get_fold_ids(fold_id,data_set_info, shuff = True):
fold_info = np.array([item['fold'] for item in data_set_info])
val_ids = np.where(fold_info == fold_id)[0]
train_ids = np.where(fold_info != fold_id)[0]
if shuff:
shuffle(val_ids)
shuffle(train_ids)
return train_ids, val_ids
normal_aug = A.Compose([A.OneOf([A.Rotate((-180,180)),
A.Rotate((-180,180),border_mode=cv2.BORDER_CONSTANT)]),
A.Flip(p=0.75)
])
train_dataset_info = []
for name, labels in zip(data['Id'], data['Target'].str.split(' ')):
train_dataset_info.append({
'path': os.path.join(path_to_train, name),
'labels': np.array([int(label) for label in labels]),
'fold':folds[name]})
train_dataset_info = np.array(train_dataset_info)
counts = np.zeros(28)
for item in train_dataset_info:
for l in item['labels']:
counts[l] = counts[l] + 1
counts = counts / len(train_dataset_info)
rare_classes = np.where(counts < 0.005)
class data_generator:
@staticmethod
def create_train(dataset_info, batch_size, shape, augument=True, oversample_factor = 0):
assert shape[2] == 3
if oversample_factor > 0:
rare_dataset_info = np.array([item for item in dataset_info if np.isin(item['labels'], rare_classes).any()])
for i in range(oversample_factor):
dataset_info = np.append(dataset_info,rare_dataset_info)
while True:
dataset_info = shuffle(dataset_info)
for start in range(0, len(dataset_info), batch_size):
end = min(start + batch_size, len(dataset_info))
batch_images = []
X_train_batch = dataset_info[start:end]
batch_labels = np.zeros((len(X_train_batch), 28))
for i in range(len(X_train_batch)):
image = data_generator.load_image(X_train_batch[i]['path'])
#rare = np.isin(X_train_batch[i]['labels'], rare_classes).any()
if augument:
image = data_generator.augment(normal_aug,image)
batch_images.append(image)
batch_labels[i][X_train_batch[i]['labels']] = 1
yield np.array(batch_images, np.float32), batch_labels
@staticmethod
def load_image(path):
image = cv2.imread(path + '.png', cv2.IMREAD_UNCHANGED)
image = preprocess_input(image)
return image
@staticmethod
def augment(aug,image):
image_aug = aug(image=image)['image']
return image_aug
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential, load_model
from keras.layers import Activation, Dropout, Flatten, Dense, GlobalMaxPooling2D, BatchNormalization, Input, Conv2D
from keras.callbacks import ModelCheckpoint
from keras import metrics
from keras.optimizers import Adam
from keras import backend as K
import keras
from keras.models import Model
def create_model(input_shape, n_out):
input_tensor = Input(shape=(SIZE, SIZE, 3))
base_model = ResNet34(include_top=False,
weights='imagenet',
input_shape=(SIZE, SIZE, 3),input_tensor=input_tensor)
x = base_model.output
x = Conv2D(32, kernel_size=(1, 1), activation='relu')(x)
x = Flatten()(x)
x = Dropout(0.5)(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.5)(x)
output = Dense(n_out, activation='sigmoid')(x)
model = Model(input_tensor, output)
# transfer imagenet weights
#res_img = ResNet34(include_top=False, weights='imagenet', input_shape=(SIZE, SIZE, 3))
#offset = 2
#for i, l in enumerate(base_model.layers[offset+1:]):
# l.set_weights(res_img.layers[i + 1].get_weights())
return model
# create callbacks list
from keras.callbacks import ModelCheckpoint, LearningRateScheduler, EarlyStopping, ReduceLROnPlateau, TensorBoard
from keras_callbacks import F1Metric
from keras_metrics import f1, f1_02
from keras_losses import f1_loss
epochs = 100
batch_size = 32
# split data into train, valid
mskf = MultilabelStratifiedKFold(n_splits=5,shuffle=True,random_state=18)
#y = np.zeros((len(train_dataset_info), 28))
#for i in range(len(train_dataset_info)):
# y[i][train_dataset_info[i]['labels']] = 1
#mskf.get_n_splits(train_dataset_info, y)
#kf = mskf.split(train_dataset_info, y)
train_indexes, valid_indexes = get_fold_ids(fold_id, train_dataset_info)
#train_indexes, valid_indexes = next(kf)
train_generator = data_generator.create_train(train_dataset_info[train_indexes],
batch_size, (SIZE, SIZE, 3), augument=True, oversample_factor=3)
validation_generator = data_generator.create_train(train_dataset_info[valid_indexes],
1, (SIZE, SIZE, 3), augument=False,oversample_factor=0)
checkpoint = ModelCheckpoint(MODEL_PATH + 'model_f{}.h5'.format(fold_id), monitor='val_f1_all', verbose=1,
save_best_only=True, mode='max', save_weights_only=True)
tensorboard = TensorBoard(MODEL_PATH + 'logs{}_'.format(fold_id) + '/')
f1_metric = F1Metric(validation_generator,len(valid_indexes)//1,1,28)
callbacks_list = [f1_metric, checkpoint, tensorboard]
# warm up model
model = create_model(
input_shape=(SIZE, SIZE, 3),
n_out=28)
model.compile(loss='binary_crossentropy',
optimizer=Adam(lr=1e-4),
metrics=['acc',f1,f1_02])
model.fit_generator(
train_generator,
steps_per_epoch=np.ceil(float(len(train_indexes)) / float(batch_size)),
validation_data=validation_generator,
validation_steps=np.ceil(float(len(valid_indexes)) / float(batch_size)),
epochs=epochs,
verbose=1,
callbacks=callbacks_list)
model.save_weights(MODEL_PATH + 'model_f{}_e{}.h5'.format(fold_id,epochs))
model.load_weights(MODEL_PATH + 'model_f{}.h5'.format(fold_id))
preds = np.zeros(shape=(len(valid_indexes),28))
preds_05 = np.zeros(shape=(len(valid_indexes),28))
y_true= np.zeros(shape=(len(valid_indexes),28))
for i, info in tqdm(enumerate(train_dataset_info[valid_indexes])):
image = data_generator.load_image(info['path'])
score_predict = model.predict(image[np.newaxis])[0]
thresh = max(score_predict[np.argsort(score_predict, axis=-1)[-5]], 0.2)
preds[i][score_predict >= thresh] = 1
preds_05[i][score_predict >= 0.5] = 1
y_true[i][info['labels']]=1
from sklearn.metrics import f1_score
individual_f1_scores = np.zeros(28)
for i in range(28):
individual_f1_scores[i] = f1_score(y_true[:,i],preds[:,i])
individual_f1_scores = pd.DataFrame(individual_f1_scores,columns=['f1'])
individual_f1_scores.to_csv(MODEL_PATH + f'summary_f1_score_f{fold_id}.csv',index=False)
f1_res = f1_score(y_true, preds, average='macro')
f1_res_05 = f1_score(y_true, preds_05, average='macro')
print(f1_res)
print(f1_res_05)
SUBMISSION = False
if SUBMISSION:
submit = pd.read_csv('Christof/assets/sample_submission.csv')
predicted = []
draw_predict = []
for name in tqdm(submit['Id']):
path = os.path.join('Christof/assets/test_rgb_256/', name)
image = data_generator.load_image(path)
score_predict = model.predict(image[np.newaxis])[0]
draw_predict.append(score_predict)
thresh = max(score_predict[np.argsort(score_predict, axis=-1)[-5]],0.2)
label_predict = np.arange(28)[score_predict >= thresh]
str_predict_label = ' '.join(str(l) for l in label_predict)
predicted.append(str_predict_label)
submit['Predicted'] = predicted
#np.save('draw_predict_InceptionV3.npy', score_predict)
submit.to_csv(MODEL_PATH + 'submission_f{}_{:.4}.csv'.format(fold_id,f1_res), index=False)
| [
"[email protected]"
]
| |
6cc1e2beb70e8f1074f9000fb8aed4f4aedc7fff | 4a1dab08ded9dd4e8130b1ae20def614b91ae27b | /modules/s3/s3query.py | 42799260aeff939e3963e35b1f94419570c24bbf | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | frededk/eden | b8c780ae9b5e3dabc8e4a9fa92f9526db9dbe3b2 | 993a05a3ec95441ef1b0ad4f607650e645268808 | refs/heads/master | 2021-01-17T12:23:54.574928 | 2014-06-06T16:55:17 | 2014-06-06T16:55:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 70,138 | py | # -*- coding: utf-8 -*-
""" S3 Query Construction
@copyright: 2009-2014 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import datetime
import re
import sys
from gluon import current
from gluon.dal import Row, Field, Query
from gluon.storage import Storage
from s3fields import S3RepresentLazy
from s3utils import s3_get_foreign_key, s3_unicode, S3TypeConverter
ogetattr = object.__getattribute__
TEXTTYPES = ("string", "text")
# =============================================================================
class S3FieldSelector(object):
""" Helper class to construct a resource query """
LOWER = "lower"
UPPER = "upper"
OPERATORS = [LOWER, UPPER]
def __init__(self, name, type=None):
""" Constructor """
if not isinstance(name, basestring) or not name:
raise SyntaxError("name required")
self.name = str(name)
self.type = type
self.op = None
# -------------------------------------------------------------------------
def __lt__(self, value):
return S3ResourceQuery(S3ResourceQuery.LT, self, value)
# -------------------------------------------------------------------------
def __le__(self, value):
return S3ResourceQuery(S3ResourceQuery.LE, self, value)
# -------------------------------------------------------------------------
def __eq__(self, value):
return S3ResourceQuery(S3ResourceQuery.EQ, self, value)
# -------------------------------------------------------------------------
def __ne__(self, value):
return S3ResourceQuery(S3ResourceQuery.NE, self, value)
# -------------------------------------------------------------------------
def __ge__(self, value):
return S3ResourceQuery(S3ResourceQuery.GE, self, value)
# -------------------------------------------------------------------------
def __gt__(self, value):
return S3ResourceQuery(S3ResourceQuery.GT, self, value)
# -------------------------------------------------------------------------
def like(self, value):
return S3ResourceQuery(S3ResourceQuery.LIKE, self, value)
# -------------------------------------------------------------------------
def belongs(self, value):
return S3ResourceQuery(S3ResourceQuery.BELONGS, self, value)
# -------------------------------------------------------------------------
def contains(self, value):
return S3ResourceQuery(S3ResourceQuery.CONTAINS, self, value)
# -------------------------------------------------------------------------
def anyof(self, value):
return S3ResourceQuery(S3ResourceQuery.ANYOF, self, value)
# -------------------------------------------------------------------------
def typeof(self, value):
return S3ResourceQuery(S3ResourceQuery.TYPEOF, self, value)
# -------------------------------------------------------------------------
def lower(self):
self.op = self.LOWER
return self
# -------------------------------------------------------------------------
def upper(self):
self.op = self.UPPER
return self
# -------------------------------------------------------------------------
def expr(self, val):
if self.op and val is not None:
if self.op == self.LOWER and \
hasattr(val, "lower") and callable(val.lower) and \
(not isinstance(val, Field) or val.type in TEXTTYPES):
return val.lower()
elif self.op == self.UPPER and \
hasattr(val, "upper") and callable(val.upper) and \
(not isinstance(val, Field) or val.type in TEXTTYPES):
return val.upper()
return val
# -------------------------------------------------------------------------
def represent(self, resource):
try:
rfield = S3ResourceField(resource, self.name)
except:
colname = None
else:
colname = rfield.colname
if colname:
if self.op is not None:
return "%s.%s()" % (colname, self.op)
else:
return colname
else:
return "(%s?)" % self.name
# -------------------------------------------------------------------------
@classmethod
def extract(cls, resource, row, field):
"""
Extract a value from a Row
@param resource: the resource
@param row: the Row
@param field: the field
@return: field if field is not a Field/S3FieldSelector instance,
the value from the row otherwise
"""
error = lambda fn: KeyError("Field not found: %s" % fn)
t = type(field)
if isinstance(field, Field):
colname = str(field)
tname, fname = colname.split(".", 1)
elif t is S3FieldSelector:
rfield = S3ResourceField(resource, field.name)
colname = rfield.colname
if not colname:
# unresolvable selector
raise error(field.name)
fname = rfield.fname
tname = rfield.tname
elif t is S3ResourceField:
colname = field.colname
if not colname:
# unresolved selector
return None
fname = field.fname
tname = field.tname
else:
return field
if type(row) is Row:
try:
if tname in row.__dict__:
value = ogetattr(ogetattr(row, tname), fname)
else:
value = ogetattr(row, fname)
except:
try:
value = row[colname]
except (KeyError, AttributeError):
raise error(colname)
elif fname in row:
value = row[fname]
elif colname in row:
value = row[colname]
elif tname is not None and \
tname in row and fname in row[tname]:
value = row[tname][fname]
else:
raise error(colname)
if callable(value):
# Lazy virtual field
try:
value = value()
except:
current.log.error(sys.exc_info()[1])
value = None
if hasattr(field, "expr"):
return field.expr(value)
return value
# -------------------------------------------------------------------------
def resolve(self, resource):
"""
Resolve this field against a resource
@param resource: the resource
"""
return S3ResourceField(resource, self.name)
# =============================================================================
# Short name for the S3FieldSelector class
#
FS = S3FieldSelector
# =============================================================================
class S3FieldPath(object):
""" Helper class to parse field selectors """
# -------------------------------------------------------------------------
@classmethod
def resolve(cls, resource, selector, tail=None):
"""
Resolve a selector (=field path) against a resource
@param resource: the S3Resource to resolve against
@param selector: the field selector string
@param tail: tokens to append to the selector
The general syntax for a selector is:
selector = {[alias].}{[key]$}[field|selector]
(Parts in {} are optional, | indicates alternatives)
* Alias can be:
~ refers to the resource addressed by the
preceding parts of the selector (=last
resource)
component alias of a component of the last resource
linktable alias of a link table of the last resource
table name of a table that has a foreign key for
the last resource (auto-detect the key)
key:table same as above, but specifying the foreign key
* Key can be:
key the name of a foreign key in the last resource
context a context expression
* Field can be:
fieldname the name of a field or virtual field of the
last resource
context a context expression
A "context expression" is a name enclosed in parentheses:
(context)
During parsing, context expressions get replaced by the
string which has been configured for this name for the
last resource with:
s3db.configure(tablename, context = dict(name = "string"))
With context expressions, the same selector can be used
for different resources, each time resolving into the
specific field path. However, the field addressed must
be of the same type in all resources to form valid
queries.
If a context name can not be resolved, resolve() will
still succeed - but the S3FieldPath returned will have
colname=None and ftype="context" (=unresolvable context).
"""
if not selector:
raise SyntaxError("Invalid selector: %s" % selector)
tokens = re.split("(\.|\$)", selector)
if tail:
tokens.extend(tail)
parser = cls(resource, None, tokens)
parser.original = selector
return parser
# -------------------------------------------------------------------------
def __init__(self, resource, table, tokens):
"""
Constructor - not to be called directly, use resolve() instead
@param resource: the S3Resource
@param table: the table
@param tokens: the tokens as list
"""
s3db = current.s3db
if table is None:
table = resource.table
# Initialize
self.original = None
self.tname = table._tablename
self.fname = None
self.field = None
self.ftype = None
self.virtual = False
self.colname = None
self.joins = {}
self.distinct = False
self.multiple = True
head = tokens.pop(0)
tail = None
if head and head[0] == "(" and head[-1] == ")":
# Context expression
head = head.strip("()")
self.fname = head
self.ftype = "context"
if not resource:
resource = s3db.resource(table, components=[])
context = resource.get_config("context")
if context and head in context:
tail = self.resolve(resource, context[head], tail=tokens)
else:
# unresolvable
pass
elif tokens:
# Resolve the tail
op = tokens.pop(0)
if tokens:
if op == ".":
# head is a component or linktable alias, and tokens is
# a field expression in the component/linked table
if not resource:
resource = s3db.resource(table, components=[])
ktable, join, m, d = self._resolve_alias(resource, head)
self.multiple = m
self.distinct = d
else:
# head is a foreign key in the current table and tokens is
# a field expression in the referenced table
ktable, join = self._resolve_key(table, head)
self.distinct = True
if join is not None:
self.joins[ktable._tablename] = join
tail = S3FieldPath(None, ktable, tokens)
else:
raise SyntaxError("trailing operator")
if tail is None:
# End of the expression
if self.ftype != "context":
# Expression is resolved, head is a field name:
self.field = self._resolve_field(table, head)
if not self.field:
self.virtual = True
self.ftype = "virtual"
else:
self.virtual = False
self.ftype = str(self.field.type)
self.fname = head
self.colname = "%s.%s" % (self.tname, self.fname)
else:
# Read field data from tail
self.tname = tail.tname
self.fname = tail.fname
self.field = tail.field
self.ftype = tail.ftype
self.virtual = tail.virtual
self.colname = tail.colname
self.distinct |= tail.distinct
self.multiple |= tail.multiple
self.joins.update(tail.joins)
# -------------------------------------------------------------------------
@staticmethod
def _resolve_field(table, fieldname):
"""
Resolve a field name against the table, recognizes "id" as
table._id.name, and "uid" as current.xml.UID.
@param table: the Table
@param fieldname: the field name
@return: the Field
"""
if fieldname == "uid":
fieldname = current.xml.UID
if fieldname == "id":
field = table._id
elif fieldname in table.fields:
field = ogetattr(table, fieldname)
else:
field = None
return field
# -------------------------------------------------------------------------
@staticmethod
def _resolve_key(table, fieldname):
"""
Resolve a foreign key into the referenced table and the
join and left join between the current table and the
referenced table
@param table: the current Table
@param fieldname: the fieldname of the foreign key
@return: tuple of (referenced table, join, left join)
@raise: AttributeError is either the field or
the referended table are not found
@raise: SyntaxError if the field is not a foreign key
"""
if fieldname in table.fields:
f = table[fieldname]
else:
raise AttributeError("key not found: %s" % fieldname)
ktablename, pkey, multiple = s3_get_foreign_key(f, m2m=False)
if not ktablename:
raise SyntaxError("%s is not a foreign key" % f)
ktable = current.s3db.table(ktablename,
AttributeError("undefined table %s" % ktablename),
db_only=True)
pkey = ktable[pkey] if pkey else ktable._id
join = [ktable.on(f == pkey)]
return ktable, join
# -------------------------------------------------------------------------
@staticmethod
def _resolve_alias(resource, alias):
"""
Resolve a table alias into the linked table (component, linktable
or free join), and the joins and left joins between the current
resource and the linked table.
@param resource: the current S3Resource
@param alias: the alias
@return: tuple of (linked table, joins, left joins, multiple,
distinct), the two latter being flags to indicate
possible ambiguous query results (needed by the query
builder)
@raise: AttributeError if one of the key fields or tables
can not be found
@raise: SyntaxError if the alias can not be resolved (e.g.
because on of the keys isn't a foreign key, points
to the wrong table or is ambiguous)
"""
# Alias for this resource?
if alias in ("~", resource.alias):
return resource.table, None, False, False
multiple = True
s3db = current.s3db
tablename = resource.tablename
# Try to attach the component
if alias not in resource.components and \
alias not in resource.links:
_alias = alias
hook = s3db.get_component(tablename, alias)
if not hook:
_alias = s3db.get_alias(tablename, alias)
if _alias:
hook = s3db.get_component(tablename, _alias)
if hook:
resource._attach(_alias, hook)
components = resource.components
links = resource.links
if alias in components:
# Is a component
component = components[alias]
ktable = component.table
join = component._join()
multiple = component.multiple
elif alias in links:
# Is a linktable
link = links[alias]
ktable = link.table
join = link._join()
elif "_" in alias:
# Is a free join
DELETED = current.xml.DELETED
table = resource.table
tablename = resource.tablename
pkey = fkey = None
# Find the table
fkey, kname = (alias.split(":") + [None])[:2]
if not kname:
fkey, kname = kname, fkey
ktable = s3db.table(kname,
AttributeError("table not found: %s" % kname),
db_only=True)
if fkey is None:
# Autodetect left key
for fname in ktable.fields:
tn, key, m = s3_get_foreign_key(ktable[fname], m2m=False)
if not tn:
continue
if tn == tablename:
if fkey is not None:
raise SyntaxError("ambiguous foreign key in %s" %
alias)
else:
fkey = fname
if key:
pkey = key
if fkey is None:
raise SyntaxError("no foreign key for %s in %s" %
(tablename, kname))
else:
# Check left key
if fkey not in ktable.fields:
raise AttributeError("no field %s in %s" % (fkey, kname))
tn, pkey, m = s3_get_foreign_key(ktable[fkey], m2m=False)
if tn and tn != tablename:
raise SyntaxError("%s.%s is not a foreign key for %s" %
(kname, fkey, tablename))
elif not tn:
raise SyntaxError("%s.%s is not a foreign key" %
(kname, fkey))
# Default primary key
if pkey is None:
pkey = table._id.name
# Build join
query = (table[pkey] == ktable[fkey])
if DELETED in ktable.fields:
query &= ktable[DELETED] != True
join = [ktable.on(query)]
else:
raise SyntaxError("Invalid tablename: %s" % alias)
return ktable, join, multiple, True
# =============================================================================
class S3ResourceField(object):
""" Helper class to resolve a field selector against a resource """
# -------------------------------------------------------------------------
def __init__(self, resource, selector, label=None):
"""
Constructor
@param resource: the resource
@param selector: the field selector (string)
"""
self.resource = resource
self.selector = selector
lf = S3FieldPath.resolve(resource, selector)
self.tname = lf.tname
self.fname = lf.fname
self.colname = lf.colname
self._joins = lf.joins
self.distinct = lf.distinct
self.multiple = lf.multiple
self._join = None
self.field = lf.field
self.virtual = False
self.represent = s3_unicode
self.requires = None
if self.field is not None:
field = self.field
self.ftype = str(field.type)
if resource.linked is not None and self.ftype == "id":
# Always represent the link-table's ID as the
# linked record's ID => needed for data tables
self.represent = lambda i, resource=resource: \
resource.component_id(None, i)
else:
self.represent = field.represent
self.requires = field.requires
elif self.colname:
self.virtual = True
self.ftype = "virtual"
else:
self.ftype = "context"
# Fall back to the field label
if label is None:
fname = self.fname
if fname in ["L1", "L2", "L3", "L3", "L4", "L5"]:
try:
label = current.gis.get_location_hierarchy(fname)
except:
label = None
elif fname == "L0":
label = current.messages.COUNTRY
if label is None:
f = self.field
if f:
label = f.label
elif fname:
label = " ".join([s.strip().capitalize()
for s in fname.split("_") if s])
else:
label = None
self.label = label
self.show = True
# -------------------------------------------------------------------------
def __repr__(self):
""" String representation of this instance """
return "<S3ResourceField " \
"selector='%s' " \
"label='%s' " \
"table='%s' " \
"field='%s' " \
"type='%s'>" % \
(self.selector, self.label, self.tname, self.fname, self.ftype)
# -------------------------------------------------------------------------
@property
def join(self):
"""
Implicit join (Query) for this field, for backwards-compatibility
"""
if self._join is not None:
return self._join
join = self._join = {}
for tablename, joins in self._joins.items():
query = None
for expression in joins:
if query is None:
query = expression.second
else:
query &= expression.second
if query:
join[tablename] = query
return join
# -------------------------------------------------------------------------
@property
def left(self):
"""
The left joins for this field, for backwards-compability
"""
return self._joins
# -------------------------------------------------------------------------
def extract(self, row, represent=False, lazy=False):
"""
Extract the value for this field from a row
@param row: the Row
@param represent: render a text representation for the value
@param lazy: return a lazy representation handle if available
"""
tname = self.tname
fname = self.fname
colname = self.colname
error = "Field not found in Row: %s" % colname
if type(row) is Row:
try:
if tname in row.__dict__:
value = ogetattr(ogetattr(row, tname), fname)
else:
value = ogetattr(row, fname)
except:
try:
value = row[colname]
except (KeyError, AttributeError):
raise KeyError(error)
elif fname in row:
value = row[fname]
elif colname in row:
value = row[colname]
elif tname is not None and \
tname in row and fname in row[tname]:
value = row[tname][fname]
else:
raise KeyError(error)
if callable(value):
# Lazy virtual field
try:
value = value()
except:
current.log.error(sys.exc_info()[1])
value = None
if represent:
renderer = self.represent
if callable(renderer):
if lazy and hasattr(renderer, "bulk"):
return S3RepresentLazy(value, renderer)
else:
return renderer(value)
else:
return s3_unicode(value)
else:
return value
# =============================================================================
class S3Joins(object):
""" A collection of joins """
def __init__(self, tablename, joins=None):
"""
Constructor
@param tablename: the name of the master table
@param joins: list of joins
"""
self.tablename = tablename
self.joins = {}
self.tables = set()
self.add(joins)
# -------------------------------------------------------------------------
def __iter__(self):
"""
Iterate over the names of all joined tables in the collection
"""
return self.joins.__iter__()
# -------------------------------------------------------------------------
def __getitem__(self, tablename):
"""
Get the list of joins for a table
@param tablename: the tablename
"""
return self.joins.__getitem__(tablename)
# -------------------------------------------------------------------------
def __setitem__(self, tablename, joins):
"""
Update the joins for a table
@param tablename: the tablename
@param joins: the list of joins for this table
"""
master = self.tablename
joins_dict = self.joins
tables = current.db._adapter.tables
joins_dict[tablename] = joins
if len(joins) > 1:
for join in joins:
try:
tname = join.first._tablename
except AttributeError:
tname = str(join.first)
if tname not in joins_dict and \
master in tables(join.second):
joins_dict[tname] = [join]
self.tables.add(tablename)
return
# -------------------------------------------------------------------------
def keys(self):
"""
Get a list of names of all joined tables
"""
return self.joins.keys()
# -------------------------------------------------------------------------
def items(self):
"""
Get a list of tuples (tablename, [joins]) for all joined tables
"""
return self.joins.items()
# -------------------------------------------------------------------------
def values(self):
"""
Get a list of joins for all joined tables
@return: a nested list like [[join, join, ...], ...]
"""
return self.joins.values()
# -------------------------------------------------------------------------
def add(self, joins):
"""
Add joins to this collection
@param joins: a join or a list/tuple of joins
@return: the list of names of all tables for which joins have
been added to the collection
"""
tablenames = set()
if joins:
if not isinstance(joins, (list, tuple)):
joins = [joins]
for join in joins:
tablename = join.first._tablename
self[tablename] = [join]
tablenames.add(tablename)
return list(tablenames)
# -------------------------------------------------------------------------
def extend(self, other):
"""
Extend this collection with the joins from another collection
@param other: the other collection (S3Joins), or a dict like
{tablename: [join, join]}
@return: the list of names of all tables for which joins have
been added to the collection
"""
if type(other) is S3Joins:
add = self.tables.add
else:
add = None
joins = self.joins if type(other) is S3Joins else self
for tablename in other:
if tablename not in self.joins:
joins[tablename] = other[tablename]
if add:
add(tablename)
return other.keys()
# -------------------------------------------------------------------------
def __repr__(self):
"""
String representation of this collection
"""
return "<S3Joins %s>" % str([str(j) for j in self.as_list()])
# -------------------------------------------------------------------------
def as_list(self, tablenames=None, exclude=None, aqueries=None):
"""
Return joins from this collection as list
@param tablenames: the names of the tables for which joins
shall be returned, defaults to all tables
in the collection. Dependencies will be
included automatically (if available)
@param exclude: tables to exclude from tablenames, can be
another S3Joins collection, or a list/tuple/set
of tablenames, useful e.g. to prevent duplication
of left joins as inner joins:
join = inner_joins.as_list(exclude=left_joins)
left = left_joins.as_list()
@param aqueries: dict of accessible-queries {tablename: query}
to include in the joins; if there is no entry
for a particular table, then it will be looked
up from current.auth and added to the dict.
To prevent differential authorization of a
particular joined table, set {<tablename>: None}
in the dict
@return: a list of joins, ordered by their interdependency, which
can be used as join/left parameter of Set.select()
"""
accessible_query = current.auth.s3_accessible_query
if tablenames is None:
tablenames = self.tables
else:
tablenames = set(tablenames)
if isinstance(exclude, S3Joins):
tablenames -= set(exclude.keys())
elif exclude:
tablenames -= set(exclude)
joins = self.joins
# Resolve dependencies
required_tables = set()
get_tables = current.db._adapter.tables
for tablename in tablenames:
if tablename not in joins or tablename == self.tablename:
continue
required_tables.add(tablename)
for join in joins[tablename]:
dependencies = get_tables(join.second)
if dependencies:
required_tables |= set(dependencies)
# Collect joins
joins_dict = {}
for tablename in required_tables:
if tablename not in joins or tablename == self.tablename:
continue
for join in joins[tablename]:
j = join
table = j.first
tname = table._tablename
if aqueries is not None and tname in tablenames:
if tname not in aqueries:
aquery = accessible_query("read", table)
aqueries[tname] = aquery
else:
aquery = aqueries[tname]
if aquery is not None:
j = join.first.on(join.second & aquery)
joins_dict[tname] = j
# Sort joins (if possible)
try:
return self.sort(joins_dict.values())
except RuntimeError:
return joins_dict.values()
# -------------------------------------------------------------------------
@classmethod
def sort(cls, joins):
"""
Sort a list of left-joins by their interdependency
@param joins: the list of joins
"""
if len(joins) <= 1:
return joins
r = list(joins)
tables = current.db._adapter.tables
append = r.append
head = None
for i in xrange(len(joins)):
join = r.pop(0)
head = join
tablenames = tables(join.second)
for j in r:
try:
tn = j.first._tablename
except AttributeError:
tn = str(j.first)
if tn in tablenames:
head = None
break
if head is not None:
break
else:
append(join)
if head is not None:
return [head] + cls.sort(r)
else:
raise RuntimeError("circular join dependency")
# =============================================================================
class S3ResourceQuery(object):
"""
Helper class representing a resource query
- unlike DAL Query objects, these can be converted to/from URL filters
"""
# Supported operators
NOT = "not"
AND = "and"
OR = "or"
LT = "lt"
LE = "le"
EQ = "eq"
NE = "ne"
GE = "ge"
GT = "gt"
LIKE = "like"
BELONGS = "belongs"
CONTAINS = "contains"
ANYOF = "anyof"
TYPEOF = "typeof"
OPERATORS = [NOT, AND, OR,
LT, LE, EQ, NE, GE, GT,
LIKE, BELONGS, CONTAINS, ANYOF, TYPEOF]
# -------------------------------------------------------------------------
def __init__(self, op, left=None, right=None):
""" Constructor """
if op not in self.OPERATORS:
raise SyntaxError("Invalid operator: %s" % op)
self.op = op
self.left = left
self.right = right
# -------------------------------------------------------------------------
def __and__(self, other):
""" AND """
return S3ResourceQuery(self.AND, self, other)
# -------------------------------------------------------------------------
def __or__(self, other):
""" OR """
return S3ResourceQuery(self.OR, self, other)
# -------------------------------------------------------------------------
def __invert__(self):
""" NOT """
if self.op == self.NOT:
return self.left
else:
return S3ResourceQuery(self.NOT, self)
# -------------------------------------------------------------------------
def _joins(self, resource, left=False):
op = self.op
l = self.left
r = self.right
if op in (self.AND, self.OR):
ljoins, ld = l._joins(resource, left=left)
rjoins, rd = r._joins(resource, left=left)
ljoins = dict(ljoins)
ljoins.update(rjoins)
return (ljoins, ld or rd)
elif op == self.NOT:
return l._joins(resource, left=left)
joins, distinct = {}, False
if isinstance(l, S3FieldSelector):
try:
rfield = l.resolve(resource)
except (SyntaxError, AttributeError):
pass
else:
distinct = rfield.distinct
if distinct and left or not distinct and not left:
joins = rfield._joins
return(joins, distinct)
# -------------------------------------------------------------------------
def fields(self):
""" Get all field selectors involved with this query """
op = self.op
l = self.left
r = self.right
if op in (self.AND, self.OR):
lf = l.fields()
rf = r.fields()
return lf + rf
elif op == self.NOT:
return l.fields()
elif isinstance(l, S3FieldSelector):
return [l.name]
else:
return []
# -------------------------------------------------------------------------
def split(self, resource):
"""
Split this query into a real query and a virtual one (AND)
@param resource: the S3Resource
@return: tuple (DAL-translatable sub-query, virtual filter),
both S3ResourceQuery instances
"""
op = self.op
l = self.left
r = self.right
if op == self.AND:
lq, lf = l.split(resource)
rq, rf = r.split(resource)
q = lq
if rq is not None:
if q is not None:
q &= rq
else:
q = rq
f = lf
if rf is not None:
if f is not None:
f &= rf
else:
f = rf
return q, f
elif op == self.OR:
lq, lf = l.split(resource)
rq, rf = r.split(resource)
if lf is not None or rf is not None:
return None, self
else:
q = lq
if rq is not None:
if q is not None:
q |= rq
else:
q = rq
return q, None
elif op == self.NOT:
if l.op == self.OR:
i = (~(l.left)) & (~(l.right))
return i.split(resource)
else:
q, f = l.split(resource)
if q is not None and f is not None:
return None, self
elif q is not None:
return ~q, None
elif f is not None:
return None, ~f
l = self.left
try:
if isinstance(l, S3FieldSelector):
lfield = l.resolve(resource)
else:
lfield = S3ResourceField(resource, l)
except:
lfield = None
if not lfield or lfield.field is None:
return None, self
else:
return self, None
# -------------------------------------------------------------------------
def transform(self, resource):
"""
Placeholder for transformation method
@param resource: the S3Resource
"""
# @todo: implement
return self
# -------------------------------------------------------------------------
def query(self, resource):
"""
Convert this S3ResourceQuery into a DAL query, ignoring virtual
fields (the necessary joins for this query can be constructed
with the joins() method)
@param resource: the resource to resolve the query against
"""
op = self.op
l = self.left
r = self.right
# Resolve query components
if op == self.AND:
l = l.query(resource)
r = r.query(resource)
if l is None or r is None:
return None
elif l is False or r is False:
return l if r is False else r if l is False else False
else:
return l & r
elif op == self.OR:
l = l.query(resource)
r = r.query(resource)
if l is None or r is None:
return None
elif l is False or r is False:
return l if r is False else r if l is False else False
else:
return l | r
elif op == self.NOT:
l = l.query(resource)
if l is None:
return None
elif l is False:
return False
else:
return ~l
# Resolve the fields
if isinstance(l, S3FieldSelector):
try:
rfield = S3ResourceField(resource, l.name)
except:
return None
if rfield.virtual:
return None
elif not rfield.field:
return False
lfield = l.expr(rfield.field)
elif isinstance(l, Field):
lfield = l
else:
return None # not a field at all
if isinstance(r, S3FieldSelector):
try:
rfield = S3ResourceField(resource, r.name)
except:
return None
rfield = rfield.field
if rfield.virtual:
return None
elif not rfield.field:
return False
rfield = r.expr(rfield.field)
else:
rfield = r
# Resolve the operator
invert = False
query_bare = self._query_bare
ftype = str(lfield.type)
if isinstance(rfield, (list, tuple)) and ftype[:4] != "list":
if op == self.EQ:
op = self.BELONGS
elif op == self.NE:
op = self.BELONGS
invert = True
elif op not in (self.BELONGS, self.TYPEOF):
query = None
for v in rfield:
q = query_bare(op, lfield, v)
if q is not None:
if query is None:
query = q
else:
query |= q
return query
# Convert date(time) strings
if ftype == "datetime" and \
isinstance(rfield, basestring):
rfield = S3TypeConverter.convert(datetime.datetime, rfield)
elif ftype == "date" and \
isinstance(rfield, basestring):
rfield = S3TypeConverter.convert(datetime.date, rfield)
query = query_bare(op, lfield, rfield)
if invert:
query = ~(query)
return query
# -------------------------------------------------------------------------
def _query_bare(self, op, l, r):
"""
Translate a filter expression into a DAL query
@param op: the operator
@param l: the left operand
@param r: the right operand
"""
if op == self.CONTAINS:
q = l.contains(r, all=True)
elif op == self.ANYOF:
# NB str/int doesn't matter here
q = l.contains(r, all=False)
elif op == self.BELONGS:
q = self._query_belongs(l, r)
elif op == self.TYPEOF:
q = self._query_typeof(l, r)
elif op == self.LIKE:
q = l.like(s3_unicode(r))
elif op == self.LT:
q = l < r
elif op == self.LE:
q = l <= r
elif op == self.EQ:
q = l == r
elif op == self.NE:
q = l != r
elif op == self.GE:
q = l >= r
elif op == self.GT:
q = l > r
else:
q = None
return q
# -------------------------------------------------------------------------
def _query_typeof(self, l, r):
"""
Translate TYPEOF into DAL expression
@param l: the left operator
@param r: the right operator
"""
hierarchy, field, nodeset, none = self._resolve_hierarchy(l, r)
if not hierarchy:
# Not a hierarchical query => use simple belongs
return self._query_belongs(l, r)
if not field:
# Field does not exist (=>skip subquery)
return None
# Construct the subquery
list_type = str(field.type)[:5] == "list:"
if nodeset:
if list_type:
q = (field.contains(list(nodeset)))
elif len(nodeset) > 1:
q = (field.belongs(nodeset))
else:
q = (field == tuple(nodeset)[0])
else:
q = None
if none:
# None needs special handling with older DAL versions
if not list_type:
if q is None:
q = (field == None)
else:
q |= (field == None)
if q is None:
# Values not resolvable (=subquery always fails)
q = field.belongs(set())
return q
# -------------------------------------------------------------------------
@classmethod
def _resolve_hierarchy(cls, l, r):
"""
Resolve the hierarchical lookup in a typeof-query
@param l: the left operator
@param r: the right operator
"""
from s3hierarchy import S3Hierarchy
tablename = l.tablename
# Connect to the hierarchy
hierarchy = S3Hierarchy(tablename)
if hierarchy.config is None:
# Reference to a hierarchical table?
ktablename, key = s3_get_foreign_key(l)[:2]
if ktablename:
hierarchy = S3Hierarchy(ktablename)
else:
key = None
list_type = str(l.type)[:5] == "list:"
if hierarchy.config is None and not list_type:
# No hierarchy configured and no list:reference
return False, None, None, None
field, keys = l, r
if not key:
s3db = current.s3db
table = s3db[tablename]
if l.name != table._id.name:
# Lookup-field rather than primary key => resolve it
# Build a filter expression for the lookup table
fs = S3FieldSelector(l.name)
if list_type:
expr = fs.contains(r)
else:
expr = cls._query_belongs(l, r, field = fs)
# Resolve filter expression into subquery
resource = s3db.resource(tablename)
if expr is not None:
subquery = expr.query(resource)
else:
subquery = None
if not subquery:
# Field doesn't exist
return True, None, None, None
# Execute query and retrieve the lookup table IDs
DELETED = current.xml.DELETED
if DELETED in table.fields:
subquery &= table[DELETED] != True
rows = current.db(subquery).select(table._id)
# Override field/keys
field = table[hierarchy.pkey.name]
keys = set([row[table._id.name] for row in rows])
nodeset, none = None, False
if keys:
# Lookup all descendant types from the hierarchy
none = False
if not isinstance(keys, (list, tuple, set)):
keys = set([keys])
nodes = set()
for node in keys:
if node is None:
none = True
else:
try:
node_id = long(node)
except ValueError:
continue
nodes.add(node_id)
if hierarchy.config is not None:
nodeset = hierarchy.findall(nodes, inclusive=True)
else:
nodeset = nodes
elif keys is None:
none = True
return True, field, nodeset, none
# -------------------------------------------------------------------------
@staticmethod
def _query_belongs(l, r, field=None):
"""
Resolve BELONGS into a DAL expression (or S3ResourceQuery if
field is an S3FieldSelector)
@param l: the left operator
@param r: the right operator
@param field: alternative left operator
"""
if field is None:
field = l
expr = None
none = False
if not isinstance(r, (list, tuple, set)):
items = [r]
else:
items = r
if None in items:
none = True
items = [item for item in items if item is not None]
wildcard = False
if str(l.type) in ("string", "text"):
for item in items:
if isinstance(item, basestring):
if "*" in item and "%" not in item:
s = item.replace("*", "%")
else:
s = item
else:
try:
s = str(item)
except:
continue
if "%" in s:
wildcard = True
_expr = (field.like(s))
else:
_expr = (field == s)
if expr is None:
expr = _expr
else:
expr |= _expr
if not wildcard:
if len(items) == 1:
# Don't use belongs() for single value
expr = (field == items[0])
elif items:
expr = (field.belongs(items))
if none:
# None needs special handling with older DAL versions
if expr is None:
expr = (field == None)
else:
expr |= (field == None)
elif expr is None:
expr = field.belongs(set())
return expr
# -------------------------------------------------------------------------
def __call__(self, resource, row, virtual=True):
"""
Probe whether the row matches the query
@param resource: the resource to resolve the query against
@param row: the DB row
@param virtual: execute only virtual queries
"""
if self.op == self.AND:
l = self.left(resource, row, virtual=False)
r = self.right(resource, row, virtual=False)
if l is None:
return r
if r is None:
return l
return l and r
elif self.op == self.OR:
l = self.left(resource, row, virtual=False)
r = self.right(resource, row, virtual=False)
if l is None:
return r
if r is None:
return l
return l or r
elif self.op == self.NOT:
l = self.left(resource, row)
if l is None:
return None
else:
return not l
real = False
left = self.left
if isinstance(left, S3FieldSelector):
try:
lfield = left.resolve(resource)
except (AttributeError, KeyError, SyntaxError):
return None
if lfield.field is not None:
real = True
elif not lfield.virtual:
# Unresolvable expression => skip
return None
else:
lfield = left
if isinstance(left, Field):
real = True
right = self.right
if isinstance(right, S3FieldSelector):
try:
rfield = right.resolve(resource)
except (AttributeError, KeyError, SyntaxError):
return None
if rfield.virtual:
real = False
elif rfield.field is None:
# Unresolvable expression => skip
return None
else:
rfield = right
if virtual and real:
return None
extract = lambda f: S3FieldSelector.extract(resource, row, f)
try:
l = extract(lfield)
r = extract(rfield)
except (KeyError, SyntaxError):
current.log.error(sys.exc_info()[1])
return None
if isinstance(left, S3FieldSelector):
l = left.expr(l)
if isinstance(right, S3FieldSelector):
r = right.expr(r)
op = self.op
invert = False
probe = self._probe
if isinstance(rfield, (list, tuple)) and \
not isinstance(lfield, (list, tuple)):
if op == self.EQ:
op = self.BELONGS
elif op == self.NE:
op = self.BELONGS
invert = True
elif op != self.BELONGS:
for v in r:
try:
r = probe(op, l, v)
except (TypeError, ValueError):
r = False
if r:
return True
return False
try:
r = probe(op, l, r)
except (TypeError, ValueError):
return False
if invert and r is not None:
return not r
else:
return r
# -------------------------------------------------------------------------
def _probe(self, op, l, r):
"""
Probe whether the value pair matches the query
@param l: the left value
@param r: the right value
"""
result = False
convert = S3TypeConverter.convert
# Fallbacks for TYPEOF
if op == self.TYPEOF:
if isinstance(l, (list, tuple, set)):
op = self.ANYOF
elif isinstance(r, (list, tuple, set)):
op = self.BELONGS
else:
op = self.EQ
if op == self.CONTAINS:
r = convert(l, r)
result = self._probe_contains(l, r)
elif op == self.ANYOF:
if not isinstance(r, (list, tuple, set)):
r = [r]
for v in r:
if isinstance(l, (list, tuple, set, basestring)):
if self._probe_contains(l, v):
return True
elif l == v:
return True
return False
elif op == self.BELONGS:
if not isinstance(r, (list, tuple, set)):
r = [r]
r = convert(l, r)
result = self._probe_contains(r, l)
elif op == self.LIKE:
pattern = re.escape(str(r)).replace("\\%", ".*").replace(".*.*", "\\%")
return re.match(pattern, str(l)) is not None
else:
r = convert(l, r)
if op == self.LT:
result = l < r
elif op == self.LE:
result = l <= r
elif op == self.EQ:
result = l == r
elif op == self.NE:
result = l != r
elif op == self.GE:
result = l >= r
elif op == self.GT:
result = l > r
return result
# -------------------------------------------------------------------------
@staticmethod
def _probe_contains(a, b):
"""
Probe whether a contains b
"""
if a is None:
return False
try:
if isinstance(a, basestring):
return str(b) in a
elif isinstance(a, (list, tuple, set)):
if isinstance(b, (list, tuple, set)):
convert = S3TypeConverter.convert
found = True
for _b in b:
if _b not in a:
found = False
for _a in a:
try:
if convert(_a, _b) == _a:
found = True
break
except (TypeError, ValueError):
continue
if not found:
break
return found
else:
return b in a
else:
return str(b) in str(a)
except:
return False
# -------------------------------------------------------------------------
def represent(self, resource):
"""
Represent this query as a human-readable string.
@param resource: the resource to resolve the query against
"""
op = self.op
l = self.left
r = self.right
if op == self.AND:
l = l.represent(resource)
r = r.represent(resource)
return "(%s and %s)" % (l, r)
elif op == self.OR:
l = l.represent(resource)
r = r.represent(resource)
return "(%s or %s)" % (l, r)
elif op == self.NOT:
l = l.represent(resource)
return "(not %s)" % l
else:
if isinstance(l, S3FieldSelector):
l = l.represent(resource)
elif isinstance(l, basestring):
l = '"%s"' % l
if isinstance(r, S3FieldSelector):
r = r.represent(resource)
elif isinstance(r, basestring):
r = '"%s"' % r
if op == self.CONTAINS:
return "(%s in %s)" % (r, l)
elif op == self.BELONGS:
return "(%s in %s)" % (l, r)
elif op == self.ANYOF:
return "(%s contains any of %s)" % (l, r)
elif op == self.TYPEOF:
return "(%s is a type of %s)" % (l, r)
elif op == self.LIKE:
return "(%s like %s)" % (l, r)
elif op == self.LT:
return "(%s < %s)" % (l, r)
elif op == self.LE:
return "(%s <= %s)" % (l, r)
elif op == self.EQ:
return "(%s == %s)" % (l, r)
elif op == self.NE:
return "(%s != %s)" % (l, r)
elif op == self.GE:
return "(%s >= %s)" % (l, r)
elif op == self.GT:
return "(%s > %s)" % (l, r)
else:
return "(%s ?%s? %s)" % (l, op, r)
# -------------------------------------------------------------------------
def serialize_url(self, resource=None):
"""
Serialize this query as URL query
@return: a Storage of URL variables
"""
op = self.op
l = self.left
r = self.right
url_query = Storage()
def _serialize(n, o, v, invert):
try:
quote = lambda s: s if "," not in s else '"%s"' % s
if isinstance(v, list):
v = ",".join([quote(S3TypeConverter.convert(str, val))
for val in v])
else:
v = quote(S3TypeConverter.convert(str, v))
except:
return
if "." not in n:
if resource is not None:
n = "~.%s" % n
else:
return url_query
if o == self.LIKE:
v = v.replace("%", "*")
if o == self.EQ:
operator = ""
else:
operator = "__%s" % o
if invert:
operator = "%s!" % operator
key = "%s%s" % (n, operator)
if key in url_query:
url_query[key] = "%s,%s" % (url_query[key], v)
else:
url_query[key] = v
return url_query
if op == self.AND:
lu = l.serialize_url(resource=resource)
url_query.update(lu)
ru = r.serialize_url(resource=resource)
url_query.update(ru)
elif op == self.OR:
sub = self._or()
if sub is None:
# This OR-subtree is not serializable
return url_query
n, o, v, invert = sub
_serialize(n, o, v, invert)
elif op == self.NOT:
lu = l.serialize_url(resource=resource)
for k in lu:
url_query["%s!" % k] = lu[k]
elif isinstance(l, S3FieldSelector):
_serialize(l.name, op, r, False)
return url_query
# -------------------------------------------------------------------------
def _or(self):
"""
Helper method to URL-serialize an OR-subtree in a query in
alternative field selector syntax if they all use the same
operator and value (this is needed to URL-serialize an
S3SearchSimpleWidget query).
"""
op = self.op
l = self.left
r = self.right
if op == self.AND:
return None
elif op == self.NOT:
lname, lop, lval, linv = l._or()
return (lname, lop, lval, not linv)
elif op == self.OR:
lvars = l._or()
rvars = r._or()
if lvars is None or rvars is None:
return None
lname, lop, lval, linv = lvars
rname, rop, rval, rinv = rvars
if lop != rop or linv != rinv:
return None
if lname == rname:
return (lname, lop, [lval, rval], linv)
elif lval == rval:
return ("%s|%s" % (lname, rname), lop, lval, linv)
else:
return None
else:
return (l.name, op, r, False)
# =============================================================================
class S3URLQuery(object):
""" URL Query Parser """
# -------------------------------------------------------------------------
@classmethod
def parse(cls, resource, vars):
"""
Construct a Storage of S3ResourceQuery from a Storage of get_vars
@param resource: the S3Resource
@param vars: the get_vars
@return: Storage of S3ResourceQuery like {alias: query}, where
alias is the alias of the component the query concerns
"""
query = Storage()
if resource is None:
return query
if not vars:
return query
subquery = cls._subquery
allof = lambda l, r: l if r is None else r if l is None else r & l
for key, value in vars.iteritems():
if not("." in key or key[0] == "(" and ")" in key):
continue
selectors, op, invert = cls.parse_expression(key)
if type(value) is list:
# Multiple queries with the same selector (AND)
q = reduce(allof,
[subquery(selectors, op, invert, v) for v in value],
None)
else:
q = subquery(selectors, op, invert, value)
if q is None:
continue
# Append to query
if len(selectors) > 1:
aliases = [s.split(".", 1)[0] for s in selectors]
if len(set(aliases)) == 1:
alias = aliases[0]
else:
alias = resource.alias
#alias = resource.alias
else:
alias = selectors[0].split(".", 1)[0]
if alias == "~":
alias = resource.alias
if alias not in query:
query[alias] = [q]
else:
query[alias].append(q)
return query
# -------------------------------------------------------------------------
@staticmethod
def parse_url(url):
"""
Parse a URL query into get_vars
@param query: the URL query string
@return: the get_vars (Storage)
"""
if not url:
return Storage()
elif "?" in url:
query = url.split("?", 1)[1]
elif "=" in url:
query = url
else:
return Storage()
import cgi
dget = cgi.parse_qsl(query, keep_blank_values=1)
get_vars = Storage()
for (key, value) in dget:
if key in get_vars:
if type(get_vars[key]) is list:
get_vars[key].append(value)
else:
get_vars[key] = [get_vars[key], value]
else:
get_vars[key] = value
return get_vars
# -------------------------------------------------------------------------
@staticmethod
def parse_expression(key):
"""
Parse a URL expression
@param key: the key for the URL variable
@return: tuple (selectors, operator, invert)
"""
if key[-1] == "!":
invert = True
else:
invert = False
fs = key.rstrip("!")
op = None
if "__" in fs:
fs, op = fs.split("__", 1)
op = op.strip("_")
if not op:
op = "eq"
if "|" in fs:
selectors = [s for s in fs.split("|") if s]
else:
selectors = [fs]
return selectors, op, invert
# -------------------------------------------------------------------------
@staticmethod
def parse_value(value):
"""
Parse a URL query value
@param value: the value
@return: the parsed value
"""
uquote = lambda w: w.replace('\\"', '\\"\\') \
.strip('"') \
.replace('\\"\\', '"')
NONE = ("NONE", "None")
if type(value) is not list:
value = [value]
vlist = []
for item in value:
w = ""
quote = False
ignore_quote = False
for c in item:
if c == '"' and not ignore_quote:
w += c
quote = not quote
elif c == "," and not quote:
if w in NONE:
w = None
else:
w = uquote(w).encode("utf-8")
vlist.append(w)
w = ""
else:
w += c
if c == "\\":
ignore_quote = True
else:
ignore_quote = False
if w in NONE:
w = None
else:
w = uquote(w).encode("utf-8")
vlist.append(w)
if len(vlist) == 1:
return vlist[0]
return vlist
# -------------------------------------------------------------------------
@classmethod
def _subquery(cls, selectors, op, invert, value):
"""
Construct a sub-query from URL selectors, operator and value
@param selectors: the selector(s)
@param op: the operator
@param invert: invert the query
@param value: the value
"""
v = cls.parse_value(value)
q = None
for fs in selectors:
if op == S3ResourceQuery.LIKE:
# Auto-lowercase and replace wildcard
f = S3FieldSelector(fs).lower()
if isinstance(v, basestring):
v = v.replace("*", "%").lower()
elif isinstance(v, list):
v = [x.replace("*", "%").lower() for x in v if x is not None]
else:
f = S3FieldSelector(fs)
rquery = None
try:
rquery = S3ResourceQuery(op, f, v)
except SyntaxError:
current.log.error("Invalid URL query operator: %s (sub-query ignored)" % op)
q = None
break
# Invert operation
if invert:
rquery = ~rquery
# Add to subquery
if q is None:
q = rquery
elif invert:
q &= rquery
else:
q |= rquery
return q
# END =========================================================================
| [
"[email protected]"
]
| |
acec31ec61ba2685785c55fce5948b4cca5d910f | 7e326ba379d8e46fbf597938c1efcb99afb0d7e8 | /server/models.py | 6aaa295aa567f34255d38ca1326ffb51cf140983 | []
| no_license | obulpathi/miner | 393f532e3901bbb3885155d6c8ff3ea363634e50 | 53e8221d6545cb9da2f166bfa771eceb11f730f6 | refs/heads/master | 2020-04-16T01:14:37.607057 | 2014-06-14T01:02:37 | 2014-06-14T01:02:37 | 12,418,001 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,133 | py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bitified Image Entry Model and helper functions."""
import cgi
from google.appengine.ext import db
# Datetime string format
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
class Miner(db.Model):
"""Main Miner model."""
user = db.StringProperty(required=True)
timestamp = db.DateTimeProperty(auto_now_add=True)
miner = db.StringProperty(required=True)
speed = db.StringProperty(required=True)
@property
def timestamp_strsafe(self):
if self.timestamp:
return self.timestamp.strftime(DATETIME_FORMAT)
return None
| [
"[email protected]"
]
| |
4aa9a980e8e71317eb2171d8d36ed74801927c53 | ef7f73ffe943eb6b2a4456d374de3fd49bc5d3bb | /src/kvt/augmentation/__init__.py | 88afc26d0a9e31864017394769d435a987ab2b6a | [
"BSD-2-Clause"
]
| permissive | Ynakatsuka/birdclef-2021 | 11acf0a7c2a6463e574df7a058fbf8cc6ab782b6 | d7cf7b39e3164a75547ee50cc9a29bd5ed4c29bd | refs/heads/main | 2023-05-12T01:00:24.612945 | 2021-06-02T01:37:16 | 2021-06-02T01:37:16 | 358,929,160 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | from .audio import (
CosineVolume,
LowFrequencyMask,
OneOf,
PinkNoise,
RandomVolume,
SpecifiedNoise,
SpeedTuning,
StretchAudio,
TimeShift,
or_mixup,
)
from .augmix import RandomAugMix
from .autoaugment import ImageNetPolicy
from .block_fade import BlockFade
from .grid_mask import GridMask
from .histogram import HistogramNormalize
from .line import Line
from .mix import cutmix, mixup
from .morphological import RandomMorph
from .needless import NeedleAugmentation
from .random_erasing import RandomErasing
from .spec_augmentation import SpecAugmentationPlusPlus
from .sprinkle import Sprinkle
| [
"[email protected]"
]
| |
06bd4e1105e6c53a55320e70f7f22a9c55b68a77 | a9305f461b2c03e4a55fec9f1ecc75f78265eb8e | /sub_text/sub_text1.py | 3db803f6ac183f182aeeb51b6db8b843e9474c94 | []
| no_license | JoaoBueno/estudos-python | 653afb174f2d141fcc82511c51cbfd2bca1b55cb | 606e188e88ee3a2b2e1daee60c71948c678228e1 | refs/heads/master | 2022-01-24T20:17:52.702768 | 2022-01-19T20:39:20 | 2022-01-19T20:39:20 | 150,925,137 | 2 | 2 | null | 2022-01-19T20:40:46 | 2018-09-30T03:09:08 | Python | UTF-8 | Python | false | false | 109 | py | from pyutil import filereplace
filereplace("somefile.txt","/educacional.1linha.com.br","/www.1linha.com.br") | [
"[email protected]"
]
| |
fb945c11cc4e07aba01f67c2de8cdc6fd748421c | 186f694b65b43cd56e746ce8538e4f1edad6129e | /1on1/Two_Pointer/lint-386.py | 7a6611cdb2072bd584bf7e854b88dde5e704967e | []
| no_license | zionhjs/algorithm_repo | 287486e0173e68cfa9e535490004c952192a54db | 26b4a770d5335abd738ae26c68d91f6af7b13749 | refs/heads/master | 2022-12-17T15:59:17.932490 | 2020-09-23T04:12:38 | 2020-09-23T04:12:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | class Solution:
"""
@param s: A string
@param k: An integer
@return: An integer
"""
def lengthOfLongestSubstringKDistinct(self, s, k):
if not s or k == 0:
return 0
alpha_counts = [0 for _ in range(256)]
count = 0
max_len = 0
i, j = 0, 0
while j < len(s):
char = ord(s[j])
alpha_counts[char] += 1
if alpha_counts[char] == 1:
count += 1
while i < j and count > k:
char = ord(s[i])
alpha_counts[char] -= 1
if alpha_counts[char] == 0:
count -= 1
i += 1
max_len = max(max_len, j-i+1)
j += 1
return max_len
| [
"[email protected]"
]
| |
d3f66cfc2fc33594b799867d55558df1743f6729 | 892a46487e056458270a774cda5b1b6752f25a84 | /1/run.py | e9e81aaa3f3325d052c1eee58ca189292dc8fdf7 | []
| no_license | taojy123/MyPhones | 249a980ccff475cf844ae858dae514ca967cc39e | 93291080dd7dc6c22054aa3d5a24a15ca1532912 | refs/heads/master | 2020-12-24T14:00:46.372187 | 2015-09-28T05:07:24 | 2015-09-28T05:07:24 | 22,827,560 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,439 | py |
#!/usr/bin/env python
import os
import sys
import webbrowser
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myphones.settings")
#these pertain to your application
import myphones.wsgi
import myphones.urls
import myphones.settings
import myphones.models
import myphones.views
import django.contrib.auth
import django.contrib.contenttypes
import django.contrib.sessions
import django.contrib.sites
import django.contrib.admin
import django.db.models.sql.compiler
from django.contrib.auth.backends import *
#from django.conf.urls.defaults import *
#these are django imports
import django.template.loaders.filesystem
import django.template.loaders.app_directories
import django.middleware.common
import django.contrib.sessions.middleware
import django.contrib.auth.middleware
import django.middleware.doc
import django.contrib.messages
import django.contrib.staticfiles
import django.contrib.messages.middleware
import django.contrib.sessions.backends.db
#import django.contrib.messages.storage.user_messages
import django.contrib.messages.storage.fallback
import django.db.backends.sqlite3.base
import django.db.backends.sqlite3.introspection
import django.db.backends.sqlite3.creation
import django.db.backends.sqlite3.client
import django.contrib.auth.context_processors
from django.core.context_processors import *
import django.contrib.messages.context_processors
import django.contrib.auth.models
import django.contrib.contenttypes.models
import django.contrib.sessions.models
import django.contrib.sites.models
import django.contrib.messages.models
import django.contrib.staticfiles.models
import django.contrib.admin.models
import django.template.defaulttags
import django.template.defaultfilters
import django.template.loader_tags
#dont need to import these pkgs
#need to know how to exclude them
import email.mime.audio
import email.mime.base
import email.mime.image
import email.mime.message
import email.mime.multipart
import email.mime.nonmultipart
import email.mime.text
import email.charset
import email.encoders
import email.errors
import email.feedparser
import email.generator
import email.header
import email.iterators
import email.message
import email.parser
import email.utils
import email.base64mime
import email.quoprimime
import django.core.cache.backends.locmem
import django.templatetags.i18n
import django.templatetags.future
import django.views.i18n
import django.core.context_processors
import django.template.defaulttags
import django.template.defaultfilters
import django.template.loader_tags
#from django.conf.urls.defaults import *
import django.contrib.admin.views.main
import django.core.context_processors
import django.contrib.auth.views
import django.contrib.auth.backends
import django.views.static
import django.contrib.admin.templatetags.log
#import django.contrib.admin.templatetags.adminmedia
import django.conf.urls.shortcut
import django.views.defaults
from django.core.handlers.wsgi import WSGIHandler
#from django.core.servers.basehttp import AdminMediaHandler
from django.conf import settings
from django.utils import translation
import django.contrib.staticfiles.urls
if __name__ == "__main__":
if len(sys.argv)==1:
sys.argv.append("runserver")
sys.argv.append("0.0.0.0:8000")
else:
webbrowser.open_new_tab('http://127.0.0.1:8000')
print sys.argv
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
b4bd715fdfbb95ac52bb29757e9e40e5b5bcfe71 | d57996fd2c6f92fdb457e8a35f6eb03a54dcf147 | /code_window/ScreenGeometry.py | d04fcc0d6da4137a249e55da885c47d80a1c34a9 | []
| no_license | ActonMartin/UI | 43aab17874d0e24bc35989b847bbe6d54c825e31 | 025d181dd6557822c28cd49af84ab9ffd3a0f274 | refs/heads/master | 2020-12-10T22:30:22.910870 | 2020-01-14T01:30:04 | 2020-01-14T01:30:04 | 233,730,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,215 | py | import sys
from PyQt5.QtWidgets import QApplication,QMainWindow,QWidget,QPushButton
def onclick_button():
print('1')
print("widget.x()= %d" % widget.x())
print("widget.y()=%d" % widget.y())
print('widget.width()=%d' % widget.width())
print('widget.height()=%d' % widget.height())
print('2')
print("widget.geometry().x()=%d" % widget.geometry().x())
print("widget.geometry().y()=%d" % widget.geometry().y())
print("widget.geometry().width()=%d" % widget.geometry().width())
print("widget.geometry().height()=%d" % widget.geometry().height())
print('3')
print("widget.frameGeometry().x()=%d" % widget.frameGeometry().x())
print("widget.frameGeometry().y()=%d" % widget.frameGeometry().y())
print("widget.frameGeometry().width()=%d" % widget.frameGeometry().width())
print("widget.frameGeometry().height()=%d" % widget.frameGeometry().height())
app = QApplication(sys.argv)
widget = QWidget()
bth = QPushButton(widget)
bth.setText('按钮')
bth.clicked.connect(onclick_button)
bth.move(24,100)
widget.resize(300,200) # 设置的工作区的尺寸
widget.move(250,200)
widget.setWindowTitle('屏幕坐标系')
widget.show()
sys.exit(app.exec_())
| [
"[email protected]"
]
| |
cf95ac327ce7cd47aa6c9d323ed27a1f6cb0762e | 0c36a11f1a8659b7691f3f612030b63e40e65869 | /chan/html_净值曲线_opt.py | 6bea437aae3389736500f304f7902ecba6388317 | []
| no_license | webclinic017/joinquantNew | 93af36b540b20cc1cb27f97b2ede11f88ec1f9e2 | 0cbadfeba4d0629471773304f6541981957965e9 | refs/heads/master | 2023-01-12T00:42:19.419870 | 2020-11-16T09:19:53 | 2020-11-16T09:19:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,332 | py | # -*- coding: utf-8 -*-
# @Time : 2020/6/30 10:15
# @Author : zhangfang
import pandas as pd
# import html5lib
from trading_future.future_singleton import Future
from backtest_func import yearsharpRatio, maxRetrace, annROR, annROR_signal
import matplotlib.pyplot as plt
import numpy as np
def PowerSetsRecursive(items):
# 求集合的所有子集
result = [[]]
for x in items:
result.extend([subset + [x] for subset in result])
return result
if __name__ == "__main__":
level_lst = [i for i in range(1, 6)]
level_lst = [5]
date_lst = [('2015-01-01', '2020-01-01'), ('2015-01-01', '2020-07-01')]
# date_lst = [('2015-01-01', '2020-01-01')]
method = 'sum' # 单利:sum;复利:muti
fee = np.float(0.00015)
fold_ini_path = 'c://g//缠论//回测报告//'
# fold_ini_path = 'G://缠论//回测报告//'
porfolio = Future()
mode = '蓝线笔_蓝线反转确认_蓝线反转平仓_200627'
code_lst_5 = ['ap', 'j', 'rb', 'i', 'fu', 'sm', 'if', 'v', 'zn', 'pp', 'ni', 'pb'] # 所有5分钟夏普>0
code_lst_15 = ['v', 'sm', 'sf', 'ap', 'ni', 'j', 'i', 'if', 'hc', 'cu', 'al', 'pp', 'zc', 'rb', 'c', 'zn',
'ag', 'pb', 'sc', 'sr', 'fu'] # 所有15分钟夏普>0
code_lst_30 = ['zc', 'v', 'ap', 'sm', 'if', 'al', 'rb', 'j', 'sc', 'fu', 'i', 'ta', 'sf', 'hc', 'pp'] # 所有30分钟夏普>0
code_lst_60 = ['ap', 'hc', 'j', 'rb', 'sc', 'al', 'ni', 'sf', 'fu', 'ta', 'zc', 'v',
'bu', 'i', 'sm', 'm', 'ma', 'tf', 'zn'] # 所有60分钟夏普>0
code_lst_240 = ['al', 'cu', 'v', 'i', 'ma', 'j', 'zn', 'jm', 'fu', 'bu', 'rb',
'sm', 'ta', 'p', 'zc', 'hc', 'c', 'pp', 'if', 'ru', 'm', 'pb'] # 所有4小时夏普>0
code_lst_1440 = ['v', 'ma', 'fu', 'cu', 'j', 'au', 'cf', 'c', 'ta', 'pp', 'sf', 'ag', 'jm', 'sr', 'oi', 'tf', 'if', 'hc',
'bu', 'zn', 'sm'] # 所有日级别夏普>0
code_dict = {}
code_dict['5'] = code_lst_5
code_dict['15'] = code_lst_15
code_dict['30'] = code_lst_30
code_dict['60'] = code_lst_60
code_dict['240'] = code_lst_240
code_dict['1440'] = code_lst_1440
# code_lst = ['ma', 'ta', 'c', 'bu', 'sf', 'v', 'sm', 'hc', 'rb', 'pp', 'p', 'zc', 'ag', 'al', 'i',
# 'pb', 'ap', 'zn'] # 保证金<10000的品种18个
# code_lst = ['ma', 'ta', 'c', 'bu', 'sf', 'v', 'sm', 'hc', 'rb', 'pp', 'p'] # 保证金<5000的品种11个
ret = {}
ret['symbol'] = []
ret['tm'] = []
ret['start_time'] = []
ret['end_time'] = []
ret['复盘模型'] = []
ret['K线数量'] = []
ret['盈利比'] = []
ret['trading_times'] = []
ret['盈利次数'] = []
ret['平均盈利'] = []
ret['平均亏损'] = []
ret['点差'] = []
ret['sharp'] = []
ret['ann_return'] = []
ret['max_drawdown'] = []
ret['level'] = []
porfolio_lst = []
for level in level_lst:
for (s_date, e_date) in date_lst:
for period in [5, 15, 30, 60, 240, 1440]:
mode_period = mode + '_' + str(period) + '分钟'
fold_path = fold_ini_path + mode_period + '//'
chg_df_all = pd.DataFrame(columns=['date_time'])
code_lst = code_dict[str(period)]
for code in code_lst:
print(code)
html = pd.read_html(fold_path + code + '.htm', encoding='gbk')
state = html[0]
print(state)
html = pd.read_html(fold_path + code + '.htm', encoding='gbk', header=0, index_col=0)
trade = html[1]
profit_df_all = trade[['时间', '获利']].rename(columns={'时间': 'date_time', '获利': 'profit'}).fillna(value=0)
profit_df_all['date_time'] = profit_df_all['date_time'].apply(lambda x: x[:4] + '-' + x[5:7] + '-' + x[8:10])
profit_df_all = profit_df_all.groupby(['date_time'])
profit_df = profit_df_all.sum()
profit_df['count'] = profit_df_all.count()
# trade_times_everyday = count_df.profit.mean()
profit_df['date_time'] = profit_df.index
profit_df = profit_df.assign(date_time=lambda df: df.date_time.apply(lambda x: str(x)[:10]))
profit_df = profit_df.reset_index(drop=True)
hq = pd.read_csv('e:/data/future_index/' + code.upper() + '_' + 'daily' + '_index.csv')[
['date_time', 'close']].assign(date_time=lambda df: df.date_time.apply(lambda x: str(x)[:10]))
hq = hq[(hq['date_time'] > s_date) & (hq['date_time'] < e_date)]
contract_lst = [code.upper()]
VolumeMultiple = porfolio.get_VolumeMultiple(contract_lst)[code.upper()]['VolumeMultiple']
print(VolumeMultiple)
aum_ini = hq.close.tolist()[0] * VolumeMultiple * 2 * level
profit_df = hq.merge(profit_df, on=['date_time'], how='left').sort_values(['date_time'])
# profit_df = profit_df.fillna(0)
profit_df['chg'] = (profit_df['profit'] - profit_df['close'].shift(1) * profit_df['count'] * VolumeMultiple * fee) * level / profit_df['close'].shift(1) / (VolumeMultiple * 2)
profit_df = profit_df.fillna(0)
if method == 'sum':
profit_df['net'] = 1 + profit_df['chg'].cumsum()
else:
profit_df['net'] = (1 + profit_df['chg']).cumprod()
print(profit_df)
net_lst = profit_df.net.tolist()
chg_df_ = profit_df.reset_index(drop=False)[['date_time', 'chg']].rename(columns={'chg': 'chg_' + code})
chg_df_all = chg_df_all.merge(chg_df_, on=['date_time'], how='outer')
chg_df_all = chg_df_all.fillna(value=0)
chg_df = chg_df_all.sort_values(['date_time']).set_index(['date_time'])
chg_name = ['chg_' + m for m in code_lst]
chg_df['chg'] = chg_df[chg_name].sum(axis=1) / len(code_lst)
if method == 'sum':
chg_df['net'] = 1 + chg_df['chg'].cumsum()
else:
chg_df['net'] = (1 + chg_df['chg']).cumprod()
chg_df = chg_df.reset_index(drop=False)
chg_df['date_time'] = pd.to_datetime(chg_df['date_time'])
chg_df = chg_df.set_index(['date_time'])
chg_df.ix[:, ['net']].plot()
sharpe_ratio = yearsharpRatio(chg_df['net'].tolist(), 1)
if method == 'sum':
ann_return = annROR_signal(chg_df['net'].tolist(), 1)
else:
ann_return = annROR(chg_df['net'].tolist(), 1)
max_drawdown = maxRetrace(chg_df['net'].tolist(), 1)
porfolio_row = []
porfolio_row.append(int(level))
porfolio_row.append(len(code_lst))
porfolio_row.append(str(period))
porfolio_row.append(fee)
porfolio_row.append(sharpe_ratio)
porfolio_row.append(ann_return)
porfolio_row.append(max_drawdown)
porfolio_row.append(s_date)
porfolio_row.append(e_date)
porfolio_lst.append(porfolio_row)
title_str = '品种%s个 周期%sm sharp %.2f annRet %.2f 回撤 %.2f 杠杆%s' % (str(len(code_lst)), str(period),
sharpe_ratio, 100 * ann_return, 100 * max_drawdown, int(level))
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.title(title_str)
plt.savefig(fold_ini_path + 'fig/' + str(len(code_lst)) + '_' + str(period) + 'm' + '_fee_opt' + '.png')
plt.show()
porfolio_state = pd.DataFrame(porfolio_lst, columns=['杠杆率', '品种数', 'period', 'fee', 'sharpe_ratio', 'ann_return',
'max_drawdown', 's_date', 'e_date'])
porfolio_state.to_excel(fold_ini_path + 'state_blue_line//state_porfolio_signal_period_' + method + '_opt.xlsx',
encoding='gbk')
| [
"[email protected]"
]
| |
a5b99c9146c91010ea8b0e69f78a0527a1dca3d0 | 81539aba88c22cf75bd2e14f5e0e92f2bf54e962 | /DarkMatterMap2017/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV_madgraph_mcatnlo_pythia8/TTbarDMJets_Dilepton_scalar_LO_Mchi-1_Mphi-500_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV_madgraph_mcatnlo_pythia8_40000_0_cff.py | d3a71ea14c4eac16bb430934b4ba72d3d74fa72c | []
| no_license | nistefan/RandomizedParametersSeparator | ad35b48b95e9745814c0bf9d8d8b6eb8aa479177 | 66a0e291b59113c6b5301768f1c10e36cf23d3c3 | refs/heads/master | 2021-01-03T00:41:17.415005 | 2020-02-19T13:30:54 | 2020-02-19T13:30:54 | 239,838,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,491 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, lumisToProcess = cms.untracked.VLuminosityBlockRange(*('1:11633', '1:11932', '1:11947', '1:11296', '1:13328', '1:13336', '1:13344', '1:13593', '1:13624', '1:13256', '1:13354', '1:13784', '1:13785', '1:13799', '1:13708', '1:13334', '1:13907', '1:13857', '1:13415', '1:25538', '1:25655', '1:25879', '1:25906', '1:29277', '1:29289', '1:29415', '1:29451', '1:29474', '1:29506', '1:30092', '1:30094', '1:31105', '1:30142', '1:11387', '1:11523', '1:13151', '1:13890', '1:22059', '1:22299', '1:29009', '1:29525', '1:29538', '1:29577', '1:29590', '1:29601', '1:29653', '1:30305', '1:30423', '1:30389', '1:30631', '1:31760', '1:31763', '1:31216', '1:31376', '1:31967', '1:31978', '1:31501', '1:31516', '1:31642', '1:31645', '1:89925', '1:90017', '1:90039', '1:90069', '1:101131', '1:101281', '1:90052', '1:101306', '1:101313', '1:13847', '1:13879', '1:22079', '1:22963', '1:22965', '1:90063', '1:101335', '1:101365', '1:101369', '1:101383', '1:101405', '1:101407', '1:101374', '1:101694', '1:13145', '1:31139', '1:89605', '1:89885', '1:89859', '1:89899', '1:89575', '1:89937', '1:90615', '1:29212', '1:29215', '1:29290', '1:29298', '1:29008', '1:29067', '1:29114', '1:29141', '1:29145', '1:29294', '1:89895', '1:90145', '1:90150', '1:101410', '1:90457', '1:90461', '1:90471', '1:90474', '1:90479', '1:90494', '1:90505', '1:90530', '1:90506', '1:90509', '1:90517', '1:90582', '1:90594', '1:90595', '1:29376', '1:29389', '1:29329', '1:30024', '1:30030', '1:30042', '1:31584', '1:31666', '1:31758', '1:31704', '1:11431', '1:13003', '1:13244', '1:13660', '1:13922', '1:13945', '1:11512', '1:89343', '1:101454', '1:90368', '1:31928', '1:89004', '1:89099', '1:89101', '1:89103', '1:89120', '1:89232', '1:89244', '1:30975', '1:89347', '1:89686', '1:89348', '1:89363', '1:89323', '1:89781', '1:89683', '1:101431', '1:101470', '1:89002', '1:101586', '1:101200', '1:90398', '1:101126', '1:101671', '1:11336', '1:11367', '1:11754', '1:11759', '1:11987', '1:13100', '1:13749', '1:25830', '1:30544', '1:30570', '1:31806', '1:31833', '1:31842', '1:90414', '1:101456', '1:101542', '1:90384', '1:22295', '1:22552', '1:25154', '1:25284', '1:29179', '1:29336', '1:25915', '1:31024', '1:31812', '1:90381', '1:101437', '1:101462', '1:101590', '1:90388', '1:90426', '1:90437', '1:101559', '1:101792', '1:89787', '1:89152', '1:90321', '1:101548', '1:11543', '1:11586', '1:11604', '1:11626', '1:11638', '1:11797', '1:11831', '1:11698', '1:11702', '1:11766', '1:11908', '1:22545', '1:22640', '1:22672', '1:22727', '1:22744', '1:22383', '1:22496', '1:11503', '1:11509', '1:11670', '1:11351', '1:13835', '1:13047', '1:13073', '1:13139', '1:13211', '1:13121', '1:13515', '1:13521', '1:13314', '1:13345', '1:13346', '1:13702', '1:13705', '1:13706', '1:13846', '1:29182', '1:31306', '1:31342', '1:31250', '1:31445', '1:89953', '1:101091', '1:101193', '1:101053', '1:101187', '1:90811', '1:90862', '1:90865', '1:90880', '1:90902', '1:90905', '1:90939', '1:90985', '1:22095', '1:22112', '1:22157', '1:22100', '1:22162', '1:22447', '1:25428', '1:25432', '1:25567', '1:25574', '1:29104', '1:29191', '1:29193', '1:29070', '1:29206', '1:29578', ))
)
readFiles.extend( ['/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/BA08AC92-32EF-E911-82D8-441EA1616DEA.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/1A47AC00-7B03-EA11-9EE6-0CC47AF973C2.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/DA862DF2-9F11-EA11-A5FB-C4346BC75558.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/DC992538-9910-EA11-BF7F-0CC47A5FC281.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/163497F8-2D11-EA11-A132-6C2B599A050D.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/0A3BC253-8411-EA11-A6F0-D8D385AF891A.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/562C1852-3411-EA11-A53F-8CDCD4A9A484.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/E6105D38-A311-EA11-9524-0CC47A7E6A5C.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/D86A4CB2-9014-EA11-8CA9-F01FAFE5CF52.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/C0549F53-A714-EA11-8560-782BCB46E733.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/70D29667-41FB-E911-818C-38EAA78D8ACC.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/B8C911EE-29F0-E911-8638-0CC47AF971DE.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/0E7BC3B3-0AFA-E911-8213-98039B3B01B2.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/DE54B8C9-DA10-EA11-89A8-A4BF01125538.root', '/store/mc/RunIIFall17MiniAODv2/TTbarDMJets_Dilepton_scalar_LO_TuneCP5_13TeV-madgraph-mcatnlo-pythia8/MINIAODSIM/PU2017_12Apr2018_rp_94X_mc2017_realistic_v14-v1/40000/2ABD4E37-8F03-EA11-815A-1866DA86CCDF.root']); | [
"[email protected]"
]
| |
9b108dc93dae3c185f5fd4ad4d94489c27095057 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingAverage_Seasonal_DayOfWeek_MLP.py | b2da5c3d66196e8f7a7296e7bbcc4ee517c2f8a1 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 164 | py | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['MovingAverage'] , ['Seasonal_DayOfWeek'] , ['MLP'] ); | [
"[email protected]"
]
| |
0029fb3267463c9804af961f0c25a555fe4d1786 | fc29ccdcf9983a54ae2bbcba3c994a77282ae52e | /Leetcode_By_Topic/dp_2seq-583.py | 044a5edc5199781c6b697036ab50df590e614417 | []
| no_license | linnndachen/coding-practice | d0267b197d9789ab4bcfc9eec5fb09b14c24f882 | 5e77c3d7a0632882d16dd064f0aad2667237ef37 | refs/heads/master | 2023-09-03T19:26:25.545006 | 2021-10-16T16:29:50 | 2021-10-16T16:29:50 | 299,794,608 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,081 | py |
class Solution:
"""
def minDistance(self, word1: str, word2: str) -> int:
m, n = len(word1), len(word2)
dp = [[0] * (n+1) for _ in range(m+1)]
# edges
for i in range(1, m+1):
dp[i][0] = i
for j in range(1, n+1):
dp[0][j] = j
for i in range(1, m+1):
for j in range(1, n+1):
if word1[i-1] == word2[j-1]:
dp[i][j] = dp[i-1][j-1]
else:
dp[i][j] = min(dp[i-1][j]+1, dp[i][j-1]+1)
return dp[-1][-1]
"""
def minDistance(self, w1, w2):
m, n = len(w1), len(w2)
dp = [[0] * (n + 1) for i in range(m + 1)]
for i in range(m):
for j in range(n):
# print(0+(w1[i] == w2[j]), (w1[i] == w2[j]))
dp[i + 1][j + 1] = max(dp[i][j + 1], dp[i + 1][j], \
dp[i][j] + (w1[i] == w2[j]))
# total len - longest common len
return m + n - 2 * dp[m][n] | [
"[email protected]"
]
| |
dcddd89959f1064a7904ec4071f2b74a51df8bab | ad212b92beac17c4d061848c1dcd443d02a168c8 | /python/0454_4sum_II/counters.py | 397e23695a0aeceb0cf68e0a3664427ecddf74b0 | []
| no_license | 21eleven/leetcode-solutions | 5ec97e4391c8ebaa77f4404a1155f3ef464953b3 | 35c91e6f5f5ed348186b8641e6fc49c825322d32 | refs/heads/master | 2023-03-03T10:22:41.726612 | 2021-02-13T21:02:13 | 2021-02-13T21:02:13 | 260,374,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | """
454. 4Sum II
Medium
Given four lists A, B, C, D of integer values, compute how many tuples (i, j, k, l) there are such that A[i] + B[j] + C[k] + D[l] is zero.
To make problem a bit easier, all A, B, C, D have same length of N where 0 ≤ N ≤ 500. All integers are in the range of -228 to 228 - 1 and the result is guaranteed to be at most 231 - 1.
Example:
Input:
A = [ 1, 2]
B = [-2,-1]
C = [-1, 2]
D = [ 0, 2]
Output:
2
Explanation:
The two tuples are:
1. (0, 0, 0, 1) -> A[0] + B[0] + C[0] + D[1] = 1 + (-2) + (-1) + 2 = 0
2. (1, 1, 0, 0) -> A[1] + B[1] + C[0] + D[0] = 2 + (-1) + (-1) + 0 = 0
"""
class Solution:
def fourSumCount(self, A: List[int], B: List[int], C: List[int], D: List[int]) -> int:
x = Counter()
y = Counter()
count = 0
for a in A:
for b in B:
x[a+b] += 1
for c in C:
for d in D:
y[c+d] += 1
for k, v in x.items():
if v2 := y[0-k]:
count += v*v2
return count
| [
"[email protected]"
]
| |
68406e79dbfbab56c0154d5e17417617f7e23b02 | 2c3367295e679b12c8ceb78745f2db7ebe338e18 | /opentech/apply/funds/migrations/0018_add_addressfield.py | f04ee0ee4c552d84013ebbc64d5d3c6d1ed0efa1 | [
"BSD-2-Clause"
]
| permissive | stdevteam/opentech.fund | 9f0d4b6274d1524207837bf54d21dfa8284cad3c | 6888dc5aa1a8c60f17629dff03877412275e08a5 | refs/heads/master | 2022-05-11T16:53:19.075549 | 2022-04-05T12:04:47 | 2022-04-05T12:04:47 | 161,327,565 | 0 | 0 | NOASSERTION | 2018-12-11T12:02:07 | 2018-12-11T12:02:07 | null | UTF-8 | Python | false | false | 7,292 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-02-05 11:53
from __future__ import unicode_literals
from django.db import migrations
import opentech.apply.categories.blocks
import wagtail.core.blocks
import wagtail.core.blocks.static_block
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('funds', '0017_round_workflow'),
]
operations = [
migrations.AlterField(
model_name='applicationform',
name='form_fields',
field=wagtail.core.fields.StreamField((('text_markup', wagtail.core.blocks.RichTextBlock(group='Other', label='Paragraph')), ('char', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('format', wagtail.core.blocks.ChoiceBlock(choices=[('email', 'Email'), ('url', 'URL')], label='Format', required=False)), ('default_value', wagtail.core.blocks.CharBlock(label='Default value', required=False))), group='Fields')), ('text', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('default_value', wagtail.core.blocks.TextBlock(label='Default value', required=False))), group='Fields')), ('number', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('default_value', wagtail.core.blocks.CharBlock(label='Default value', required=False))), group='Fields')), ('checkbox', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('default_value', wagtail.core.blocks.BooleanBlock(required=False))), group='Fields')), ('radios', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('choices', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Choice')))), group='Fields')), ('dropdown', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('choices', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Choice')))), group='Fields')), ('checkboxes', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('checkboxes', wagtail.core.blocks.ListBlock(wagtail.core.blocks.CharBlock(label='Checkbox')))), group='Fields')), ('date', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('default_value', wagtail.core.blocks.DateBlock(required=False))), group='Fields')), ('time', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('default_value', wagtail.core.blocks.TimeBlock(required=False))), group='Fields')), ('datetime', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('default_value', wagtail.core.blocks.DateTimeBlock(required=False))), group='Fields')), ('image', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False))), group='Fields')), ('file', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False))), group='Fields')), ('rich_text', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('default_value', wagtail.core.blocks.TextBlock(label='Default value', required=False))), group='Fields')), ('category', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(help_text='Leave blank to use the default Category label', label='Label', required=False)), ('help_text', wagtail.core.blocks.TextBlock(label='Leave blank to use the default Category help text', required=False)), ('required', wagtail.core.blocks.BooleanBlock(label='Required', required=False)), ('category', opentech.apply.categories.blocks.ModelChooserBlock('categories.Category')), ('multi', wagtail.core.blocks.BooleanBlock(label='Multi select', required=False))), group='Custom')), ('title', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('info', wagtail.core.blocks.static_block.StaticBlock())), group='Required')), ('value', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('info', wagtail.core.blocks.static_block.StaticBlock())), group='Required')), ('email', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('info', wagtail.core.blocks.static_block.StaticBlock())), group='Required')), ('address', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('info', wagtail.core.blocks.static_block.StaticBlock())), group='Required')), ('full_name', wagtail.core.blocks.StructBlock((('field_label', wagtail.core.blocks.CharBlock(label='Label')), ('help_text', wagtail.core.blocks.TextBlock(label='Help text', required=False)), ('info', wagtail.core.blocks.static_block.StaticBlock())), group='Required')))),
),
]
| [
"[email protected]"
]
| |
3a973056015b59391f41788261dbc500cc430e55 | b4f9aa1c939bb9c97bcf7f925ed09bfe0ab2abf3 | /zentral/core/queues/backends/kombu.py | a80ec755b741496890833e5c84fb16cb642f9e7d | [
"Apache-2.0"
]
| permissive | gwhitehawk/zentral | 26ef8b2f8e5f270e34c67077acdd44c3b0ca6ac5 | 156134aed3d7ff8a7cb40ab6f2269a763c316459 | refs/heads/main | 2023-03-02T03:26:49.124909 | 2021-02-12T16:04:22 | 2021-02-12T16:04:22 | 338,364,014 | 0 | 0 | Apache-2.0 | 2021-02-12T15:56:39 | 2021-02-12T15:56:39 | null | UTF-8 | Python | false | false | 9,510 | py | from importlib import import_module
import logging
import time
from zentral.conf import settings
from kombu import Connection, Consumer, Exchange, Queue
from kombu.mixins import ConsumerMixin, ConsumerProducerMixin
from kombu.pools import producers
from zentral.utils.json import save_dead_letter
logger = logging.getLogger('zentral.core.queues.backends.kombu')
raw_events_exchange = Exchange('raw_events', type='direct', durable=True)
events_exchange = Exchange('events', type="fanout", durable=True)
enrich_events_queue = Queue('enrich_events',
exchange=events_exchange,
durable=True)
enriched_events_exchange = Exchange('enriched_events', type="fanout", durable=True)
process_events_queue = Queue('process_events',
exchange=enriched_events_exchange,
durable=True)
class BaseWorker:
name = "UNDEFINED"
counters = []
def setup_metrics_exporter(self, *args, **kwargs):
self.metrics_exporter = kwargs.pop("metrics_exporter", None)
if self.metrics_exporter:
for name, label in self.counters:
self.metrics_exporter.add_counter(name, [label])
self.metrics_exporter.start()
def inc_counter(self, name, label):
if self.metrics_exporter:
self.metrics_exporter.inc(name, label)
def log(self, msg, level, *args):
logger.log(level, "{} - {}".format(self.name, msg), *args)
def log_debug(self, msg, *args):
self.log(msg, logging.DEBUG, *args)
def log_info(self, msg, *args):
self.log(msg, logging.INFO, *args)
def log_error(self, msg, *args):
self.log(msg, logging.ERROR, *args)
class PreprocessWorker(ConsumerProducerMixin, BaseWorker):
name = "preprocess worker"
counters = (
("preprocessed_events", "routing_key"),
("produced_events", "event_type"),
)
def __init__(self, connection):
self.connection = connection
# preprocessors
self.preprocessors = {
preprocessor.routing_key: preprocessor
for preprocessor in self._get_preprocessors()
}
def _get_preprocessors(self):
for app in settings['apps']:
try:
preprocessors_module = import_module("{}.preprocessors".format(app))
except ImportError:
pass
else:
yield from getattr(preprocessors_module, "get_preprocessors")()
def run(self, *args, **kwargs):
self.log_info("run")
super().setup_metrics_exporter(*args, **kwargs)
super().run(*args, **kwargs)
def get_consumers(self, _, default_channel):
queues = [
Queue(preprocessor.routing_key, exchange=raw_events_exchange,
routing_key=preprocessor.routing_key, durable=True)
for routing_key, preprocessor in self.preprocessors.items()
]
return [Consumer(default_channel,
queues=queues,
accept=['json'],
callbacks=[self.do_preprocess_raw_event])]
def do_preprocess_raw_event(self, body, message):
routing_key = message.delivery_info.get("routing_key")
if not routing_key:
logger.error("Message w/o routing key")
else:
preprocessor = self.preprocessors.get(routing_key)
if not preprocessor:
logger.error("No preprocessor for routing key %s", routing_key)
else:
for event in preprocessor.process_raw_event(body):
self.producer.publish(event.serialize(machine_metadata=False),
serializer='json',
exchange=events_exchange,
declare=[events_exchange])
self.inc_counter("produced_events", event.event_type)
message.ack()
self.inc_counter("preprocessed_events", routing_key or "UNKNOWN")
class EnrichWorker(ConsumerProducerMixin, BaseWorker):
name = "enrich worker"
counters = (
("enriched_events", "event_type"),
("produced_events", "event_type"),
)
def __init__(self, connection, enrich_event):
self.connection = connection
self.enrich_event = enrich_event
self.name = "enrich worker"
def run(self, *args, **kwargs):
self.log_info("run")
super().setup_metrics_exporter(*args, **kwargs)
super().run(*args, **kwargs)
def get_consumers(self, _, default_channel):
return [Consumer(default_channel,
queues=[enrich_events_queue],
accept=['json'],
callbacks=[self.do_enrich_event])]
def do_enrich_event(self, body, message):
self.log_debug("enrich event")
try:
for event in self.enrich_event(body):
self.producer.publish(event.serialize(machine_metadata=False),
serializer='json',
exchange=enriched_events_exchange,
declare=[enriched_events_exchange])
self.inc_counter("produced_events", event.event_type)
except Exception as exception:
logger.exception("Requeuing message with 1s delay: %s", exception)
time.sleep(1)
message.requeue()
else:
message.ack()
self.inc_counter("enriched_events", event.event_type)
class ProcessWorker(ConsumerMixin, BaseWorker):
name = "process worker"
counters = (
("processed_events", "event_type"),
)
def __init__(self, connection, process_event):
self.connection = connection
self.process_event = process_event
def run(self, *args, **kwargs):
self.log_info("run")
super().setup_metrics_exporter(*args, **kwargs)
super().run(*args, **kwargs)
def get_consumers(self, _, default_channel):
return [Consumer(default_channel,
queues=[process_events_queue],
accept=['json'],
callbacks=[self.do_process_event])]
def do_process_event(self, body, message):
self.log_debug("process event")
event_type = body['_zentral']['type']
self.process_event(body)
message.ack()
self.inc_counter("processed_events", event_type)
class StoreWorker(ConsumerMixin, BaseWorker):
counters = (
("stored_events", "event_type"),
)
def __init__(self, connection, event_store):
self.connection = connection
self.event_store = event_store
self.name = "store worker {}".format(self.event_store.name)
self.input_queue = Queue(('store_events_{}'.format(self.event_store.name)).replace(" ", "_"),
exchange=enriched_events_exchange,
durable=True)
def run(self, *args, **kwargs):
self.log_info("run")
super().setup_metrics_exporter(*args, **kwargs)
super().run(*args, **kwargs)
def get_consumers(self, _, default_channel):
return [Consumer(default_channel,
queues=[self.input_queue],
accept=['json'],
callbacks=[self.do_store_event])]
def do_store_event(self, body, message):
self.log_debug("store event")
event_type = body['_zentral']['type']
try:
self.event_store.store(body)
except Exception:
logger.exception("Could add event to store %s", self.event_store.name)
save_dead_letter(body, "event store {} error".format(self.event_store.name))
message.reject()
else:
message.ack()
self.inc_counter("stored_events", event_type)
class EventQueues(object):
def __init__(self, config_d):
self.backend_url = config_d['backend_url']
self.transport_options = config_d.get('transport_options')
self.connection = self._get_connection()
def _get_connection(self):
return Connection(self.backend_url, transport_options=self.transport_options)
def get_preprocess_worker(self):
return PreprocessWorker(self._get_connection())
def get_enrich_worker(self, enrich_event):
return EnrichWorker(self._get_connection(), enrich_event)
def get_process_worker(self, process_event):
return ProcessWorker(self._get_connection(), process_event)
def get_store_worker(self, event_store):
return StoreWorker(self._get_connection(), event_store)
def post_raw_event(self, routing_key, raw_event):
with producers[self.connection].acquire(block=True) as producer:
producer.publish(raw_event,
serializer='json',
exchange=raw_events_exchange,
routing_key=routing_key,
declare=[raw_events_exchange])
def post_event(self, event):
with producers[self.connection].acquire(block=True) as producer:
producer.publish(event.serialize(machine_metadata=False),
serializer='json',
exchange=events_exchange,
declare=[events_exchange])
| [
"[email protected]"
]
| |
44e5ad078fcf91fcaff4eac2ee6a76bff6f9f15f | 960f2de01a49c822e6e4afd9595bf60d75f7dacc | /fbpagespam.py | 9277c724f17a9799dca6b7ee27df9efc0c0fd4bc | []
| no_license | 2020saurav/random-scripts | adbf5461e4f98d51a02cf2ee3992bdb82c77dc86 | b339c47765a9e18f86565ea9639f5ff6a7691f46 | refs/heads/master | 2021-01-23T14:05:29.907049 | 2014-11-11T20:33:22 | 2014-11-11T20:33:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | import requests
import time
import urllib2
import json
# Replace line 6 with actual token and put spam comment in line 16
TOKEN = 'put-your-access-token-here'
def get_posts():
url = 'https://graph.facebook.com/v2.2/891971000853462/posts?access_token='+ TOKEN
page = urllib2.urlopen(url)
page = page.read()
page = json.loads(page)
return page["data"]
def comment(postId):
url = 'https://graph.facebook.com/v2.2/'+postId+'/comments'
comment = 'SPAM'
payload = {'access_token' : TOKEN, 'message': comment}
s = requests.post(url,data=payload,verify=False)
time.sleep(1)
if __name__ == '__main__':
posts = get_posts()
for post in posts:
comment(post["id"])
print "Done"
| [
"[email protected]"
]
| |
15d7a7961a130a87360769e49d0c7d7af8ad7677 | 0f94496000c2b73c3721ecaf1d2a788e9215078a | /ckanta/commands.py | 2840890cc590c2b77a73e72c617d75189fb30772 | []
| no_license | hkmshb/ckanta | 8521ebeb802eae18eb6bfe633a15ba4fec505dee | b4f1ea42d750838d06b46cc28b5ca082e4430e8e | refs/heads/master | 2020-03-21T03:10:15.648468 | 2019-01-08T09:16:39 | 2019-01-08T09:16:39 | 138,039,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,109 | py | import re
import csv
import click
import logging
from itertools import chain
from urllib.parse import unquote
from furl import furl
from slugify import slugify
from collections import OrderedDict, namedtuple
from .common import CKANTAError, CKANObject, MembershipRole, ApiClient
_log = logging.getLogger()
class CommandError(CKANTAError):
'''Expection raise for Command execution related errors.
'''
pass
class CommandBase:
TARGET_OBJECTS = []
def __init__(self, context, **action_args):
self._validate_action_args(action_args)
self.api_client = context.client
self.action_args = action_args
self.context = context
def _validate_action_args(self, args):
'''Validates that action args provided on the cli are valid.
Subclasses should override to perform command specific checks.
'''
assert 'object' in args, 'Target object to be listed required'
target_object = args.get('object', None)
assert target_object in self.TARGET_OBJECTS, (
'Invalid target object. Any of these expected: {}'.format(
self.TARGET_OBJECTS
))
# normalize object name; dataset in particular
if target_object == 'dataset':
args['object'] = 'package'
class ListCommand(CommandBase):
'''Retrieve and list objects from a CKAN instance.
'''
TARGET_OBJECTS = ('dataset', 'group', 'organization', 'user')
def _build_group_payload(self):
payload = {
'sort': 'name asc',
'all_fields': False
}
payload.update(self.action_args)
return payload
def _build_organization_payload(self):
payload = {
'sort': 'name asc',
'all_fields': False
}
payload.update(self.action_args)
return payload
def _build_package_payload(self):
payload = {}
payload.update(self.action_args)
return payload
def _build_user_payload(self):
payload = {
'all_fields': False
}
payload.update(self.action_args)
return payload
def execute(self, as_get=True):
target_object = self.action_args.pop('object')
action_name = '{}_list'.format(target_object)
method_name = '_build_{}_payload'.format(target_object)
if not hasattr(self, method_name):
errmsg = 'Payload builder method not found: {}'
raise CommandError(errmsg.format(method_name))
payload = getattr(self, method_name)()
_log.debug('action: {}, unified payload: {}'.format(
action_name, payload
))
try:
result = self.api_client(action_name, payload, as_get=as_get)
except Exception as ex:
raise CommandError('API request failed.') from ex
return result
class ShowCommand(CommandBase):
'''Retrieve and show an object from a CKAN instance.
'''
TARGET_OBJECTS = ('dataset', 'group', 'organization', 'user')
def _validate_action_args(self, args):
'''Validates that action args provided on the cli are valid.
'''
super()._validate_action_args(args)
id = args.get('id', None)
assert id is not None, 'Target object Id required'
def _build_dataset_payload(self):
return {}
def execute(self, as_get=True):
object_id = self.action_args.pop('id')
target_object = self.action_args.pop('object')
action_name = '{}_show'.format(target_object)
payload = {'id': object_id}
try:
result = self.api_client(action_name, payload, as_get=as_get)
except Exception as ex:
raise CommandError('API request failed.') from ex
return result
class MembershipCommand(CommandBase):
COMMAND = '::list'
TARGET_OBJECTS = (COMMAND,)
TARGET_ACTIONS = ('organization_list_for_user', 'group_list_authz')
def __init__(self, context, userid, check_group=False):
super().__init__(context, object=self.COMMAND)
self.check_group = check_group
self.userid = userid
def execute(self, as_get):
payload = {'id': self.userid}
action_names = self.TARGET_ACTIONS
_log.debug('action_names: {}; payload: {}'.format(
action_names, payload)
)
results = []
targets = action_names[:1] if not self.check_group else action_names
try:
for action_name in targets:
# title = action_name.split('_')[0]
result = self.api_client(action_name, payload, as_get)
results.append(result)
except Exception as ex:
raise CommandError('API request failed.') from ex
return results
class MembershipGrantCommand(CommandBase):
TARGET_OBJECTS = ('user',)
def __init__(self, context, userid, role, objects, object_type):
super().__init__(context, object='user')
self.role = MembershipRole.from_name(role)
self.object_type = object_type
self.objects = objects
self.userid = userid
def _get_access_request_payload(self, object_id, user_dict):
return {
'fullname': '{} (via CKANTA)'.format(user_dict['display_name']),
'email': user_dict['email'],
'contact_phone_number': '000-0000-0000'.replace('-', ''),
'entity_id': object_id,
'entity_type': 'dataset',
'description': (
'Access request initiated from CKAN task automation tool '
'for official purpose.'),
'org_name': user_dict['org_name'],
'org_category': user_dict['org_category'],
'country_state': user_dict['country_state']
}
def _create_membership(self, object_id):
if self.role == MembershipRole.NONE:
click.echo('Skipping operation as dropping membership (role=none) '
'is not supported yet')
return
role_name = self.role.name.lower()
target_object = self.object_type.name.lower()
action_name = '{}_member_create'.format(target_object)
payload = {
'id': object_id, 'username': self.userid,
'role': role_name
}
self.api_client(action_name, data=payload, as_get=False)
def _grant_dataset_access(self):
passed, action_result = (0, [])
total = len(self.objects)
result = {}
# 1: first retrieve apikey for user to be granted access
_log.info('Retrieving details for user requiring access...')
try:
action_name = 'user_show'
result = self.api_client(action_name, {'id': self.userid}, False)
result = result['result']
_log.info('Requesting user details retrieved')
except Exception as ex:
_log.info('Failed retrieving details for user needing access')
return self._build_result_summary(action_result, total, passed)
# 2: make access request using retrieved user details
fullname = result['display_name']
_log.info("Making access request as '{}'".format(fullname))
for objectid in self.objects:
try:
# make request as user whom needs access
action_name = 'eoc_request_create'
payload = self._get_access_request_payload(objectid, result)
client = ApiClient(self.api_client.urlbase, result['apikey'])
result = client(action_name, payload, False)
request_id = result['result']['id']
_log.info('Access request made for {}. Got: {}'.format(
objectid, request_id))
# patch request as user running script
action_name = 'eoc_request_patch'
payload = {'id': request_id, 'status': 'approved'}
result = self.api_client(action_name, payload, False)
_log.info('Access request granted\n')
passed += 1
except Exception as ex:
action_result.append('. {}: err: {}'.format(objectid, ex))
result = self._build_result_summary(action_result, total, passed)
return result
def _build_result_summary(self, action_result, total, passed):
return {
'result': action_result,
'summary': {
'total': total, 'passed': passed, 'failed': total - passed
}
}
def execute(self, as_get):
if self.object_type == CKANObject.DATASET:
result = self._grant_dataset_access()
elif self.object_type in (CKANObject.GROUP, CKANObject.ORGANIZATION):
passed, action_result = (0, [])
for obj in self.objects:
try:
self._create_membership(obj)
action_result.append('+ {}'.format(obj))
passed += 1
except Exception as ex:
action_result.append('. {}: err: {}'.format(obj, ex))
total = len(self.objects)
result = self._build_result_summary(action_result, total, passed)
return result
class UploadCommand(CommandBase):
'''Creates an object on a CKAN instance.
'''
NATIONAL_KEY = 'national:'
TARGET_OBJECTS = ('group', 'organization')
def _validate_action_args(self, args):
'''Validates that action args provided on the cli are valid.
Expects a file argument to be provided in addition to the object
argument.
'''
# checks that object argument is provide
super()._validate_action_args(args)
# check that file argument is provided
file_arg = args.get('infile', None)
assert file_arg is not None, "'infile' argument expected"
def _get_group_payload_factory(self, payload_method, file_obj):
reader = csv.DictReader(file_obj, delimiter=',')
for row in reader:
yield payload_method(row)
def _build_group_payload(self, row_dict):
row_dict.setdefault('state', 'active')
row_dict.setdefault('name', slugify(row_dict.get('title')))
return row_dict
def _get_organization_payload_factory(self, payload_method, file_obj):
reader = csv.DictReader(file_obj, delimiter=',')
extras = list(filter(lambda k: k.startswith('extras:'), reader.fieldnames))
for row in reader:
yield payload_method(row, extras)
def _build_organization_payload(self, row_dict, extras=None):
row_dict.setdefault('state', 'active')
row_dict.setdefault('name', slugify(row_dict.get('title')))
# handle extras
extras_list = []
for entry in (extras or []):
_, field = [e for e in entry.split(':') if e][:2]
value = row_dict.pop(entry)
if value:
extras_list.append({
'key': field,
'value': value
})
if extras_list:
row_dict['extras'] = extras_list
return row_dict
def execute(self, as_get=True):
file_obj = self.action_args.pop('infile')
target_object = self.action_args.pop('object')
action_name = '{}_create'.format(target_object)
method_name = '_build_{}_payload'.format(target_object)
if not hasattr(self, method_name):
errmsg = 'Payload builder method not found: {}'
raise CommandError(errmsg.format(method_name))
payload_method = getattr(self, method_name)
method_name = '_get_{}_payload_factory'.format(target_object)
if not hasattr(self, method_name):
errmsg = 'Payload factory method not found: {}'
raise CommandError(errmsg.format(method_name))
factory_method = getattr(self, method_name)
_log.debug('action: {}, payload-method: {}, payload-factory: {}'.format(
action_name, payload_method.__name__, factory_method.__name__)
)
factory = factory_method(payload_method, file_obj)
passed, action_result = (0, [])
for payload in factory:
_log.debug('{} payload: {}'.format(target_object, payload))
try:
self.api_client(action_name, payload, as_get=False)
action_result.append('+ {}'.format(payload.get('name', '?')))
passed += 1
except Exception as ex:
_log.error('API request failed. {}'.format(ex))
action_result.append('x {}'.format(payload.get('name', '?')))
total_items = len(action_result)
return {
'result': action_result,
'summary': {
'total': total_items, 'passed': passed,
'failed': total_items - passed
}
}
class UploadDatasetCommand(CommandBase):
'''Create datasets on a CKAN instance.
'''
VARTAG_STATE = '${state_code}'
REPTTN_STATE = re.compile("state(?:code|_code)='(.+)'")
NATIONAL_KEY = 'national:'
TARGET_OBJECTS = ('dataset',)
TARGET_FORMATS = {
'CSV': 'csv',
'JSON': 'application/json',
'GeoJSON': 'application/json',
}
def __init__(self, context, infile, owner_orgs, urlbase, authkey, format):
super().__init__(context, object=self.TARGET_OBJECTS[0])
self.infile = infile
self.urlbase = urlbase
self.authkey = authkey
self.format = format
if not isinstance(owner_orgs, (list, tuple)):
owner_orgs = owner_orgs.split(',')
self.owner_orgs = owner_orgs
def _get_package_payload_factory(self, payload_method, file_obj):
reader = csv.DictReader(file_obj, delimiter=',')
norm = lambda n: n.replace(self.NATIONAL_KEY, '')
for row in reader:
for orgname in self.owner_orgs:
row.setdefault('owner_org', norm(orgname))
row.setdefault('locations', norm(orgname))
yield payload_method(row, orgname)
def _build_package_payload(self, row_dict, orgname):
## required package attributes:
# name, private, state:active, type:dataset, owner_org,
# sector_id, locations
# adjust title
if not orgname.startswith(self.NATIONAL_KEY):
title = row_dict.pop('title')
row_dict['title'] = '{} {}'.format(
self.context.national_states[orgname].name,
title
)
row_dict.setdefault('type', 'dataset')
row_dict.setdefault('state', 'active')
row_dict.setdefault('private', 'false')
row_dict.setdefault('name', slugify(row_dict.get('title')))
# use sector_id to define sector
sector_id = row_dict.get('sector_id', '')
row_dict['groups'] = [{'name': sector_id}]
## build resource
res_dict = self._build_resource_payload(row_dict, orgname)
if res_dict:
row_dict['resources'] = [res_dict]
return row_dict
def _build_resource_payload(self, row_dict, orgname):
## required resource attributes
# res:name, res:url,
## optinal resource attributes
# res:description
res_dict = {
k[4:]: row_dict[k]
for k in row_dict.keys()
if k.startswith('res:') and k[4:] and row_dict[k]
}
if not res_dict or 'url' not in res_dict:
return
# if name not provided use package title
org_fullname = self.context.national_states[orgname].name
pkg_title = row_dict.get('title')
if org_fullname in pkg_title:
pkg_title = pkg_title.replace(org_fullname, '').strip()
res_dict.setdefault('name', pkg_title)
# process url further
built_url = self._build_resource_url(res_dict['url'], orgname)
res_dict['url'] = built_url
return res_dict
def _build_resource_url(self, res_url, orgname):
built_url = None
if res_url.startswith('http'):
built_url = furl(unquote(res_url))
else:
config_name = 'grid-geoserver-urlbase'
urlbase = self.urlbase or self.context.get_config(config_name)
if not urlbase:
raise CKANTAError(
"Please provide '-u/--urlbase' option or set "
"'{0}' in the config file".format(config_name)
)
built_url = furl(urlbase)
# add in what we currently have as res_url
for (key, value) in zip(
('typeName', 'CQL_FILTER'),
[p.strip() for p in res_url.split(';')]
):
built_url.args[key] = value
# add in other stuff from config
for qryparam in (
'service', 'version', 'request', 'outputFormat', 'authkey'
):
if qryparam not in built_url.args:
conf_key = 'grid-geoserver-{}'.format(qryparam)
value = self.context.get_config(conf_key)
if not value:
raise CKANTAError(
"Please set '{0}' in the config file".format(conf_key)
)
built_url.args[qryparam] = value
# overwrite query params provide on the cli
for (param_key, param_value) in (
('outputFormat', self.format), ('authkey', self.authkey)
):
if param_value:
built_url.args[param_key] = param_value
# replace state_code in CQL_FILTER
cql_filter = built_url.args['CQL_FILTER']
state_code = self.context.national_states[orgname].code
if self.VARTAG_STATE in cql_filter:
cql_filter = cql_filter.replace(self.VARTAG_STATE, state_code)
built_url.args['CQL_FILTER'] = cql_filter
else:
match = self.REPTTN_STATE.search(cql_filter)
if match:
found = match.groups()[0]
cql_filter = cql_filter.replace(found, state_code)
built_url.args['CQL_FILTER'] = cql_filter
return built_url.url
def execute(self, as_get=True):
file_obj = self.infile
target_object = self.action_args.pop('object')
action_name = '{}_create'.format(target_object)
method_name = '_build_{}_payload'.format(target_object)
if not hasattr(self, method_name):
errmsg = 'Payload builder method not found: {}'
raise CommandError(errmsg.format(method_name))
payload_method = getattr(self, method_name)
method_name = '_get_{}_payload_factory'.format(target_object)
if not hasattr(self, method_name):
errmsg = 'Payload factory method not found: {}'
raise CommandError(errmsg.format(method_name))
factory_method = getattr(self, method_name)
_log.debug('action: {}, payload-method: {}, payload-factory: {}'.format(
action_name, payload_method.__name__, factory_method.__name__)
)
factory = factory_method(payload_method, file_obj)
passed, action_result = (0, [])
for payload in factory:
_log.debug('{} payload: {}'.format(target_object, payload))
try:
self.api_client(action_name, payload, as_get=False)
action_result.append('+ {}'.format(payload.get('name', '?')))
passed += 1
except Exception as ex:
_log.error('API request failed. {}'.format(ex))
action_result.append('x {}'.format(payload.get('name', '?')))
total_items = len(action_result)
return {
'result': action_result,
'summary': {
'total': total_items, 'passed': passed,
'failed': total_items - passed
}
}
class PurgeCommand(CommandBase):
"""Purge existing objects on a CKAN instance.
"""
TARGET_OBJECTS = ('dataset', 'group')
def __init__(self, context, object, infile, ids):
super().__init__(context, object=object)
self.infile = infile
self.ids = ids
def execute(self, as_get=False):
target_object = self.action_args.pop('object')
target_object = target_object.replace('package', 'dataset')
action_name = '{}_purge'.format(target_object)
ids_list = list(filter(
lambda id: id and id.strip() != "",
chain(*[id.split(',') for id in self.ids])
))
if self.infile:
lines = self.infile.readlines()
ids_list.extend([ln.strip() for ln in lines])
result = []
for obj_id in ids_list:
try:
self.api_client(action_name, {'id': obj_id}, as_get=as_get)
result.append('+ {}'.format(obj_id))
except Exception as ex:
result.append('. {}'.format(obj_id))
return result
| [
"[email protected]"
]
| |
1081b155b8d6b430076939dd4c8bec7a57c484c7 | 33b5ef4f67e9c36d45990506e6f9f39d573ce730 | /folders/python/instagram/63def.py | 23caa9ce83278630e81abd66b2d763746fdb6fc7 | []
| no_license | dineshkumarkummara/my-basic-programs-in-java-and-python | 54f271e891e8d9dbdf479a9617e9355cbd0819e9 | b8a4cf455f4a057e382f7dda7581fad5b2f1e616 | refs/heads/master | 2022-12-12T06:02:55.558763 | 2020-08-19T12:14:39 | 2020-08-19T12:14:39 | 283,415,087 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | #Creating a function in Python. Use the "def" keyword,
# give a function a name and list its arguments.
def greet(name):
print("hello my name is " +name+ "")
greet("sai")
greet("manu")
#sai and manu are arguments
#greet is a function name | [
"[email protected]"
]
| |
484d121c7184af439cc4bf82e09a9899d8150546 | 8f9a06f8c35a983df918a9418710bf1a81392819 | /backend/sprint_waste_servic_18730/settings.py | 350c0cedc85e8b474ac4bc6c36bb5f0a9a7ddfd5 | []
| no_license | crowdbotics-apps/sprint-waste-servic-18730 | c43e8543a225e37a34b1fbe6d3ea2645d3d493bf | 379a590074055c2b1066fda2932b1af11674b037 | refs/heads/master | 2022-11-13T21:53:35.860288 | 2020-07-08T21:34:46 | 2020-07-08T21:34:46 | 278,198,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,827 | py | """
Django settings for sprint_waste_servic_18730 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sprint_waste_servic_18730.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sprint_waste_servic_18730.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
]
| |
25e5547093fa72a5ff91fdf5c2545ec79a793127 | e5d059896640e25a57f29f5ec972c114f8ef5866 | /src/scs_analysis/socket_receiver.py | 68d886c62b7ddd4423cc75d222d28250d1d07be7 | [
"MIT"
]
| permissive | tonybushido/scs_analysis | 10add7b13cee29e1445ea18240bdb08e3bc908a4 | 1121be19c83b0d616772da42ea90623d6f6573c4 | refs/heads/master | 2021-01-03T03:11:31.474595 | 2020-02-11T14:27:32 | 2020-02-11T14:27:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,516 | py | #!/usr/bin/env python3
"""
Created on 18 Aug 2016
@author: Bruno Beloff ([email protected])
source repo: scs_analysis
DESCRIPTION
The socket_receiver utility is used to accept data via a Unix socket, with data sourced from the same host, or
another host on the same local area network. A socket_sender utility is provided for the purpose of sourcing data,
as part of the scs_dev package.
The socket_receiver utility should be started before socket_sender. When socket_sender terminates, socket_receiver
will also terminate.
If a port number is not specified, then port 2000 is used.
SYNOPSIS
socket_receiver.py [-p PORT] [-v]
EXAMPLES
socket_receiver.py -p 2002
SEE ALSO
scs_analysis/uds_receiver
scs_dev/socket_sender
BUGS
It is possible to create scenarios where a port becomes orphaned. Depending on host operating systems, orphaned ports
may take time to be garbage collected.
"""
import sys
from scs_analysis.cmd.cmd_socket_receiver import CmdSocketReceiver
from scs_host.comms.network_socket import NetworkSocket
# --------------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
# ----------------------------------------------------------------------------------------------------------------
# cmd...
cmd = CmdSocketReceiver()
if cmd.verbose:
print("socket_receiver: %s" % cmd, file=sys.stderr)
receiver = None
try:
# ------------------------------------------------------------------------------------------------------------
# resources...
receiver = NetworkSocket('', cmd.port)
if cmd.verbose:
print("socket_receiver: %s" % receiver, file=sys.stderr)
sys.stderr.flush()
# ------------------------------------------------------------------------------------------------------------
# run...
for message in receiver.read():
print(message)
sys.stdout.flush()
receiver.ack()
# ----------------------------------------------------------------------------------------------------------------
# end...
except KeyboardInterrupt:
if cmd.verbose:
print("socket_receiver: KeyboardInterrupt", file=sys.stderr)
# ----------------------------------------------------------------------------------------------------------------
# close...
finally:
if receiver:
receiver.close()
| [
"[email protected]"
]
| |
559053148361b7bb8b316ba9e69c87eb3b307856 | 08eea46f91eb71972b66cc34a620df2a2d15e6a7 | /Advanced/Theano/Course/MaxPool.py | fa657097f96c4f937a1bd0ac9f0b3d0150916605 | []
| no_license | jsa4000/Getting-Started-Python | ab2f1ce51c78ce870560ab466c8408c9f81717e5 | 8f7f107a93bb7578a00d531123ee7f5db61d807e | refs/heads/master | 2021-01-11T14:19:18.123093 | 2019-01-12T07:47:51 | 2019-01-12T07:47:51 | 81,347,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,543 | py | import theano
import theano.tensor as T
from theano.tensor.signal import downsample
import numpy
# Now what the Max Pooling algorithm does in a matrix.
input = T.dtensor4('input')
maxpool_shape = (2, 2)
pool_out = downsample.max_pool_2d(input, maxpool_shape, ignore_border=True)
f = theano.function([input],pool_out)
invals = numpy.random.RandomState(1).rand(3, 2, 5, 5) # Max pool will take the last two indexes for the pooling
#print invals
# Maxpool take a Matrix (multiple featutes, convolutions, etc), let say 10x10.
# An for all existing matrix there are in the 4D matrix [:,:;m;n]
# Generate a new matrix (with same shape) downsmapled with the maxpool_shape defined.
# For that it will divide the matrix 10x10 into the maxpool defined and will teake the maximun value.
print 'With ignore_border set to True:'
print 'invals[0, 0, :, :] =\n', invals[0, 0, :, :]
print 'output[0, 0, :, :] =\n', f(invals)[0, 0, :, :]
pool_out = downsample.max_pool_2d(input, maxpool_shape, ignore_border=False)
f = theano.function([input],pool_out)
print 'With ignore_border set to False:'
print 'invals[1, 0, :, :] =\n ', invals[1, 0, :, :]
print 'output[1, 0, :, :] =\n ', f(invals)[1, 0, :, :]
# Important note:
# - If matrix is 31 x 31 the the max pool result with (2,2) of max pool will generate a new
# matrix with 15x15, so it's like only get the integer part dividing Int(31/2) = 15
# - If matrix is 31 x 31 the the max pool result with (3,3) of max pool will generate a new
# matrix with 10x10 -> Unt(31/3) = 10
| [
"[email protected]"
]
| |
bb3c3f608d70d420e3a814c30207c034df5c72ea | 508321d683975b2339e5292202f3b7a51bfbe22d | /Userset.vim/ftplugin/python/CompletePack/PySide2/QtGui/QMatrix4x4.py | 3a5014bf558cdafbdfc83ec79abdb7af136dd07e | []
| no_license | cundesi/vimSetSa | 4947d97bcfe89e27fd2727423112bb37aac402e2 | 0d3f9e5724b471ab21aa1199cc3b4676e30f8aab | refs/heads/master | 2020-03-28T05:54:44.721896 | 2018-08-31T07:23:41 | 2018-08-31T07:23:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,756 | py | # encoding: utf-8
# module PySide2.QtGui
# from C:\Program Files\Autodesk\Maya2017\Python\lib\site-packages\PySide2\QtGui.pyd
# by generator 1.145
# no doc
# imports
import PySide2.QtCore as __PySide2_QtCore
import Shiboken as __Shiboken
class QMatrix4x4(__Shiboken.Object):
# no doc
def column(self, *args, **kwargs): # real signature unknown
pass
def copyDataTo(self, *args, **kwargs): # real signature unknown
pass
def data(self, *args, **kwargs): # real signature unknown
pass
def determinant(self, *args, **kwargs): # real signature unknown
pass
def fill(self, *args, **kwargs): # real signature unknown
pass
def flipCoordinates(self, *args, **kwargs): # real signature unknown
pass
def frustum(self, *args, **kwargs): # real signature unknown
pass
def inverted(self, *args, **kwargs): # real signature unknown
pass
def isAffine(self, *args, **kwargs): # real signature unknown
pass
def isIdentity(self, *args, **kwargs): # real signature unknown
pass
def lookAt(self, *args, **kwargs): # real signature unknown
pass
def map(self, *args, **kwargs): # real signature unknown
pass
def mapRect(self, *args, **kwargs): # real signature unknown
pass
def mapVector(self, *args, **kwargs): # real signature unknown
pass
def normalMatrix(self, *args, **kwargs): # real signature unknown
pass
def optimize(self, *args, **kwargs): # real signature unknown
pass
def ortho(self, *args, **kwargs): # real signature unknown
pass
def perspective(self, *args, **kwargs): # real signature unknown
pass
def rotate(self, *args, **kwargs): # real signature unknown
pass
def row(self, *args, **kwargs): # real signature unknown
pass
def scale(self, *args, **kwargs): # real signature unknown
pass
def setColumn(self, *args, **kwargs): # real signature unknown
pass
def setRow(self, *args, **kwargs): # real signature unknown
pass
def setToIdentity(self, *args, **kwargs): # real signature unknown
pass
def toAffine(self, *args, **kwargs): # real signature unknown
pass
def toTransform(self, *args, **kwargs): # real signature unknown
pass
def translate(self, *args, **kwargs): # real signature unknown
pass
def transposed(self, *args, **kwargs): # real signature unknown
pass
def viewport(self, *args, **kwargs): # real signature unknown
pass
def __add__(self, y): # real signature unknown; restored from __doc__
""" x.__add__(y) <==> x+y """
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __div__(self, y): # real signature unknown; restored from __doc__
""" x.__div__(y) <==> x/y """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __getitem__(self, y): # real signature unknown; restored from __doc__
""" x.__getitem__(y) <==> x[y] """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __iadd__(self, y): # real signature unknown; restored from __doc__
""" x.__iadd__(y) <==> x+=y """
pass
def __idiv__(self, y): # real signature unknown; restored from __doc__
""" x.__idiv__(y) <==> x/=y """
pass
def __imul__(self, y): # real signature unknown; restored from __doc__
""" x.__imul__(y) <==> x*=y """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __isub__(self, y): # real signature unknown; restored from __doc__
""" x.__isub__(y) <==> x-=y """
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lshift__(self, y): # real signature unknown; restored from __doc__
""" x.__lshift__(y) <==> x<<y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __mul__(self, y): # real signature unknown; restored from __doc__
""" x.__mul__(y) <==> x*y """
pass
def __neg__(self): # real signature unknown; restored from __doc__
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __radd__(self, y): # real signature unknown; restored from __doc__
""" x.__radd__(y) <==> y+x """
pass
def __rdiv__(self, y): # real signature unknown; restored from __doc__
""" x.__rdiv__(y) <==> y/x """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __rlshift__(self, y): # real signature unknown; restored from __doc__
""" x.__rlshift__(y) <==> y<<x """
pass
def __rmul__(self, y): # real signature unknown; restored from __doc__
""" x.__rmul__(y) <==> y*x """
pass
def __rrshift__(self, y): # real signature unknown; restored from __doc__
""" x.__rrshift__(y) <==> y>>x """
pass
def __rshift__(self, y): # real signature unknown; restored from __doc__
""" x.__rshift__(y) <==> x>>y """
pass
def __rsub__(self, y): # real signature unknown; restored from __doc__
""" x.__rsub__(y) <==> y-x """
pass
def __rtruediv__(self, y): # real signature unknown; restored from __doc__
""" x.__rtruediv__(y) <==> y/x """
pass
def __sub__(self, y): # real signature unknown; restored from __doc__
""" x.__sub__(y) <==> x-y """
pass
def __truediv__(self, y): # real signature unknown; restored from __doc__
""" x.__truediv__(y) <==> x/y """
pass
| [
"[email protected]"
]
| |
2b94ca5f26c8983462ef53b76ee77557a6b751e3 | 32904d4841d104143ba0f41cc3aeb749e470f546 | /backend/django/api/urls.py | 6e5dd09bbd5b11925232aa849f7d72ba6e996239 | []
| no_license | aurthurm/dispatrace-api-vuejs | 20ec5deee015e69bce7a64dc2d89ccae8941b800 | 56d122318af27ff64755fc515345974631d3026f | refs/heads/master | 2023-01-23T23:03:15.438339 | 2020-10-20T22:09:29 | 2020-10-20T22:09:29 | 219,028,985 | 0 | 1 | null | 2022-12-22T18:31:38 | 2019-11-01T17:08:35 | Vue | UTF-8 | Python | false | false | 877 | py | from django.urls import path, include
import api.accounts.routes, api.notices.routes, api.memos.routes
from rest_framework_simplejwt.views import (
TokenObtainPairView,
TokenRefreshView,
TokenVerifyView
)
from .views import ExtraTokenObtainPairView, get_user_data
app_name = 'api'
urlpatterns = [
path('accounts/', include(api.accounts.routes)),
path('notices/', include(api.notices.routes)),
path('memos/', include(api.memos.routes)),
# JWT AUTHENTICATION
path('token/', TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('token/extras/', ExtraTokenObtainPairView.as_view(), name="token-extras"), # MODIFIED
path('token/verify/', TokenVerifyView.as_view(), name='token_verify'),
path('refresh/', TokenRefreshView.as_view(), name='token_refresh'),
path('user/', get_user_data, name='user'),
] | [
"[email protected]"
]
| |
093c22edc372fda95f2e5cdb6c60b226b694b071 | 7c81f9ea4c77007435781cc7fab991df50747c53 | /setup.py | d581e3ec5308d8ee33e528dbc682836b3c7dc998 | [
"MIT"
]
| permissive | ArutyunovG/netron | dc259584f552e1ef2808cc7a0fc552d6681076c4 | 07f842e26d3618005ea75a23dd41313ddcfc28d0 | refs/heads/master | 2022-06-11T17:52:08.567397 | 2020-05-02T03:53:23 | 2020-05-02T03:53:23 | 260,730,924 | 0 | 0 | MIT | 2020-05-02T16:42:28 | 2020-05-02T16:42:27 | null | UTF-8 | Python | false | false | 6,872 | py | #!/usr/bin/env python
import distutils
import io
import json
import os
import setuptools
import setuptools.command.build_py
import distutils.command.build
node_dependencies = [
( 'netron', [
'node_modules/d3/dist/d3.min.js',
'node_modules/dagre/dist/dagre.min.js',
'node_modules/marked/marked.min.js',
'node_modules/pako/dist/pako.min.js',
'node_modules/long/dist/long.js',
'node_modules/protobufjs/dist/protobuf.min.js',
'node_modules/protobufjs/ext/prototxt/prototxt.js',
'node_modules/flatbuffers/js/flatbuffers.js' ] )
]
class build(distutils.command.build.build):
user_options = distutils.command.build.build.user_options + [ ('version', None, 'version' ) ]
def initialize_options(self):
distutils.command.build.build.initialize_options(self)
self.version = None
def finalize_options(self):
distutils.command.build.build.finalize_options(self)
def run(self):
build_py.version = bool(self.version)
return distutils.command.build.build.run(self)
class build_py(setuptools.command.build_py.build_py):
user_options = setuptools.command.build_py.build_py.user_options + [ ('version', None, 'version' ) ]
def initialize_options(self):
setuptools.command.build_py.build_py.initialize_options(self)
self.version = None
def finalize_options(self):
setuptools.command.build_py.build_py.finalize_options(self)
def run(self):
setuptools.command.build_py.build_py.run(self)
for target, files in node_dependencies:
target = os.path.join(self.build_lib, target)
if not os.path.exists(target):
os.makedirs(target)
for file in files:
self.copy_file(file, target)
def build_module(self, module, module_file, package):
setuptools.command.build_py.build_py.build_module(self, module, module_file, package)
if build_py.version and module == '__version__':
package = package.split('.')
outfile = self.get_module_outfile(self.build_lib, package, module)
with open(outfile, 'w+') as f:
f.write("__version__ = '" + package_version() + "'\n")
def package_version():
folder = os.path.realpath(os.path.dirname(__file__))
with open(os.path.join(folder, 'package.json')) as package_file:
package_manifest = json.load(package_file)
return package_manifest['version']
setuptools.setup(
name="netron",
version=package_version(),
description="Viewer for neural network, deep learning and machine learning models",
long_description='Netron is a viewer for neural network, deep learning and machine learning models.\n\n' +
'Netron supports **ONNX** (`.onnx`, `.pb`), **Keras** (`.h5`, `.keras`), **Core ML** (`.mlmodel`), **Caffe** (`.caffemodel`, `.prototxt`), **Caffe2** (`predict_net.pb`), **Darknet** (`.cfg`), **MXNet** (`.model`, `-symbol.json`), ncnn (`.param`) and **TensorFlow Lite** (`.tflite`). Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **ArmNN** (`.armnn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **PaddlePaddle** (`__model__`), **MediaPipe** (`.pbtxt`), **ML.NET** (`.zip`), MNN (`.mnn`), **OpenVINO** (`.xml`), **scikit-learn** (`.pkl`), **Tengine** (`.tmfile`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).',
keywords=[
'onnx', 'keras', 'tensorflow', 'tflite', 'coreml', 'mxnet', 'caffe', 'caffe2', 'torchscript', 'pytorch', 'ncnn', 'mnn' 'openvino', 'darknet', 'paddlepaddle', 'chainer',
'artificial intelligence', 'machine learning', 'deep learning', 'neural network',
'visualizer', 'viewer'
],
license="MIT",
cmdclass={
'build': build,
'build_py': build_py
},
package_dir={
'netron': 'src'
},
packages=[
'netron'
],
package_data={
'netron': [
'favicon.ico', 'icon.png',
'base.js',
'numpy.js', 'pickle.js', 'hdf5.js', 'bson.js',
'zip.js', 'tar.js', 'gzip.js',
'armnn.js', 'armnn-metadata.json', 'armnn-schema.js',
'bigdl.js', 'bigdl-metadata.json', 'bigdl-proto.js',
'caffe.js', 'caffe-metadata.json', 'caffe-proto.js',
'caffe2.js', 'caffe2-metadata.json', 'caffe2-proto.js',
'chainer.js',
'cntk.js', 'cntk-metadata.json', 'cntk-proto.js',
'coreml.js', 'coreml-metadata.json', 'coreml-proto.js',
'darknet.js', 'darknet-metadata.json',
'dl4j.js', 'dl4j-metadata.json',
'flux.js', 'flux-metadata.json',
'keras.js', 'keras-metadata.json',
'mediapipe.js',
'mlnet.js', 'mlnet-metadata.json',
'mnn.js', 'mnn-metadata.json', 'mnn-schema.js',
'mxnet.js', 'mxnet-metadata.json',
'ncnn.js', 'ncnn-metadata.json',
'onnx.js', 'onnx-metadata.json', 'onnx-proto.js',
'openvino.js', 'openvino-metadata.json', 'openvino-parser.js',
'paddle.js', 'paddle-metadata.json', 'paddle-proto.js',
'pytorch.js', 'pytorch-metadata.json', 'python.js',
'sklearn.js', 'sklearn-metadata.json',
'tengine.js', 'tengine-metadata.json',
'tf.js', 'tf-metadata.json', 'tf-proto.js',
'tflite.js', 'tflite-metadata.json', 'tflite-schema.js',
'torch.js', 'torch-metadata.json',
'index.html', 'index.js',
'view-grapher.css', 'view-grapher.js',
'view-sidebar.css', 'view-sidebar.js',
'view.js',
'server.py'
]
},
install_requires=[],
author='Lutz Roeder',
author_email='[email protected]',
url='https://github.com/lutzroeder/netron',
entry_points={
'console_scripts': [ 'netron = netron:main' ]
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Visualization'
]
) | [
"[email protected]"
]
| |
b9d25a7978535c0c6e4353d0f3cfda7045e14a4d | 250962c80383ecf9c2f94e2874c1e1f961f6a181 | /escpos/showcase.py | 425709a2b35dd8b0fe2d45b7933843d6b9107c5c | [
"Apache-2.0"
]
| permissive | kmee/pyescpos | aa59d6f2d1a9c99d3d9a55da1e1c543c49105da3 | b0a0040cd770c1658258a870caca1a33ff010460 | refs/heads/master | 2023-07-07T09:02:41.581454 | 2022-04-14T19:05:17 | 2022-04-14T19:05:17 | 106,326,996 | 0 | 1 | Apache-2.0 | 2022-04-20T18:29:21 | 2017-10-09T19:34:38 | Python | UTF-8 | Python | false | false | 8,742 | py | # -*- coding: utf-8 -*-
#
# escpos/showcase.py
#
# Copyright 2020 Base4 Sistemas Ltda ME
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import unicode_literals
import math
from datetime import datetime
from . import barcode
from .impl import epson
def showcase(printer, **kwargs):
"""All printing showcases in one call."""
printer.justify_center()
printer.set_expanded(True)
printer.text(printer.model.name)
printer.set_expanded(False)
printer.text(printer.model.vendor)
printer.justify_left()
printer.lf()
fonts_showcase(printer, **kwargs)
printer.cut()
modes_and_alignment_showcase(printer, **kwargs)
printer.cut()
text_size_showcase(printer, **kwargs)
printer.cut()
rulers_showcase(printer)
printer.cut()
receipt_showcase(printer)
printer.cut()
barcode_showcase(printer, **kwargs)
printer.cut()
qrcode_showcase(printer, **kwargs)
printer.cut()
def fonts_showcase(printer, **kwargs):
"""A showcase of available fonts."""
_header(printer, 'Fonts')
font_set = kwargs.get('font_set', epson.AVAILABLE_FONTS)
for param, name in font_set:
printer.init()
printer.set_font(param)
printer.text(name)
printer.lf()
def modes_and_alignment_showcase(printer):
"""A showcase for font modes (normal, condensed, emphasized and expanded)
and alignment (left, centered and right alignment).
"""
_header(printer, 'Modes and Alignment')
def print_modes(title):
printer.set_expanded(True)
printer.text(title)
printer.set_expanded(False)
printer.text('Normal mode')
printer.set_condensed(True)
printer.text('Condensed mode')
printer.set_condensed(False)
printer.set_emphasized(True)
printer.text('Emphasized mode')
printer.set_emphasized(False)
printer.init()
printer.justify_right()
print_modes('Right aligned')
printer.lf()
printer.justify_center()
print_modes('Centered')
printer.lf()
printer.justify_left()
print_modes('Left aligned')
printer.lf()
def text_size_showcase(printer, **kwargs):
"""A showcase of various text sizes.
:param str text: Any text eight characters long. If its longer than
eight chars it will be truncated. If less than eight chars it will
be completed with "X"s.
"""
text = kwargs.get('text', 'SPAMEGGS')
letters = text[:8].ljust(8, 'X')
_header(printer, 'Text Size')
printer.init()
for w, c in zip(range(8), letters):
printer.set_text_size(w, 7)
printer.textout(c)
printer.lf()
for h, c in zip(range(8), letters):
printer.set_text_size(w, h)
printer.textout(c)
printer.lf(2)
def rulers_showcase(printer):
"""A showcase of various column widths."""
cols = printer.feature.columns
n = max(cols.normal, max(cols.condensed, cols.expanded))
ruler = '....:....!' * n
_header(printer, 'Rulers')
printer.init()
printer.text('Normal ({:d} columns)'.format(cols.normal))
printer.text(ruler[:cols.normal])
printer.lf()
printer.text('Condensed ({:d} columns)'.format(cols.condensed))
printer.set_condensed(True)
printer.text(ruler[:cols.condensed])
printer.set_condensed(False)
printer.lf()
printer.text('Expanded ({:d} columns)'.format(cols.expanded))
printer.set_expanded(True)
printer.text(ruler[:cols.expanded])
printer.set_expanded(False)
printer.lf()
def receipt_showcase(printer):
"""A showcase of a fictional POS receipt."""
ruler_single = _get_ruler(printer)
ruler_double = _get_ruler(printer, '=')
printer.init()
printer.text(ruler_double)
printer.set_expanded(True)
printer.justify_center()
printer.text('RECEIPT #5678')
printer.justify_left()
printer.set_expanded(False)
printer.text(ruler_single)
printer.text('{:%x %X} Session #{:d}'.format(datetime.now(), 42))
item_mask = _build_item_mask(
printer.feature.columns.condensed,
alignments='><>^>>',
column_widths=[
0.1,
0.4,
0.15,
0.05,
0.15,
0.15,
]
)
data = (
('ID', 'Product', 'Qty', '', 'Price', 'Subtotal'),
('1234', 'SAMPLE', '2', 'x', '0.25', '0.50'),
('1235', 'OTHER SAMPLE', '1', 'x', '1.50', '1.50'),
('1237', 'ANOTHER ONE', '3', 'x', '0.75', '2.25'),
)
printer.set_condensed(True)
for row in data:
printer.text(item_mask.format(*row))
printer.set_condensed(False)
printer.text(ruler_single)
printer.set_emphasized(True)
printer.text('TOTAL 4.25')
printer.set_emphasized(False)
printer.text(ruler_double)
printer.lf()
def barcode_showcase(printer, **kwargs):
"""A showcase of 1-dimensional barcodes."""
barcode_height = kwargs.get('barcode_height', 120)
barcode_width = kwargs.get('barcode_width', barcode.BARCODE_NORMAL_WIDTH)
barcode_hri = kwargs.get('barcode_hri', barcode.BARCODE_HRI_BOTTOM)
barcodes = (
('EAN-8', 'ean8', '12345670'),
('EAN-13', 'ean13', '1234567890128'),
('Code128-A', 'code128', '12345'),
)
_header(printer, 'Barcodes')
printer.init()
for title, method, data in barcodes:
printer.set_emphasized(True)
printer.text(title)
printer.set_emphasized(False)
getattr(printer, method)(
data,
barcode_hri=barcode_hri,
barcode_height=barcode_height,
barcode_width=barcode_width
)
printer.lf()
def qrcode_showcase(printer, **kwags):
"""A showcase of QRCodes in various configurations."""
data = kwags.get('data', 'https://github.com/base4sistemas/pyescpos')
_header(printer, 'QRCode')
printer.init()
# showcase all default values
printer.text('QRCode (all defaults)')
printer.qrcode(data)
printer.lf()
# showcase all possible module size variations
def _qrcode_ecc_level_l(module_size, title):
printer.text('QRCode')
printer.text('Module size: {!r} ({})'.format(module_size, title))
printer.text(' ECC level: L')
printer.qrcode(
data,
qrcode_module_size=module_size,
qrcode_ecc_level=barcode.QRCODE_ERROR_CORRECTION_L
)
printer.lf()
for value, title in barcode.QRCODE_MODULE_SIZES:
_qrcode_ecc_level_l(value, title)
# showcase all possible error correction level variations
def _qrcode_module_size_4(ecc_level, ecc_title):
printer.text('QRCode')
printer.text('Module size: 4')
printer.text(' ECC level: {!r} ({})'.format(ecc_level, ecc_title))
printer.qrcode(
data,
qrcode_module_size=barcode.QRCODE_MODULE_SIZE_4,
qrcode_ecc_level=ecc_level
)
printer.lf()
for value, title in barcode.QRCODE_ERROR_CORRECTION_LEVELS:
_qrcode_module_size_4(value, title)
def _header(printer, title):
ruler = _get_ruler(printer)
printer.init()
printer.text(ruler)
printer.justify_center()
printer.text(title)
printer.justify_left()
printer.text(ruler)
def _get_ruler(printer, char='-'):
return char * printer.feature.columns.normal
def _build_item_mask(width, alignments=None, column_widths=None, gap=1):
# <alignments> str, for example "<>^" (left, right, center)
# <column_widths> list(float, ...)
if len(alignments) != len(column_widths):
raise ValueError('Alignment spec and number of columns must match')
if sum(column_widths) > 100:
raise ValueError('Sum of column widths must not be greater than 100%')
width = width - (len(alignments) * gap) - gap
columns = []
for i, perc in enumerate(column_widths):
col_len = int(math.ceil(perc * width))
columns.append('{{:{:s}{:d}s}}'.format(alignments[i], col_len))
return (' ' * gap).join(columns)
| [
"[email protected]"
]
| |
440ed281f6b51de91c0e3b9136e205f7ba4f3c15 | 7c47e106c9ec85a7239c84c55ad5f20972edefcf | /plotting.py | ead3aaf6df3bdf93cf92df55b0cdd303471d0293 | []
| no_license | anasthesia/pyBBN | 11813717ad5023a9b29f9594ccde93fbc2d5a0c9 | 0e88604b765eb5ce2f196909c65cf2af11a8cc2f | refs/heads/master | 2021-01-21T03:37:46.309318 | 2016-05-10T12:03:50 | 2016-05-10T12:03:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,435 | py | # -*- coding: utf-8 -*-
import os
import time
import itertools
import numpy
import pandas
import matplotlib.pyplot as plt
from common import UNITS, GRID, statistics as STATISTICS
from common.utils import ring_deque, getboolenv
def monitor_datafile(datafolder, timer=1):
datafile = os.path.join(datafolder, 'evolution.pickle')
def plot_backlog(data, last_datalen):
i = last_datalen + 1
while i < len(data):
plotting.plot(data[:i], redraw=False)
i += 1
last_datalen = len(data)
plotting.redraw()
return last_datalen
plt.ion()
plotting = Plotting()
data = pandas.read_pickle(datafile)
last_datalen = plot_backlog(data, 0)
last_mtime = os.stat(datafile).st_mtime
while True:
mtime = os.stat(datafile).st_mtime
if mtime > last_mtime:
try:
data = pandas.read_pickle(datafile)
if len(data) < last_datalen:
print "Datafile is shorter than before, clearing the output"
plt.close('all')
plotting = Plotting()
last_datalen = 0
last_datalen = plot_backlog(data, last_datalen)
print "Plotting done at", mtime
except Exception as e:
print e
last_mtime = mtime
time.sleep(timer)
class Plotting(object):
particles = None
def __init__(self, show=True):
""" Initialize plots, setup basic styling """
self.show_plots = getboolenv("SHOW_PLOTS", show)
self.params_figure, self.plots = plt.subplots(2, 2, num=1)
self.plots = list(itertools.chain(*self.plots))
self.params_figure.subplots_adjust(hspace=0.5, wspace=0.5)
self.plot_map = ['T', 'a', 'aT', 'N_eff']
self.divider_map = [UNITS.MeV, 1, UNITS.MeV, 1]
self.plots[0].set_title("Temperature")
self.plots[0].set_xlabel("time, s")
self.plots[0].set_xscale("log")
self.plots[0].set_yscale("log")
self.plots[0].set_ylabel("T, MeV")
self.plots[0].set_yscale("log")
self.plots[1].set_title("Scale factor")
self.plots[1].set_xlabel("time, s")
self.plots[1].set_xscale("log")
self.plots[1].set_yscale("log")
self.plots[1].set_ylabel("a, 1")
self.plots[1].set_ylim(0, 1)
self.plots[2].set_title("T * a")
self.plots[2].set_xlabel("time, s")
self.plots[2].set_xscale("log")
self.plots[2].set_ylabel("T * a, MeV")
self.plots[2].set_ylim(1, 1.1)
self.plots[3].set_title("N_eff")
self.plots[3].set_xlabel("time, s")
self.plots[3].invert_xaxis()
self.plots[3].set_xscale("log")
self.plots[3].set_ylabel("N_eff")
self.lines = []
self.plots_data = []
self.times = ring_deque([], 1e6)
for plot in self.plots:
self.lines.append(plot.plot([], [], 'b-')[0])
self.plots_data.append(ring_deque([], 1e6))
if self.show_plots:
self.params_figure.show()
def redraw(self):
self.params_figure.canvas.draw()
if self.particles:
self.particles_figure.canvas.draw()
def save(self, filename):
""" Save cosmological and monitored particles plots to the file in the same folder as \
`filename` """
folder = os.path.split(filename)[0]
self.params_figure.savefig(os.path.join(folder, 'plots.svg'))
if self.particles:
self.particles_figure.savefig(os.path.join(folder, 'particles.svg'))
def monitor(self, map):
""" Setup the detailed distribution function and energy density plots for specific \
particle species """
self.particles_figure, self.particles_plots = plt.subplots(len(map), 2, num=2)
self.particles_figure.subplots_adjust(hspace=0.5, wspace=0.5)
self.particles = self.particles if self.particles else []
for i, (particle, monitor) in enumerate(map):
self.particles.append((particle, monitor(particle, self.particles_plots[i])))
if self.show_plots:
self.particles_figure.show()
def plot(self, data, redraw=True):
""" Plot cosmological parameters and monitored particles distribution functions """
last_t = data['t'].iloc[-1] / UNITS.s
if last_t == 0:
return
self.times.append(last_t)
for i, plot in enumerate(self.plots):
_, xmax = plot.get_xlim()
ymin, ymax = plot.get_ylim()
if last_t >= xmax:
plot.set_xlim(self.times[0], last_t * 1.1)
last_data = data[self.plot_map[i]].iloc[-1] / self.divider_map[i]
self.plots_data[i].append(last_data)
if last_data >= ymax:
plot.set_ylim(self.plots_data[i].min, 1.1 * last_data)
if last_data <= ymin:
plot.set_ylim(last_data / 1.1, self.plots_data[i].max)
self.lines[i].set_data(self.times, self.plots_data[i])
if self.particles:
for i, (_, monitor) in enumerate(self.particles):
monitor.plot(data)
if redraw:
self.redraw()
class ParticleMonitor(object):
data = None
def __init__(self, particle, plots):
raise NotImplementedError()
def plot(self, data):
raise NotImplementedError()
def scatter(self, plot, x, y, *args, **kwargs):
if not self.data:
self.data = {}
if plot not in self.data:
self.data[plot] = [[], []]
self.plots[plot].scatter(x, y, *args, **kwargs)
self.data[plot][0].append(x)
self.data[plot][1].append(y)
def plot_function(self, plot, t, grid, foo, *args, **kwargs):
if not self.data:
self.data = {}
if plot not in self.data:
self.data[plot] = [[], [], []]
self.plots[plot].plot(grid, foo, *args, **kwargs)
self.data[plot][0].append(t)
self.data[plot][1].append(grid)
self.data[plot][2].append(foo)
class RadiationParticleMonitor(ParticleMonitor):
def __init__(self, particle, plots):
self.particle, self.plots = particle, plots
self.plots[0].set_title(particle.name)
self.plots[0].set_xlabel("T, MeV")
self.plots[0].invert_xaxis()
self.plots[0].set_xscale("log")
self.plots[0].set_ylabel("rho/rho_eq")
self.plots[1].set_xlabel("y, MeV")
self.plots[1].set_xlim(self.particle.grid.MIN_MOMENTUM / UNITS.MeV,
self.particle.grid.MAX_MOMENTUM / UNITS.MeV)
self.plots[1].set_ylabel("f/f_eq")
def comparison_distributions(self, data):
T = self.particle.params.T
aT = self.particle.aT
rhoeq = self.particle.energy_density / (
self.particle.dof * numpy.pi**2 / 30. * (aT / self.particle.params.a)**4
* (7./8. if self.particle.statistics == STATISTICS.FERMION else 1.)
)
feq = self.particle.equilibrium_distribution(aT=aT)
return (T, rhoeq), feq
def plot(self, data):
(T, rhoeq), feq = self.comparison_distributions(data)
if not self.particle.in_equilibrium:
ratio = numpy.vectorize(self.particle.distribution)(self.particle.grid.TEMPLATE) / feq
else:
ratio = numpy.ones(self.particle.grid.TEMPLATE.shape)
rhoeq = 1.
self.scatter(0, T / UNITS.MeV, rhoeq, s=1)
age_lines(self.plots[1].get_axes().lines)
self.plot_function(1, T, self.particle.grid.TEMPLATE / UNITS.MeV, ratio)
class EquilibriumRadiationParticleMonitor(RadiationParticleMonitor):
def comparison_distributions(self, data):
T = self.particle.params.T
aT = self.particle.params.aT
# T = data['T'].iloc[-1]
# aT = data['aT'].iloc[-1]
rhoeq = self.particle.energy_density / (
self.particle.dof * numpy.pi**2 / 30 * T**4
* (7./8. if self.particle.statistics == STATISTICS.FERMION else 1.)
)
feq = self.particle.equilibrium_distribution(aT=aT)
return (T, rhoeq), feq
class EffectiveTemperatureRadiationPartileMonitor(RadiationParticleMonitor):
def comparison_distributions(self, data):
rho = self.particle.energy_density
const = (
self.particle.dof * numpy.pi**2 / 30.
* (7./8. if self.particle.statistics == STATISTICS.FERMION else 1.)
)
T = self.particle.params.T
T_eff = (rho / const)**0.25
aT = T_eff * self.particle.params.a
rhoeq = rho / const / (self.particle.aT / self.particle.params.a)**4
feq = self.particle.equilibrium_distribution(aT=aT)
self.plots[1].set_title("T ~ {:3e}".format(T_eff / UNITS.MeV))
return (T, rhoeq), feq
class MassiveParticleMonitor(ParticleMonitor):
def __init__(self, particle, plots):
self.particle, self.plots = particle, plots
self.plots[0].set_title(particle.name)
self.plots[0].invert_xaxis()
self.plots[0].set_xlabel("T, MeV")
self.plots[0].set_xscale("log")
self.plots[0].set_ylabel("rho/(n M)")
self.plots[1].set_xlabel("y, MeV")
self.plots[1].set_xlim(self.particle.grid.MIN_MOMENTUM / UNITS.MeV,
self.particle.grid.MAX_MOMENTUM / UNITS.MeV)
self.plots[1].set_ylabel("(f-f_eq) y^2")
def plot(self, data):
T = data['T'].iloc[-1]
from particles.NonEqParticle import energy_density, density
self.scatter(0, T / UNITS.MeV,
energy_density(self.particle) / (self.particle.mass * density(self.particle)),
s=1)
age_lines(self.plots[1].get_axes().lines)
yy = self.particle.grid.TEMPLATE * self.particle.grid.TEMPLATE / UNITS.MeV**2
f = self.particle._distribution
feq = self.particle.equilibrium_distribution()
self.plot_function(1, T, self.particle.grid.TEMPLATE / UNITS.MeV, yy*(f-feq))
class EquilibrationMonitor(ParticleMonitor):
def __init__(self, particle, plots):
self.particle, self.plots = particle, plots
self.plots[0].set_title(particle.name)
self.plots[0].set_xlabel("a")
self.plots[0].set_xscale("log")
self.plots[0].set_ylabel("max|I|")
self.plots[1].set_xlabel("a")
self.plots[1].set_xscale("log")
self.plots[1].set_ylabel("numerator, MeV^-1")
def plot(self, data):
a = data['a'].iloc[-1]
from particles.NonEqParticle import numerator
self.scatter(0, a, numpy.max(numpy.fabs(self.particle.collision_integral)) * UNITS.MeV, s=1)
self.scatter(1, a, numerator(self.particle) * UNITS.MeV, s=1)
class AbundanceMonitor(ParticleMonitor):
def __init__(self, particle, plots):
self.particle, self.plots = particle, plots
self.plots[0].set_title(particle.name)
self.plots[0].set_xlabel("T, MeV")
self.plots[0].invert_xaxis()
self.plots[0].set_xscale("log")
self.plots[0].set_yscale("log")
self.plots[0].set_ylabel("rho fraction")
self.plots[1].set_xlabel("T, MeV")
self.plots[1].invert_xaxis()
self.plots[1].set_xscale("log")
self.plots[1].set_yscale("log")
self.plots[1].set_ylabel("n a^3")
def plot(self, data):
T = data['T'].iloc[-1]
total_rho = data['rho'].iloc[-1]
rho = self.particle.energy_density
self.scatter(0, T / UNITS.MeV, rho / total_rho, s=1)
density = self.particle.density * self.particle.params.a**3 / UNITS.MeV**3
self.scatter(1, T / UNITS.MeV, density, s=1)
class DensityAndEnergyMonitor(ParticleMonitor):
def __init__(self, particle, plots):
self.particle, self.plots = particle, plots
self.plots[0].set_title(particle.name)
self.plots[0].set_xlabel("T, MeV")
self.plots[0].invert_xaxis()
self.plots[0].set_xscale("log")
self.plots[0].set_ylabel("rho a^4")
self.plots[1].set_xlabel("T, MeV")
self.plots[1].invert_xaxis()
self.plots[1].set_xscale("log")
self.plots[1].set_ylabel("n a^3")
def plot(self, data):
T = data['T'].iloc[-1]
rho = self.particle.energy_density * self.particle.params.a**4 / UNITS.MeV**4
self.scatter(0, T / UNITS.MeV, rho, s=1)
density = self.particle.density * self.particle.params.a**3 / UNITS.MeV**3
self.scatter(1, T / UNITS.MeV, density, s=1)
def age_lines(lines):
""" Slightly decrease the opacity of plotted lines until they are barely visible.\
Then, remove them. Saves up on memory and clears the view of the plots. """
for line in lines:
alpha = line.get_alpha() or 1.
if alpha < 0.1:
line.remove()
else:
line.set_alpha((line.get_alpha() or 1.) * 0.8)
def plot_integrand(integrand, name, p0, filename=__file__):
""" Save a 3D plot of the distribution function integrand into a file. """
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure(3)
ax = fig.gca(projection='3d')
plt.cla()
X, Y = numpy.meshgrid(GRID.TEMPLATE, GRID.TEMPLATE)
Z = numpy.array([integrand([x, y]) for x, y in zip(numpy.ravel(X), numpy.ravel(Y))])\
.reshape(X.shape)
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, alpha=0.1)
ax.contourf(X, Y, Z, zdir='z', offset=numpy.amin(Z), cmap=cm.coolwarm)
ax.contourf(X, Y, Z, zdir='x', offset=ax.get_xlim()[0], cmap=cm.coolwarm)
ax.contourf(X, Y, Z, zdir='y', offset=ax.get_ylim()[1], cmap=cm.coolwarm)
ax.set_xlabel('p1')
ax.set_ylabel('p2')
ax.set_title('{} p0 = {}'.format(name, p0 / UNITS.MeV))
plt.savefig(os.path.join(os.path.split(filename)[0], 'logs/plt_{}.svg'.format(p0 / UNITS.MeV)))
def plot_points(points, name):
""" Draw a scatter plot for a number of `points` tuples `(x, y)` """
plt.figure(4)
plt.title(name)
plt.scatter(*zip(*points))
plt.show()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Monitor data files and plot')
parser.add_argument('--folder', required=True)
args = parser.parse_args()
monitor_datafile(args.folder)
| [
"[email protected]"
]
| |
5af60b53e31f670cd9204c30adfae796e822d773 | c0b039e2fe126f8675d6c0bd351d7e58570afbd9 | /capstone/Lib/site-packages/matplotlib/font_manager.py | 10e9f0eb2c3aa63fa8d1e5ff6dc60a6f56afa071 | []
| no_license | sungjin0757/Inu-Capstone-2021 | 4cdd8c8c3963600a24ac69f0a4ffa62d77be7483 | c1ab66c89bb25504b6e9203d3646c7fb51101224 | refs/heads/master | 2023-07-08T15:46:18.673391 | 2021-08-09T12:57:25 | 2021-08-09T12:57:25 | 393,968,655 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 51,026 | py | """
A module for finding, managing, and using fonts across platforms.
This module provides a single `FontManager` instance that can
be shared across backends and platforms. The `findfont`
function returns the best TrueType (TTF) font file in the local or
system font path that matches the specified `FontProperties`
instance. The `FontManager` also handles Adobe Font Metrics
(AFM) font files for use by the PostScript backend.
The design is based on the `W3C Cascading Style Sheet, Level 1 (CSS1)
font specification <http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_.
Future versions may implement the Level 2 or 2.1 specifications.
"""
# KNOWN ISSUES
#
# - documentation
# - font variant is untested
# - font stretch is incomplete
# - font size is incomplete
# - default font algorithm needs improvement and testing
# - setWeights function needs improvement
# - 'light' is an invalid weight value, remove it.
from functools import lru_cache
import json
import logging
from numbers import Number
import os
from pathlib import Path
import re
import subprocess
import sys
try:
import threading
from threading import Timer
except ImportError:
import dummy_threading as threading
from dummy_threading import Timer
import matplotlib as mpl
from matplotlib import _api, afm, cbook, ft2font, rcParams
from matplotlib.fontconfig_pattern import (
parse_fontconfig_pattern, generate_fontconfig_pattern)
from matplotlib.rcsetup import _validators
_log = logging.getLogger(__name__)
font_scalings = {
'xx-small': 0.579,
'x-small': 0.694,
'small': 0.833,
'medium': 1.0,
'large': 1.200,
'x-large': 1.440,
'xx-large': 1.728,
'larger': 1.2,
'smaller': 0.833,
None: 1.0,
}
stretch_dict = {
'ultra-condensed': 100,
'extra-condensed': 200,
'condensed': 300,
'semi-condensed': 400,
'normal': 500,
'semi-expanded': 600,
'semi-extended': 600,
'expanded': 700,
'extended': 700,
'extra-expanded': 800,
'extra-extended': 800,
'ultra-expanded': 900,
'ultra-extended': 900,
}
weight_dict = {
'ultralight': 100,
'light': 200,
'normal': 400,
'regular': 400,
'book': 400,
'medium': 500,
'roman': 500,
'semibold': 600,
'demibold': 600,
'demi': 600,
'bold': 700,
'heavy': 800,
'extra bold': 800,
'black': 900,
}
_weight_regexes = [
# From fontconfig's FcFreeTypeQueryFaceInternal; not the same as
# weight_dict!
("thin", 100),
("extralight", 200),
("ultralight", 200),
("demilight", 350),
("semilight", 350),
("light", 300), # Needs to come *after* demi/semilight!
("book", 380),
("regular", 400),
("normal", 400),
("medium", 500),
("demibold", 600),
("demi", 600),
("semibold", 600),
("extrabold", 800),
("superbold", 800),
("ultrabold", 800),
("bold", 700), # Needs to come *after* extra/super/ultrabold!
("ultrablack", 1000),
("superblack", 1000),
("extrablack", 1000),
(r"\bultra", 1000),
("black", 900), # Needs to come *after* ultra/super/extrablack!
("heavy", 900),
]
font_family_aliases = {
'serif',
'sans-serif',
'sans serif',
'cursive',
'fantasy',
'monospace',
'sans',
}
# OS Font paths
MSFolders = \
r'Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders'
MSFontDirectories = [
r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts',
r'SOFTWARE\Microsoft\Windows\CurrentVersion\Fonts']
MSUserFontDirectories = [
str(Path.home() / 'AppData/Local/Microsoft/Windows/Fonts'),
str(Path.home() / 'AppData/Roaming/Microsoft/Windows/Fonts'),
]
X11FontDirectories = [
# an old standard installation point
"/usr/X11R6/lib/X11/fonts/TTF/",
"/usr/X11/lib/X11/fonts",
# here is the new standard location for fonts
"/usr/share/fonts/",
# documented as a good place to install new fonts
"/usr/local/share/fonts/",
# common application, not really useful
"/usr/lib/openoffice/share/fonts/truetype/",
# user fonts
str((Path(os.environ.get('XDG_DATA_HOME') or Path.home() / ".local/share"))
/ "fonts"),
str(Path.home() / ".fonts"),
]
OSXFontDirectories = [
"/Library/Fonts/",
"/Network/Library/Fonts/",
"/System/Library/Fonts/",
# fonts installed via MacPorts
"/opt/local/share/fonts",
# user fonts
str(Path.home() / "Library/Fonts"),
]
@lru_cache(64)
def _cached_realpath(path):
return os.path.realpath(path)
def get_fontext_synonyms(fontext):
"""
Return a list of file extensions extensions that are synonyms for
the given file extension *fileext*.
"""
return {
'afm': ['afm'],
'otf': ['otf', 'ttc', 'ttf'],
'ttc': ['otf', 'ttc', 'ttf'],
'ttf': ['otf', 'ttc', 'ttf'],
}[fontext]
def list_fonts(directory, extensions):
"""
Return a list of all fonts matching any of the extensions, found
recursively under the directory.
"""
extensions = ["." + ext for ext in extensions]
return [os.path.join(dirpath, filename)
# os.walk ignores access errors, unlike Path.glob.
for dirpath, _, filenames in os.walk(directory)
for filename in filenames
if Path(filename).suffix.lower() in extensions]
def win32FontDirectory():
r"""
Return the user-specified font directory for Win32. This is
looked up from the registry key ::
\\HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders\Fonts
If the key is not found, ``%WINDIR%\Fonts`` will be returned.
"""
import winreg
try:
with winreg.OpenKey(winreg.HKEY_CURRENT_USER, MSFolders) as user:
return winreg.QueryValueEx(user, 'Fonts')[0]
except OSError:
return os.path.join(os.environ['WINDIR'], 'Fonts')
def _win32RegistryFonts(reg_domain, base_dir):
r"""
Search for fonts in the Windows registry.
Parameters
----------
reg_domain : int
The top level registry domain (e.g. HKEY_LOCAL_MACHINE).
base_dir : str
The path to the folder where the font files are usually located (e.g.
C:\Windows\Fonts). If only the filename of the font is stored in the
registry, the absolute path is built relative to this base directory.
Returns
-------
`set`
`pathlib.Path` objects with the absolute path to the font files found.
"""
import winreg
items = set()
for reg_path in MSFontDirectories:
try:
with winreg.OpenKey(reg_domain, reg_path) as local:
for j in range(winreg.QueryInfoKey(local)[1]):
# value may contain the filename of the font or its
# absolute path.
key, value, tp = winreg.EnumValue(local, j)
if not isinstance(value, str):
continue
# Work around for https://bugs.python.org/issue25778, which
# is fixed in Py>=3.6.1.
value = value.split("\0", 1)[0]
try:
# If value contains already an absolute path, then it
# is not changed further.
path = Path(base_dir, value).resolve()
except RuntimeError:
# Don't fail with invalid entries.
continue
items.add(path)
except (OSError, MemoryError):
continue
return items
def win32InstalledFonts(directory=None, fontext='ttf'):
"""
Search for fonts in the specified font directory, or use the
system directories if none given. Additionally, it is searched for user
fonts installed. A list of TrueType font filenames are returned by default,
or AFM fonts if *fontext* == 'afm'.
"""
import winreg
if directory is None:
directory = win32FontDirectory()
fontext = ['.' + ext for ext in get_fontext_synonyms(fontext)]
items = set()
# System fonts
items.update(_win32RegistryFonts(winreg.HKEY_LOCAL_MACHINE, directory))
# User fonts
for userdir in MSUserFontDirectories:
items.update(_win32RegistryFonts(winreg.HKEY_CURRENT_USER, userdir))
# Keep only paths with matching file extension.
return [str(path) for path in items if path.suffix.lower() in fontext]
@lru_cache()
def _call_fc_list():
"""Cache and list the font filenames known to `fc-list`."""
try:
if b'--format' not in subprocess.check_output(['fc-list', '--help']):
_log.warning( # fontconfig 2.7 implemented --format.
'Matplotlib needs fontconfig>=2.7 to query system fonts.')
return []
out = subprocess.check_output(['fc-list', '--format=%{file}\\n'])
except (OSError, subprocess.CalledProcessError):
return []
return [os.fsdecode(fname) for fname in out.split(b'\n')]
def get_fontconfig_fonts(fontext='ttf'):
"""List font filenames known to `fc-list` having the given extension."""
fontext = ['.' + ext for ext in get_fontext_synonyms(fontext)]
return [fname for fname in _call_fc_list()
if Path(fname).suffix.lower() in fontext]
def findSystemFonts(fontpaths=None, fontext='ttf'):
"""
Search for fonts in the specified font paths. If no paths are
given, will use a standard set of system paths, as well as the
list of fonts tracked by fontconfig if fontconfig is installed and
available. A list of TrueType fonts are returned by default with
AFM fonts as an option.
"""
fontfiles = set()
fontexts = get_fontext_synonyms(fontext)
if fontpaths is None:
if sys.platform == 'win32':
fontpaths = MSUserFontDirectories + [win32FontDirectory()]
# now get all installed fonts directly...
fontfiles.update(win32InstalledFonts(fontext=fontext))
else:
fontpaths = X11FontDirectories
if sys.platform == 'darwin':
fontpaths = [*X11FontDirectories, *OSXFontDirectories]
fontfiles.update(get_fontconfig_fonts(fontext))
elif isinstance(fontpaths, str):
fontpaths = [fontpaths]
for path in fontpaths:
fontfiles.update(map(os.path.abspath, list_fonts(path, fontexts)))
return [fname for fname in fontfiles if os.path.exists(fname)]
class FontEntry:
"""
A class for storing Font properties. It is used when populating
the font lookup dictionary.
"""
def __init__(self,
fname ='',
name ='',
style ='normal',
variant='normal',
weight ='normal',
stretch='normal',
size ='medium',
):
self.fname = fname
self.name = name
self.style = style
self.variant = variant
self.weight = weight
self.stretch = stretch
try:
self.size = str(float(size))
except ValueError:
self.size = size
def __repr__(self):
return "<Font '%s' (%s) %s %s %s %s>" % (
self.name, os.path.basename(self.fname), self.style, self.variant,
self.weight, self.stretch)
def ttfFontProperty(font):
"""
Extract information from a TrueType font file.
Parameters
----------
font : `.FT2Font`
The TrueType font file from which information will be extracted.
Returns
-------
`FontEntry`
The extracted font properties.
"""
name = font.family_name
# Styles are: italic, oblique, and normal (default)
sfnt = font.get_sfnt()
mac_key = (1, # platform: macintosh
0, # id: roman
0) # langid: english
ms_key = (3, # platform: microsoft
1, # id: unicode_cs
0x0409) # langid: english_united_states
# These tables are actually mac_roman-encoded, but mac_roman support may be
# missing in some alternative Python implementations and we are only going
# to look for ASCII substrings, where any ASCII-compatible encoding works
# - or big-endian UTF-16, since important Microsoft fonts use that.
sfnt2 = (sfnt.get((*mac_key, 2), b'').decode('latin-1').lower() or
sfnt.get((*ms_key, 2), b'').decode('utf_16_be').lower())
sfnt4 = (sfnt.get((*mac_key, 4), b'').decode('latin-1').lower() or
sfnt.get((*ms_key, 4), b'').decode('utf_16_be').lower())
if sfnt4.find('oblique') >= 0:
style = 'oblique'
elif sfnt4.find('italic') >= 0:
style = 'italic'
elif sfnt2.find('regular') >= 0:
style = 'normal'
elif font.style_flags & ft2font.ITALIC:
style = 'italic'
else:
style = 'normal'
# Variants are: small-caps and normal (default)
# !!!! Untested
if name.lower() in ['capitals', 'small-caps']:
variant = 'small-caps'
else:
variant = 'normal'
# The weight-guessing algorithm is directly translated from fontconfig
# 2.13.1's FcFreeTypeQueryFaceInternal (fcfreetype.c).
wws_subfamily = 22
typographic_subfamily = 16
font_subfamily = 2
styles = [
sfnt.get((*mac_key, wws_subfamily), b'').decode('latin-1'),
sfnt.get((*mac_key, typographic_subfamily), b'').decode('latin-1'),
sfnt.get((*mac_key, font_subfamily), b'').decode('latin-1'),
sfnt.get((*ms_key, wws_subfamily), b'').decode('utf-16-be'),
sfnt.get((*ms_key, typographic_subfamily), b'').decode('utf-16-be'),
sfnt.get((*ms_key, font_subfamily), b'').decode('utf-16-be'),
]
styles = [*filter(None, styles)] or [font.style_name]
def get_weight(): # From fontconfig's FcFreeTypeQueryFaceInternal.
# OS/2 table weight.
os2 = font.get_sfnt_table("OS/2")
if os2 and os2["version"] != 0xffff:
return os2["usWeightClass"]
# PostScript font info weight.
try:
ps_font_info_weight = (
font.get_ps_font_info()["weight"].replace(" ", "") or "")
except ValueError:
pass
else:
for regex, weight in _weight_regexes:
if re.fullmatch(regex, ps_font_info_weight, re.I):
return weight
# Style name weight.
for style in styles:
style = style.replace(" ", "")
for regex, weight in _weight_regexes:
if re.search(regex, style, re.I):
return weight
if font.style_flags & ft2font.BOLD:
return 700 # "bold"
return 500 # "medium", not "regular"!
weight = int(get_weight())
# Stretch can be absolute and relative
# Absolute stretches are: ultra-condensed, extra-condensed, condensed,
# semi-condensed, normal, semi-expanded, expanded, extra-expanded,
# and ultra-expanded.
# Relative stretches are: wider, narrower
# Child value is: inherit
if any(word in sfnt4 for word in ['narrow', 'condensed', 'cond']):
stretch = 'condensed'
elif 'demi cond' in sfnt4:
stretch = 'semi-condensed'
elif any(word in sfnt4 for word in ['wide', 'expanded', 'extended']):
stretch = 'expanded'
else:
stretch = 'normal'
# Sizes can be absolute and relative.
# Absolute sizes are: xx-small, x-small, small, medium, large, x-large,
# and xx-large.
# Relative sizes are: larger, smaller
# Length value is an absolute font size, e.g., 12pt
# Percentage values are in 'em's. Most robust specification.
if not font.scalable:
raise NotImplementedError("Non-scalable fonts are not supported")
size = 'scalable'
return FontEntry(font.fname, name, style, variant, weight, stretch, size)
def afmFontProperty(fontpath, font):
"""
Extract information from an AFM font file.
Parameters
----------
font : `.AFM`
The AFM font file from which information will be extracted.
Returns
-------
`FontEntry`
The extracted font properties.
"""
name = font.get_familyname()
fontname = font.get_fontname().lower()
# Styles are: italic, oblique, and normal (default)
if font.get_angle() != 0 or 'italic' in name.lower():
style = 'italic'
elif 'oblique' in name.lower():
style = 'oblique'
else:
style = 'normal'
# Variants are: small-caps and normal (default)
# !!!! Untested
if name.lower() in ['capitals', 'small-caps']:
variant = 'small-caps'
else:
variant = 'normal'
weight = font.get_weight().lower()
if weight not in weight_dict:
weight = 'normal'
# Stretch can be absolute and relative
# Absolute stretches are: ultra-condensed, extra-condensed, condensed,
# semi-condensed, normal, semi-expanded, expanded, extra-expanded,
# and ultra-expanded.
# Relative stretches are: wider, narrower
# Child value is: inherit
if 'demi cond' in fontname:
stretch = 'semi-condensed'
elif any(word in fontname for word in ['narrow', 'cond']):
stretch = 'condensed'
elif any(word in fontname for word in ['wide', 'expanded', 'extended']):
stretch = 'expanded'
else:
stretch = 'normal'
# Sizes can be absolute and relative.
# Absolute sizes are: xx-small, x-small, small, medium, large, x-large,
# and xx-large.
# Relative sizes are: larger, smaller
# Length value is an absolute font size, e.g., 12pt
# Percentage values are in 'em's. Most robust specification.
# All AFM fonts are apparently scalable.
size = 'scalable'
return FontEntry(fontpath, name, style, variant, weight, stretch, size)
class FontProperties:
"""
A class for storing and manipulating font properties.
The font properties are the six properties described in the
`W3C Cascading Style Sheet, Level 1
<http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_ font
specification and *math_fontfamily* for math fonts:
- family: A list of font names in decreasing order of priority.
The items may include a generic font family name, either
'sans-serif' (default), 'serif', 'cursive', 'fantasy', or 'monospace'.
In that case, the actual font to be used will be looked up
from the associated rcParam.
- style: Either 'normal' (default), 'italic' or 'oblique'.
- variant: Either 'normal' (default) or 'small-caps'.
- stretch: A numeric value in the range 0-1000 or one of
'ultra-condensed', 'extra-condensed', 'condensed',
'semi-condensed', 'normal' (default), 'semi-expanded', 'expanded',
'extra-expanded' or 'ultra-expanded'.
- weight: A numeric value in the range 0-1000 or one of
'ultralight', 'light', 'normal' (default), 'regular', 'book', 'medium',
'roman', 'semibold', 'demibold', 'demi', 'bold', 'heavy',
'extra bold', 'black'.
- size: Either an relative value of 'xx-small', 'x-small',
'small', 'medium', 'large', 'x-large', 'xx-large' or an
absolute font size, e.g., 10 (default).
- math_fontfamily: The family of fonts used to render math text; overrides
:rc:`mathtext.fontset`. Supported values are the same as the ones
supported by :rc:`mathtext.fontset`: 'dejavusans', 'dejavuserif', 'cm',
'stix', 'stixsans' and 'custom'.
Alternatively, a font may be specified using the absolute path to a font
file, by using the *fname* kwarg. However, in this case, it is typically
simpler to just pass the path (as a `pathlib.Path`, not a `str`) to the
*font* kwarg of the `.Text` object.
The preferred usage of font sizes is to use the relative values,
e.g., 'large', instead of absolute font sizes, e.g., 12. This
approach allows all text sizes to be made larger or smaller based
on the font manager's default font size.
This class will also accept a fontconfig_ pattern_, if it is the only
argument provided. This support does not depend on fontconfig; we are
merely borrowing its pattern syntax for use here.
.. _fontconfig: https://www.freedesktop.org/wiki/Software/fontconfig/
.. _pattern:
https://www.freedesktop.org/software/fontconfig/fontconfig-user.html
Note that Matplotlib's internal font manager and fontconfig use a
different algorithm to lookup fonts, so the results of the same pattern
may be different in Matplotlib than in other applications that use
fontconfig.
"""
def __init__(self,
family = None,
style = None,
variant= None,
weight = None,
stretch= None,
size = None,
fname = None, # if set, it's a hardcoded filename to use
math_fontfamily = None,
):
self._family = _normalize_font_family(rcParams['font.family'])
self._slant = rcParams['font.style']
self._variant = rcParams['font.variant']
self._weight = rcParams['font.weight']
self._stretch = rcParams['font.stretch']
self._size = rcParams['font.size']
self._file = None
self._math_fontfamily = None
if isinstance(family, str):
# Treat family as a fontconfig pattern if it is the only
# parameter provided.
if (style is None and variant is None and weight is None and
stretch is None and size is None and fname is None):
self.set_fontconfig_pattern(family)
return
self.set_family(family)
self.set_style(style)
self.set_variant(variant)
self.set_weight(weight)
self.set_stretch(stretch)
self.set_file(fname)
self.set_size(size)
self.set_math_fontfamily(math_fontfamily)
@classmethod
def _from_any(cls, arg):
"""
Generic constructor which can build a `.FontProperties` from any of the
following:
- a `.FontProperties`: it is passed through as is;
- `None`: a `.FontProperties` using rc values is used;
- an `os.PathLike`: it is used as path to the font file;
- a `str`: it is parsed as a fontconfig pattern;
- a `dict`: it is passed as ``**kwargs`` to `.FontProperties`.
"""
if isinstance(arg, cls):
return arg
elif arg is None:
return cls()
elif isinstance(arg, os.PathLike):
return cls(fname=arg)
elif isinstance(arg, str):
return cls(arg)
else:
return cls(**arg)
def __hash__(self):
l = (tuple(self.get_family()),
self.get_slant(),
self.get_variant(),
self.get_weight(),
self.get_stretch(),
self.get_size_in_points(),
self.get_file(),
self.get_math_fontfamily())
return hash(l)
def __eq__(self, other):
return hash(self) == hash(other)
def __str__(self):
return self.get_fontconfig_pattern()
def get_family(self):
"""
Return a list of font names that comprise the font family.
"""
return self._family
def get_name(self):
"""
Return the name of the font that best matches the font properties.
"""
return get_font(findfont(self)).family_name
def get_style(self):
"""
Return the font style. Values are: 'normal', 'italic' or 'oblique'.
"""
return self._slant
get_slant = get_style
def get_variant(self):
"""
Return the font variant. Values are: 'normal' or 'small-caps'.
"""
return self._variant
def get_weight(self):
"""
Set the font weight. Options are: A numeric value in the
range 0-1000 or one of 'light', 'normal', 'regular', 'book',
'medium', 'roman', 'semibold', 'demibold', 'demi', 'bold',
'heavy', 'extra bold', 'black'
"""
return self._weight
def get_stretch(self):
"""
Return the font stretch or width. Options are: 'ultra-condensed',
'extra-condensed', 'condensed', 'semi-condensed', 'normal',
'semi-expanded', 'expanded', 'extra-expanded', 'ultra-expanded'.
"""
return self._stretch
def get_size(self):
"""
Return the font size.
"""
return self._size
def get_size_in_points(self):
return self._size
def get_file(self):
"""
Return the filename of the associated font.
"""
return self._file
def get_fontconfig_pattern(self):
"""
Get a fontconfig_ pattern_ suitable for looking up the font as
specified with fontconfig's ``fc-match`` utility.
This support does not depend on fontconfig; we are merely borrowing its
pattern syntax for use here.
"""
return generate_fontconfig_pattern(self)
def set_family(self, family):
"""
Change the font family. May be either an alias (generic name
is CSS parlance), such as: 'serif', 'sans-serif', 'cursive',
'fantasy', or 'monospace', a real font name or a list of real
font names. Real font names are not supported when
:rc:`text.usetex` is `True`.
"""
if family is None:
family = rcParams['font.family']
self._family = _normalize_font_family(family)
set_name = set_family
def set_style(self, style):
"""
Set the font style. Values are: 'normal', 'italic' or 'oblique'.
"""
if style is None:
style = rcParams['font.style']
_api.check_in_list(['normal', 'italic', 'oblique'], style=style)
self._slant = style
set_slant = set_style
def set_variant(self, variant):
"""
Set the font variant. Values are: 'normal' or 'small-caps'.
"""
if variant is None:
variant = rcParams['font.variant']
_api.check_in_list(['normal', 'small-caps'], variant=variant)
self._variant = variant
def set_weight(self, weight):
"""
Set the font weight. May be either a numeric value in the
range 0-1000 or one of 'ultralight', 'light', 'normal',
'regular', 'book', 'medium', 'roman', 'semibold', 'demibold',
'demi', 'bold', 'heavy', 'extra bold', 'black'
"""
if weight is None:
weight = rcParams['font.weight']
try:
weight = int(weight)
if weight < 0 or weight > 1000:
raise ValueError()
except ValueError:
if weight not in weight_dict:
raise ValueError("weight is invalid")
self._weight = weight
def set_stretch(self, stretch):
"""
Set the font stretch or width. Options are: 'ultra-condensed',
'extra-condensed', 'condensed', 'semi-condensed', 'normal',
'semi-expanded', 'expanded', 'extra-expanded' or
'ultra-expanded', or a numeric value in the range 0-1000.
"""
if stretch is None:
stretch = rcParams['font.stretch']
try:
stretch = int(stretch)
if stretch < 0 or stretch > 1000:
raise ValueError()
except ValueError as err:
if stretch not in stretch_dict:
raise ValueError("stretch is invalid") from err
self._stretch = stretch
def set_size(self, size):
"""
Set the font size. Either an relative value of 'xx-small',
'x-small', 'small', 'medium', 'large', 'x-large', 'xx-large'
or an absolute font size, e.g., 12.
"""
if size is None:
size = rcParams['font.size']
try:
size = float(size)
except ValueError:
try:
scale = font_scalings[size]
except KeyError as err:
raise ValueError(
"Size is invalid. Valid font size are "
+ ", ".join(map(str, font_scalings))) from err
else:
size = scale * FontManager.get_default_size()
if size < 1.0:
_log.info('Fontsize %1.2f < 1.0 pt not allowed by FreeType. '
'Setting fontsize = 1 pt', size)
size = 1.0
self._size = size
def set_file(self, file):
"""
Set the filename of the fontfile to use. In this case, all
other properties will be ignored.
"""
self._file = os.fspath(file) if file is not None else None
def set_fontconfig_pattern(self, pattern):
"""
Set the properties by parsing a fontconfig_ *pattern*.
This support does not depend on fontconfig; we are merely borrowing its
pattern syntax for use here.
"""
for key, val in parse_fontconfig_pattern(pattern).items():
if type(val) == list:
getattr(self, "set_" + key)(val[0])
else:
getattr(self, "set_" + key)(val)
def get_math_fontfamily(self):
"""
Return the name of the font family used for math text.
The default font is :rc:`mathtext.fontset`.
"""
return self._math_fontfamily
def set_math_fontfamily(self, fontfamily):
"""
Set the font family for text in math mode.
If not set explicitly, :rc:`mathtext.fontset` will be used.
Parameters
----------
fontfamily : str
The name of the font family.
Available font families are defined in the
matplotlibrc.template file
:ref:`here <customizing-with-matplotlibrc-files>`
See Also
--------
.text.Text.get_math_fontfamily
"""
if fontfamily is None:
fontfamily = rcParams['mathtext.fontset']
else:
valid_fonts = _validators['mathtext.fontset'].valid.values()
# _check_in_list() Validates the parameter math_fontfamily as
# if it were passed to rcParams['mathtext.fontset']
_api.check_in_list(valid_fonts, math_fontfamily=fontfamily)
self._math_fontfamily = fontfamily
def copy(self):
"""Return a copy of self."""
new = type(self)()
vars(new).update(vars(self))
return new
class _JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, FontManager):
return dict(o.__dict__, __class__='FontManager')
elif isinstance(o, FontEntry):
d = dict(o.__dict__, __class__='FontEntry')
try:
# Cache paths of fonts shipped with Matplotlib relative to the
# Matplotlib data path, which helps in the presence of venvs.
d["fname"] = str(
Path(d["fname"]).relative_to(mpl.get_data_path()))
except ValueError:
pass
return d
else:
return super().default(o)
def _json_decode(o):
cls = o.pop('__class__', None)
if cls is None:
return o
elif cls == 'FontManager':
r = FontManager.__new__(FontManager)
r.__dict__.update(o)
return r
elif cls == 'FontEntry':
r = FontEntry.__new__(FontEntry)
r.__dict__.update(o)
if not os.path.isabs(r.fname):
r.fname = os.path.join(mpl.get_data_path(), r.fname)
return r
else:
raise ValueError("Don't know how to deserialize __class__=%s" % cls)
def json_dump(data, filename):
"""
Dump `FontManager` *data* as JSON to the file named *filename*.
See Also
--------
json_load
Notes
-----
File paths that are children of the Matplotlib data path (typically, fonts
shipped with Matplotlib) are stored relative to that data path (to remain
valid across virtualenvs).
This function temporarily locks the output file to prevent multiple
processes from overwriting one another's output.
"""
with cbook._lock_path(filename), open(filename, 'w') as fh:
try:
json.dump(data, fh, cls=_JSONEncoder, indent=2)
except OSError as e:
_log.warning('Could not save font_manager cache {}'.format(e))
def json_load(filename):
"""
Load a `FontManager` from the JSON file named *filename*.
See Also
--------
json_dump
"""
with open(filename, 'r') as fh:
return json.load(fh, object_hook=_json_decode)
def _normalize_font_family(family):
if isinstance(family, str):
family = [family]
return family
class FontManager:
"""
On import, the `FontManager` singleton instance creates a list of ttf and
afm fonts and caches their `FontProperties`. The `FontManager.findfont`
method does a nearest neighbor search to find the font that most closely
matches the specification. If no good enough match is found, the default
font is returned.
"""
# Increment this version number whenever the font cache data
# format or behavior has changed and requires a existing font
# cache files to be rebuilt.
__version__ = 330
def __init__(self, size=None, weight='normal'):
self._version = self.__version__
self.__default_weight = weight
self.default_size = size
paths = [cbook._get_data_path('fonts', subdir)
for subdir in ['ttf', 'afm', 'pdfcorefonts']]
# Create list of font paths
for pathname in ['TTFPATH', 'AFMPATH']:
if pathname in os.environ:
ttfpath = os.environ[pathname]
if ttfpath.find(';') >= 0: # win32 style
paths.extend(ttfpath.split(';'))
elif ttfpath.find(':') >= 0: # unix style
paths.extend(ttfpath.split(':'))
else:
paths.append(ttfpath)
_api.warn_deprecated(
"3.3", name=pathname, obj_type="environment variable",
alternative="FontManager.addfont()")
_log.debug('font search path %s', str(paths))
# Load TrueType fonts and create font dictionary.
self.defaultFamily = {
'ttf': 'DejaVu Sans',
'afm': 'Helvetica'}
self.afmlist = []
self.ttflist = []
# Delay the warning by 5s.
timer = Timer(5, lambda: _log.warning(
'Matplotlib is building the font cache; this may take a moment.'))
timer.start()
try:
for fontext in ["afm", "ttf"]:
for path in [*findSystemFonts(paths, fontext=fontext),
*findSystemFonts(fontext=fontext)]:
try:
self.addfont(path)
except OSError as exc:
_log.info("Failed to open font file %s: %s", path, exc)
except Exception as exc:
_log.info("Failed to extract font properties from %s: "
"%s", path, exc)
finally:
timer.cancel()
def addfont(self, path):
"""
Cache the properties of the font at *path* to make it available to the
`FontManager`. The type of font is inferred from the path suffix.
Parameters
----------
path : str or path-like
"""
if Path(path).suffix.lower() == ".afm":
with open(path, "rb") as fh:
font = afm.AFM(fh)
prop = afmFontProperty(path, font)
self.afmlist.append(prop)
else:
font = ft2font.FT2Font(path)
prop = ttfFontProperty(font)
self.ttflist.append(prop)
@property
def defaultFont(self):
# Lazily evaluated (findfont then caches the result) to avoid including
# the venv path in the json serialization.
return {ext: self.findfont(family, fontext=ext)
for ext, family in self.defaultFamily.items()}
def get_default_weight(self):
"""
Return the default font weight.
"""
return self.__default_weight
@staticmethod
def get_default_size():
"""
Return the default font size.
"""
return rcParams['font.size']
def set_default_weight(self, weight):
"""
Set the default font weight. The initial value is 'normal'.
"""
self.__default_weight = weight
@staticmethod
def _expand_aliases(family):
if family in ('sans', 'sans serif'):
family = 'sans-serif'
return rcParams['font.' + family]
# Each of the scoring functions below should return a value between
# 0.0 (perfect match) and 1.0 (terrible match)
def score_family(self, families, family2):
"""
Return a match score between the list of font families in
*families* and the font family name *family2*.
An exact match at the head of the list returns 0.0.
A match further down the list will return between 0 and 1.
No match will return 1.0.
"""
if not isinstance(families, (list, tuple)):
families = [families]
elif len(families) == 0:
return 1.0
family2 = family2.lower()
step = 1 / len(families)
for i, family1 in enumerate(families):
family1 = family1.lower()
if family1 in font_family_aliases:
options = [*map(str.lower, self._expand_aliases(family1))]
if family2 in options:
idx = options.index(family2)
return (i + (idx / len(options))) * step
elif family1 == family2:
# The score should be weighted by where in the
# list the font was found.
return i * step
return 1.0
def score_style(self, style1, style2):
"""
Return a match score between *style1* and *style2*.
An exact match returns 0.0.
A match between 'italic' and 'oblique' returns 0.1.
No match returns 1.0.
"""
if style1 == style2:
return 0.0
elif (style1 in ('italic', 'oblique')
and style2 in ('italic', 'oblique')):
return 0.1
return 1.0
def score_variant(self, variant1, variant2):
"""
Return a match score between *variant1* and *variant2*.
An exact match returns 0.0, otherwise 1.0.
"""
if variant1 == variant2:
return 0.0
else:
return 1.0
def score_stretch(self, stretch1, stretch2):
"""
Return a match score between *stretch1* and *stretch2*.
The result is the absolute value of the difference between the
CSS numeric values of *stretch1* and *stretch2*, normalized
between 0.0 and 1.0.
"""
try:
stretchval1 = int(stretch1)
except ValueError:
stretchval1 = stretch_dict.get(stretch1, 500)
try:
stretchval2 = int(stretch2)
except ValueError:
stretchval2 = stretch_dict.get(stretch2, 500)
return abs(stretchval1 - stretchval2) / 1000.0
def score_weight(self, weight1, weight2):
"""
Return a match score between *weight1* and *weight2*.
The result is 0.0 if both weight1 and weight 2 are given as strings
and have the same value.
Otherwise, the result is the absolute value of the difference between
the CSS numeric values of *weight1* and *weight2*, normalized between
0.05 and 1.0.
"""
# exact match of the weight names, e.g. weight1 == weight2 == "regular"
if cbook._str_equal(weight1, weight2):
return 0.0
w1 = weight1 if isinstance(weight1, Number) else weight_dict[weight1]
w2 = weight2 if isinstance(weight2, Number) else weight_dict[weight2]
return 0.95 * (abs(w1 - w2) / 1000) + 0.05
def score_size(self, size1, size2):
"""
Return a match score between *size1* and *size2*.
If *size2* (the size specified in the font file) is 'scalable', this
function always returns 0.0, since any font size can be generated.
Otherwise, the result is the absolute distance between *size1* and
*size2*, normalized so that the usual range of font sizes (6pt -
72pt) will lie between 0.0 and 1.0.
"""
if size2 == 'scalable':
return 0.0
# Size value should have already been
try:
sizeval1 = float(size1)
except ValueError:
sizeval1 = self.default_size * font_scalings[size1]
try:
sizeval2 = float(size2)
except ValueError:
return 1.0
return abs(sizeval1 - sizeval2) / 72
def findfont(self, prop, fontext='ttf', directory=None,
fallback_to_default=True, rebuild_if_missing=True):
"""
Find a font that most closely matches the given font properties.
Parameters
----------
prop : str or `~matplotlib.font_manager.FontProperties`
The font properties to search for. This can be either a
`.FontProperties` object or a string defining a
`fontconfig patterns`_.
fontext : {'ttf', 'afm'}, default: 'ttf'
The extension of the font file:
- 'ttf': TrueType and OpenType fonts (.ttf, .ttc, .otf)
- 'afm': Adobe Font Metrics (.afm)
directory : str, optional
If given, only search this directory and its subdirectories.
fallback_to_default : bool
If True, will fallback to the default font family (usually
"DejaVu Sans" or "Helvetica") if the first lookup hard-fails.
rebuild_if_missing : bool
Whether to rebuild the font cache and search again if the first
match appears to point to a nonexisting font (i.e., the font cache
contains outdated entries).
Returns
-------
str
The filename of the best matching font.
Notes
-----
This performs a nearest neighbor search. Each font is given a
similarity score to the target font properties. The first font with
the highest score is returned. If no matches below a certain
threshold are found, the default font (usually DejaVu Sans) is
returned.
The result is cached, so subsequent lookups don't have to
perform the O(n) nearest neighbor search.
See the `W3C Cascading Style Sheet, Level 1
<http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_ documentation
for a description of the font finding algorithm.
.. _fontconfig patterns:
https://www.freedesktop.org/software/fontconfig/fontconfig-user.html
"""
# Pass the relevant rcParams (and the font manager, as `self`) to
# _findfont_cached so to prevent using a stale cache entry after an
# rcParam was changed.
rc_params = tuple(tuple(rcParams[key]) for key in [
"font.serif", "font.sans-serif", "font.cursive", "font.fantasy",
"font.monospace"])
return self._findfont_cached(
prop, fontext, directory, fallback_to_default, rebuild_if_missing,
rc_params)
@lru_cache()
def _findfont_cached(self, prop, fontext, directory, fallback_to_default,
rebuild_if_missing, rc_params):
prop = FontProperties._from_any(prop)
fname = prop.get_file()
if fname is not None:
return fname
if fontext == 'afm':
fontlist = self.afmlist
else:
fontlist = self.ttflist
best_score = 1e64
best_font = None
_log.debug('findfont: Matching %s.', prop)
for font in fontlist:
if (directory is not None and
Path(directory) not in Path(font.fname).parents):
continue
# Matching family should have top priority, so multiply it by 10.
score = (self.score_family(prop.get_family(), font.name) * 10
+ self.score_style(prop.get_style(), font.style)
+ self.score_variant(prop.get_variant(), font.variant)
+ self.score_weight(prop.get_weight(), font.weight)
+ self.score_stretch(prop.get_stretch(), font.stretch)
+ self.score_size(prop.get_size(), font.size))
_log.debug('findfont: score(%s) = %s', font, score)
if score < best_score:
best_score = score
best_font = font
if score == 0:
break
if best_font is None or best_score >= 10.0:
if fallback_to_default:
_log.warning(
'findfont: Font family %s not found. Falling back to %s.',
prop.get_family(), self.defaultFamily[fontext])
for family in map(str.lower, prop.get_family()):
if family in font_family_aliases:
_log.warning(
"findfont: Generic family %r not found because "
"none of the following families were found: %s",
family, ", ".join(self._expand_aliases(family)))
default_prop = prop.copy()
default_prop.set_family(self.defaultFamily[fontext])
return self.findfont(default_prop, fontext, directory,
fallback_to_default=False)
else:
raise ValueError(f"Failed to find font {prop}, and fallback "
f"to the default font was disabled")
else:
_log.debug('findfont: Matching %s to %s (%r) with score of %f.',
prop, best_font.name, best_font.fname, best_score)
result = best_font.fname
if not os.path.isfile(result):
if rebuild_if_missing:
_log.info(
'findfont: Found a missing font file. Rebuilding cache.')
new_fm = _load_fontmanager(try_read_cache=False)
# Replace self by the new fontmanager, because users may have
# a reference to this specific instance.
# TODO: _load_fontmanager should really be (used by) a method
# modifying the instance in place.
vars(self).update(vars(new_fm))
return self.findfont(
prop, fontext, directory, rebuild_if_missing=False)
else:
raise ValueError("No valid font could be found")
return _cached_realpath(result)
@lru_cache()
def is_opentype_cff_font(filename):
"""
Return whether the given font is a Postscript Compact Font Format Font
embedded in an OpenType wrapper. Used by the PostScript and PDF backends
that can not subset these fonts.
"""
if os.path.splitext(filename)[1].lower() == '.otf':
with open(filename, 'rb') as fd:
return fd.read(4) == b"OTTO"
else:
return False
@lru_cache(64)
def _get_font(filename, hinting_factor, *, _kerning_factor, thread_id):
return ft2font.FT2Font(
filename, hinting_factor, _kerning_factor=_kerning_factor)
# FT2Font objects cannot be used across fork()s because they reference the same
# FT_Library object. While invalidating *all* existing FT2Fonts after a fork
# would be too complicated to be worth it, the main way FT2Fonts get reused is
# via the cache of _get_font, which we can empty upon forking (in Py3.7+).
if hasattr(os, "register_at_fork"):
os.register_at_fork(after_in_child=_get_font.cache_clear)
def get_font(filename, hinting_factor=None):
# Resolving the path avoids embedding the font twice in pdf/ps output if a
# single font is selected using two different relative paths.
filename = _cached_realpath(filename)
if hinting_factor is None:
hinting_factor = rcParams['text.hinting_factor']
# also key on the thread ID to prevent segfaults with multi-threading
return _get_font(filename, hinting_factor,
_kerning_factor=rcParams['text.kerning_factor'],
thread_id=threading.get_ident())
def _load_fontmanager(*, try_read_cache=True):
fm_path = Path(
mpl.get_cachedir(), f"fontlist-v{FontManager.__version__}.json")
if try_read_cache:
try:
fm = json_load(fm_path)
except Exception as exc:
pass
else:
if getattr(fm, "_version", object()) == FontManager.__version__:
_log.debug("Using fontManager instance from %s", fm_path)
return fm
fm = FontManager()
json_dump(fm, fm_path)
_log.info("generated new fontManager")
return fm
fontManager = _load_fontmanager()
findfont = fontManager.findfont
| [
"[email protected]"
]
| |
e7abdc3057615b0c4df8020ffafd22f81321db03 | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/Zero_Brush/Zero_Brush.py | 0b40c4565af51d6f184f0fa1f0620cc26249525c | []
| no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 348,557 | py | #===============================================================
# Copyright (C)2012-2016 Blender Sensei (Seth Fentress: Licensor)
# For more information visit Blendersensei.com
#
# ####BEGIN LICENSE BLOCK #####
# By using this software (from this point on referred to as "The
# Software", you (the Licensee) agree to the terms of its license.
# The conditions of this license are as follows:
#
# • You may not redistribute, share or sell this software.
# • The content you create with this software may be used
# for personal or commercial use.
# • The Software is provided under an AS-IS basis. Licensor shall
# never, and without any limit, be liable for any damage, cost,
# expense or any other payment incurred by Licensee as a result
# of Software’s actions, failure, bugs and/or any other interaction
# between The Software and Licensee’s end-equipment, computers,
# other software or any 3rd party, end-equipment or services.
# ####END LICENSE BLOCK #####
#
#===============================================================
bl_info = {
"name": "Zero Brush",
"author": "Blender Sensei (Seth Fentress) Blendersensei.com",
"version": (1, 8, 3),
"location": "Properties shelf & Tool shelf of Sensei tab (SPACE: brush menu | Q: options)",
"description": "Zero button texture painting by Blender Sensei",
"wiki_url": "https://blendersensei.com/zero-brush",
"category": "Sensei Format"}
import zlib
i_0 = b'x\xda\xa5X\xddo\xdb6\x10\xffWn}\xd8\x1a\xc0\xf0\x80\xf5e\xe8[\x1c\xa7m\xb0\xb8)\xec$\x05\xf6"\xd0\x12m\xb1\x91H\x8d\xa4\xec\xaa\xe8\x1f\xbf;~H\x94\x94t\x05\xf6\x92Xw\xe2}\xdf\xef\x8eR\xfb/<\xb7\xcbo\xfb\xaca\xda\x8a\xbc\xe2Y\xc1-\x13Ufx\x85\x9c\xec\x8f?~\xf93\xfb\x14x\xb0v<\xd8%<\xff\x1bl\xc9\xc1v\r\x07u\x80(\n\xf2RT\x85\xe6\x12\xbcL\xb0\n\x84\xcc5g\x06\xdf\xd3H\r\xbf_\xf7\'x!,\xd4\xaa\xe0\x0bP\xce\xb4\xf0\x80B\n\xae/\xbc\xca\r\xb3\\\x0bV\xc1]c\x85\x92\xc6Sk.[r\xa4\x0e\xdcL\x05.1&\x07U\xc35s\\\xa8\xc4\x13G\xf1\xb5:\ty\x84x\xd8\x90}\xed\t\x9f\x1b\x03\x07\xadj`\xe0#\x82\x87\x96^\xda\xa51\xe2(!\n\x1d[\xc1\x8ab\xb0\xc4\xbf^\x14($\xd2(\x16\x143/\x94\x17\xc1\xdd\xe8\xb6\xd3_sS.GQ^u/\xa8\x0b\xd9\xdaw\x13\xa5\xe1\x1c\xab\xaa^\xb0-\x19\x124\x87\xd6\x90\xc7dDo\x94\xf3\x94(\x9506\xa8V}\x89$\x91\x8d\xe1\x9b%d\xcc\xd8R\\\xb9S?D\xb6W2\xf1\xdd\x8c\x15\x9aC\xc6\x9a\xa6\xea2\xcb\xbf\x06g\xde\xfd\x05\x97D\x82{\xfe\xd5\xb6\x9a{\xf2\x95\x92\'\xae-\x88\x9a\x1d\xb9\x81B\xab\xa6!\xa1\x12C\x1c\x9d\xeeu\x1e\x04V\xd9^\xab\xb3\xe1\x1a\x8b\x11_\xe9\r\x0b\xea\xad\x97M\x0eW\x8a\x15\xd9^\xb7\xa6\xf4\xac\xd5\xf0\xf3\x16Y\xc0\xa4W\x8a\x05,\x8f\x0b\xf8\xd2p\xfck\xc5\xe1\xb0\x00n\xf3\xe5\xf2\x02\x98\xc1\x9c\'\x12\x9e\x15\xceM">>x\x05pP\x15\x16>uUp\xefG\xaaF\xd2\xc6\x05\xe2m\x18Z\xe1\xef\x158u\x80\xd1#\xa3\xa6\xb9\xf6\xef\xe7\xcf01/\xb9\x8f\xf88\xfc\xf7\n6<\xfa\x19\x89RIW\xc6}"|\xc8\xfb\xf7\x06\x85\x8d\xe6\xb90\x1cq\x08_\xe9m\xdc*\x8b\xe9\x81G\xc1\xcf\x9ev\xa7m\xa9\x8e\x9a5\xa5\xc8\xb1\xac:\xd0\xfe\x8d\x13\xbe\x01gaK\xf8\xbc\xd8-.\x17kL\x0e\xc6Oc\xa6\xe1\x89w\xe6\xc7\xear\xab\xab^\xe7\'\xcf\x82O\xc4r\xf1\xd1\xaa\x9a\xf5\x02\xc2R\xb6o\xad\xedk\x1dO\x86n\xdb\x10/m?"\xbc\x85;\x0fh\xdfa\x97\xb7UC?\xbc\x06\xfa\x1f\xe0\xef{\x844\xfc\xf5\xae\xad*\x83\x00\xc9\xe5T7\xab\xf2\x92\xd7\xdd$T\x97\x9e\xea\x85N\x8f\x18n3\x86\xe0u\xe2Y\xc5:\xae\xa3y\x16.\x1d5v\x14\xdc\x0e\xdc\xc0\t%\x0b\xc99\xa7\x98\xc4b\xb7\xcfRvM4\r\x9f\xa2\xdd\x1f\xb0\x82\xc1\x94\xe2`\t\xf3\xdc\tp\'\x16p\xe6\xe2X\xe2\x7f*\x15\xfe\xd5e\xacf\xe6\xc9\xa1\xee<\xe0h\xbbM\xfb~\xa3\x06\xbb\x13\xca\xc8^h\x1b7i\xd4y\x16\xc5\x02sc\'"\xd7\x8e6\x16\x1ah\xcf\x84a\x90\xa5\xb9\xebf,\xc8\xcc\xb7\xe9P\xc0\x8e\x83\xc9\xa9\xe0&\xe1\x042\x01#\xca\xe5Z\xbaZ\xa6\xe9\x87\xc8\x15:}\xcf\xf2\'\xdf/\xab\xca\x8d\xbfYRYLg\x90\xbaC\x02l:\x9f\xc4@\xbb\xaa\x04\x8a\xb1\xa5@\x81\xfc\xa0\xd0\x03<F\xc0\xdf\xa9V{8D\r$\n\xc75\x93\xa4\xb8f\x85#Jl)/~6\n|\x14\x08US|tz\xe1\x1dam\x02\x95W8\xe41\x80\x88\x83\x94z\x9f\x16\x87\xc7\xce\x82\x18W\'\xc6\xbb>\xd3\xb6o\xeb&\xb3*\x93J\xd7lh\xd4\r\xd7\x88\xbd\x1f\x1d1N"G:\t#\xf6\x84\xf3x\xceU\x95?\x18\x9c\xf11\xc5y\x8eQ\xc0\x97R\x1e\xbc\xaeY\x07\x96=\xd1p\xc2\xaaDz-dk\xb9\xb9X\x0e\xb8\x89\xec\x99\x85=\x8d\xb84\x18\xc2\xfa\x82\xc5\xd7\xef\r\x84\xe2\x11\x07\x7fFQ\x1c\xfa\xf4\x96\xc7\x85\t\xa6\x93RZ\xbaF\xcbZ\xd22\xdc\x18\x0c\'b\x9d\xfa\xda\xa5C!\xd4\x84\x82B\x98\x06\x1d\xc7%\x0b3 \xe4\x01\xc3\xb2W\xad\xf5\xe5\xe2\xf7\xa7\xe5KR\xff\x9f<r\xaf3\x96\xd7\x93=\xc6\xd3T\xba\xd8\xf9\xbf\x04\xf0o\xd6C\xed\xcd\xb7\xbbtQ\xc1\xc0\xb4R\xfc\xd3\xf2\xa1X(\x8a\x0f\t\xcd\x11\xe2\xf0_\xc4:4\xf0k\xec?/\x00^c\xd3x\xc0\xa2\xae\xc1,\x16m\x83\xfe\xb2dq\xb9\x98\xe3\x01\x01n{\xea\xbb\x1d\x1f\xe1\xe1\x11\x8dhRJ\xa8\x11\xbfg\xbaB\xc5!\xc0\xf4K\x18\xe30\x00\x07\xd7)A\x17\xa2\xd1\xc4:\xf5\x13\xf29Z2\xf7\xfc\xbcS\x95\xd2\xc3Zz5<\xdeS\xa6py\xa5\xbdE\xf3c[1=AT\xecZ\xc8\xb1\xc0\x9d$P\xf2y\r\xaee{\x05\xab\xfe)\x95\xef\xe5\xb9\xa1\xcd\xe0\x15\x9dx\x85\xd1\xa6\xb4\xbcY\x03?\x1cP\xe0\xc5O\xaa3\r\xcf\xc9\xd6A\xe5nDI\xd5\xc6w\xa97_\xedJ!\x85\xc4\x92~\xe5rp\xc6\xb5\xa2\x04\x04|\xe3\x14{\xa5\xe7\x92\xa3\xf7\xf4|f\xd2_up\x9eI\xfe\xbc)V3i\xf0B\xc3\xe3L$k\xee\xa7Dg\xd0Y \xfech\xa5\x87\xc2P\x0eBF\xfc\xa2\x8a@[\x12S\xb0\xfc\x84\x9d\xef\x03M\xc92\x9a\x9c\x83\xc2K\xa2a\xbdE\x9a\xd37\xf1k\xb04\xef\xfc\xba\x9c\x9a\xe1\xbc>\xc5\x84\x97J\x99\x17\x1c>V\xea<(~\x9f>\xb9,\xd3\x8c\x07^\x0b\xeb\x1a\xc8%}\t\x0f8\n\xdc>\xaet\xce1\x10\x88`\xdd\xf2y\xf9)\xea\x93\x86\x8f\xc9s\xdc3IT\xb8d\x0e\xa3\xa5n++\xa8\xa5qY\x13\x07\x11\x17~\x16!\xbf\x8e\xdd\x98 G8\xe2\xeeo\x03L\xf8\x8d\x8daN\xc6j\'7\x98~\xb2\xf4\xafG\x06e\xd2XFG\x1c\x0f#1\x9b\xe6\x8e\x91\x15\x9dd\xb5\xc8\xc3@\xf7\x9b\xe2\xda\xd3hG\x08;\xc9@\x80#\x97t\xe9\xe2~\xcff>\xc1^\x18\xbc\xd6\xfc\x9f\xd6\x85@s\xab\x1al\xf3cG\x89\x958\xf8\xa5\xc0\xbbB1\x83.\xd3&Q\x08[\x95\x8e7\xff`U\xbb/\xc4I\x14|T/\x04\x93\x0e\xf7}\x1e0(/\xba\xda\xcb\x1f\xf6\xb2[l\xc1\x91\x92-/\xda|\xa4\xe17\x13\xdd*\x92\xf7\xa6\xa0[\x8cd\x10\xca\xaeg\x14\xba\xa7\x8c\xc8\x83\x90\x18\xcd\x8c\x96\x95l\xb8\xa8\xb82X\xb1\xd8\x03\xfe\x1a\xea\xcdAO\x87\xef\x1bT\x88-\x01p\xd5\xb9\xef\t\xa4\t/\xc55\x93\xd8\xfa1}\x93\xbd!-p7\xec7\xb1p\xd3R\xf7*\xfb\x9a\x1e\x85\xc2\xdf3\x87\xb2\x86I/\xff\xe7\xb6\x91\x80\t\x16\xbe!d(L\x02\xa6\x13\xc2\x02r\xd5t\x0b\xff\x11\xc7\xad\xc7\xfd\x17\x9cp6i\xc3 \xdb\x1f\xf6f\xbbB\xc3k\x15:oj\xa5l9,\x05\x8e\n\xbb\x84:;"\xa8\xf9\x0c\x9f\x9e\xb9I\xc9IA\xfbC\xa3\xbb\xda\xca=\xc4\x8f[\x0e\xe3i\n!J\x1c8a4\xe2;k\xb8y\x0b\xdb\xeb\xf7\x0f\xb7\x97[Xm\x1fv\x1f~\x87\xab\x87\xed\xe3\xf5\xefp{\xf3\x11\xff^\xddl\xafn\xaf\xa7\xear<\x8ec\x1a\xd7\x18\x99\xc7\xf2\xbar4\xb4sN\x0b\xef9\x80p\x05\xec/\xb4\xb4-:\xcc\xc0\x1e\x7f\xc6#/\xc7t2\xe2\x04\xfe\x8a\xe2\x83\x8b7\x9b\xcb\xf7\xd7\xd9\xf5\xfa\xe6\xfen\x1bF\xc0\xdd\xdd\xedn*\xcb\xed:\x19]<\xfap\xba{\n\\\xd3\x97\xb8\xf0q\xa0e\x95\xf8\xc6\x86\x05\xf4\xbd\xf2\x97\x13\xb2\x10\xdb\x1e#\xb7\xe7\xf6\x8c7UW\xfe8\xbc\xe9\xbe\xee\xb8\xf4\xec?\x91\x90\x8a\xb8d\xfc\xcd\xb5Jo\x08\x89e;.\r\x17?\xe1B\xfa\xa2[\xca\xd1\x9e{\x15\x95\xcf\xfb,\x7f\xa2\x1b\xc4\x9b"\x1b\xb6\xa2\xe8Fh\xdb\xd1\xd1\x9f4\xf1\xf1\xe6\xfas\xf6f\xfdc\xeb^\x165e\x8d\xc4=\xdcL\xbf\xbb\xd8\x10\xffU\xc5d\x98\xeb\x0f\x92\x9d\x10\x00\x86\xd1\xe4?\x19\xd0\xdb\x94]\x89y\xa5oy\x84jC\x82\x91\n\xb7#j\x7f\xa3\x0f7\t\x7f3\xcfj\xa1\xb5\xd2\xd3\x0b\xc9g\xc7\x84M\xc2\xfc\x17\xf5\xb3\xd0\xf4'
i_1 = b'x\xda\x03\x00\x00\x00\x00\x01'
a = str(zlib.decompress(i_0))[2:]
i_0 = a.split('_22!8_')
b = str(zlib.decompress(i_1))[2:]
i_1 = b.split('_22!8_')
import bpy, os, addon_utils
from bpy.props import*
from bpy_extras.io_utils import ImportHelper
from bpy.types import Menu, Panel
from bl_ui.properties_paint_common import (
UnifiedPaintPanel,
brush_texture_settings,
)
default, state = addon_utils.check("Zero_Brush_Lite")
if state:
try:
addon_utils.disable("Sensei_Keys", default_set = True)
addon_utils.disable("Zero_Brush_Lite", default_set = True)
addon_utils.enable("Zero_Brush", default_set = True)
addon_utils.enable("Sensei_Keys", default_set = True)
except:
pass
try:
bpy.ops.wm.save_userpref('INVOKE_DEFAULT')
except:
pass
def fu0():
d = bpy.data
item = [d.images,d.textures,d.node_groups,
d.materials,d.worlds,d.meshes,d.objects]
for i in item:
try:
for ob in i:
if ob.users < 1:
try:
i.remove(ob, do_unlink = True)
except:
try:
i.remove(ob)
except:
pass
except:
pass
def fu1(mode):
if 'EDIT' in mode:
mode = 'EDIT'
if 'TEXTURE' in mode:
mode = 'TEXTURE_PAINT'
if 'VERTEX' in mode:
mode = 'VERTEX_PAINT'
if 'WEIGHT' in mode:
mode = 'WEIGHT_PAINT'
if 'PARTICLE' in mode:
mode = 'PARTICLE_EDIT'
try:
bpy.ops.object.mode_set(mode= mode)
except:
print('unable to return to previous mode')
return
def fu2(fn):
newName = ''.join(x for x in fn if x not in
',<>:""[]{}()/\|!@$%^&*,.?')
newName = newName.replace("-", " ")
newName = newName.replace("_", " ")
if len(newName) > 20:
newName = newName.replace(" ", "")
newName = newName[:18]
for i in newName:
if newName[:1] == ' ':
newName = newName[1:]
if len(newName) < 1:
newName = "myBrush"
newName = "°" + newName.title()
return newName
def fu3(brush, tex, fn, filepath, brushes):
brush.zb_texscale = 50
brush.zb_hardness = 50
brush.zb_texscale_x = 0
brush.zb_texscale_y = 0
brush.zb_texscale_z = 0
tool_settings = bpy.context.tool_settings
wm = bpy.context.window_manager
scene = bpy.context.scene
mode = bpy.context.mode
if brushes == 1:
newName = brush.name
else:
newName = fu2(fn)
brush.name = newName
brush.texture = tex
brush.use_custom_icon = True
brush.icon_filepath = filepath
try:
brush.stroke_method = 'DOTS'
brush.texture_slot.tex_paint_map_mode = 'TILED'
brush.texture_overlay_alpha = 30
brush.mask_overlay_alpha = 30
tool_settings.image_paint.input_samples = 1
tool_settings.sculpt.input_samples = 1
except:
pass
if scene.zbLoadBrushType == 'decal':
brush.name = '°Decal ' + newName[1:]
try:
brush.stroke_method = 'DRAG_DOT'
except:
pass
ts = brush.texture_slot
ts.tex_paint_map_mode = 'VIEW_PLANE'
ts.scale.xyz = 1.25
img = brush.texture.image
x = img.size[0]
y = img.size[1]
if x != y:
lengths = [x,y]
smallest = min(lengths)
largest = max(lengths)
percent = largest / smallest
if x == smallest:
ts.scale[0] = ts.scale[0] * percent
if y == smallest:
ts.scale[1] = ts.scale[1] * percent
brush.texture_overlay_alpha = 100
brush.use_pressure_strength = False
tex.extension = 'CLIP'
brush.zb_hardness = 100
if mode == "SCULPT":
tex.use_color_ramp = True
brush.texture.filter_size = 14
if scene.zbLoadBrushType == 'stencil':
brush.name = '°Stencil ' + newName[1:]
brush.stroke_method = 'DOTS'
brush.texture_slot.tex_paint_map_mode = 'STENCIL'
brush.texture_slot.scale.xyz = 1
brush.use_pressure_strength = False
brush.texture_overlay_alpha = 30
brush.mask_overlay_alpha = 50
bpy.ops.object.zb_center_stencil()
if scene.zbLoadBrushType == 'hard':
brush.use_pressure_strength = False
brush.zb_hardness = 100
if bpy.context.mode == 'SCULPT':
bpy.ops.brush.curve_preset(shape='LINE')
brush.texture.filter_eccentricity = 64
if scene.zbLoadBrushType != 'decal':
brush.texture.filter_size = 2
else:
bpy.ops.brush.curve_preset(shape='MAX')
def fu4(context, filepath):
wm = bpy.context.window_manager
scene = bpy.context.scene
settings = bpy.context.tool_settings
mode = bpy.context.mode
aType = bpy.context.area.type
if aType == 'IMAGE_EDITOR':
bpy.context.area.type = 'VIEW_3D'
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
mode = bpy.context.mode
if os.path.isdir(filepath):
return
else:
fn = bpy.path.display_name_from_filepath(filepath)
imgExists = False
for image in bpy.data.images:
if image.name in filepath:
img = image
imgExists = True
break
if imgExists == False:
img = bpy.data.images.load(filepath)
img.use_fake_user = True
tex = bpy.data.textures.new(name =fn, type='IMAGE')
tex.image = img
if 'PAINT' in mode:
if mode == 'PAINT_TEXTURE':
bpy.ops.brush.add()
brush = bpy.context.tool_settings.image_paint.brush
bpy.data.brushes[brush.name].image_tool = 'DRAW'
else:
bpy.ops.brush.add()
brush = bpy.context.tool_settings.vertex_paint.brush
bpy.data.brushes[brush.name].vertex_tool = 'MIX'
bpy.ops.brush.curve_preset(shape='SHARP')
brush.color = (1, 1, 1)
brush.strength = 1
if 'SCULPT' in mode:
bpy.ops.brush.add()
brush = bpy.context.tool_settings.sculpt.brush
bpy.data.brushes[brush.name].sculpt_tool = 'DRAW'
bpy.ops.brush.curve_preset(shape='SHARP')
brush.strength = 0.125
brush.auto_smooth_factor = 0.15
if scene.zbLoadImgSculpt:
if 'SCULPT' in mode:
brush.use_paint_image = True
brush.use_paint_vertex = True
else:
brush.use_paint_sculpt = True
brushes = 0
fu3(brush, tex, fn, filepath, brushes)
fu1(mode)
if aType == 'IMAGE_EDITOR':
bpy.context.area.type = aType
bpy.ops.object.mode_set(mode='EDIT')
return {'FINISHED'}
class zbParticleDetailSelect(bpy.types.Operator):
bl_idname =i_0[0]
bl_label =i_0[1]
bl_description =i_0[2]
detailSelect = bpy.props.StringProperty()
def execute(self,context):
scene = bpy.context.scene
dSel = self.detailSelect
scene.zbParticleDetailSelect = dSel
return{'FINISHED'}
class cl0(bpy.types.Menu):
bl_label =i_0[3]
bl_idname =i_0[4]
bl_description =i_0[5]
def draw(self, context):
layout = self.layout
col = layout.column()
ob = bpy.context.active_object
if ob:
mat = ob.active_material
col.menu('menu.zb_add_material',icon='ZOOMIN')
col.operator('object.zb_material_operations',
text='Remove Materials',icon='ZOOMOUT').op = 'REMOVE_MATERIALS'
col.operator('object.zb_material_operations',
text='Remove UV Maps',icon='ZOOMOUT').op = 'REMOVE_UVMAPS'
col.menu('menu.zb_select_by_material',icon='HAND')
if ob.active_material:
col.separator()
try:
col.operator("object.zb_make_unique", text="Make Unique",
icon='MATERIAL')
except:
pass
else:
try:
if ob.data.uv_textures.active.data[0].image:
col.separator()
col.operator('object.sf_apply_tex',text='Convert UV To Material',
icon = 'MATERIAL')
except:
pass
else:
col.label('Material Operations (select object first)')
class cl1(bpy.types.Menu):
bl_label =i_0[6]
bl_idname =i_0[7]
bl_description =i_0[8]
def draw(self, context):
layout = self.layout
col = layout.column()
if len(bpy.data.materials) > 0:
for mat in bpy.data.materials:
col.operator('object.zb_material_operations',text=mat.name,
icon='MATERIAL').op = mat.name + '__ZB__ADD_MATERIAL'
else:
col.label('No Materils Found In File',icon='MATERIAL')
class cl2(bpy.types.Menu):
bl_label =i_0[9]
bl_idname =i_0[10]
bl_description =i_0[11]
def draw(self, context):
layout = self.layout
col = layout.column()
if len(bpy.data.materials) > 0:
for mat in bpy.data.materials:
col.operator('object.zb_material_operations',text=mat.name,
icon='MATERIAL').op = mat.name + '__ZB__SELECT_BY_MATERIAL'
else:
col.label('No Materils To Select From',icon='MATERIAL')
class cl3(bpy.types.Operator):
bl_idname =i_0[12]
bl_label =i_0[13]
bl_description =i_0[14]
op = bpy.props.StringProperty()
def execute(self,context):
scene = bpy.context.scene
mode = bpy.context.mode
wm = bpy.context.window_manager
ob = bpy.context.active_object
selected = bpy.context.selected_objects
op = self.op
if 'ADD_MATERIAL' in op:
mName = op.split('__ZB__ADD_MATERIAL')[0]
mat = bpy.data.materials[mName]
if 'EDIT' in mode:
if not ob.active_material:
bpy.ops.object.material_slot_add()
if mName not in ob.data.materials:
ob.data.materials.append(mat)
i=0
for m in ob.data.materials:
if ob.active_material == mat:
break
else:
ob.active_material_index = i
i+=1
bpy.ops.object.material_slot_assign()
else:
for ob in selected:
ob.active_material = mat
if 'REMOVE_MATERIALS' in op:
activeOb = bpy.context.active_object
for obj in selected:
if hasattr(obj.data,'materials'):
scene.objects.active = obj
for m in obj.data.materials:
try:
bpy.ops.object.material_slot_remove()
except:
print('Could not remove material for',obj.name)
scene.objects.active = ob
if 'REMOVE_UVMAPS' in op:
activeOb = bpy.context.active_object
for obj in selected:
scene.objects.active = obj
if hasattr(obj.data,'uv_textures'):
for uv in obj.data.uv_textures:
try:
bpy.ops.mesh.uv_texture_remove()
except:
print('Could not remove uv map for',obj.name)
scene.objects.active = ob
if 'SELECT_BY_MATERIAL' in op:
mName = op.split('__ZB__SELECT_BY_MATERIAL')[0]
for ob in bpy.data.objects:
if hasattr(ob,'active_material'):
if hasattr(ob,'data'):
if hasattr(ob.data,'materials'):
if mName in ob.data.materials:
ob.select = True
else:
ob.select = False
else:
ob.select = False
else:
ob.select = False
else:
ob.select = False
return{'FINISHED'}
class cl4(bpy.types.Operator):
bl_idname =i_0[15]
bl_label =i_0[16]
bl_description =i_0[17]
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
scene = bpy.context.scene
renEng = scene.render.engine
scene = bpy.context.scene
selected = bpy.context.selected_objects
mode = bpy.context.mode
for ob in selected:
if ob.type == 'MESH':
scene.objects.active = ob
if ob.data.uv_textures:
img = ob.data.uv_textures.active.data[0].image
bpy.ops.object.zb_paint_color()
mat = ob.active_material
mat.active_texture.image = img
imgNode = mat.node_tree.nodes['Image Texture zbColor']
imgNode.image = img
fu1(mode)
return {'FINISHED'}
def fu5(context, filepath):
wm = bpy.context.window_manager
scene = bpy.context.scene
aType = bpy.context.area.type
settings = bpy.context.tool_settings
mode = bpy.context.mode
if aType == 'IMAGE_EDITOR':
bpy.context.area.type = 'VIEW_3D'
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
mode = bpy.context.mode
if os.path.isdir(filepath):
directory = filepath
else:
li = filepath.split(os.sep)
directory = filepath.rstrip(li[-1])
files = os.listdir(directory)
for f in files:
imgExists = False
for blendImage in bpy.data.images:
if blendImage.name in f:
imgExists = True
img = blendImage
fn = f[3:]
tex = bpy.data.textures.new(name =fn, type='IMAGE')
tex.use_fake_user =True
tex.image = img
break
if imgExists == False:
try:
fn = f[3:]
img = bpy.data.images.load(filepath = directory +os.sep + f)
img.use_fake_user =True
tex = bpy.data.textures.new(name =fn, type='IMAGE')
tex.use_fake_user =True
tex.image = img
except:
pass
if bpy.context.mode.startswith('PAINT'):
if mode == 'PAINT_TEXTURE':
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
bpy.ops.brush.add()
brush = bpy.context.tool_settings.image_paint.brush
bpy.data.brushes[brush.name].image_tool = 'DRAW'
else:
bpy.ops.object.mode_set(mode='VERTEX_PAINT')
bpy.ops.brush.add()
brush = bpy.context.tool_settings.vertex_paint.brush
bpy.data.brushes[brush.name].vertex_tool = 'MIX'
bpy.ops.brush.curve_preset(shape='SHARP')
brush.color = (1, 1, 1)
if bpy.context.mode.startswith('SCULPT') is True:
bpy.ops.object.mode_set(mode='SCULPT')
bpy.ops.brush.add()
brush = bpy.context.tool_settings.sculpt.brush
bpy.data.brushes[brush.name].sculpt_tool = 'DRAW'
bpy.ops.brush.curve_preset(shape='SHARP')
brush.strength = 0.125
brush.auto_smooth_factor = 0.15
fn = bpy.path.display_name_from_filepath(directory +os.sep + f)
newName = fu2(fn)
brush.name = newName
brush.texture = tex
brush.use_custom_icon = True
brush.icon_filepath = directory +os.sep + f
brushes = 1
if scene.zbLoadImgSculpt:
if 'SCULPT' in mode:
brush.use_paint_image = True
brush.use_paint_vertex = True
else:
brush.use_paint_sculpt = True
fu3(brush, tex, fn, filepath, brushes)
fu1(mode)
if aType == 'IMAGE_EDITOR':
bpy.context.area.type = aType
bpy.ops.object.mode_set(mode='EDIT')
return {'FINISHED'}
class zbLoadBrush(bpy.types.Operator, ImportHelper):
bl_idname =i_0[18]
bl_label =i_0[19]
bl_description =i_0[20]
@classmethod
def poll(cls, context):
return context.active_object != None
def execute(self, context):
return fu4(context, self.filepath)
class cl5(bpy.types.Operator, ImportHelper):
bl_idname =i_0[21]
bl_label =i_0[22]
bl_description =i_0[23]
@classmethod
def poll(cls, context):
return context.active_object != None
def execute(self, context):
return fu5(context, self.filepath)
class cl6(bpy.types.Menu):
bl_label =i_0[24]
bl_idname =i_0[25]
def draw(self, context):
mode = bpy.context.mode
ts = bpy.context.tool_settings
aType = bpy.context.area.type
layout = self.layout
flow = layout.column_flow(columns=3)
for brush in bpy.data.brushes:
if mode == 'PAINT_TEXTURE' or aType == 'IMAGE_EDITOR':
if brush.use_paint_image:
row = flow.row(align=True)
row.operator('object.zb_brush_context', text= brush.name,
icon_value=layout.icon(brush)).brushName = brush.name
if mode == 'SCULPT':
if brush.use_paint_sculpt:
row = flow.row(align=True)
row.operator('object.zb_brush_context', text= brush.name,
icon_value=layout.icon(brush)).brushName = brush.name
if mode == 'PAINT_WEIGHT':
if brush.use_paint_weight:
row = flow.row(align=True)
row.operator('object.zb_brush_context', text= brush.name,
icon_value=layout.icon(brush)).brushName = brush.name
if mode == 'PAINT_VERTEX':
if brush.use_paint_vertex:
row = flow.row(align=True)
row.operator('object.zb_brush_context', text= brush.name,
icon_value=layout.icon(brush)).brushName = brush.name
if mode == 'PARTICLE':
settings = ts.particle_edit
brush = settings.brush
tool = settings.tool
tools = ['COMB','SMOOTH','ADD','LENGTH','PUFF','CUT']
for tool in tools:
row = flow.row()
row.operator('object.zb_brush_context',
text=tool.title(), icon='PARTICLEMODE').brushName = tool
class cl7(bpy.types.Operator):
bl_label =i_0[26]
bl_idname =i_0[27]
brushName = bpy.props.StringProperty()
def execute(self,context):
scene = bpy.context.scene
ts = bpy.context.tool_settings
sd = bpy.context.space_data
ups = ts.unified_paint_settings
aType = bpy.context.area.type
mode = bpy.context.mode
wm = bpy.context.window_manager
if mode == 'PAINT_TEXTURE' or aType == 'IMAGE_EDITOR':
brush = bpy.data.brushes[self.brushName]
paint = ts.image_paint
if 'Fill' not in brush.name:
wm.zbGradientSwitch = False
else:
brush.use_gradient = False
ts.image_paint.brush = brush
sd = bpy.context.space_data
if scene.zbDistractionFreeMode:
if hasattr(sd, 'show_only_render'):
if 'Clone' in brush.name:
sd.show_only_render = False
self.report({'INFO'}, "Distraction Free Mode disabled when using Clone brush")
else:
sd.show_only_render = True
if 'Smear' in brush.name:
brush.stroke_method = 'DOTS'
if 'Soften' in brush.name:
brush.stroke_method = 'DOTS'
if 'Clone' in brush.name:
brush.stroke_method = 'DOTS'
if 'Mask' in brush.name:
if not paint.stencil_image:
bpy.ops.image.new(gen_context='PAINT_STENCIL')
wm.zbViewMaskMode = 1
else:
if aType == 'VIEW_3D':
if sd.viewport_shade != 'TEXTURED':
sd.viewport_shade = 'MATERIAL'
if 'Draw' in brush.name or 'Pen' in brush.name:
paint.input_samples = 3
else:
paint.input_samples = 1
if 'Graphic Pen' in brush.name:
ups.use_pressure_size = True
else:
ups.use_pressure_size = False
if mode == 'SCULPT':
brush = bpy.data.brushes[self.brushName]
ts.sculpt.brush = brush
if mode == 'PARTICLE':
settings = ts.particle_edit
settings.tool = self.brushName
bpy.context.area.tag_redraw()
if mode == 'PAINT_WEIGHT':
sd.viewport_shade = 'SOLID'
brush = bpy.data.brushes[self.brushName]
ts.weight_paint.brush = brush
if mode == 'PAINT_VERTEX':
brush = bpy.data.brushes[self.brushName]
ts.vertex_paint.brush = brush
return{'FINISHED'}
def fu6(self,context):
wm = bpy.context.window_manager
scene = bpy.context.scene
sd = bpy.context.space_data
sculpt = bpy.context.mode.startswith('SCULPT')
paint = bpy.context.mode.startswith('PAINT')
re = scene.render.engine
if sculpt:
type = bpy.context.tool_settings.sculpt
if paint:
type = bpy.context.tool_settings.image_paint
try:
type.brush.strength = 1
type.brush.use_pressure_strength = False
except:
pass
if wm.zbViewMaskMode:
if re == 'BLENDER_RENDER':
sd.viewport_shade = 'SOLID'
else:
sd.viewport_shade = 'TEXTURED'
if sculpt:
sd.viewport_shade = 'SOLID'
else:
if sd.viewport_shade != 'TEXTURED':
sd.viewport_shade = 'MATERIAL'
if sculpt:
try:
type.brush = bpy.data.brushes['SculptDraw']
except:
pass
if paint:
try:
type.brush = bpy.data.brushes['Draw']
except:
pass
def fu7():
scene = bpy.context.scene
bpy.ops.object.mode_set(mode='OBJECT')
realOb = bpy.context.active_object
mirror = len([mod for mod in realOb.modifiers if mod.type == 'MIRROR'])
if mirror:
try:
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Mirror")
except:
pass
armature = len([mod for mod in realOb.modifiers if mod.type == 'ARMATURE'])
if armature < 1:
bpy.ops.object.skin_armature_create(modifier="Skin")
ob = bpy.context.active_object
bones = bpy.context.object.data
bones.use_auto_ik = True
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.armature.select_all(action='SELECT')
bpy.ops.armature.calculate_roll(type='GLOBAL_POS_Z')
bpy.ops.object.mode_set(mode='POSE')
if not bpy.context.active_object.pose_library:
bpy.ops.poselib.new()
bpy.ops.object.mode_set(mode='OBJECT')
ob.name = realOb.name + "sBones"
bpy.context.active_object.select = False
bpy.context.object.hide = True
scene.objects.active = realOb
try:
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Skin")
except:
pass
realOb.select = True
bpy.context.space_data.use_occlude_geometry = True
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.normals_make_consistent(inside=False)
bpy.ops.mesh.vert_connect_nonplanar()
bpy.ops.mesh.tris_convert_to_quads()
bpy.ops.object.mode_set(mode='OBJECT')
for x in range(20):
bpy.ops.object.modifier_move_up(modifier="Armature")
return{'FINISHED'}
def zbDistractionFreeMode(self,context):
mode = bpy.context.mode
sd = bpy.context.space_data
if mode in {'SCULPT', 'PAINT_TEXTURE'}:
if self.zbDistractionFreeMode == False:
sd.show_only_render = False
else:
sd.show_only_render = True
return
class cl8(bpy.types.Operator):
bl_idname =i_0[28]
bl_label =i_0[29]
bl_description =i_0[30]
def execute(self,context):
mode = bpy.context.mode
bpy.ops.object.mode_set(mode='OBJECT')
sel = bpy.context.selected_objects
for ob in sel:
removeMeta = False
if ob.type == 'META':
removeMeta = True
try:
bpy.ops.object.convert(target='MESH')
if removeMeta:
for obj in bpy.data.objects:
if obj.type == 'META':
bpy.context.scene.objects.unlink(obj)
for obj in bpy.data.objects:
if 'MBall' in obj.name:
obj.select = True
break
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.remove_doubles()
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.tris_convert_to_quads()
bpy.ops.mesh.tris_convert_to_quads()
bpy.ops.mesh.beautify_fill()
if ob.type == 'FONT':
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.uv.smart_project(island_margin = 0.03)
except:
pass
fu1(mode)
return{'FINISHED'}
def zbPrecisePaintOption(self,context):
wm = bpy.context.window_manager
if self.zbPrecisePaintOption:
needToSetUp = True
for map in wm.keyconfigs.addon.keymaps:
ki = map.keymap_items
for key in ki:
if 'ZB Rotate View' in key.name:
needToSetUp == False
if needToSetUp:
km = wm.keyconfigs.addon.keymaps.new(name='Image Paint', space_type='EMPTY')
kmi = km.keymap_items.new("object.zb_precise_paint", 'W','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint", 'S','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint", 'A','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint", 'D','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'UP_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'DOWN_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'LEFT_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'RIGHT_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'UP_ARROW','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'DOWN_ARROW','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'LEFT_ARROW','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'RIGHT_ARROW','PRESS')
km = wm.keyconfigs.addon.keymaps.new('Sculpt', space_type='EMPTY')
kmi = km.keymap_items.new("object.zb_precise_paint", 'W','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint", 'S','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint", 'A','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint", 'D','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'UP_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'DOWN_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'LEFT_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'RIGHT_ARROW','PRESS',ctrl=True)
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'UP_ARROW','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'DOWN_ARROW','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'LEFT_ARROW','PRESS')
kmi = km.keymap_items.new("object.zb_precise_paint_ctrl", 'RIGHT_ARROW','PRESS')
addon_keymaps.append(km)
else:
for map in wm.keyconfigs.addon.keymaps:
ki = map.keymap_items
for key in ki:
if 'ZB Precise Paint' in key.name:
key.active = False
return
class zbPrecisePaint(bpy.types.Operator):
bl_idname =i_0[31]
bl_label =i_0[32]
bl_description =i_0[33]
counter = bpy.props.IntProperty()
perspChoice = bpy.props.StringProperty(default='PERSP')
def modal(self, context, event):
if event.type == 'W':
view3D = bpy.context.space_data.region_3d
if self.counter < 1:
self.perspChoice = view3D.view_perspective
bpy.ops.view3d.view_selected('INVOKE_DEFAULT')
bpy.ops.view3d.viewnumpad('INVOKE_DEFAULT', type='FRONT')
if self.counter%2 == 0:
view3D.view_perspective = 'ORTHO'
bpy.ops.view3d.view_orbit(type='ORBITDOWN')
view3D.view_perspective = 'ORTHO'
self.counter += 1
if event.type == 'S':
view3D = bpy.context.space_data.region_3d
if self.counter < 1:
self.perspChoice = view3D.view_perspective
bpy.ops.view3d.view_selected('INVOKE_DEFAULT')
bpy.ops.view3d.viewnumpad('INVOKE_DEFAULT', type='BACK')
if self.counter%2 == 0:
view3D.view_perspective = 'ORTHO'
bpy.ops.view3d.view_orbit(type='ORBITUP')
view3D.view_perspective = 'ORTHO'
self.counter += 1
if event.type == 'A':
view3D = bpy.context.space_data.region_3d
if self.counter < 1:
self.perspChoice = view3D.view_perspective
bpy.ops.view3d.view_selected('INVOKE_DEFAULT')
bpy.ops.view3d.viewnumpad('INVOKE_DEFAULT', type='LEFT')
if self.counter%2 == 0:
view3D.view_perspective = 'ORTHO'
bpy.ops.view3d.view_orbit(type='ORBITRIGHT')
view3D.view_perspective = 'ORTHO'
self.counter += 1
if event.type == 'D':
view3D = bpy.context.space_data.region_3d
if self.counter < 1:
self.perspChoice = view3D.view_perspective
bpy.ops.view3d.view_selected('INVOKE_DEFAULT')
bpy.ops.view3d.viewnumpad('INVOKE_DEFAULT', type='RIGHT')
if self.counter%2 == 0:
view3D.view_perspective = 'ORTHO'
bpy.ops.view3d.view_orbit(type='ORBITLEFT')
view3D.view_perspective = 'ORTHO'
self.counter += 1
if event.type == 'LEFTMOUSE':
if event.value == 'PRESS':
if 'SCULPT' in bpy.context.mode:
bpy.ops.sculpt.brush_stroke('INVOKE_DEFAULT')
else:
bpy.ops.paint.image_paint('INVOKE_DEFAULT')
if event.type in {'X', 'F'}:
if event.value == 'PRESS':
if 'SCULPT' in bpy.context.mode:
bpy.ops.wm.radial_control('INVOKE_DEFAULT',
data_path_primary = 'tool_settings.sculpt.brush.size',
rotation_path = 'tool_settings.sculpt.brush.texture_slot.angle',
color_path = 'tool_settings.sculpt.brush.cursor_color_add',
zoom_path = 'space_data.zoom',
image_id = 'tool_settings.sculpt.brush',
secondary_tex = False)
else:
bpy.ops.wm.radial_control('INVOKE_DEFAULT',
data_path_primary = 'tool_settings.image_paint.brush.size',
data_path_secondary = 'tool_settings.unified_paint_settings.size',
use_secondary = 'tool_settings.unified_paint_settings.use_unified_size',
rotation_path = 'tool_settings.image_paint.brush.mask_texture_slot.angle',
color_path = 'tool_settings.image_paint.brush.cursor_color_add',
fill_color_path = 'tool_settings.image_paint.brush.color',
zoom_path = 'space_data.zoom',
image_id = 'tool_settings.image_paint.brush',
secondary_tex = False)
if event.ctrl:
if event.type in {'X', 'F'}:
if event.value == 'PRESS':
if 'SCULPT' in bpy.context.mode:
bpy.ops.wm.radial_control('INVOKE_DEFAULT',
data_path_primary = 'tool_settings.sculpt.brush.texture_slot.angle',
rotation_path = 'tool_settings.sculpt.brush.texture_slot.angle',
color_path = 'tool_settings.sculpt.brush.cursor_color_add',
image_id = 'tool_settings.sculpt.brush',
secondary_tex=False)
else:
bpy.ops.wm.radial_control('INVOKE_DEFAULT',
data_path_primary = 'tool_settings.image_paint.brush.texture_slot.angle',
rotation_path = 'tool_settings.image_paint.brush.texture_slot.angle',
color_path = 'tool_settings.image_paint.brush.cursor_color_add',
fill_color_path = 'tool_settings.image_paint.brush.color',
image_id = 'tool_settings.image_paint.brush',
secondary_tex=False)
if event.type == 'Z':
if self.counter%2 == 0:
bpy.ops.ed.undo()
self.counter+= 1
try:
if 'SCULPT' in bpy.context.mode:
brush = bpy.context.tool_settings.sculpt.brush
else:
brush = bpy.context.tool_settings.image_paint.brush
if event.type == 'DOWN_ARROW':
brush.stencil_pos.y -= 0.1
if event.type == 'UP_ARROW':
brush.stencil_pos.y += 0.1
if event.type == 'RIGHT_ARROW':
brush.stencil_pos.x += 0.1
if event.type == 'LEFT_ARROW':
brush.stencil_pos.x -= 0.1
except:
pass
if event.shift:
if event.type in {'X', 'F'}:
if event.value == 'PRESS':
if 'SCULPT' in bpy.context.mode:
bpy.ops.wm.radial_control('INVOKE_DEFAULT',
data_path_primary = 'tool_settings.sculpt.brush.strength',
rotation_path = 'tool_settings.sculpt.brush.texture_slot.angle',
color_path = 'tool_settings.sculpt.brush.cursor_color_add',
image_id = 'tool_settings.sculpt.brush',
secondary_tex = False)
else:
bpy.ops.wm.radial_control('INVOKE_DEFAULT',
data_path_primary = 'tool_settings.image_paint.brush.strength',
data_path_secondary = 'tool_settings.unified_paint_settings.strength',
use_secondary = 'tool_settings.unified_paint_settings.use_unified_strength',
rotation_path = 'tool_settings.image_paint.brush.mask_texture_slot.angle',
color_path = 'tool_settings.image_paint.brush.cursor_color_add',
fill_color_path = 'tool_settings.image_paint.brush.color',
image_id = 'tool_settings.image_paint.brush',
secondary_tex = False)
if event.ctrl:
if event.alt:
if event.type == 'SPACE':
if event.value == 'PRESS':
bpy.ops.view3d.zoom('INVOKE_DEFAULT')
if event.type == 'MIDDLEMOUSE':
if event.value == 'PRESS':
bpy.ops.view3d.move('INVOKE_DEFAULT')
if event.type == 'RIGHTMOUSE':
bpy.ops.brush.stencil_control('INVOKE_DEFAULT', mode='SCALE')
if event.type == 'WHEELUPMOUSE':
bpy.ops.view3d.zoom(delta = 1)
if event.type == 'WHEELDOWNMOUSE':
bpy.ops.view3d.zoom(delta = -1)
if event.type == 'RIGHTMOUSE':
if 'SCULPT' in bpy.context.mode:
brush = context.tool_settings.sculpt.brush
else:
brush = context.tool_settings.image_paint.brush
if brush.texture_slot.tex_paint_map_mode == 'STENCIL':
if event.value == 'PRESS':
bpy.ops.brush.stencil_control('INVOKE_DEFAULT', mode='TRANSLATION')
else:
self.counter = 0
view3D = bpy.context.space_data.region_3d
view3D.view_perspective = self.perspChoice
return{'FINISHED'}
if event.type =='SPACE':
if event.value == 'RELEASE':
bpy.ops.wm.call_menu(name='menu.zb_brush_menu')
if event.ctrl == False:
if event.type == 'DOWN_ARROW':
if self.counter%2 == 0:
bpy.ops.view3d.view_pan(type='PANDOWN')
self.counter += 1
if event.type == 'UP_ARROW':
if self.counter%2 == 0:
bpy.ops.view3d.view_pan(type='PANUP')
self.counter += 1
if event.type == 'RIGHT_ARROW':
if self.counter%2 == 0:
bpy.ops.view3d.view_pan(type='PANRIGHT')
self.counter += 1
if event.type == 'LEFT_ARROW':
if self.counter%2 == 0:
bpy.ops.view3d.view_pan(type='PANLEFT')
self.counter += 1
if event.type == 'MIDDLEMOUSE':
if event.value == 'PRESS':
if event.shift == False:
self.counter = 0
view3D = bpy.context.space_data.region_3d
view3D.view_perspective = self.perspChoice
bpy.ops.view3d.rotate('INVOKE_DEFAULT')
return{'FINISHED'}
if event.type in {'ESC','ZERO','ONE','TWO','THREE',
'FOUR','FIVE','SIX','SEVEN','EIGHT','NINE'}:
self.counter = 0
view3D = bpy.context.space_data.region_3d
view3D.view_perspective = self.perspChoice
return {'FINISHED'}
return {'RUNNING_MODAL'}
def invoke(self, context, event):
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
class cl9(bpy.types.Operator):
bl_idname =i_0[34]
bl_label =i_0[35]
bl_description = "Pan brush stencil up, down, left, right with\
very tight precision"
def modal(self, context, event):
if event.ctrl:
try:
brush = bpy.context.tool_settings.image_paint.brush
if event.type == 'DOWN_ARROW':
brush.stencil_pos.y -= 0.1
return {'FINISHED'}
if event.type == 'UP_ARROW':
brush.stencil_pos.y += 0.1
return {'FINISHED'}
if event.type == 'RIGHT_ARROW':
brush.stencil_pos.x += 0.1
return {'FINISHED'}
if event.type == 'LEFT_ARROW':
brush.stencil_pos.x -= 0.1
return {'FINISHED'}
except:
pass
if event.ctrl == False:
if event.type == 'DOWN_ARROW':
bpy.ops.view3d.view_pan(type='PANDOWN')
return {'FINISHED'}
if event.type == 'UP_ARROW':
bpy.ops.view3d.view_pan(type='PANUP')
return {'FINISHED'}
if event.type == 'RIGHT_ARROW':
bpy.ops.view3d.view_pan(type='PANRIGHT')
return {'FINISHED'}
if event.type == 'LEFT_ARROW':
bpy.ops.view3d.view_pan(type='PANLEFT')
return {'FINISHED'}
return {'RUNNING_MODAL'}
def invoke(self, context, event):
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
class cl10(bpy.types.Operator):
bl_idname =i_0[36]
bl_label =i_0[37]
bl_description =i_0[38]
modeButton = bpy.props.IntProperty()
@classmethod
def poll(cls, context):
ob = bpy.context.active_object
abortOp = False
try:
ob.library.users
abortOp = True
except:
try:
ob.data.library.users
abortOp = True
except:
pass
return abortOp == False
def execute(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
re = scene.render.engine
mode = bpy.context.mode
sd = bpy.context.space_data
userMsg = ''
try:
ob = bpy.context.active_object
mat = ob.active_material
except:
ob = 0
mat = 0
if scene.zbDistractionFreeMode:
try:
if ob.type == 'MESH':
if self.modeButton == 2 or self.modeButton == 3:
if "PAINT" not in mode and "SCULPT" not in mode:
sd.show_only_render = True
ob.hide_render = False
else:
if self.modeButton < 5:
sd.show_only_render = False
except:
pass
if ob:
if self.modeButton != 3 and self.modeButton < 5:
if bpy.context.image_paint_object:
if scene.zbAutoSaveLayers:
bpy.ops.object.zb_save_layers(save_only_active=True)
userMsg = "Paint layers saved."
if self.modeButton == 1:
try:
ob.modifiers["Multires"].show_viewport = True
except:
pass
bpy.ops.object.mode_set(mode='OBJECT')
elif self.modeButton == 2:
if scene.zbDisableShadows:
scene.zbDisableShadows = True
else:
scene.zbDisableShadows = False
if ob.type != 'MESH':
mType = ['SURFACE','FONT','META','CURVE']
for m in mType:
if m in ob.type:
bpy.ops.object.sf_convert()
ob = bpy.context.active_object
break
if ob.type == 'MESH':
try:
ob.modifiers["Multires"].show_viewport = True
except:
pass
try:
bpy.ops.object.transform_apply(scale=True)
except:
pass
bpy.ops.object.mode_set(mode='SCULPT')
if scene.zbFastMode:
sd.viewport_shade = 'SOLID'
else:
userMsg = "This mode is only for mesh objects."
elif self.modeButton == 3:
if scene.zbDisableShadows:
scene.zbDisableShadows = True
else:
scene.zbDisableShadows = False
scene.zbPrecisePaintOption = scene.zbPrecisePaintOption
if ob.type != 'MESH':
mType = ['SURFACE','FONT','META','CURVE']
for m in mType:
if m in ob.type:
bpy.ops.object.sf_convert()
ob = bpy.context.active_object
break
if ob.type == 'MESH':
if re == 'CYCLES':
if scene.zbGoCycles == False:
scene.zbGoCycles = True
else:
if scene.zbGoCycles:
scene.zbGoCycles = False
skin = len([mod for mod in ob.modifiers if mod.type == 'SKIN'])
if skin:
fu7()
userMsg = "APPLIED SKIN: Click 'EYE' icon next to" + ob.name + " in the outliner to see it."
hasPaint = False
if ob.data.uv_textures:
if mat is not None:
for ts in mat.texture_slots:
if ts is not None:
try:
if ts.texture.type == 'IMAGE':
if ts.texture.image:
hasPaint = True
break
except:
pass
if hasPaint == False:
if mat.use_nodes:
if mat.node_tree.nodes:
if 'Image Texture' in mat.node_tree.nodes:
hasPaint = True
if scene.zbAState:
needsPaint = True
fu8(ob)
mat = ob.active_material
if mat:
if 'ZB Painted' in mat.name:
needsPaint = False
if hasattr(mat.node_tree, 'nodes'):
if 'Image Texture zbColor' in mat.node_tree.nodes:
needsPaint = False
if needsPaint:
hasPaint = True
if re == 'BLENDER_RENDER':
if ' Painted' in mat.name:
userMsg = 'Can not paint Blender Render materials after Alchemy material painting.'
sd.show_only_render = False
else:
bpy.ops.object.zb_alchemy_paint()
if re == 'CYCLES':
if 'ZBA Painted' in mat.name:
userMsg = 'Material to complex, can no longer texture paint'
sd.show_only_render = False
else:
bpy.ops.object.zb_alchemy_paint()
fu11()
if hasPaint:
try:
scene.game_settings.material_mode = 'GLSL'
except:
pass
else:
if scene.zbAState == False:
for sl in ob.material_slots:
if not sl.material:
i = 0
for sl in ob.material_slots:
ob.active_material_index = i
if not sl.material:
break
else:
i += 1
break
bpy.ops.object.zb_paint_color()
if sd.viewport_shade != 'TEXTURED':
sd.viewport_shade = 'MATERIAL'
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
try:
ob = bpy.context.active_object
for mod in ob.modifiers:
if mod.type == 'BEVEL':
if mod.show_viewport == True:
bpy.ops.object.mode_set(mode='OBJECT')
mod.show_viewport = True
if userMsg == '':
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
break
if mod.type == 'MULTIRES':
mod.show_viewport = True
except:
pass
else:
userMsg = "This mode is only for mesh objects."
elif self.modeButton == 4:
if scene.zbFastMode:
try:
ob.modifiers["Multires"].show_viewport = False
except:
pass
skin = len([mod for mod in ob.modifiers if mod.type == 'SKIN'])
if skin:
fu7()
userMsg = "APPLIED SKIN: Click 'EYE' icon next to" + ob.name + " in the outliner to see it."
report = 0
for type in ob.modifiers:
try:
if "Bones" not in type.object.name:
report = 1
break
except:
pass
if report == 1:
userMsg = "Object already has armature. None created for it."
try:
if bpy.context.active_object.type == 'MESH':
if context.active_object.particle_systems.active is None:
bpy.ops.object.zb_add_strands(option = 'NEW')
bpy.ops.object.mode_set(mode='PARTICLE_EDIT')
else:
userMsg = "This mode is not available for this type of object."
except:
pass
else:
if ob:
if bpy.context.mode != 'POSE':
bpy.ops.object.mode_set(mode='OBJECT')
if self.modeButton != 1:
if self.modeButton != 6:
userMsg = "This mode is only for mesh objects."
if scene.zbDisableShadows:
if self.modeButton > 3 or self.modeButton < 2:
for lamp in bpy.data.lamps:
try:
lamp.shadow_method = 'RAY_SHADOW'
except:
pass
if self.modeButton == 6:
bpy.ops.screen.screen_full_area()
for brush in bpy.data.brushes:
if 'Decal'.lower() in brush.name.lower():
if brush.use_paint_sculpt:
if brush.use_paint_image:
tex = brush.texture
if mode == 'PAINT_TEXTURE':
tex.use_color_ramp = True
if mode == 'SCULPT':
tex.use_color_ramp = False
if userMsg:
self.report({'INFO'}, userMsg)
return{'FINISHED'}
class cl11(bpy.types.Operator):
bl_idname =i_0[39]
bl_label =i_0[40]
bl_options = {'REGISTER', 'UNDO'}
def execute(self,context):
wm = bpy.context.window_manager
scene = bpy.context.scene
renEng = scene.render.engine
ob = bpy.context.active_object
fu8(ob)
mat = ob.active_material
origMat = mat.name
newMatName = ' ZB Painted'
dispMeth = ''
if hasattr(mat.cycles,'displacement_method'):
dispMeth = mat.cycles.displacement_method
if renEng == 'BLENDER_RENDER':
mat.name = mat.name + newMatName
if ob.data.uv_textures is False:
bpy.ops.object.mode_set(mode='EDIT')
useSeams = False
for edge in ob.data.edges:
if edge.use_seam:
useSeams = True
bpy.ops.uv.unwrap()
break
if useSeams is False:
bpy.ops.mesh.select_all(action='SELECT')
if scene.zbUseLightMap:
bpy.ops.uv.lightmap_pack(PREF_MARGIN_DIV=0.3)
else:
bpy.ops.uv.smart_project(island_margin = 0.03,
angle_limit = 45)
if mat.use_diffuse_ramp:
mat.diffuse_ramp_blend = 'DIVIDE'
layerType = "Color"
texCol = 0.9
texOpas = 0
alphaChoice = True
normalChoice = True
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
else:
aType = bpy.context.area.type
bpy.context.area.type = 'NODE_EDITOR'
bpy.context.space_data.tree_type = 'ShaderNodeTree'
bpy.context.space_data.shader_type = 'OBJECT'
bpy.ops.wm.redraw_timer(type='DRAW_WIN', iterations=0)
activeMat = mat
g1_vol = 0
g1_dis = 0
g1_surf = 0
for node in activeMat.node_tree.nodes:
if node.type == 'OUTPUT_MATERIAL':
node.select = False
if node.inputs['Surface'].links:
l = node.inputs['Surface'].links[0]
g1_surf = l.from_socket.name
if node.inputs['Volume'].links:
l = node.inputs['Volume'].links[0]
g1_vol = l.from_socket.name
if node.inputs['Displacement'].links:
l = node.inputs['Displacement'].links[0]
g1_dis = l.from_socket.name
else:
node.select = True
allGroups = []
for g in bpy.data.node_groups:
allGroups.append(g.name)
bpy.ops.node.group_make()
bpy.ops.node.tree_path_parent()
bpy.ops.node.group_ungroup()
newGroup = 0
for group in bpy.data.node_groups:
if group.name not in allGroups:
group01 = group
group01.tag = True
break
group01.name = activeMat.name + ' Alchemy Material ZB'
bpy.context.area.type = aType
bpy.ops.object.material_slot_add()
layerType = "Color"
texCol = 0.9
texOpas = 0
alphaChoice = True
normalChoice = True
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
mat = ob.active_material
if ' Painted' in mat.name:
mn1 = split(' Painted')[0]
mat.name = mn1 + newMatName
else:
mat.name = mat.name + newMatName
i = 0
ob.active_material_index = 0
for slot in ob.material_slots:
if ob.active_material.name == origMat:
bpy.ops.object.material_slot_remove()
break
else:
ob.active_material_index = i
i += 1
i = 0
for i in ob.material_slots:
try:
if ob.active_material:
if ' ZB Painted' in ob.active_material.name:
ob.active_material_index = i
mat = ob.active_material
break
except:
pass
nodes = mat.node_tree.nodes
for node in nodes:
if node.type == 'OUTPUT_MATERIAL':
nodeOutput = node
if node.name == 'Image Texture zbColor':
nodeImgTex = node
if node.name == 'Diffuse BSDF zbColor':
nodeZBColor = node
if node.name == 'Mixed1':
nodeMixed1 = node
if node.name == 'Mixed8':
nodeMixed8 = node
if node.name == 'Bright/Contrast zbColor':
nodeImgOut = node
if node.name == 'Math zbBump':
nodeZBBump = node
if node.name == 'Math zbDisplace':
nodeZBDisplace = node
nodeGroup = nodes.new(type = 'ShaderNodeGroup')
nodeGroup.node_tree = group01
nodeGroup.location = (-50, 225)
nodeInvert = nodes.new(type = 'ShaderNodeInvert')
nodeInvert.location = (-50, 105)
nodeMixed0 = nodes.new(type = 'ShaderNodeMixShader')
nodeMixed0.label = 'Mixed0'
nodeMixed0.name = 'Mixed0'
nodeMixed0.location = (505, 135)
nodeBW = nodes.new(type = 'ShaderNodeRGBToBW')
nodeBW.location = (2250, -125)
nodeMixDis = nodes.new(type = 'ShaderNodeMixRGB')
nodeMixDis.name = 'Mix Alchemy Displacement'
nodeMixDis.label = 'Mix Alchemy Displacement'
nodeMixDis.location = (2500, -288)
link = mat.node_tree.links
link.new(nodeGroup.outputs[g1_surf], nodeMixed0.inputs[2])
link.new(nodeMixed0.outputs['Shader'], nodeMixed1.inputs[1])
link.new(nodeZBColor.outputs['BSDF'], nodeMixed0.inputs[1])
link.new(nodeImgTex.outputs['Alpha'], nodeInvert.inputs['Color'])
link.new(nodeInvert.outputs['Color'], nodeMixed0.inputs['Fac'])
link.new(nodeImgOut.outputs['Color'], nodeBW.inputs['Color'])
link.new(nodeMixDis.outputs['Color'],nodeZBDisplace.inputs[1])
if nodeZBDisplace.inputs[1].links[0].from_node.name == 'Math zbBump':
link.new(nodeZBBump.outputs['Value'], nodeMixDis.inputs[1])
if g1_dis:
link.new(nodeBW.outputs['Val'], nodeMixDis.inputs['Fac'])
link.new(nodeGroup.outputs[g1_dis], nodeMixDis.inputs[1])
link.new(nodeGroup.outputs[g1_dis], nodeMixDis.inputs[2])
if g1_vol:
try:
if nodeGroup.outputs[g1_vol] == nodeGroup.outputs[0]:
if nodeGroup.outputs[1] == 'Displacement':
link.new(nodeGroup.outputs[2],nodeOutput.inputs['Volume'])
else:
link.new(nodeGroup.outputs[1],nodeOutput.inputs['Volume'])
else:
link.new(nodeGroup.outputs[g1_vol],nodeOutput.inputs['Volume'])
except:
pass
i = 0
for mat in ob.data.materials:
ob.active_material_index = i
if 'ZB Painted' not in ob.active_material.name:
bpy.ops.object.material_slot_remove()
else:
i += 1
if dispMeth:
mat.cycles.displacement_method = dispMeth
return{'FINISHED'}
def fu8(ob):
i = 0
for slot in ob.material_slots:
ob.active_material_index = i
if hasattr(ob.active_material,'name'):
i+=1
else:
print('removing material slot',i)
bpy.ops.object.material_slot_remove()
ob.active_material_index = 0
def fu9(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
sd = bpy.context.space_data
ren = scene.render
origSelected = bpy.context.selected_objects
origActive = bpy.context.active_object.name
if self.zbFastMode:
if sd.viewport_shade == 'MATERIAL':
sd.viewport_shade = 'TEXTURED'
ren.use_simplify = True
if wm.zbLampBufferSize == 0:
ren.simplify_child_particles = 0.0
ren.simplify_shadow_samples = 1
ren.simplify_ao_sss = 0.15
ren.simplify_subdivision = 3
for lamp in bpy.data.lamps:
if lamp.type != 'HEMI':
if lamp.shadow_buffer_size > 0:
if lamp.shadow_buffer_size != 128:
wm.zbLampBufferSize = lamp.shadow_buffer_size
lamp.shadow_buffer_size = 128
scene.tool_settings.sculpt.show_low_resolution = True
if context.sculpt_object:
sd.viewport_shade = 'SOLID'
if context.particle_edit_object:
try:
bpy.context.object.modifiers["Multires"].show_viewport = False
except:
pass
else:
try:
bpy.context.object.modifiers["Multires"].show_viewport = True
except:
pass
try:
sel = bpy.context.selected_objects
bpy.ops.object.select_all(action='DESELECT')
for ob in bpy.data.objects:
for mod in ob.modifiers:
if mod.type == 'BEVEL':
mod.show_viewport = False
ob.select = True
bpy.ops.object.shade_flat()
bpy.ops.object.select_all(action='DESELECT')
for ob in sel:
ob.select = True
except:
pass
if len(bpy.context.screen.areas) < 3:
bpy.ops.screen.screen_full_area()
if bpy.context.screen.name in {'Blender','Hacker'}:
v3d_list = [area for area in bpy.context.screen.areas if area.type == 'VIEW_3D']
if v3d_list:
if len(v3d_list) > 1:
smallest_area = min(v3d_list, key=lambda area: area.width * area.height)
smallest_area.spaces.active.viewport_shade = 'BOUNDBOX'
largest_area = max(v3d_list, key=lambda area: area.width * area.height)
largest_area.spaces.active.show_backface_culling = True
else:
ren.use_simplify = False
scene.tool_settings.sculpt.show_low_resolution = False
if sd.viewport_shade == 'TEXTURED':
sd.viewport_shade = 'MATERIAL'
try:
ob = bpy.context.active_object
for mod in ob.modifiers:
if mod.type == 'MULTIRES':
mod.show_viewport = True
break
except:
pass
for lamp in bpy.data.lamps:
if lamp.type != 'HEMI':
if lamp.shadow_buffer_size > 0:
lamp.shadow_buffer_size = wm.zbLampBufferSize
try:
sel = bpy.context.selected_objects
bpy.ops.object.select_all(action='DESELECT')
for ob in bpy.data.objects:
for mod in ob.modifiers:
if mod.type == 'BEVEL':
mod.show_viewport = True
ob.select = True
bpy.ops.object.shade_smooth()
bpy.ops.object.select_all(action='DESELECT')
for ob in sel:
ob.select = True
except:
pass
if len(bpy.context.screen.areas) < 3:
bpy.ops.screen.screen_full_area()
for screen in bpy.data.screens:
for area in screen.areas:
if area.type == 'VIEW_3D':
if area.spaces.active.viewport_shade == 'BOUNDBOX':
area.spaces.active.viewport_shade = 'MATERIAL'
area.spaces.active.show_backface_culling = False
for obj in bpy.context.selected_objects:
obj.select = False
for obj in origSelected:
obj.select = True
scene.objects.active = bpy.data.objects[origActive]
def fu10(ob):
scene = bpy.context.scene
ob.update_from_editmode()
seamsUsed = False
mesh = ob.data
if ob.data.uv_textures:
print('Used',ob.name+"'s existing uv map")
else:
for e in mesh.edges:
if e.use_seam:
seamsUsed = True
break
if seamsUsed:
if bpy.context.mode != 'EDIT_MESH':
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.uv.unwrap()
bpy.ops.object.editmode_toggle()
else:
if scene.zbUseLightMap:
bpy.ops.uv.lightmap_pack(PREF_MARGIN_DIV=0.3)
else:
bpy.ops.uv.smart_project(island_margin = 0.03, angle_limit = 15)
def fu11():
try:
ob = bpy.context.active_object
mat = ob.active_material
img = mat.active_texture.image
x = -1
for slot in mat.texture_paint_images:
x += 1
if slot == img:
mat.paint_active_slot = x
break
except:
pass
def fu12(context,tn):
try:
ob = bpy.context.active_object
me = ob.data
mat = ob.active_material
mat.active_texture_index = tn
ts = mat.texture_slots[tn]
try:
ts.use = True
except:
pass
if not me.uv_textures:
bpy.ops.mesh.uv_texture_add()
if ts.texture_coords == 'UV':
if ts.uv_layer:
uvtex = me.uv_textures[ts.uv_layer]
else:
uvtex = me.uv_textures.active
me.uv_textures.active= uvtex
else:
uvtex = me.uv_textures.active
uvtex = uvtex.data.values()
img = ts.texture.image
m_id = ob.active_material_index
if img:
for f in me.polygons:
if f.material_index == m_id:
uvtex[f.index].image = img
else:
for f in me.polygons:
if f.material_index == m_id:
uvtex[f.index].image = None
fu11()
me.update()
except:
pass
try:
if "color" in img.name.lower() :
node = mat.node_tree.nodes['Image Texture zbColor']
if "bump" in img.name.lower() :
node = mat.node_tree.nodes['Image Texture zbBump']
if "specular" in img.name.lower() :
node = mat.node_tree.nodes['Image Texture zbSpecular']
if "glow" in img.name.lower() :
node = mat.node_tree.nodes['Image Texture zbGlow']
if "alpha_mask" in img.name.lower() :
node = mat.node_tree.nodes['Image Texture zbAlpha_Mask']
node.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = node
me.update()
except:
pass
return
class cl12(bpy.types.Operator):
bl_idname =i_0[41]
bl_label =i_0[42]
bl_description =i_0[43]
tex_index = IntProperty(name = 'tex_index', default = 0)
@classmethod
def poll(cls, context):
return context.active_object != None
def execute(self, context):
tn = self.tex_index
fu12(context, tn)
return {'FINISHED'}
class cl13(bpy.types.Operator):
bl_idname =i_0[44]
bl_label =i_0[45]
bl_description =i_0[46]
def modal(self, context, event):
paint = bpy.context.mode.startswith('PAINT_TEXTURE')
weight = bpy.context.mode.startswith('PAINT_WEIGHT')
vertex = bpy.context.mode.startswith('PAINT_VERTEX')
aType = bpy.context.area.type
wm = bpy.context.window_manager
if event.value == 'RELEASE':
if paint or aType == 'IMAGE_EDITOR':
brush = bpy.context.tool_settings.image_paint.brush
brushBlend = wm.zbLastBrushBlend
if brushBlend == "ERASE_ALPHA":
brushBlend = "MIX"
try:
bpy.context.tool_settings.image_paint.brush.blend = brushBlend
except:
pass
if brush is not None:
if 'Mask' in brush.name:
brush.weight = 1
if weight:
brush = bpy.context.tool_settings.weight_paint.brush
brush.vertex_tool = 'ADD'
if vertex:
brush = bpy.context.tool_settings.vertex_paint.brush
brush.vertex_tool = 'MIX'
if event.ctrl is False:
brush.color = (0,0,0)
return {'FINISHED'}
return {'RUNNING_MODAL'}
def invoke(self, context, event):
context.window_manager.modal_handler_add(self)
paint = bpy.context.mode.startswith('PAINT_TEXTURE')
weight = bpy.context.mode.startswith('PAINT_WEIGHT')
vertex = bpy.context.mode.startswith('PAINT_VERTEX')
aType = bpy.context.area.type
wm = bpy.context.window_manager
try:
if paint or aType == 'IMAGE_EDITOR':
ob = bpy.context.active_object
img = 0
try:
mat = ob.active_material
img = mat.active_texture.image
except:
pass
brush = bpy.context.tool_settings.image_paint.brush
brushBlend = brush.blend
wm.zbLastBrushBlend = brushBlend
if 'Mask' in brush.name:
brush.weight = 0
else:
if img:
if 'Bump' in img.name:
brush.blend = 'SUB'
else:
brush.blend = 'ERASE_ALPHA'
else:
brush.blend = 'ERASE_ALPHA'
if weight:
brush = bpy.context.tool_settings.weight_paint.brush
brush.vertex_tool = 'SUB'
if vertex:
brush = bpy.context.tool_settings.vertex_paint.brush
if event.ctrl:
brush.vertex_tool = 'BLUR'
else:
brush.vertex_tool = 'MIX'
brush.color = (1,1,1)
except:
pass
if paint or aType == 'IMAGE_EDITOR':
bpy.ops.paint.image_paint('INVOKE_DEFAULT')
if weight:
bpy.ops.paint.weight_paint('INVOKE_DEFAULT')
if vertex:
bpy.ops.paint.vertex_paint('INVOKE_DEFAULT')
return {'RUNNING_MODAL'}
class cl14(bpy.types.Operator):
bl_idname =i_0[47]
bl_label =i_0[48]
bl_description =i_0[49]
tex_move_up = IntProperty(default = 0)
tex_move_down = IntProperty(default = 0)
def execute(self, context):
try:
ob = bpy.context.active_object
mat = ob.active_material
slots = mat.texture_slots
index = mat.active_texture_index
ts = slots[index]
ctx = bpy.context.copy()
ctx['texture_slot'] = ts
except:
pass
moveValue = 0
moveType = "NONE"
if self.tex_move_up == 1:
if index > 0:
moveType = "UP"
moveValue = -1
if self.tex_move_down == 1:
if index < 17:
moveType = "DOWN"
moveValue = 1
if moveType != "NONE":
safety = 0
while safety < 16:
safety += 1
try:
tex = slots[mat.active_texture_index + moveValue]
if tex is None or tex.texture_coords != 'UV':
bpy.ops.texture.slot_move(ctx, type= moveType)
else:
bpy.ops.texture.slot_move(ctx, type= moveType)
self.tex_move_down = 0
self.tex_move_up = 0
safety = 17
except:
pass
fu11()
return {'FINISHED'}
class cl15(bpy.types.Operator):
bl_idname =i_0[50]
bl_label =i_0[51]
bl_description =i_0[52]
tex_kill = IntProperty(name="tex_kill", default = 0)
def execute(self, context):
ob = bpy.context.active_object
mat = ob.active_material
ts = mat.texture_slots[self.tex_kill]
texName = mat.texture_slots[self.tex_kill].name
if ts.use_map_alpha == True:
mat.use_transparency = False
if ts.use_map_color_spec == True:
mat.specular_color = (1, 1, 1)
mat.specular_intensity = 0.5
try:
if "specular" in ts.texture.image.name.lower():
node = mat.node_tree.nodes['Image Texture zbSpecular']
node = mat.node_tree.nodes['Math zbSpecular']
node.inputs[1].default_value = 0
if "glow" in ts.texture.image.name.lower():
node = mat.node_tree.nodes['Image Texture zbGlow']
node = mat.node_tree.nodes['Math zbGlow']
node.inputs[1].default_value = 0
if "alpha_mask" in ts.texture.image.name.lower():
node = mat.node_tree.nodes['Image Texture zbAlpha_Mask']
node.mute = True
if "transparent" in ts.texture.image.name.lower():
if len(mat.texture_slots.items()) == 1:
node = mat.node_tree.nodes['Image Texture zbColor']
node.mute = True
except:
pass
try:
if self.tex_kill > -1:
if ts:
if mat.texture_slots[self.tex_kill]:
imgDie = mat.texture_slots[self.tex_kill].texture.image
try:
bpy.data.images.remove(imgDie, do_unlink = True)
except:
if imgDie.users > 0:
imgDie.user_clear()
bpy.data.images.remove(imgDie)
ts.texture = None
mat.texture_slots.clear(self.tex_kill)
if self.tex_kill == mat.active_texture_index:
x = 17
while x > -1:
if mat.texture_slots[x]:
bpy.ops.object.zb_set_active_layer(tex_index=x)
break
x -= 1
except:
pass
fu0()
if hasattr(mat.node_tree,'nodes'):
nodes = mat.node_tree.nodes
if 'Image Texture zbColor' in nodes:
colNode = nodes['Image Texture zbColor']
if not colNode.image:
for slot in mat.texture_slots:
try:
if slot is not None:
if slot.texture:
if slot.texture.type == 'IMAGE':
if slot.texture.image:
img = slot.texture.image
if 'Color' in slot.texture.image.name:
colNode.image = img
break
except:
pass
return {'FINISHED'}
class cl16(bpy.types.Operator):
bl_idname =i_0[53]
bl_label =i_0[54]
bl_description =i_0[55]
def execute(self,context):
for img in bpy.data.images:
img.reload()
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
area.tag_redraw()
return{'FINISHED'}
def fu13(self,context):
scene = bpy.context.scene
wm = bpy.context.window_manager
scene.zbSaveToHardDrive = True
wm.zbNewSavePath = True
return
def zbSaveType(self,context):
wm = bpy.context.window_manager
wm.zbNewSavePath = True
return
class cl17(bpy.types.Operator):
bl_idname =i_0[56]
bl_label =i_0[57]
bl_description =i_0[58]
save_only_active = bpy.props.BoolProperty()
def execute(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
mode = bpy.context.mode
userMsg = ''
aType = 0
try:
aType = bpy.context.area.type
except:
pass
try:
if hasattr(scene,'active_object') is False:
for obj in bpy.context.selected_objects:
scene.objects.active = obj
break
ob = bpy.context.active_object
mat = ob.active_material
if mat:
if scene.zbSaveToHardDrive:
if len(wm.zbSaveImagePath) < 3:
for ts in mat.texture_slots:
if ts is not None:
try:
if ts.texture_coords == 'UV':
if ts.texture.type == 'IMAGE':
try:
imgPath = bpy.path.abspath(ts.texture.image.filepath)
if "\\..\\" not in imgPath:
imgDir = os.path.dirname(imgPath)
if len(imgDir) > 3:
wm.zbSaveImagePath = imgDir + "\\"
break
break
except:
pass
except:
pass
if len(wm.zbSaveImagePath) < 3:
for ob in bpy.data.objects:
if ob.active_material:
for ts in ob.active_material.texture_slots:
try:
if ts.texture_coords == 'UV':
if ts.texture.image:
imgPath = bpy.path.abspath(ts.texture.image.filepath)
if len(imgPath) > 3:
if "\\..\\" not in imgPath:
imgDir = os.path.dirname(imgPath)
if len(imgDir) > 3:
wm.zbSaveImagePath = imgDir + "\\"
break
except:
pass
if len(wm.zbSaveImagePath) < 3:
userMsg = ("You must select a folder before saving to hard drive")
else:
for ts in mat.texture_slots:
if ts is not None:
try:
ts.texture.image
texName = ts.texture.name
imgName = ts.texture.image.name
types = ['Color', 'Bump', 'Specular', 'Glow',
'Transparent', 'Alpha_Mask']
for type in types:
if type.lower() in imgName.lower():
if type.lower() in texName.lower():
newName = texName
else:
newName = texName + type
ts.texture.image.name = newName
except:
pass
newSaveState = wm.zbNewSavePath
zbsip = wm.zbSaveImagePath
wm.zbSaveImagePath = bpy.path.abspath(zbsip)
wm.zbNewSavePath = newSaveState
aType = bpy.context.area.type
bpy.context.area.type = 'IMAGE_EDITOR'
i = 0
for slot in mat.texture_paint_slots:
newSaveLocation = True
img = mat.texture_paint_images[i]
mat.paint_active_slot = i
if "\\..\\" in img.filepath:
img.filepath_raw = ""
if len(img.filepath) > 3:
if wm.zbNewSavePath == False:
print("Saving image at it's previous save location")
newSaveLocation = False
bpy.ops.image.save()
if self.save_only_active:
if not img.packed_file:
newSaveLocation = False
if newSaveLocation:
print('Saving image using selected folder location')
override = bpy.context.copy()
override['edit_image'] = img
bpy.ops.image.pack(override, as_png = True)
ff = wm.zbSaveType
ff = ff.replace(".","")
if ff == 'TGA':
ff = 'TARGA'
img.file_format = ff
path = bpy.path.abspath(wm.zbSaveImagePath)
imgPath = path + img.name + wm.zbSaveType
bpy.ops.image.save_as(filepath = imgPath)
if img.packed_file:
bpy.ops.image.unpack(method = 'WRITE_ORIGINAL')
img.filepath_raw = bpy.path.abspath(imgPath)
try:
blendFilePath = bpy.data.filepath
blendFileDir = os.path.dirname(blendFilePath)
imgToX = os.path.join(blendFileDir, img.name)
if os.path.exists(imgToX):
os.remove(imgToX)
except:
pass
i += 1
if aType:
bpy.context.area.type = aType
else:
if self.save_only_active:
layersAmount = 0
for ts in mat.texture_slots:
if ts is not None:
try:
if ts.texture.type == 'IMAGE':
if ts.texture_coords == 'UV':
image = ts.texture.image
override = bpy.context.copy()
override['edit_image'] = image
bpy.ops.image.pack(override, as_png = True)
layersAmount += 1
except:
pass
if mode != 'PAINT_TEXTURE':
userMsg = "Paint layers saved."
else:
layersAmount = 0
for ob in bpy.data.objects:
mat = ob.active_material
if mat:
for ts in mat.texture_slots:
if ts is not None:
try:
if ts.texture.type == 'IMAGE':
if ts.texture_coords == 'UV':
image = ts.texture.image
override = bpy.context.copy()
override['edit_image'] = image
bpy.ops.image.pack(override, as_png = True)
layersAmount += 1
except:
pass
userMsg = "All paint layers saved."
except:
userMsg = "Had trouble saving all images."
if userMsg:
self.report({'INFO'}, userMsg)
if wm.zbNewSavePath:
wm.zbNewSavePath = False
return {'FINISHED'}
class cl18(bpy.types.Operator):
bl_idname =i_0[59]
bl_label =i_0[60]
bl_description =i_0[61]
action = bpy.props.StringProperty(default='START')
@classmethod
def poll(cls, context):
return bpy.context.active_object.type == 'MESH'
def execute(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
renEng = scene.render.engine
ob = bpy.context.active_object
mat = ob.active_material
proxy = 0
for obj in bpy.data.objects:
if '_zb_proxy' in obj.name:
proxy = obj
mainObName = proxy.name.strip('_zb_proxy')
mainOb = bpy.data.objects[mainObName]
if not proxy:
mainOb = ob
if self.action == 'START':
bpy.ops.object.zb_save_layers(save_only_active = True)
ob.data.tag = False
if renEng == 'CYCLES':
ob.data.tag = True
scene.zbGoCycles = False
fu18()
brush = bpy.context.tool_settings.image_paint.brush
img = brush.texture.image
newName = ob.name[:4] + 'BrushColor'
bpy.ops.object.zb_paint_color()
mat.active_texture.name = newName
mat.active_texture.image.name = newName
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.duplicate()
proxy = ob.name[:4] + '_zb_proxy'
ob = bpy.context.active_object
ob.name = proxy
ob.data.name = proxy
ob = bpy.context.active_object
bpy.ops.object.material_slot_remove()
newMat = bpy.data.materials.new(name=proxy)
ob.active_material = newMat
mat = ob.active_material
newTex = bpy.data.textures.new(name=proxy, type='IMAGE')
mat.active_texture = newTex
newTex.image = img
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.uv.cylinder_project()
for uv_face in ob.data.uv_textures.active.data:
uv_face.image = img
mat.texture_slots[proxy].scale[0] = 5
mat.texture_slots[proxy].scale[1] = 1
mainOb.hide = True
wm.showBrushLayerOptions = True
bpy.ops.object.mode_set(mode='OBJECT')
if self.action == 'CANCEL':
if mainOb.data.tag:
print('data tag is true')
scene.zbGoCycles = True
wm.showBrushLayerOptions = False
scene.objects.active = mainOb
mainOb.select = True
mainOb.hide = False
mainOb.select = False
scene.objects.active = proxy
for ts in proxy.active_material.texture_slots:
if hasattr(ts,'texture'):
if ts.texture:
ts.texture = None
for ms in proxy.material_slots:
if hasattr(ms,'material'):
if ms.material:
ms.material = None
bpy.ops.object.delete()
mainOb.select = True
scene.objects.active = mainOb
n = 0
for ts in mainOb.active_material.texture_slots:
try:
if 'BrushLayer' in ts.texture.name:
break
except:
pass
n += 1
if n > 17:
n = 17
bpy.ops.object.zb_delete_texture(tex_kill=n)
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
if self.action == 'FLIP':
for obj in bpy.data.objects:
if '_zb_proxy' in obj.name:
ob = obj
scene.objects.active = ob
break
brush = bpy.context.tool_settings.image_paint.brush
img = brush.texture.image
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.uv.cylinder_project()
for uv_face in ob.data.uv_textures.active.data:
uv_face.image = img
bpy.ops.object.mode_set(mode='OBJECT')
self.flip = False
if self.action == 'FINISH':
wm.showBrushLayerOptions = False
for ob in bpy.data.objects:
if '_zb_proxy' in ob.name:
proxy = ob
ob.hide = False
ob.hide_render = False
ob.select = True
else:
ob.select = False
mainOb.hide = False
mainOb.hide_render = False
mainOb_location = mainOb.location.xyz
mainOb_scale = mainOb.scale.xyz
mainOb_rotX = mainOb.rotation_euler[0]
mainOb_rotY = mainOb.rotation_euler[1]
mainOb_rotZ = mainOb.rotation_euler[2]
mainOb.location = proxy.location
mainOb.rotation_euler = proxy.rotation_euler
mainOb.scale = proxy.scale
scene.objects.active = mainOb
mainOb.select = True
mainOb.hide = False
scene.render.bake_type = 'TEXTURE'
scene.render.use_bake_selected_to_active = True
scene.render.bake_distance = 1
mat = mainOb.active_material
activeTex = mat.active_texture
mat.active_texture = None
bpy.ops.object.bake_image()
mat.active_texture = activeTex
img = activeTex.image
override = bpy.context.copy()
override['edit_image'] = img
bpy.ops.image.pack(override, as_png = True)
mainOb.select = False
scene.objects.active = proxy
for ts in proxy.active_material.texture_slots:
if hasattr(ts,'texture'):
if ts.texture:
ts.texture = None
for ms in proxy.material_slots:
if hasattr(ms,'material'):
if ms.material:
ms.material = None
bpy.ops.object.delete()
mainOb.select = True
scene.objects.active = mainOb
mat = mainOb.active_material
tex = mat.active_texture
ts = mat.texture_slots[tex.name]
ts.use_map_normal = True
ts.normal_factor = 0
mainOb.location = mainOb_location.xyz
mainOb.scale = mainOb_scale.xyz
mainOb.rotation_euler[0] = mainOb_rotX
mainOb.rotation_euler[1] = mainOb_rotY
mainOb.rotation_euler[2] = mainOb_rotZ
if mainOb.data.tag:
scene.zbGoCycles = True
mainOb.data.tag = False
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
fu0()
return{'FINISHED'}
class cl19(bpy.types.Operator):
bl_idname =i_0[62]
bl_label =i_0[63]
bl_description =i_0[64]
def execute(self,context):
scene = bpy.context.scene
mode = bpy.context.mode
re = scene.render.engine
wm = bpy.context.window_manager
ob = bpy.context.active_object
mat = ob.active_material
selected = bpy.context.selected_objects
bpy.ops.object.zb_save_layers(save_only_active=True)
bpy.ops.object.mode_set(mode='OBJECT')
if re == 'CYCLES':
scene.zbGoCycles = False
newName = ob.name[:4] + 'Normal'
img = bpy.data.images.new(name= newName, width= scene.zbImgSize,
height = scene.zbImgSizeH)
for obj in bpy.data.objects:
if hasattr(obj,'active_material'):
if obj.active_material == ob.active_material:
obj.select = True
if obj.data.uv_textures:
for uv_face in ob.data.uv_textures.active.data:
uv_face.image = img
else:
obj.select = False
override = bpy.context.copy()
override['edit_image'] = img
bpy.ops.image.pack(override, as_png = True)
scene.render.use_bake_multires = False
scene.render.bake_type = 'NORMALS'
bpy.ops.object.bake_image()
bpy.ops.object.zb_paint_normal()
for i in range(16):
bpy.ops.object.zb_move_texture(tex_move_up=1)
mat = ob.active_material
tex = mat.active_texture
tex.image = img
tex.use_normal_map = True
mat.texture_slots[tex.name].use_map_color_diffuse = False
mat.texture_slots[tex.name].normal_factor = 5
fu21(mat)
try:
for slot in mat.texture_slots:
try:
if slot.use_map_normal:
if slot.texture.use_normal_map == False:
if slot.use_map_color_diffuse == False:
slot.use = False
except:
pass
try:
if slot.use_map_color_diffuse:
if slot.use_map_normal:
slot.normal_factor = 0
except:
pass
try:
if slot.texture.use_normal_map:
if slot.texture.name != mat.active_texture.name:
slot.use = False
except:
pass
except:
pass
if re == 'CYCLES':
scene.zbGoCycles = True
fu1(mode)
fu0()
return{'FINISHED'}
class zbBake(bpy.types.Operator):
bl_label =i_0[65]
bl_idname =i_0[66]
bl_description =i_0[67]
bake_type = bpy.props.StringProperty(default='')
def execute(self,context):
wm = bpy.context.window_manager
scene = bpy.context.scene
mode = bpy.context.mode
renEng = scene.render.engine
bake_type = self.bake_type
bt = bake_type
mat = 0
fu19()
abort, mSize, userMessage = fu20(bake_type)
selected = bpy.context.selected_objects
ob = bpy.context.active_object
mainOb = 0
origActive = ob.name
activeTex = 0
if abort is False:
multiresNormalBake = False
if 'NORMAL' in bt or 'DISPLACEMENT' in bt or 'DERIVATIVE' in bt:
for obj in selected:
for mod in obj.modifiers:
if mod.type == 'MULTIRES':
multiresNormalBake = True
break
if multiresNormalBake:
break
if bt == 'MERGE_NORMALS':
multiresNormalBake = True
else:
multiresNormalBake = True
regularBake = True
origSelected = 0
orig_zbBakeSingle_state = scene.zbBakeSingle
if 'Shared-UVMap' in ob.data.uv_textures:
scene.zbBakeSingle = True
sharedMat = ob.active_material
if ob.active_material:
for obj in bpy.data.objects:
if obj.active_material == ob.active_material:
print(' ',obj.name)
obj.select = True
else:
obj.select = False
selected = bpy.context.selected_objects
else:
if len(selected) == 1:
scene.zbBakeSingle = False
if scene.zbBakeSingle:
if ob.data.uv_textures:
regularBake = False
ob = fu16(selected,bt,mSize)
else:
if scene.zbUseLightMap:
bpy.ops.uv.lightmap_pack(PREF_CONTEXT='ALL_OBJECTS',
PREF_PACK_IN_ONE=True,PREF_NEW_UVLAYER=True,
PREF_MARGIN_DIV = .3)
else:
bpy.ops.uv.smart_project(island_margin = 0.03,
angle_limit = 45, stretch_to_bounds = False)
for obj in selected:
obj.data.uv_textures.active.name = 'Shared-UVMap'
if regularBake:
print('Used Regular Bake')
bpy.ops.object.duplicate()
origSelected = selected
for obj in origSelected:
obj.hide_render = True
obj.hide = True
if 'NORM' not in bt and 'DISP' not in bt and 'DERIV' not in bt:
for obj in bpy.context.selected_objects:
scene.objects.active = obj
for mod in obj.modifiers:
try:
bpy.ops.object.modifier_apply(apply_as='DATA',
modifier = mod.name)
except:
pass
bpy.ops.object.join()
selected = bpy.context.selected_objects
ob = bpy.context.active_object
if multiresNormalBake:
if regularBake:
for ob in selected:
bakeAllProceed = True
if scene.zbBakeSelectedToActive:
if ob != bpy.context.active_object:
bakeAllProceed = False
if bakeAllProceed:
layerTypeChoice = 'Color'
if 'SPEC' in bt:
layerTypeChoice = 'Specular'
adaptiveState = ob.cycles.use_adaptive_subdivision
ob.cycles.use_adaptive_subdivision = False
scene.objects.active = ob
for obj in bpy.data.objects:
if obj != bpy.context.active_object:
obj.select = False
else:
obj.select = True
if 'NORM' in bt or 'DISP' in bt or 'DERIV' in bt:
if bt == 'MERGE_NORMALS':
bpy.ops.object.zb_bump_to_normal()
else:
quit = True
for mod in ob.modifiers:
if mod.type == 'MULTIRES':
quit = False
bpy.ops.object.zb_bake_normal()
break
if quit:
userMessage = 'Selected objects must have multires modifiers for this kind of baking.'
print(userMessage)
else:
img = 0
adjustNonZB = False
addLayer = True
multiMatPlusUV = False
selectedToActive = False
if len(ob.data.materials) > 1:
selectedToActive = True
if ob.data.uv_textures:
uvMap = ob.data.uv_textures.active
if uvMap.name != 'Shared-UVMap':
multiMatPlusUV = True
else:
fu10(ob)
mainOb = bpy.data.objects[ob.name]
bpy.ops.object.duplicate()
ob = bpy.context.active_object
for m in ob.data.materials:
bpy.ops.object.material_slot_remove()
mainOb.select = True
scene.render.use_bake_selected_to_active = True
if ob.active_material:
mat = ob.active_material
makeMatCopy = False
if hasattr(mat.node_tree,'nodes'):
nodes = mat.node_tree.nodes
if 'Image Texture zbColor' not in nodes:
makeMatCopy = True
else:
makeMatCopy = True
if mat.users > 1:
makeMatCopy = True
if makeMatCopy:
adjustNonZB = True
mat.use_fake_user = True
matCopy = mat.copy()
ob.active_material = matCopy
matCopy.name = ob.name.strip() + ' Baked'
mat = matCopy
if renEng == 'CYCLES':
mat.name = 'zbConduit'
addLayer = False
fu10(ob)
w = round(scene.zbImgSize)
h = round(scene.zbImgSizeH)
layerName = ob.name[:4] + 'Color' + bt
img = bpy.data.images.new(layerName, scene.zbImgSize,
scene.zbImgSizeH, alpha= True)
img.pixels[:] = (0, 0, 0, 0) * w * h
override = bpy.context.copy()
override['edit_image'] = img
bpy.ops.image.pack(override, as_png = True)
for uv_face in ob.data.uv_textures.active.data:
uv_face.image = img
if mat.use_nodes is False:
mat.use_nodes = True
nodes = mat.node_tree.nodes
node = nodes.new(type = 'ShaderNodeTexImage')
node.label = 'zbConduit'
node.name = node.label
node.image = img
node.select = True
nodes.active = node
imgTexNode = 0
if addLayer:
if multiMatPlusUV:
proceed = False
for uv_face in ob.data.uv_textures.active.data:
if uv_face.image:
proceed = True
break
if proceed:
for uv in ob.data.uv_textures:
bpy.ops.mesh.uv_texture_remove()
mat,tex,activeTex,newTexName,imgTexNode,zbConduit_AL = fu15(layerTypeChoice,ob,renEng,bt)
if scene.render.use_bake_multires:
if ob.modifiers:
useMulti = False
for mod in ob.modifiers:
if mod.type == 'MULTIRES':
if mod.levels > 0:
useMulti = True
else:
bpy.ops.object.modifier_remove(modifier=mod.name)
if useMulti is False:
scene.render.use_bake_multires = False
else:
scene.render.use_bake_multires = False
bk = scene.render
cbk = bk.bake
if scene.zbUserBakeSettings:
cageExtrude = cbk.cage_extrusion
mSize = cbk.margin
useClear = False
else:
cageExtrude = .001
useClear = False
if scene.zbBakeSelectedToActive:
selectedToActive = True
bk.use_bake_selected_to_active = True
for obj in selected:
obj.select = True
if scene.zbUserBakeSettings is False:
cageExtrude = 1
else:
bk.use_bake_selected_to_active = False
if renEng == 'CYCLES':
bpy.ops.object.bake(type=bake_type, margin=mSize,
use_selected_to_active = selectedToActive, use_clear = useClear,
cage_extrusion = cageExtrude)
else:
bpy.ops.object.bake_image()
if addLayer:
fu15Finish(mat,imgTexNode,activeTex,newTexName,bt,zbConduit_AL)
else:
if renEng == 'CYCLES':
if img:
ob.active_material = None
bpy.ops.object.zb_paint_color()
mat = ob.active_material
mat.name = mat.name.strip() + ' Baked'
tex = mat.active_texture
tex.image = img
tex.name = img.name
imgTexNode = mat.node_tree.nodes['Image Texture zbColor']
imgTexNode.image = img
if adjustNonZB:
i = 0
for ts in mat.texture_slots:
if hasattr(ts,'texture'):
if hasattr(ts.texture,'type'):
if ts.texture.type != 'IMAGE':
tex = ts.texture
mat.texture_slots[i].texture = None
i+=1
if activeTex:
if bake_type == 'SHADOW' or bake_type == 'AO':
slot = mat.texture_slots[activeTex]
slot.blend_type = 'MULTIPLY'
slot.use_map_normal = True
slot.normal_factor = 0
if selectedToActive:
bpy.ops.object.mode_set(mode='OBJECT')
matName = mat.name.split('.')[0]
matName = matName + ' Baked'
mat.name = matName
if mainOb:
scene.objects.active = mainOb
for m in mainOb.data.materials:
bpy.ops.object.material_slot_remove()
mainOb.active_material = mat
mainOb.select = False
scene.objects.active = ob
bpy.ops.object.delete()
scene.objects.active = mainOb
mainOb.select = True
ob = mainOb
if multiMatPlusUV:
proceed = False
for uv_face in ob.data.uv_textures.active.data:
if uv_face.image:
proceed = True
break
if proceed:
for uv in ob.data.uv_textures:
bpy.ops.mesh.uv_texture_remove()
fu10(ob)
bpy.ops.object.mode_set(mode='OBJECT')
ob.cycles.use_adaptive_subdivision = adaptiveState
if selected[-1] != ob:
print('')
if scene.zbBakeSelectedToActive:
scene.zbBakeSelectedToActive = False
break
else:
if len(selected) == 2:
scene.render.engine = 'BLENDER_RENDER'
if 'NORMAL' in bake_type:
scene.render.bake_type = 'NORMALS'
if 'DISPLACEMENT' in bake_type:
scene.render.bake_type = 'DISPLACEMENT'
if 'DERIVATIVE' in bake_type:
scene.render.bake_type = 'DERIVATIVE'
scene.render.use_bake_clear = False
scene.render.use_bake_multires = False
scene.render.use_bake_selected_to_active = True
scene.render.bake_distance = 1.5
if bt != 'DISPLACEMENT' and bt != 'DERIVATIVE':
if not ob.active_material:
bpy.ops.object.zb_paint_color()
bpy.ops.object.zb_paint_normal()
mat = ob.active_material
tex = mat.active_texture
ts = mat.texture_slots[tex.name]
if 'DISPLACEMENT' not in bt:
for i in range(16):
bpy.ops.object.zb_move_texture(tex_move_up=1)
tex.use_normal_map = True
ts.normal_factor = 5
if bt == 'DERIVATIVE':
ts.normal_factor = .4
ts.use_map_color_diffuse = False
fu18()
activeTex = mat.active_texture_index
mat.use_textures[activeTex] = False
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.bake_image()
mat.use_textures[activeTex] = True
if 'NORMAL' in bt:
fu21(mat)
ts.use_map_color_diffuse = False
ts.texture.use_normal_map = True
ts.normal_factor = 5
else:
bpy.ops.object.zb_message('INVOKE_DEFAULT',
message = 'TRADITIONAL_NORMALS_BAKE')
if origSelected:
for obj in origSelected:
obj.hide_render = False
obj.hide = False
obj.select = True
scene.objects.active = obj
for m in obj.data.materials:
bpy.ops.object.material_slot_remove()
if mat:
obj.active_material = mat
else:
obj.active_material = ob.active_material
obj.select = False
ob.select = True
scene.objects.active = ob
bpy.ops.object.delete()
for obj in origSelected:
obj.select = True
scene.objects.active = obj
if bt == 'TEXTURE':
mat = bpy.context.active_object.active_material
for ts in mat.texture_slots:
if hasattr(ts,'texture'):
try:
tex = ts.texture
if 'merged' not in tex.name.lower():
if 'color' in tex.image.name.lower():
mat.texture_slots[tex.name].use = False
except:
pass
for obj in bpy.context.selected_objects:
obj.cycles.use_adaptive_subdivision = False
bpy.ops.object.zb_save_layers(save_only_active=True)
scene.render.engine = renEng
for ob in selected:
ob.select = True
scene.zbBakeSingle = orig_zbBakeSingle_state
scene.objects.active = bpy.data.objects[origActive]
if renEng == 'CYCLES':
scene.zbGoCycles = True
else:
scene.zbGoCycles = False
if renEng == 'CYCLES':
if scene.zbQuickLights:
for ob in bpy.data.objects:
if 'zbCyclesLight' in ob.name:
ob.hide = True
fu1(mode)
if userMessage:
self.report({'INFO'}, userMessage)
return{'FINISHED'}
class cl20(bpy.types.Menu):
bl_label =i_0[68]
bl_idname =i_0[69]
description = "Select a bake type to bake"
def draw(self, context):
scene = bpy.context.scene
renEng = scene.render.engine
brBakeTypes = ['FULL', 'AO', 'SHADOW', 'NORMALS', 'TEXTURE',
'DISPLACEMENT', 'DERIVATIVE', 'VERTEX_COLORS', 'EMIT', 'ALPHA',
'MIRROR_INTENSITY', 'MIRROR_COLOR', 'SPEC_INTENSITY', 'SPEC_COLOR']
cyBakeTypes = ['COMBINED', 'AO', 'SHADOW', 'NORMAL', 'UV', 'EMIT',
'ENVIRONMENT', 'DIFFUSE', 'GLOSSY', 'TRANSMISSION', 'SUBSURFACE']
bakeTypes = []
bakeTypes.append(brBakeTypes)
bakeTypes.append(cyBakeTypes)
layout = self.layout
bt = bakeTypes[0]
if bpy.context.active_object:
if renEng == 'CYCLES':
bt = bakeTypes[1]
for type in bt:
bType = type.replace('_',' ').title()
if len(bType) < 3:
bType = bType.upper()
if bType == 'AO':
bType = 'Ambient Occlusion'
if bType == 'Full':
bType = 'Full Material Bake'
if bType == 'Combined':
bType = 'Full Bake (combined)'
if bType == 'Texture':
bType = 'Textures (merge)'
layout.operator('object.zb_message',text=bType).message = type
else:
layout.label('There is no active object to bake')
def fu14():
scene = bpy.context.scene
selected = bpy.context.selected_objects
origActive = bpy.context.active_object.name
scene.render.use_bake_multires = False
for obj in selected:
for mod in obj.modifiers:
if mod.type == 'MULTIRES':
scene.render.use_bake_multires = True
break
if scene.render.use_bake_multires:
for obj in selected:
scene.objects.active = obj
needsMultires = True
if obj.modifiers:
for mod in obj.modifiers:
if mod.type == 'MULTIRES':
needsMultires = False
break
if needsMultires:
mod = obj.modifiers.new(name='Multires',type='MULTIRES')
bpy.ops.object.multires_subdivide(modifier=mod.name)
txt1 = 'to make compatible with other baked objects using multires.'
for mod in obj.modifiers:
if mod.type == 'MULTIRES':
mod.show_expanded = False
break
if obj == selected[-1]:
print('')
scene.objects.active = bpy.data.objects[origActive]
def fu15(layerTypeChoice,ob,renEng,bt):
texLayerAdder(layerType=layerTypeChoice, texCol = 1,
texOpas = 0, alphaChoice = True, normalChoice = True)
mat = ob.active_material
tex = mat.active_texture
newTexName = bt
if newTexName == 'TEXTURE':
newTexName = 'MERGED'
newTexName = tex.name.split('.')[0] + ' ' + newTexName
tex.name = newTexName
tex.image.name = newTexName
activeTex = mat.active_texture_index
mat.use_textures[activeTex] = False
zbConduit_AL = 0
imgTexNode = 0
if renEng == 'CYCLES':
nodes = mat.node_tree.nodes
imgTexNode = nodes['Image Texture zbColor']
imgTexNode.mute = True
for node in nodes:
node.select = False
zbConduit_AL = nodes.new(type = 'ShaderNodeTexImage')
zbConduit_AL.label = 'zbConduit'
zbConduit_AL.name = zbConduit_AL.label
zbConduit_AL.image = tex.image
zbConduit_AL.select = True
nodes.active = zbConduit_AL
return mat,tex,activeTex,newTexName,imgTexNode,zbConduit_AL
def fu15Finish(mat,imgTexNode,activeTex,newTexName,bt,zbConduit_AL):
mat.use_textures[activeTex] = True
if imgTexNode:
imgTexNode.mute = False
nodes = mat.node_tree.nodes
nodes.remove(zbConduit_AL)
if 'SPEC' in bt:
ts = mat.texture_slots[mat.active_texture_index]
ts.use_map_color_diffuse = False
ts.use_map_normal = False
ts.use_map_color_spec = True
if 'MERGED' in newTexName:
i = 0
for ts in mat.texture_slots:
if hasattr(ts,'texture'):
if ts.texture is not None:
if ts.texture.type == 'IMAGE':
if ts.texture != mat.active_texture:
if ts.texture.image:
if 'Color' in ts.texture.image.name:
mat.texture_slots[i].use = False
i+=1
def fu16(selected,bt,mSize):
bpy.ops.object.mode_set(mode='OBJECT')
scene = bpy.context.scene
renEng = scene.render.engine
ob = bpy.context.active_object
origMat = ob.active_material
origSelected = selected
origActive = ob.name
firstMultiUVBake = False
if 'Shared-UVMap' not in ob.data.uv_textures:
firstMultiUVBake = True
useNormal = False
if 'NORM' in bt or 'DISP' in bt or 'DERIV' in bt:
useNormal = True
bpy.ops.object.duplicate()
fu17(useNormal)
artObject = bpy.context.active_object
artObject.name = 'Bake Source Object'
artObject.select = False
needsUnwrap = True
for obj in origSelected:
obj.select = True
scene.objects.active = obj
for mat in obj.data.materials:
bpy.ops.object.material_slot_remove()
for uvMap in obj.data.uv_textures:
if 'Shared-UVMap' in uvMap.name:
needsUnwrap = False
else:
bpy.ops.mesh.uv_texture_remove()
scene.objects.active = bpy.data.objects[origActive]
if needsUnwrap:
if scene.zbUseLightMap:
bpy.ops.uv.lightmap_pack(PREF_CONTEXT='ALL_OBJECTS',
PREF_PACK_IN_ONE=True,PREF_NEW_UVLAYER=True,
PREF_MARGIN_DIV = .3)
else:
bpy.ops.uv.smart_project(island_margin = 0.03,
angle_limit = 45, stretch_to_bounds = False)
for obj in bpy.context.selected_objects:
obj.data.uv_textures.active.name = 'Shared-UVMap'
if useNormal:
scene.objects.active = artObject
else:
bpy.ops.object.duplicate()
fu17(useNormal)
bakeProxy = bpy.context.active_object
ob = bpy.context.active_object
for obj in origSelected:
obj.hide = True
obj.hide_render = True
artObject.select = True
if useNormal:
scene.render.engine = 'BLENDER_RENDER'
if bt == 'MERGE_NORMALS':
bpy.ops.object.zb_bump_to_normal()
else:
bpy.ops.object.zb_bake_normal()
else:
layerTypeChoice = 'Color'
if 'SPEC' in bt:
layerTypeChoice = 'Specular'
mat,tex,activeTex,newTexName,imgTexNode,zbConduit_AL = fu15(layerTypeChoice,ob,renEng,bt)
if renEng == 'CYCLES':
bpy.ops.object.bake(type=bt, margin=mSize,
use_selected_to_active = True, use_clear = False,
cage_extrusion = .001)
else:
scene.render.use_bake_selected_to_active = True
scene.render.use_bake_multires = False
for obj in bpy.context.selected_objects:
for mod in obj.modifiers:
if mod.type == 'MULTIRES':
scene.render.use_bake_multires = True
break
if scene.render.use_bake_multires:
break
bpy.ops.object.bake_image()
fu15Finish(mat,imgTexNode,activeTex,newTexName,bt,zbConduit_AL)
if firstMultiUVBake:
print('Adjusting Materials For First (Bake To Single)')
finalMat = bakeProxy.active_material
finalMat.name = ob.name.split('.')[0] + ' Baked'
for obj in origSelected:
obj.active_material = finalMat
obj.hide = False
obj.hide_render = False
obj.select = False
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.delete()
for obj in origSelected:
scene.objects.active = obj
obj.select = True
else:
print('Adjusting Materials For (Bake To Single)')
for obj in origSelected:
scene.objects.active = obj
obj.select = False
obj.hide = False
obj.hide_render = False
obj.active_material = origMat
scene.objects.active = bpy.data.objects[origActive]
if useNormal:
bpMat = artObject.active_material
if 'DISP' not in bt and 'DERIV' not in bt:
texIndex = mat.active_texture_index
activeSlot = mat.texture_slots[texIndex]
activeSlot.use_map_color_diffuse = False
activeSlot.use_map_normal = True
activeSlot.normal_factor = 5
fu21(mat)
else:
bpMat = bakeProxy.active_material
i=0
ob = bpy.context.active_object
for ts in ob.active_material.texture_slots:
if hasattr(ts,'texture') is False:
ob.active_material.active_texture_index = i
break
i+=1
mat = ob.active_material
mat.active_texture = bpMat.active_texture
tex = ob.active_material.active_texture
img = tex.image
if 'SHADOW' in bt or 'AO' in bt:
slot = mat.texture_slots[tex.name]
slot.blend_type = 'MULTIPLY'
slot.use_map_normal = True
slot.normal_factor = 0
for ts in mat.texture_slots:
if useNormal:
bpy.ops.object.zb_move_texture(tex_move_up=1)
else:
bpy.ops.object.zb_move_texture(tex_move_down=1)
bpy.ops.object.zb_set_active_layer(tex_index=mat.active_texture_index)
if useNormal is False:
bakeProxy.select = True
bpMat.user_clear()
artObject.select = True
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.delete()
for obj in origSelected:
obj.select = True
scene.objects.active = bpy.data.objects[origActive]
def fu17(useNormal):
scene = bpy.context.scene
selected = bpy.context.selected_objects
origActive = bpy.context.active_object.name
if useNormal:
for obj in selected:
scene.objects.active = obj
needMulti = True
for mod in obj.modifiers:
if mod.type == 'MULTIRES':
needMulti = False
else:
try:
bpy.ops.object.modifier_apply(apply_as='DATA',
modifier = mod.name)
except:
pass
if needMulti:
bpy.ops.object.modifier_add(type='MULTIRES')
else:
for obj in selected:
scene.objects.active = obj
for mod in obj.modifiers:
try:
bpy.ops.object.modifier_apply(apply_as='DATA',
modifier = mod.name)
except:
pass
scene.objects.active = bpy.data.objects[origActive]
bpy.context.active_object.select = True
bpy.ops.object.join()
def fu18():
if len(bpy.data.lamps) < 1:
newLamp = bpy.data.lamps.new(name='Basic Lamp',type ='HEMI')
txt1 = 'No lamps were found in the scene, so one was created '
txt2 = 'because it was required for this type of baking process.'
else:
visibleLamp = False
for ob in bpy.data.objects:
if ob.type == 'LAMP':
if ob.hide_render is False:
visibleLamp = True
break
if visibleLamp == False:
for ob in bpy.data.objects:
if ob.type == 'LAMP':
ob.hide_render = False
break
def fu19():
wm = bpy.context.window_manager
try:
default, state = addon_utils.check("Alchemy")
if state:
ob = bpy.context.active_object
if wm.sfMatLibOn:
if bpy.context.mode == 'PAINT_VERTEX':
if ob.active_material:
if mode != 'OBJECT':
bpy.ops.object.sf_matlib_mat_paint(func="finish_painting")
wm.sfMatLibOn = False
if ' Painted' in ob.data.materials[0].name:
mName = ob.data.materials[0].name
ob.data.materials[0].name = mName[:-8]
except:
pass
def fu20(bake_type):
bpy.ops.object.mode_set(mode='OBJECT')
selected = bpy.context.selected_objects
scene = bpy.context.scene
wm = bpy.context.window_manager
renEng = scene.render.engine
mode = bpy.context.mode
if scene.zbFastMode:
scene.zbFastMode = False
if mode == 'PAINT_TEXTURE':
bpy.ops.object.zb_save_layers(save_only_active=True)
if renEng == 'CYCLES':
if scene.zbQuickLights:
for obj in bpy.data.objects:
if 'zbCyclesLight' in obj.name:
obj.hide = False
obj.select = False
faces = 0
abort = False
userMessage = ''
mType = ['SURFACE','FONT','META','CURVE']
for obj in selected:
try:
if hasattr(obj.data,'materials') is False:
print(obj.name,'Does not use materials. Deselecting for bake.')
obj.select = False
if obj.select != False:
if hasattr(obj.data,'uv_textures') is False:
print(obj.name,'Does not use uv mapping. Deselecting for bake.')
obj.select = False
except:
pass
try:
badEgg = True
for m in mType:
if m not in obj.type:
badEgg = False
if badEgg:
if obj.dupli_type == 'GROUP':
userMessage = "Group instances must be made unique before baking"
abort = True
break
else:
userMessage = obj.type + ' type objects can not be baked'
abort = True
break
except:
abort = True
try:
if obj.type != 'MESH':
for m in mType:
if m in obj.type:
bpy.ops.object.sf_convert()
break
if obj.type != 'MESH':
obj.select = False
else:
if len(obj.data.polygons) < 1:
obj.select = False
else:
faces += len(obj.data.polygons)
except:
abort = True
try:
if obj.modifiers:
for mod in obj.modifiers:
if mod.type == 'SOLIDIFY':
mod.show_render = False
mod.show_viewport = False
except:
pass
if len(bpy.context.selected_objects) < 1:
abort = True
if abort:
if userMessage == '':
userMessage = 'Can not bake'
if renEng == 'BLENDER_RENDER':
if bake_type == 'MERGE_NORMALS':
bake_type = 'NORMALS'
scene.render.bake_type = bake_type
if scene.zbUserBakeSettings:
mSize = scene.render.bake.margin
else:
mSize = 5
if scene.zbImgSize < 513:
mSize = 2
if scene.zbImgSize > 1023:
mSize = 8
if scene.zbImgSize > 2047:
mSize = 12
scene.render.bake_bias = .015
scene.render.use_bake_clear = False
scene.render.bake_margin = mSize
scene.render.bake_distance = 1
scene.render.bake_samples = 512
if scene.zbBakeSelectedToActive is False:
if renEng != 'CYCLES':
if scene.zbBakeSingle:
fu14()
return abort, mSize, userMessage
def fu21(mat):
scene = bpy.context.scene
ob = bpy.context.active_object
tex = mat.active_texture
needNodes = True
if hasattr(mat.node_tree,'nodes'):
if 'Normal Map zbNormal' in mat.node_tree.nodes:
mat.node_tree.nodes['Image Texture zbNormal'].image = tex.image
needNodes = False
if 'Diffuse BSDF zbColor' not in mat.node_tree.nodes:
if scene.render.engine == 'BLENDER_RENDER':
scene.zbGoCycles = True
scene.render.engine = 'BLENDER_RENDER'
else:
if scene.render.engine == 'BLENDER_RENDER':
scene.zbGoCycles = True
scene.render.engine = 'BLENDER_RENDER'
if needNodes:
node = mat.node_tree.nodes.new(type = 'ShaderNodeTexImage')
node.label = node.name + ' zb' + 'Normal'
node.name = node.label
nodeTex = node
nodeTex.color_space = 'NONE'
nodeTex.image = tex.image
node = mat.node_tree.nodes.new(type = 'ShaderNodeNormalMap')
node.label = node.name + ' zb' + 'Normal'
node.name = node.label
nodeNormal = node
nodeNormal.uv_map = 'UVMap'
nodeNormal.inputs[0].default_value = 5
nodeDifCol = mat.node_tree.nodes['Diffuse BSDF zbColor']
mat.node_tree.links.new(nodeTex.outputs['Color'], nodeNormal.inputs['Color'])
mat.node_tree.links.new(nodeNormal.outputs['Normal'], nodeDifCol.inputs['Normal'])
mat.node_tree.nodes[nodeTex.name].hide = True
mat.node_tree.nodes[nodeNormal.name].hide = True
nodeTex.location = (120, -105)
nodeNormal.location = (120, -160)
try:
normalMapNode = mat.node_tree.nodes['Normal Map zbNormal']
normalMapNode.uv_map = ob.data.uv_textures.active.name
except:
pass
class cl21(bpy.types.Operator):
bl_idname =i_0[70]
bl_label =i_0[71]
bl_description =i_0[72]
msg = bpy.props.StringProperty()
def execute(self,context):
bpy.ops.object.zb_message('INVOKE_DEFAULT',message = self.msg)
return{'CANCELLED'}
def fu22(message):
brBakeTypes = ['FULL', 'AO', 'SHADOW', 'NORMALS', 'TEXTURE',
'DISPLACEMENT', 'DERIVATIVE', 'VERTEX_COLORS', 'EMIT', 'ALPHA',
'MIRROR_INTENSITY', 'MIRROR_COLOR', 'SPEC_INTENSITY', 'SPEC_COLOR']
cyBakeTypes = ['COMBINED', 'AO', 'SHADOW', 'NORMAL', 'UV', 'EMIT',
'ENVIRONMENT', 'DIFFUSE', 'GLOSSY', 'TRANSMISSION', 'SUBSURFACE']
if message in brBakeTypes or message in cyBakeTypes:
bpy.ops.object.zb_bake(bake_type = message)
if message == "MERGE_NORMALS":
bpy.ops.object.zb_bake(bake_type='MERGE_NORMALS')
if message == "MERGE_LAYERS":
bpy.ops.object.zb_bake(bake_type='TEXTURE')
if message == 'LAYER_FROM_BRUSH':
bpy.ops.object.zb_layer_from_brush(action='START')
class cl22(bpy.types.Operator):
bl_idname =i_0[73]
bl_label =i_0[74]
bl_description =i_0[75]
message = bpy.props.StringProperty()
def execute(self, context):
fu22(self.message)
return {'FINISHED'}
def draw(self, context):
scene = bpy.context.scene
renEng = scene.render.engine
message = self.message
layout = self.layout
sub = layout.column()
sub.scale_y = 0.75
brBakeTypes = ['FULL', 'AO', 'SHADOW', 'NORMALS', 'TEXTURE',
'DISPLACEMENT', 'DERIVATIVE', 'VERTEX_COLORS', 'EMIT', 'ALPHA',
'MIRROR_INTENSITY', 'MIRROR_COLOR', 'SPEC_INTENSITY', 'SPEC_COLOR']
cyBakeTypes = ['COMBINED', 'AO', 'SHADOW', 'NORMAL', 'UV', 'EMIT',
'ENVIRONMENT', 'DIFFUSE', 'GLOSSY', 'TRANSMISSION', 'SUBSURFACE']
useMultires = False
if 'NORMAL' in message:
try:
ob = bpy.context.active_object
for mod in ob.modifiers:
if mod.type == 'MULTIRES':
useMultires = True
break
except:
pass
if message in brBakeTypes or message in cyBakeTypes:
defaultMessage = True
if renEng == 'CYCLES':
if message == 'SHADOW':
defaultMessage = False
sub.label('"SHADOW" BAKE:',icon='SOLO_ON')
sub.label('Shadow maps baked in Cycles can only use light from')
sub.label('actual Blender lamps to create shadows (this is default')
sub.label('Blender behaviour). Light from your environment or')
sub.label('materials using emission will not render in the bake.')
sub.label('')
sub.label('Press Shift-A > Lamp and add a lamp for shadow bakes')
sub.label('to work. Or else perform a full bake (combined) to use')
sub.label('regular Cycles lighting in your bake.')
if message == 'ENVIRONMENT':
defaultMessage = False
sub.label('"ENVIRONMENT" BAKE:',icon='SOLO_ON')
sub.label('This type of bake will only bake light coming from')
sub.label('your world environment on to your selection. So if')
sub.label('you have an environment image attached to your world')
sub.label('output such as clouds, this will show up on what ever')
sub.label('you bake.')
if message == 'VERTEX_COLORS':
defaultMessage = False
sub.label('"VERTEX COLORS" BAKE:',icon='SOLO_ON')
sub.label("This will bake the object's vertex colors to an")
sub.label("image. If you haven't painted vertex colors onto")
sub.label('an object, then the bake will appear totally black.')
if message == 'DISPLACEMENT':
defaultMessage = False
sub.label('"DISPLACEMENT" BAKE:',icon='SOLO_ON')
sub.label('The baked displacement map will not be integrated')
sub.label("into the default ZB layer system. To use this map")
sub.label('after it is baked, add a displace modifier to a similar')
sub.label('shaped object as the one baked. Then set the displace')
sub.label("modifier's texture to the baked image.")
sub.label('')
sub.label('The object must have a lot of faces for the baked map')
sub.label('to work or else first add a subsurface modifier to the')
sub.label('object before adding the displace modifier. If the')
sub.label('object is using the same uv map as the one baked, be')
sub.label('sure to set the modifiers "Tex Coordinates" to "UV".')
if scene.zbBakeSingle:
if defaultMessage is False:
sub.label('')
defaultMessage = False
if useMultires == False:
sub.label('"Bake To Single" is activated. All of the selected',
icon='SOLO_ON')
sub.label('objects will bake to a single map and material. Since')
sub.label('these objects share a map, their resolution will lower')
sub.label('so you may wish to increase the "New Layer Size" in')
sub.label('the settings before baking.')
sub.label('')
sub.label('When the bake is finished, all selected objects will')
sub.label('posses the same material (though the bake will reflect')
sub.label('the materials they were using). To bake multiple objects')
sub.label('(but bake them to individual maps & materials), uncheck')
sub.label('the "Bake To Single" option in the ZB options menu.')
try:
ob = bpy.context.active_object
if ob.data.uv_textures:
if 'Shared-UVMap' not in ob.data.uv_textures.active.name:
sub.label('')
sub.label('The objects you are about to bake have uv maps.',icon ='SOLO_ON')
sub.label('Baking them to one image will construct a new merged')
sub.label("uv-map for all objects in the selection. If you manually")
sub.label('unwrapped these objects and intend to paint them in')
sub.label('other software, it would probably be best to disable')
sub.label('"Bake To Single" before baking so that the objects being')
sub.label('baked keep their own uv maps, images and materials.')
except:
pass
if 'NORMAL' in message:
defaultMessage = False
try:
ob = bpy.context.active_object
if ob.data.uv_textures:
if 'Shared-UVMap' not in ob.data.uv_textures.active.name:
if len(bpy.context.selected_objects) > 1:
sub.label('')
except:
pass
abortNormalBake = False
if ob.active_material:
if not ob.data.uv_textures:
abortNormalBake = True
if abortNormalBake:
sub.label('You can bake normals for objects with no materials',
icon='ERROR')
sub.label('or objects that have already been baked or painted')
sub.label('but not objects with basic materials which have not')
sub.label('been either ZB painted or baked first. Switch to object')
sub.label('mode and perform a full bake first.')
self.message = 'CANCEL'
else:
if useMultires:
sub.label('This will convert the sculpt detail of your mesh',icon='SOLO_ON')
sub.label('into a normal map that will be applied to your')
sub.label('material and be visible in both BR & Cycles. This')
sub.label("method of normal baking uses an object's multires")
sub.label('modifier to bake from.')
else:
sub.label('This will bake detail from the high-poly mesh onto the',icon='SOLO_ON')
sub.label('low-poly mesh. Make sure the objects are in the same')
sub.label('position and that the high-poly mesh was selected first')
sub.label('then shift-select the object you want the detail baked to.')
sub.label('')
sub.label(' When the bake is complete you can press the ZB tex-')
sub.label('-ture paint button to enter texture paint mode and')
sub.label('adjust the normal value of the layer. You can control')
sub.label('the size of the normal map by adjusting the "New Layer')
sub.label('Width & Height" values in the options before you bake.')
if defaultMessage:
sub.label('Baking is a rendering process which may take a few')
sub.label('minutes to up to an hour depending on the size. You')
sub.label('can control the size of the bake image by adjusting')
sub.label('the "New Layer Width & Height" values in the options')
sub.label('menu as well as the "Bake Quality" before baking.')
sub.label('')
sub.prop(scene,'zbDisablePopup',text='Disable Messages')
sub.label('(enable in options menu)')
sub.label('')
sub.label('PROCEED WITH BAKE?')
if message == 'TRADITIONAL_NORMALS_BAKE':
sub.label('You must first select the high-poly model you want',
icon='ERROR')
sub.label('to acquire the detail from, then shift-select the')
sub.label('lower-poly model you want to bake the map onto')
sub.label('before performing a "Normal","Displacement" or')
sub.label('"Derivative" bake. The two objects should also be')
sub.label('in the same location (or very close).')
sub.label('')
sub.label('You can only bake one map at a time when using')
sub.label('this traditional method. This means you should')
sub.label('only have two objects selected. You can also bake')
sub.label('a normal or displacement map for objects using')
sub.label("multires-modifiers. In that case, you would only")
sub.label('have to select the object with the multires')
sub.label('modifier before baking and you could bake multiple')
sub.label('objects at a time.')
sub.label('')
if message == 'MERGE_NORMALS':
sub.label("This will merge (and convert) all of the material's")
sub.label("bump & normal layers into a new normal map (includ-")
sub.label('-ing bump effects from color layers). This is a bake')
sub.label('process which may take a few minutes or longer.')
sub.label('')
sub.label('Use the "New Layer Size" option to define how big you')
sub.label('want the map to be, or exclude layers from the merge')
sub.label('by clicking the eye icon next to them (while in texture')
sub.label('paint mode) to hide them before baking. No layers will')
sub.label('be deleted, just a new one created.')
sub.label('')
sub.label('PROCEED WITH MERGER?')
if message == 'MERGE_LAYERS':
sub.label("This will merge all of the material's color (diffuse),")
sub.label("layers into a new single color layer. You can use the")
sub.label('"New Layer Size" option to define how big you want')
sub.label('the new layer to be before merging, or exclude layers')
sub.label('by clicking the eye icon next to them to hide them.')
sub.label('')
sub.label('Once finished, the layers used to create the merged')
sub.label('layer will be hidden displaying only the merged layer.')
sub.label('No layers will be deleted, just a new one created.')
sub.label('This process may take a few minutes to complete.')
sub.label('')
sub.label('PROCEED WITH MERGER?')
if message == 'LAYER_FROM_BRUSH':
sub.label('If you have a texture brush loaded, this option will')
sub.label('allow you to create a new layer from the brush image.')
sub.label('This is especially useful when using seamless texture')
sub.label('brushes like those found at blendersensei.com')
sub.label('')
sub.label('This happens in two parts. The first part lets you')
sub.label('adjust the position and scale of the repeating image.')
sub.label('There is also a button to re-apply the brush image')
sub.label('from your current view. When you are satisfied with')
sub.label('the results press "Finish Brush Layer" to bake the')
sub.label('final image as a new, regular Zero Brush layer.')
sub.label('')
sub.label('NOTE: If you are in Cycles, you will be temporarily')
sub.label('transported to Blender Render while adjusting your')
sub.label('new layer. You will return to your previous render')
sub.label('engine after pressing "Finish Brush Layer".')
sub.label('')
sub.label('CREATE A LAYER FROM THE BRUSH IMAGE?')
def invoke(self, context, event):
scene = bpy.context.scene
if scene.zbDisablePopup:
fu22(self.message)
return {'FINISHED'}
else:
width = 325
height = bpy.context.window.height/2
return context.window_manager.invoke_props_dialog(self, width, height)
class cl23(bpy.types.Menu):
bl_label =i_0[76]
bl_idname =i_0[77]
def draw(self, context):
scene = bpy.context.scene
re = scene.render.engine
wm = bpy.context.window_manager
layout = self.layout
col = layout.column()
col.prop(scene, 'zbAutoSaveLayers', text= 'Autosave Layers')
col.prop(scene, 'zbSaveWhenSave', text= 'Autosave (with file)')
col.prop(scene, 'zbPrecisePaintOption', text = 'Precise Paint Mode')
col.prop(scene, 'zbDistractionFreeMode', text = 'Distraction Free')
col.prop(scene, "zbDisableShadows", text = "Disable Shadows")
col.prop(scene,'zbDisablePopup',text='Disable Messages')
col.prop(scene, 'zbUserBakeSettings')
col.prop(scene, 'zbUseLightMap', text = 'Use Lightmap UVs')
col.prop(scene, 'zbLoadImgSculpt', text = 'Texture & Sculpt')
col.prop(scene, 'zbAutoConvertCycles', text = 'Auto Convert Cycles')
col.prop(scene, 'zbAutoConvertLamps', text= 'Auto Convert Lamps')
col.prop(scene, 'zbAutoGPURender')
default, state = addon_utils.check("Alchemy")
if state:
col.prop(scene, 'zbAState',text='Alchemy Paint')
class cl24(bpy.types.Menu):
bl_label =i_0[78]
bl_idname =i_0[79]
def draw(self, context):
scene = bpy.context.scene
re = scene.render.engine
wm = bpy.context.window_manager
mode = bpy.context.mode
tSet = bpy.context.tool_settings
ob = bpy.context.active_object
ncIcon = 'LINK'
hIcon = 'INLINK'
layout = self.layout
col = layout.column()
try:
mat = ob.active_material
except:
mat = 0
col.menu("menu.zb_system_options", text = 'ZB Options', icon=hIcon)
col.operator('object.zb_image_edit',text='Image Editor',
icon=ncIcon).func = 'to_image_editor'
col.prop(scene, "zbGoCycles", text="Use Cycles")
if re == 'CYCLES':
col.prop(scene, "zbQuickLights", text = "Quick Lights")
col.prop(scene, "zbFastMode", text="Fast Mode")
col.separator()
col.menu('menu.zb_bake_type_select',text='Bake Selected',icon=hIcon)
col.prop(scene,'zbBakeSelectedToActive',text='Sel-to-Active')
col.prop(scene,'zbBakeSingle',text = 'Bake To Single')
if re == 'CYCLES':
scene = context.scene
cscene = scene.cycles
col.prop(cscene, "samples", text=" Bake Quality")
else:
ls = scene.world.light_settings
col.prop(ls, "samples", text=" Bake Quality")
col.separator()
col.menu("menu.zb_material_options_menu", text = 'Material Options', icon=hIcon)
col.separator()
col.label('Layer Options',icon=hIcon)
col.operator("object.zb_save_layers", text="Save My Layers",
icon=ncIcon)
if context.mode == 'PAINT_TEXTURE':
if tSet.image_paint.brush.texture:
col.operator("object.zb_message_proxy",text = 'Layer From Brush',
icon=ncIcon).msg = 'LAYER_FROM_BRUSH'
if mat:
if re != 'CYCLES':
col.operator('object.zb_message_proxy',text='Merge Layers',
icon=ncIcon).msg = 'MERGE_LAYERS'
col.operator('object.zb_message_proxy',text='Merge Normals',
icon=ncIcon).msg = 'MERGE_NORMALS'
col.prop(wm, "zbUseBrushColor", text= "Color New Layers")
col.prop(scene, "zbImgSize", text=' New Layer Width')
col.prop(scene, "zbImgSizeH", text=' New Layer Height')
class cl25(bpy.types.Operator):
bl_idname =i_0[80]
bl_label =i_0[81]
bl_description =i_0[82]
def execute(self,context):
scene = bpy.context.scene
ob = bpy.context.active_object
mat = ob.active_material
try:
if ob.dupli_type == 'GROUP':
oldName = ob.dupli_group.name
newGroupSel = []
empty = 0
bpy.ops.object.duplicates_make_real()
sel = bpy.context.selected_objects
for ob in sel:
if ob.type == 'EMPTY':
empty = ob
ob.select = False
newGroupSel = bpy.context.selected_objects
bpy.ops.object.select_all(action='DESELECT')
for ob in bpy.data.objects:
if ob.type == 'EMPTY':
if 'group' in ob.name:
ob.select = True
bpy.ops.object.delete()
for ob in newGroupSel:
ob.select = True
ob.data = ob.data.copy()
except:
pass
sel = bpy.context.selected_objects
mode = bpy.context.mode
if 'PAINT' in mode:
bpy.ops.object.mode_set(mode='OBJECT')
for ob in sel:
bpy.ops.object.select_all(action='DESELECT')
scene.objects.active = ob
ob.select = True
mat = ob.active_material
if mat:
if mat.users < 2:
ob.active_material = mat.copy()
bpy.ops.object.zb_save_layers(save_only_active=True)
slot = ob.active_material_index
newMat = ob.active_material.copy()
ob.material_slots[slot].material = newMat
mat = bpy.context.active_object.active_material
for slot in mat.texture_slots:
try:
newTex = slot.texture.copy()
newImg = slot.texture.image.copy()
slot.texture = newTex
slot.texture.image = newImg
except:
pass
try:
tn = mat.active_texture_index
fu12(context, tn)
except:
pass
if mode == 'OBJECT':
bpy.ops.object.select_all(action='DESELECT')
for ob in sel:
ob.select = True
fu1(mode)
scene.update_tag()
scene.update()
return{'FINISHED'}
def fu23(self, context):
scene = bpy.context.scene
wm = bpy.context.window_manager
brushSettings = scene.tool_settings.image_paint
mat = bpy.context.object.active_material
if wm.zbPaintThrough == True:
brushSettings.use_occlude = False
brushSettings.use_normal_falloff = False
brushSettings.use_backface_culling = False
else:
brushSettings.use_occlude = True
brushSettings.use_normal_falloff = True
brushSettings.use_backface_culling = True
if scene.zbDisableShadows:
for lamp in bpy.data.lamps:
uses = False
try:
if lamp.shadow_method == 'RAY_SHADOW':
uses = True
if lamp.shadow_method == 'BUFFER_SHADOW':
uses = True
except:
pass
if uses:
lamp.shadow_method = 'NOSHADOW'
else:
for lamp in bpy.data.lamps:
try:
lamp.shadow_method = 'RAY_SHADOW'
except:
pass
def fu24(self,context):
wm = bpy.context.window_manager
scene = bpy.context.scene
mode = bpy.context.mode
sel = bpy.context.selected_objects
if self.zbQuickLights:
world = scene.world
if not world:
bpy.ops.world.new()
world = bpy.data.worlds[0]
scene.world = world
worldWasUsingMist = False
if world.mist_settings.use_mist:
worldWasUsingMist = True
try:
wName = scene.world.name
scene.zbLastWorld = wName
if 'ZB Quick Lights' in wName:
try:
scene.zbLastWorld = bpy.data.worlds[2].name
except:
scene.zbLastWorld = bpy.data.worlds[0].name
except:
w = bpy.context.data.worlds.new('Basic World')
scene.zbLastWorld = w.name
try:
bpy.ops.object.mode_set(mode='OBJECT')
activeOb = bpy.context.active_object
bpy.ops.object.select_all(action='DESELECT')
except:
pass
needWorld = False
for world in bpy.data.worlds:
if 'ZB Quick Lights' in world.name:
needWorld = False
zbQuickLights = world
scene.world = bpy.data.worlds[world.name]
break
else:
needWorld = True
if needWorld:
zbQuickLights = bpy.data.worlds.new("ZB Quick Lights")
zbQuickLights.horizon_color = [0,0,0]
zbQuickLights.zenith_color = [0,0,0]
zbQuickLights.ambient_color = [0,0,0]
scene.world = zbQuickLights
zbQuickLights.use_nodes = True
zbQuickLights.use_fake_user = True
bg = zbQuickLights.node_tree.nodes['Background']
bg.inputs[0].default_value = (0,0,0,1)
needPlanes = False
if 'zbCyclesLight' not in bpy.data.objects:
needPlanes = True
if needPlanes:
scene.cursor_location.xyz = [0,0,0]
bpy.ops.mesh.primitive_plane_add()
bpy.context.active_object.name = "zbCyclesLight"
ob = bpy.context.active_object
zbCyclesLight = bpy.context.active_object
zbCyclesLight.scale = [17,17,17]
zbCyclesLight.location.z = 100
zbCyclesLight.active_material = bpy.data.materials.new("zbCyclesLight")
bpy.ops.object.modifier_add(type='SUBSURF')
zbCyclesLight.modifiers["Subsurf"].levels = 3
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Subsurf")
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.dissolve_limited()
bpy.ops.object.mode_set(mode='OBJECT')
mat = ob.active_material
mat.use_nodes = True
mat.node_tree.nodes.clear()
nodeEmission = mat.node_tree.nodes.new(type = 'ShaderNodeEmission')
nodeEmission.inputs[1].default_value = 20
nodeOutput = mat.node_tree.nodes.new(type = 'ShaderNodeOutputMaterial')
mat.node_tree.links.new(nodeEmission.outputs['Emission'],
nodeOutput.inputs['Surface'])
nodeOutput.location = (250,0)
ob.cycles_visibility.camera = False
ob.cycles_visibility.shadow = False
bpy.ops.object.duplicate_move()
ob = bpy.context.active_object
ob.location.xyz = [-75,-65,15]
ob.rotation_euler.y = 115
ob.rotation_euler.z = 45
ob.scale = [15,15,15]
bpy.ops.object.duplicate_move()
ob = bpy.context.active_object
ob.location.xyz = [75,-65,15]
ob.rotation_euler.y = -115
ob.rotation_euler.z = -45
bpy.ops.object.duplicate_move()
ob = bpy.context.active_object
ob.location.xyz = [0,100,15]
ob.rotation_euler[2] = 1.5708
for ob in bpy.data.objects:
if "zbCyclesLight" in ob.name:
scene.objects.active = ob
ob.select = True
scene.objects.active = bpy.data.objects["zbCyclesLight"]
bpy.ops.object.parent_set(type='OBJECT')
for ob in bpy.data.objects:
if "zbCyclesLight" in ob.name:
ob.select = False
ob.hide = True
try:
scene.objects.active = activeOb
scene.objects.active.select = True
except:
pass
try:
if worldWasUsingMist:
bpy.ops.object.sf_cam_options(func="mist")
except:
pass
else:
if scene.zbLastWorld:
lastWorld = scene.zbLastWorld
if lastWorld in bpy.data.worlds:
scene.world = bpy.data.worlds[lastWorld]
else:
for world in bpy.data.worlds:
if 'ZB Quick Lights' not in world.name:
scene.world = world
break
if scene.world.name == 'ZB Quick Lights':
bpy.ops.world.new()
for world in bpy.data.worlds:
if world.name != 'ZB Quick Lights':
scene.world = world
break
try:
activeOb = bpy.context.active_object
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.select_all(action='DESELECT')
for mat in bpy.data.materials:
if "zbCyclesLight" in mat.name:
try:
bpy.data.materials.remove(mat, do_unlink = True)
except:
if mat.users:
mat.user_clear()
bpy.data.materials.remove(mat)
except:
pass
try:
if 'ZB Quick Lights' in bpy.data.worlds:
x = bpy.data.worlds["ZB Quick Lights"]
try:
bpy.data.worlds.remove(x, do_unlink = True)
except:
if x.users:
x.user_clear()
bpy.data.worlds.remove()
except:
pass
for ob in bpy.data.objects:
if "zbCyclesLight" in ob.name:
ob.select = False
scene.objects.unlink(ob)
fu0()
try:
bpy.ops.object.select_all(action='DESELECT')
scene.objects.active = activeOb
scene.objects.active.select = True
except:
pass
try:
fu1(mode)
except:
pass
for ob in sel:
ob.select = True
class cl26(bpy.types.Operator):
bl_idname =i_0[83]
bl_label =i_0[84]
bl_description =i_0[85]
resetLayer = bpy.props.BoolProperty(default = True)
def execute(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
mode = bpy.context.mode
userMsg = ''
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
ob = bpy.context.active_object
mesh = ob.data
fu10(ob)
if self.resetLayer:
layers = len(ob.active_material.texture_paint_images)
if layers == 1:
layer = 0
for ts in ob.active_material.texture_slots:
try:
ts.texture.image
break
except:
pass
layer += 1
bpy.ops.object.zb_delete_texture(tex_kill=layer)
bpy.ops.object.zb_paint_color()
userMsg = "Reset uvs and layer (this will also reset your image if only one layer)"
fu1(mode)
if userMsg:
self.report({'INFO'}, userMsg)
return{'FINISHED'}
class cl27(bpy.types.Operator):
bl_idname =i_0[86]
bl_label =i_0[87]
bl_description =i_0[88]
vpShade = bpy.props.StringProperty(default='MATERIAL')
def execute(self, context):
wm = bpy.data.window_managers["WinMan"]
scene = bpy.context.scene
mode = bpy.context.mode
re = scene.render.engine
farthestLeft = 0
farthestLeftShade = 0
if bpy.context.screen.name == 'Shader':
v3d_list = [area for area in bpy.context.screen.areas if area.type == 'VIEW_3D']
if v3d_list:
farthestRight = max(v3d_list, key=lambda area: area.x)
sd = farthestRight.spaces[0]
farthestLeft = min(v3d_list, key=lambda area: area.x)
farthestLeftShade = farthestLeft.spaces[0].viewport_shade
farthestLeft.spaces[0].viewport_shade = 'BOUNDBOX'
else:
sd = bpy.context.space_data
turnOn = False
if sd.viewport_shade != 'RENDERED':
turnOn = True
if bpy.context.screen.name == 'Shader':
if scene.cycles.preview_pause:
turnOn = True
if turnOn:
if re == 'CYCLES':
try:
if scene.cycles.feature_set == 'EXPERIMENTAL':
bpy.ops.object.editmode_toggle()
bpy.ops.object.editmode_toggle()
except:
pass
if farthestLeft == 0:
self.vpShade = sd.viewport_shade
else:
if farthestLeftShade:
self.vpShade = farthestLeftShade
if mode == 'PARTICLE':
bpy.ops.object.mode_set(mode='OBJECT')
wm.zbLastObjectMode = mode
self.report({'INFO'},
"Blender does not support render preview in particle mode (temporarily switched modes).")
scene.cycles.preview_pause = False
if re == 'CYCLES':
if scene.zbQuickLights:
try:
scnLight = bpy.data.objects['-Scene Light']
shadows = bpy.data.objects['-Shadows']
scnLight.hide_render = True
shadows.hide_render = True
shadows.hide = True
scnLight.hide = True
except:
pass
for ob in bpy.data.objects:
if ob.type == 'LAMP':
if re != 'CYCLES':
if not ob.hide_render:
ob.hide = False
else:
if scene.zbAutoConvertLamps:
if not ob.hide_render:
ob.hide = False
if re == 'CYCLES':
if scene.zbQuickLights:
for ob in bpy.data.objects:
if 'zbCyclesLight' in ob.name:
ob.hide = False
for i in range(20):
ob.layers[i] = True
try:
sd.use_render_border = True
sd.viewport_shade = 'RENDERED'
except:
pass
else:
if re == 'CYCLES':
if scene.zbQuickLights == True:
for ob in bpy.data.objects:
if "zbCyclesLight" in ob.name:
ob.hide = True
for i in range(19):
ob.layers[i] = False
ob.layers[0] = True
ob.layers[19] = False
for ob in bpy.data.objects:
if ob.type == 'LAMP':
if "Shadows" in ob.name:
ob.hide = True
if "-Scene Light" in ob.name:
ob.hide = True
if bpy.context.screen.name != 'Shader':
sd.viewport_shade = self.vpShade
else:
if re != 'CYCLES':
sd.viewport_shade = self.vpShade
sd.use_render_border = False
scene.cycles.preview_pause = True
if farthestLeft != 0:
farthestLeft = min(v3d_list, key=lambda area: area.x)
farthestLeft.spaces[0].viewport_shade = self.vpShade
if wm.zbLastObjectMode == 'PARTICLE':
wm.zbLastObjectMode = ''
bpy.ops.object.mode_set(mode='PARTICLE_EDIT')
fu1(mode)
return{'FINISHED'}
def texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice):
try:
sys = bpy.context.user_preferences.system
fontState = sys.use_translate_new_dataname
sys.use_translate_new_dataname = False
except:
pass
wm = bpy.context.window_manager
scene = bpy.context.scene
sd = bpy.context.space_data
re = scene.render.engine
ob = bpy.context.active_object
mat = ob.active_material
try:
if layerType == "Transparent" or layerType == "Alpha_Mask":
ob.show_transparent = True
if mat:
n = 0
for ts in mat.texture_slots:
if ts is not None:
try:
if ts.use_map_alpha == True:
ts.texture.image = None
ts.texture = None
mat.texture_slots.clear(n)
except:
pass
n += 1
try:
if layerType == 'Alpha_Mask':
for node in bpy.data.materials[mat.name].node_tree.nodes:
if 'zbTransparent' in node.name:
tree = bpy.data.materials[mat.name].node_tree
mixed5 = tree.nodes['Mixed5']
mixed6 = tree.nodes['Mixed6']
mat.node_tree.links.new(mixed5.outputs['Shader'],
mixed6.inputs[1])
nodeColor = tree.nodes['Image Texture zbColor']
for img in bpy.data.images:
if "Color" in img.name:
if ob.name[:4] in img.name:
nodeColor.image = img
break
except:
pass
if mat is None or "None" in mat.name:
mat = bpy.data.materials.new(ob.name)
mat.diffuse_shader = 'LAMBERT'
mat.darkness = 0.8
mat.strand.use_tangent_shading = False
mat.strand.root_size = 2.5
mat.strand.tip_size = 0.25
mat.strand.width_fade = 0.5
ob.active_material = mat
try:
node = mat.node_tree.nodes['Mixed1']
except:
mat.use_nodes = True
mat.node_tree.nodes.clear()
mixedTotal = 8
locX = 250
mixedList =[]
for mixed in range(1,mixedTotal + 1):
x = mixed
mixed = mat.node_tree.nodes.new(type="ShaderNodeMixShader")
mixed.name = "Mixed" + str(x)
mixed.label = mixed.name
mixedList.append(mixed)
mixed.inputs['Fac'].default_value = 0
locX += 250
mixed.location = (locX,0)
nodeOutput = mat.node_tree.nodes.new(type = 'ShaderNodeOutputMaterial')
nodeOutput.location = (locX + 250,0)
node = mat.node_tree.nodes.new(type = 'ShaderNodeMath')
node.label = node.name + ' zbDisplace'
node.name = node.label
nodeMath = node
nodeMath.location = (locX + 250,-120)
mat.node_tree.links.new(nodeMath.outputs['Value'],
nodeOutput.inputs['Displacement'])
x = 0
for mixed in mixedList:
x += 1
if x < mixedTotal:
mixedNext = mixedList[x]
mat.node_tree.links.new(mixed.outputs['Shader'],
mixedNext.inputs['Shader'])
else:
mat.node_tree.links.new(mixed.outputs['Shader'],
nodeOutput.inputs['Surface'])
if "5" in mixed.name:
mat.node_tree.links.new(mixed.outputs['Shader'],
mixedNext.inputs[2])
w = round(scene.zbImgSize)
h = round(scene.zbImgSizeH)
layerName = ob.name[:4]
img = bpy.data.images.new(layerName + layerType, scene.zbImgSize, scene.zbImgSizeH, alpha= alphaChoice)
override = bpy.context.copy()
override['edit_image'] = img
bpy.ops.image.pack(override, as_png = True)
img.pixels[:] = (texCol, texCol, texCol, texOpas) * w * h
try:
brushCol = bpy.context.tool_settings.image_paint.brush.color
if wm.zbUseBrushColor:
l = layerType
go = 0
if "Bu" not in l:
if "Tr" not in l:
if "Ma" not in l:
go = 1
if go:
img.pixels[:] = (brushCol.r, brushCol.g, brushCol.b, 1) * w * h
except:
pass
cTexName = layerName + layerType
cTex = bpy.data.textures.new( name = cTexName, type = 'IMAGE')
activeTex = -1
for ts in mat.texture_slots:
activeTex += 1
if ts is None:
break
mTex = mat.texture_slots.add()
mTex.texture = cTex
mTex.texture_coords = 'UV'
if normalChoice == True:
mTex.use_map_normal = True
mTex.bump_method = 'BUMP_MEDIUM_QUALITY'
mTex.normal_factor = 0.0
cTex.image = img
bpy.ops.object.mode_set(mode = 'EDIT')
bpy.ops.mesh.select_all(action='SELECT')
fu10(ob)
for uv_face in ob.data.uv_textures.active.data:
uv_face.image = img
try:
if sd.viewport_shade != 'TEXTURED':
sd.viewport_shade = 'MATERIAL'
scene.game_settings.material_mode = 'GLSL'
except:
pass
bpy.ops.object.mode_set(mode = 'TEXTURE_PAINT')
bleed = 2
if w > 512:
bleed = 5
if w > 1024:
bleed = 6
if w > 2048:
bleed = 8
scene.tool_settings.image_paint.seam_bleed = bleed
bpy.ops.object.zb_set_active_layer(tex_index=activeTex)
slots = mat.texture_slots
ts = slots[mat.active_texture_index]
ctx = bpy.context.copy()
ctx['texture_slot'] = ts
x = 0
while x < 17:
bpy.ops.texture.slot_move(ctx, type='DOWN')
x += 1
except:
pass
tn = mat.active_texture_index
context = bpy.context
fu12(context,tn)
if layerType == "Color":
try:
nodeTex = mat.node_tree.nodes['Image Texture zbColor']
nodeTex.mute = False
except:
node = mat.node_tree.nodes.new(type = 'ShaderNodeTexImage')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeTex = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeMixRGB')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeMixRGB = node
nodeMixRGB.blend_type = 'MIX'
nodeMixRGB.inputs['Fac'].default_value = 1
node = mat.node_tree.nodes.new(type = 'ShaderNodeBrightContrast')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBright = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeBsdfDiffuse')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeDiffuse = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeBump')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBump = node
nodeBump.inputs[1].default_value = 0.015
nodeBump.invert = True
node = mat.node_tree.nodes.new(type='ShaderNodeRGBToBW')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBW = node
node = mat.node_tree.nodes.new(type='ShaderNodeBsdfGlossy')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeGloss = node
node = mat.node_tree.nodes.new(type='ShaderNodeMath')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeMath = node
nodeMath.inputs[1].default_value = 0
nodeMath.operation = 'MULTIPLY'
node = mat.node_tree.nodes.new(type='ShaderNodeEmission')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeEmission = node
nodeEmission.inputs[1].default_value = 10
node = mat.node_tree.nodes.new(type = 'ShaderNodeBsdfTransparent')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeAlpha = node
mat.node_tree.nodes[nodeBump.name].hide = True
mat.node_tree.nodes[nodeBW.name].hide = True
mat.node_tree.nodes[nodeGloss.name].hide = True
mat.node_tree.nodes[nodeMath.name].hide = True
mat.node_tree.nodes[nodeBright.name].hide = True
mat.node_tree.nodes[nodeMixRGB.name].hide = True
nodeTex.location = (-50, 0)
nodeMixRGB.location = (120, -5)
nodeBright.location = (120, -55)
nodeDiffuse.location = (250, 0)
nodeMath.location = (250, -130)
nodeBW.location = (250, -170)
nodeBump.location = (500, -130)
nodeGloss.location = (500, -170)
nodeEmission.location = (750,-130)
nodeAlpha.location = (1000, -130)
colorMixed1 = mat.node_tree.nodes['Mixed1']
colorMixed2 = mat.node_tree.nodes['Mixed2']
colorMixed3 = mat.node_tree.nodes['Mixed3']
nodeMath2 = mat.node_tree.nodes['Math zbDisplace']
nodeMath2.inputs[0].default_value = 0
mat.node_tree.links.new(nodeTex.outputs['Color'], nodeMixRGB.inputs['Color2'])
mat.node_tree.links.new(nodeMixRGB.outputs['Color'], nodeBright.inputs['Color'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeDiffuse.inputs['Color'])
mat.node_tree.links.new(nodeDiffuse.outputs['BSDF'], colorMixed1.inputs['Shader'])
mat.node_tree.links.new(nodeDiffuse.outputs['BSDF'], colorMixed1.inputs['Shader'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeBW.inputs['Color'])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeMath.inputs[0])
mat.node_tree.links.new(nodeMath.outputs['Value'], nodeMath2.inputs[0])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeGloss.inputs['Color'])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeBump.inputs['Strength'])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeBump.inputs['Height'])
mat.node_tree.links.new(nodeBump.outputs['Normal'], nodeGloss.inputs['Normal'])
mat.node_tree.links.new(nodeGloss.outputs['BSDF'], colorMixed1.inputs[2])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeEmission.inputs['Color'])
mat.node_tree.links.new(nodeEmission.outputs['Emission'], colorMixed2.inputs[2])
mat.node_tree.links.new(nodeAlpha.outputs['BSDF'], colorMixed3.inputs[2])
mat.node_tree.links.new(colorMixed2.outputs['Shader'], nodeAlpha.inputs['Color'])
nodeTex.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = nodeTex
if layerType == "Bump":
try:
nodeTex = mat.node_tree.nodes['Image Texture zbBump']
except:
node = mat.node_tree.nodes.new(type = 'ShaderNodeTexImage')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeTex = node
nodeTex.color_space = 'NONE'
node = mat.node_tree.nodes.new(type = 'ShaderNodeBrightContrast')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBright = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeRGBToBW')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBW = node
node = mat.node_tree.nodes.new(type='ShaderNodeMath')
node.label = node.name + ' zb' + layerType
node.name = node.label
node.inputs[1].default_value = 2.5
nodeMath = node
nodeMath.operation = 'MULTIPLY'
nodeOutput = mat.node_tree.nodes['Material Output']
nodeMath2 = mat.node_tree.nodes['Math zbDisplace']
mat.node_tree.links.new(nodeTex.outputs['Color'], nodeBright.inputs['Color'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeBW.inputs['Color'])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeMath.inputs[0])
mat.node_tree.links.new(nodeMath.outputs['Value'], nodeMath2.inputs[1])
nodeTex.location = (-50, -260)
nodeBright.location = (120, -260)
nodeBW.location = (120, -390)
nodeMath.location = (285, -260)
nodeTex.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = nodeTex
try:
brush = bpy.context.tool_settings.image_paint.brush
brush.color = (1,1,1)
except:
pass
if layerType == "Specular":
try:
nodeTex = mat.node_tree.nodes['Image Texture zbSpecular']
mat.node_tree.nodes['Math zbSpecular'].inputs[1].default_value = 1
except:
node = mat.node_tree.nodes.new(type = 'ShaderNodeTexImage')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeTex = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeMixRGB')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeMixRGB = node
nodeMixRGB.blend_type = 'MIX'
nodeMixRGB.inputs['Fac'].default_value = 1
node = mat.node_tree.nodes.new(type = 'ShaderNodeBrightContrast')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBright = node
node = mat.node_tree.nodes.new(type='ShaderNodeBsdfGlossy')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeGloss = node
node = mat.node_tree.nodes.new(type='ShaderNodeRGBToBW')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBW = node
node = mat.node_tree.nodes.new(type='ShaderNodeMath')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeMath = node
nodeMath.operation = 'MULTIPLY'
nodeMath.inputs[1].default_value = 1
specularMixed5 = mat.node_tree.nodes['Mixed5']
mat.node_tree.links.new(nodeTex.outputs['Color'], nodeMixRGB.inputs['Color2'])
mat.node_tree.links.new(nodeMixRGB.outputs['Color'], nodeBright.inputs['Color'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeBW.inputs['Color'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeGloss.inputs['Color'])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeMath.inputs['Value'])
mat.node_tree.links.new(nodeGloss.outputs['BSDF'], specularMixed5.inputs[2])
mat.node_tree.links.new(nodeMath.outputs['Value'], specularMixed5.inputs['Fac'])
nodeTex.location = (-50, -515)
nodeMixRGB.location = (120, -520)
nodeBright.location = (120, -570)
nodeBW.location = (250,-515)
nodeMath.location = (250,-605)
nodeGloss.location = (415,-515)
mat.node_tree.nodes[nodeBright.name].hide = True
mat.node_tree.nodes[nodeMixRGB.name].hide = True
nodeTex.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = nodeTex
if layerType == "Glow":
try:
nodeTex = mat.node_tree.nodes['Image Texture zbGlow']
mat.node_tree.nodes['Math zbGlow'].inputs[1].default_value = 6.5
except:
node = mat.node_tree.nodes.new(type = 'ShaderNodeTexImage')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeTex = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeMixRGB')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeMixRGB = node
nodeMixRGB.blend_type = 'MIX'
nodeMixRGB.inputs['Fac'].default_value = 1
node = mat.node_tree.nodes.new(type = 'ShaderNodeBrightContrast')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBright = node
node = mat.node_tree.nodes.new(type='ShaderNodeEmission')
node.label = node.name + ' zb' + layerType
node.name = node.label
node.inputs[1].default_value = 6.5
nodeEmission = node
node = mat.node_tree.nodes.new(type='ShaderNodeRGBToBW')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeBW = node
node = mat.node_tree.nodes.new(type='ShaderNodeMath')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeMath = node
nodeMath.operation = 'MULTIPLY'
nodeMath.inputs[1].default_value = 6.5
glowMixed7 = mat.node_tree.nodes['Mixed7']
mat.node_tree.links.new(nodeTex.outputs['Color'], nodeMixRGB.inputs['Color2'])
mat.node_tree.links.new(nodeMixRGB.outputs['Color'], nodeBright.inputs['Color'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeBW.inputs['Color'])
mat.node_tree.links.new(nodeBright.outputs['Color'], nodeEmission.inputs['Color'])
mat.node_tree.links.new(nodeBW.outputs['Val'], nodeMath.inputs['Value'])
mat.node_tree.links.new(nodeEmission.outputs['Emission'], glowMixed7.inputs[2])
mat.node_tree.links.new(nodeMath.outputs['Value'], glowMixed7.inputs['Fac'])
nodeTex.location = (-50, -790)
nodeMixRGB.location = (120, -795)
nodeBright.location = (120, -845)
nodeBW.location = (250,-790)
nodeMath.location = (250,-880)
nodeEmission.location = (415,-790)
mat.node_tree.nodes[nodeBright.name].hide = True
mat.node_tree.nodes[nodeMixRGB.name].hide = True
nodeTex.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = nodeTex
if layerType == "Transparent":
try:
nodeTex = mat.node_tree.nodes['Image Texture zbColor']
nodeTex.mute = False
except:
bpy.ops.object.zb_paint_color()
nodeTex = mat.node_tree.nodes['Image Texture zbColor']
if nodeTex.outputs['Alpha'].is_linked == False:
try:
nodeAlpha = mat.node_tree.nodes['Transparent BSDF zbTransparent']
except:
node = mat.node_tree.nodes.new(type = 'ShaderNodeBsdfTransparent')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeAlpha = node
Mixed5 = mat.node_tree.nodes['Mixed5']
Mixed6 = mat.node_tree.nodes['Mixed6']
mat.node_tree.links.new(nodeTex.outputs['Alpha'], Mixed6.inputs['Fac'])
mat.node_tree.links.new(Mixed5.outputs['Shader'], nodeAlpha.inputs['Color'])
mat.node_tree.links.new(nodeAlpha.outputs['BSDF'], Mixed6.inputs['Shader'])
mat.node_tree.links.new(Mixed5.outputs['Shader'], Mixed6.inputs[2])
nodeAlpha.location = (1750, -130)
nodeTex.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = nodeTex
if layerType == "Alpha_Mask":
try:
nodeTex = mat.node_tree.nodes['Image Texture zbAlpha_Mask']
nodeTex.mute = False
except:
pass
try:
Mixed6 = mat.node_tree.nodes['Mixed6']
nodeAlpha = mat.node_tree.nodes['Transparent BSDF zbAlpha_Mask']
mat.node_tree.links.new(nodeTex.outputs['Alpha'], Mixed6.inputs['Fac'])
mat.node_tree.links.new(nodeAlpha.outputs['BSDF'], Mixed6.inputs[2])
except:
node = mat.node_tree.nodes.new(type = 'ShaderNodeTexImage')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeTex = node
node = mat.node_tree.nodes.new(type = 'ShaderNodeBsdfTransparent')
node.label = node.name + ' zb' + layerType
node.name = node.label
nodeAlpha = node
nodeTex.location = (-50, -1075)
nodeAlpha.location = (250, -1075)
Mixed6 = mat.node_tree.nodes['Mixed6']
mat.node_tree.links.new(nodeTex.outputs['Alpha'], Mixed6.inputs['Fac'])
mat.node_tree.links.new(nodeAlpha.outputs['BSDF'], Mixed6.inputs[2])
nodeTex.image = img
node_tree = bpy.data.materials[mat.name].node_tree
node_tree.nodes.active = nodeTex
if re != 'CYCLES':
mat.use_nodes = False
try:
sys = bpy.context.user_preferences.system
sys.use_translate_new_dataname = fontState
except:
pass
return mTex
class cl28(bpy.types.Operator):
bl_idname =i_0[89]
bl_label =i_0[90]
bl_description =i_0[91]
def execute(self, context):
scene = bpy.context.scene
re = scene.render.engine
layerType = "Color"
texCol = 0.9
texOpas = 1
try:
ob = bpy.context.active_object
if ob.active_material:
if ob.active_material.texture_paint_images:
if re != 'CYCLES':
texOpas = 0
if 'ZB Painted' in ob.active_material.name:
texOpas = 0
if 'ZBA Painted' in ob.active_material.name:
texOpas = 0
except:
pass
alphaChoice = True
normalChoice = True
texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
return {'FINISHED'}
class cl29(bpy.types.Operator):
bl_idname =i_0[92]
bl_label =i_0[93]
bl_description =i_0[94]
def execute(self, context):
scene = bpy.context.scene
re = scene.render.engine
layerType = "Bump"
texCol = 0.0
texOpas = 1.0
alphaChoice = True
normalChoice = True
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
mTex.use_map_color_diffuse = False
mTex.normal_factor = 0.25
ob = bpy.context.active_object
if ob.active_material:
if ' ZB Painted' in ob.active_material.name:
if re == 'CYCLES':
mat = ob.active_material
nodes = mat.node_tree.nodes
for node in nodes:
if node.name == 'Mix Alchemy Displacement':
nodeMixDis = node
if node.name == 'Math zbDisplace':
nodeZBDisplace = node
link = mat.node_tree.links
link.new(nodeMixDis.outputs['Color'], nodeZBDisplace.inputs[0])
self.report({'INFO'}, "Use white when painting on bump layers")
return {'FINISHED'}
class cl30(bpy.types.Operator):
bl_idname =i_0[95]
bl_label =i_0[96]
bl_description =i_0[97]
def execute(self, context):
layerType = "Specular"
texCol = 0.0
texOpas = 0.0
alphaChoice = True
normalChoice = False
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
ob = bpy.context.active_object
mat = ob.active_material
mat.specular_color = (0, 0, 0)
mat.specular_intensity = 1
mTex.use_map_color_diffuse = False
mTex.use_map_color_spec = True
return {'FINISHED'}
class cl31(bpy.types.Operator):
bl_idname =i_0[98]
bl_label =i_0[99]
bl_description =i_0[100]
@classmethod
def poll(cls, context):
use = False
ob = bpy.context.active_object
if ob.active_material:
if ' ZB Painted' not in ob.active_material.name:
use = True
return use
def execute(self, context):
layerType = "Transparent"
texCol = 0.0
texOpas = 0.0
alphaChoice = True
normalChoice = False
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
ob = bpy.context.active_object
mat = ob.active_material
mat.use_transparency = True
mat.transparency_method = 'Z_TRANSPARENCY'
mat.alpha = 0
mat.specular_intensity = 0
mTex.use_map_alpha = True
bpy.context.space_data.show_backface_culling = False
return {'FINISHED'}
class cl32(bpy.types.Operator):
bl_idname =i_0[101]
bl_label =i_0[102]
bl_description =i_0[103]
def execute(self, context):
layerType = "Alpha_Mask"
texCol = 0.0
texOpas = 0.0
alphaChoice = True
normalChoice = False
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
ob = bpy.context.active_object
mat = ob.active_material
mat.use_transparency = True
mat.transparency_method = 'Z_TRANSPARENCY'
mat.alpha = 0
mTex.use_map_alpha = True
mTex.diffuse_color_factor = 0
mTex.alpha_factor = -1
bpy.context.space_data.show_backface_culling = False
return {'FINISHED'}
class cl33(bpy.types.Operator):
bl_idname =i_0[104]
bl_label =i_0[105]
bl_description =i_0[106]
def execute(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
layerType = "Glow"
texCol = 0.0
texOpas = 0.0
alphaChoice = True
normalChoice = False
mTex = texLayerAdder(layerType, texCol, texOpas, alphaChoice,
normalChoice)
mTex.use_map_emit = True
mTex.emit_factor = 0.05
mTex.texture.factor_red = 2
mTex.texture.factor_green = 2
mTex.texture.factor_blue = 2
mTex.texture.contrast = 5
world = scene.world
ls = world.light_settings
ls.gather_method = 'APPROXIMATE'
ls.use_indirect_light = True
ls.correction = 0.75
ls.indirect_bounces = 3
return {'FINISHED'}
class cl34(bpy.types.Operator):
bl_idname =i_0[107]
bl_label =i_0[108]
bl_description =i_0[109]
def execute(self, context):
scene = bpy.context.scene
bt = scene.render.bake_type
alphaChoice = True
layerType = "Normal"
normalChoice = True
texCol = 1
texOpas = 0
if 'DISPLACEMENT' in bt:
layerType = "Displacement"
normalChoice = False
if 'DERIVATIVE' in bt:
layerType = "Derivative"
normalChoice = True
texLayerAdder(layerType, texCol,
texOpas, alphaChoice, normalChoice)
return {'FINISHED'}
class cl35(bpy.types.Operator):
bl_idname =i_0[110]
bl_label =i_0[111]
bl_description =i_0[112]
def execute(self, context):
ob = bpy.context.active_object
userMsg = ''
for mod in ob.modifiers:
if 'Mirror' in mod.name or 'Bevel' in mod.name or 'Shrinkwrap' in mod.name:
if 'Raise AE' not in mod.name:
try:
bpy.ops.object.modifier_apply(apply_as='DATA',
modifier = mod.name)
except:
pass
ob = bpy.context.active_object
skin = len([mod for mod in ob.modifiers if mod.type == 'SKIN'])
if skin:
fu7()
userMsg = "APPLIED SKIN MODIFIER: Click 'EYE' icon next to"+ob.name+"sBones in the outliner to see it."
for mod in ob.modifiers:
if hasattr(mod,'object'):
if "Bones" not in mod.object.name:
userMsg = "Object already has armature. None created for it."
if mod.type == 'SOLIDIFY':
try:
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=mod.name)
ob.show_x_ray = False
except:
pass
adaptiveMod = fu25(ob)
subsurf = 0
for mod in ob.modifiers:
if mod.type == 'SUBSURF':
if mod != adaptiveMod:
subsurf = mod
break
if subsurf:
newLevels = subsurf.levels
bpy.ops.object.modifier_remove(modifier=subsurf.name)
multires = ob.modifiers.new(name='Multires',type='MULTIRES')
while newLevels > 0:
bpy.ops.object.multires_subdivide(modifier=multires.name)
multires.levels += 1
newLevels -= 1
else:
multires = ob.modifiers.new(name='Multires',type='MULTIRES')
newLevels = 2
while newLevels > 0:
bpy.ops.object.multires_subdivide(modifier=multires.name)
multires.levels += 1
newLevels -= 1
for mod in ob.modifiers:
try:
bpy.ops.object.modifier_move_up(modifier="Multires")
except:
pass
try:
bpy.ops.object.modifier_move_up(modifier="Armature")
except:
pass
bpy.ops.object.shade_smooth()
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='SCULPT')
if userMsg:
self.report({'INFO'}, userMsg)
return {'FINISHED'}
def fu25(ob):
adaptiveMod = 0
try:
if ob.cycles.use_adaptive_subdivision:
for mod in ob.modifiers:
if mod.type == 'SUBSURF':
if mod == ob.modifiers[-1]:
adaptiveMod = mod
break
except:
pass
return adaptiveMod
class cl36(bpy.types.Operator):
bl_idname =i_0[113]
bl_label =i_0[114]
bl_description =i_0[115]
def execute(self, context):
scene = bpy.context.scene
ob = bpy.context.active_object
userMsg = ''
mirror = len([mod for mod in ob.modifiers if mod.type == 'MIRROR'])
if mirror:
try:
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Mirror")
except:
pass
scene.tool_settings.sculpt.use_symmetry_x = True
ob = bpy.context.active_object
skin = len([mod for mod in ob.modifiers if mod.type == 'SKIN'])
if skin:
fu7()
userMsg = "APPLIED SKIN MODIFIER: Click 'EYE' icon next to "+ob.name+"sBones in the outliner to see it."
adaptiveMod = fu25(ob)
for mod in ob.modifiers:
try:
if hasattr(mod,'object'):
if "Bones" not in type.object.name:
userMsg = "Object already has armature. None created for it."
if mod.type == 'SOLIDIFY':
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=mod.name)
ob.show_x_ray = False
if mod.type == 'SUBSURF':
if mod != adaptiveMod:
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=mod.name)
if mod.type == 'MULTIRES':
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=mod.name)
except:
pass
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='SCULPT')
bpy.ops.sculpt.dynamic_topology_toggle()
scene.tool_settings.sculpt.detail_type_method = 'CONSTANT'
scene.tool_settings.sculpt.detail_refine_method = 'SUBDIVIDE_COLLAPSE'
scene.tool_settings.sculpt.constant_detail = 4.5
bpy.ops.sculpt.optimize()
bpy.context.tool_settings.sculpt.brush.auto_smooth_factor = 0.75
scene.tool_settings.sculpt.use_smooth_shading = True
bpy.context.tool_settings.sculpt.brush
for brush in bpy.data.brushes:
if brush.use_paint_sculpt:
try:
brush.use_accumulate = True
except:
pass
try:
brush.sculpt_plane = 'VIEW'
except:
pass
try:
brush.auto_smooth_factor = 0.1
except:
pass
if userMsg:
self.report({'INFO'},userMsg)
return {'FINISHED'}
class cl37(bpy.types.Operator):
bl_idname =i_0[116]
bl_label =i_0[117]
bl_description =i_0[118]
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
ob = bpy.context.active_object
multires = len([mod for mod in ob.modifiers if mod.type == 'MULTIRES'])
if multires:
val = bpy.context.object.modifiers["Multires"].levels
bpy.context.object.modifiers["Multires"].levels += 1
bpy.context.object.modifiers["Multires"].sculpt_levels += 1
bpy.context.object.modifiers["Multires"].render_levels += 1
val2 = bpy.context.object.modifiers["Multires"].sculpt_levels
if val == val2:
bpy.ops.object.multires_subdivide(modifier="Multires")
bpy.context.object.modifiers["Multires"].levels += 1
val2 = bpy.context.object.modifiers["Multires"].levels
bpy.context.area.tag_redraw()
response = "Subdivision levels: " + str(val2)
self.report({'INFO'}, response)
if not multires:
bpy.ops.object.zb_multires_add()
return {'FINISHED'}
class cl37Down(bpy.types.Operator):
bl_idname =i_0[119]
bl_label =i_0[120]
bl_description =i_0[121]
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
ob = bpy.context.active_object
multires = len([mod for mod in ob.modifiers if mod.type == 'MULTIRES'])
if multires:
val = bpy.context.object.modifiers["Multires"].levels
bpy.context.object.modifiers["Multires"].levels -= 1
bpy.context.object.modifiers["Multires"].sculpt_levels -= 1
bpy.context.object.modifiers["Multires"].render_levels -= 1
val2 = bpy.context.object.modifiers["Multires"].sculpt_levels
bpy.context.area.tag_redraw()
response = "Subdivision levels: " + str(val2)
self.report({'INFO'}, response)
return {'FINISHED'}
class cl38(bpy.types.Operator):
bl_idname =i_0[122]
bl_label =i_0[123]
bl_description =i_0[124]
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
ob = bpy.context.active_object
multires = len([mod for mod in ob.modifiers if mod.type == 'MULTIRES'])
if multires:
lastLevels = ob.modifiers["Multires"].levels
ob.modifiers["Multires"].sculpt_levels = 0
bpy.ops.object.multires_higher_levels_delete(modifier="Multires")
for x in range(lastLevels):
bpy.ops.object.multires_subdivide(modifier="Multires")
bpy.context.object.modifiers["Multires"].levels = lastLevels
bpy.context.object.modifiers["Multires"].sculpt_levels = lastLevels
bpy.context.object.modifiers["Multires"].render_levels = lastLevels
bpy.context.area.tag_redraw()
return {'FINISHED'}
class cl39(bpy.types.Operator):
bl_idname =i_0[125]
bl_label =i_0[126]
bl_description =i_0[127]
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
ob = bpy.context.active_object
multires = len([mod for mod in ob.modifiers if mod.type == 'MULTIRES'])
dynamic = bpy.context.active_object.use_dynamic_topology_sculpting
if multires:
bpy.ops.object.mode_set(mode='OBJECT')
try:
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Multires")
except:
pass
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.remove_doubles()
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.particle_system_remove()
bpy.ops.object.mode_set(mode='SCULPT')
if dynamic:
bpy.ops.sculpt.optimize()
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.remove_doubles()
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.particle_system_remove()
bpy.ops.object.mode_set(mode='SCULPT')
self.report({'INFO'}, "Recommend retopology as next step in design")
return {'FINISHED'}
class cl40(bpy.types.Operator):
bl_idname =i_0[128]
bl_label =i_0[129]
bl_description =i_0[130]
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
wm = bpy.context.window_manager
scene = bpy.context.scene
sd = bpy.context.space_data
mode = bpy.context.mode
ob = bpy.context.active_object
bt = scene.render.bake_type
bpy.ops.object.mode_set(mode='OBJECT')
for obj in bpy.context.selected_objects:
if ob == obj:
obj.select = True
else:
obj.select = False
fastModeState = False
if scene.zbFastMode:
fastModeState = True
scene.zbFastMode = False
fu18()
bpy.ops.object.mode_set(mode='OBJECT')
modsToAvoid = ['MULTIRES','SUBSURF']
multires = 0
for mod in ob.modifiers:
if mod.type == 'MULTIRES':
multires = mod
if mod.type not in modsToAvoid:
try:
bpy.ops.object.modifier_apply(modifier=mod.name,
apply_as='DATA')
except:
pass
if multires == 0:
try:
multires = ob.modifiers.new(name='Multires',type='MULTIRES')
except:
pass
if multires == 0:
print('Multires modifier could not be added to',ob.name)
print('Aborted bake')
else:
if bt != 'DISPLACEMENT' and bt != 'DERIVATIVE':
if not ob.active_material:
bpy.ops.object.zb_paint_color()
bpy.ops.object.zb_paint_normal()
mat = ob.active_material
tex = mat.active_texture
if bt != 'DISPLACEMENT' and bt != 'DERIVATIVE':
for i in range(16):
bpy.ops.object.zb_move_texture(tex_move_up=1)
tex.use_normal_map = True
mat.texture_slots[tex.name].normal_factor = 5
if bt == 'DERIVATIVE':
mat.texture_slots[tex.name].normal_factor = .4
mat.texture_slots[tex.name].use_map_color_diffuse = False
fu18()
levels = multires.levels
reduced = int(levels/2)
if reduced < 1:
reduced = 1
multires.levels = reduced
scene.render.bake_margin = 25
if bt != 'DISPLACEMENT' and bt != 'DERIVATIVE':
scene.render.bake_type = 'NORMALS'
scene.render.use_bake_multires = True
scene.render.use_bake_selected_to_active = False
bpy.ops.object.bake_image()
if bt != 'DISPLACEMENT' and bt != 'DERIVATIVE':
fu21(mat)
try:
multires.levels = levels
except:
pass
fu1(mode)
if fastModeState:
scene.zbFastMode = True
return {'FINISHED'}
class cl41(bpy.types.Operator):
bl_idname =i_0[131]
bl_label =i_0[132]
bl_description =i_0[133]
bl_options = {'REGISTER', 'UNDO'}
option = bpy.props.StringProperty()
def execute(self, context):
option = self.option
scene = bpy.context.scene
ob = bpy.context.active_object
if option == 'NEW':
par = str(len(ob.particle_systems))
parSys = ob.name + "Strands" + par
bpy.ops.object.particle_system_add()
activeSys = ob.particle_systems.active
activeSys.settings.name = parSys
set = activeSys.settings
set.type = 'HAIR'
set.hair_length = 0.15
set.count = 0
set.adaptive_angle = 3
set.use_strand_primitive = True
set.use_hair_bspline = False
set.render_step = 6
set.draw_step = 4
set.cycles.root_width = 0.25
set.child_type = 'SIMPLE'
bpy.ops.object.mode_set(mode='PARTICLE_EDIT')
set.child_nbr = 65
set.rendered_child_count = 65
set.child_length = 1
set.child_radius = 0.10
set.roughness_2 = 0.01
set.draw_percentage = 5
pe = scene.tool_settings.particle_edit
pe.show_particles = True
pe.draw_step = 3
pe.tool = 'ADD'
pe.brush.size = 30
pe.brush.count = 3
activeSys.use_hair_dynamics = True
cloth = ob.particle_systems.active.cloth
settings = cloth.settings
settings.pin_stiffness = 4
settings.mass = 0.25
settings.bending_stiffness = 0.125
settings.bending_damping = 0.125
settings.internal_friction = 0
settings.air_damping = 0.75
settings.quality = 4
activeSys.use_hair_dynamics = False
pe.draw_step = 4
lastBrush = pe.tool
bItems = ['COMB','SMOOTH','LENGTH','PUFF','CUT','WEIGHT']
for item in bItems:
pe.tool = item
if pe.brush.strength > 5:
pe.brush.strength = 0.75
pe.tool = lastBrush
if option == 'COPY':
renEn = scene.render.engine
ob = bpy.context.active_object
parSys = ob.particle_systems.active
set = parSys.settings
lastSetName = set.name
lastSysName = parSys.name
copyPar = bpy.data.particles[set.name].copy()
copyPar.name = lastSetName + '_COPY'
bpy.ops.object.particle_system_add()
activeSys = ob.particle_systems.active
activeSys.name = lastSysName + '_COPY'
activeSys.settings = copyPar
pe = scene.tool_settings.particle_edit
pe.tool = 'ADD'
for sys in bpy.data.particles:
if sys.users == 0:
try:
bpy.data.particles.remove(sys, do_unlink = True)
except:
bpy.data.particles.remove(sys)
if option == 'CHANGE_COLOR':
renEn = scene.render.engine
ob = bpy.context.active_object
parSys = ob.particle_systems.active
set = parSys.settings
matName = set.material_slot
mat = bpy.data.materials[matName]
nodes = mat.node_tree.nodes
if nodes:
if 'Hair BSDF' in nodes:
hairNode = nodes['Hair BSDF']
if renEn == 'CYCLES':
mCol = hairNode.inputs['Color'].default_value
mat.diffuse_color = mCol[0], mCol[1], mCol[2]
mat.use_nodes = True
else:
mCol = mat.diffuse_color
hairNode.inputs['Color'].default_value = mCol[0],mCol[1],mCol[2], 1
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='PARTICLE_EDIT')
if option == 'ADD_BASIC_MATERIAL':
renEn = scene.render.engine
ob = bpy.context.active_object
parSys = ob.particle_systems.active
set = parSys.settings
baseColor = (0.344312, 0.202204, 0.0661491)
newMat = bpy.data.materials.new(name = ob.name + 'Hair')
newMat.diffuse_color = baseColor
newMat.strand.root_size = 0.5
newMat.strand.tip_size = 0.25
newMat.strand.use_tangent_shading = True
newMat.translucency = 0.1
newMat.use_transparency = True
newMat.alpha = 0.595
rm = newMat.raytrace_mirror
rm.use = True
rm.reflect_factor = 0.5
rm.fresnel = 4.25
rm.fresnel_factor = 4.5
rm.depth = 1
mat = newMat
newMat.use_nodes = True
mat.node_tree.nodes.clear()
nodeOutput = mat.node_tree.nodes.new(type = 'ShaderNodeOutputMaterial')
nodeOutput.location = (200,0)
nodeHair = mat.node_tree.nodes.new(type = 'ShaderNodeBsdfHair')
nodeHair.inputs['Color'].default_value = baseColor[0],baseColor[1],baseColor[2], 1
links = mat.node_tree.links
links.new(nodeHair.outputs['BSDF'], nodeOutput.inputs['Surface'])
if renEn == 'CYCLES':
newMat.use_nodes = True
else:
newMat.use_nodes = False
if len(ob.data.materials) == 0:
if len(ob.material_slots) == 0:
bpy.ops.object.material_slot_add()
if newMat.name not in ob.data.materials:
ob.data.materials.append(newMat)
set.material_slot = newMat.name
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='PARTICLE_EDIT')
return {'FINISHED'}
def zbAutoGPURender(scene,context):
scene = bpy.context.scene
userPref = bpy.context.user_preferences
system = userPref.system
props = system.bl_rna.properties
availableTypes = props['compute_device_type'].enum_items.keys()
if scene.zbAutoGPURender:
if system.compute_device_type == 'NONE' or scene.cycles.device == 'CPU':
if len(availableTypes) > 1:
deviceType = 'NONE'
for deviceType in availableTypes:
if 'CUDA' in deviceType:
system.compute_device_type = deviceType
break
if deviceType != 'NONE':
system.compute_device_type = deviceType
break
if deviceType != 'NONE':
availableDevices = props['compute_device'].enum_items.keys()
multi_device = []
normal_device = []
for device in availableDevices:
if 'MULTI' in device:
multi_device.append(device)
else:
normal_device.append(device)
if multi_device:
multi = []
for d in multi_device:
if d.isdigit():
multi.append(int(d))
if multi:
maxMulti = max(multi)
for device in multi_device:
if str(maxMulti) in device:
system.compute_device = device
break
else:
system.compute_device = multi_device[0]
if normal_device:
normal = []
for d in normal_device:
if d.isdigit():
normal.append(int(d))
if normal:
maxNormal = max(normal)
for device in normal_device:
if str(maxNormal) in device:
system.compute_device = device
break
else:
system.compute_device = normal_device[0]
scene.cycles.device = 'GPU'
else:
scene.cycles.device = 'CPU'
def fu26(self,context):
wm = bpy.context.window_manager
scene = bpy.context.scene
re = scene.render.engine
mode = bpy.context.mode
try:
bpy.ops.object.mode_set(mode='OBJECT')
ob = bpy.context.active_object
mat = ob.active_material
aTex = mat.active_texture.name
except:
pass
if scene.zbGoCycles:
scene.render.engine = 'CYCLES'
if bpy.context.screen.name in {'Hacker', 'Blender', 'Shader'}:
world = scene.world
world.light_settings.ao_factor = 0
try:
bpy.data.worlds['ZB Quick Lights'].ao_factor = 1
except:
pass
if bpy.context.screen.name == 'Shader':
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
area.spaces.active.viewport_shade = 'MATERIAL'
if scene.zbAutoConvertLamps:
for lamp in bpy.data.lamps:
lamp.use_nodes = True
else:
try:
bpy.data.objects['-Scene Light'].hide_render = True
bpy.data.objects['-Shadows'].hide_render = True
except:
pass
if scene.zbAutoConvertCycles:
try:
sel = bpy.context.selected_objects
activeOb = bpy.context.active_object
except:
sel = 0
activeOb = 0
for ob in bpy.data.objects:
try:
if ob.type == 'MESH':
if ob.hide == False:
if 'sf_material_library' not in ob.name:
for obj in bpy.context.selected_objects:
obj.select = False
ob.select = True
bpy.context.scene.objects.active = ob
if ob.active_material:
mat = ob.active_material
oldMatTs = []
oldMatTextures = []
for ts in mat.texture_slots:
try:
if ts is not None:
if ts.texture_coords == 'UV':
if ts.texture is not None:
if ts.texture.image:
oldMatTs.append(ts)
oldMatTextures.append(ts.texture)
except:
pass
if len(oldMatTs) > 0:
texNodes = False
try:
if mat.node_tree is not None:
for node in mat.node_tree.nodes:
if 'TEX_IMAGE' in node.type:
texNodes = True
break
if texNodes == False:
print('Found nodes, but none that work for painting')
mat.node_tree.nodes.clear()
except:
pass
if texNodes == False:
print('Found paint layers but no Cycles support')
for ts in oldMatTs:
doOnce = True
oldImage = bpy.data.images[ts.texture.image.name]
oldTexName = ts.texture.name
if ts.use_map_color_diffuse:
if ts.use_map_alpha == False:
if ts.use_map_emit == False:
doOnce = False
if 'color' not in oldImage.name.lower():
oldImage.name += 'Color'
bpy.ops.object.zb_paint_color()
mat.active_texture.image = oldImage
mat.node_tree.nodes['Image Texture zbColor'].image = oldImage
if doOnce:
if ts.use_map_normal:
if ts.use_map_color_diffuse == False:
doOnce = False
if 'bump' not in oldImage.name.lower():
oldImage.name += 'Bump'
bpy.ops.object.zb_paint_bump()
mat.active_texture.image = oldImage
mat.node_tree.nodes['Image Texture zbBump'].image = oldImage
if doOnce:
if ts.use_map_color_spec:
doOnce = False
if 'specular' not in oldImage.name.lower():
oldImage.name += 'Specular'
bpy.ops.object.zb_paint_specular()
mat.active_texture.image = oldImage
mat.node_tree.nodes['Image Texture zbSpecular'].image = oldImage
if doOnce:
if ts.use_map_emit:
doOnce = False
if 'glow' not in oldImage.name.lower():
oldImage.name += 'Glow'
bpy.ops.object.zb_paint_glow()
mat.active_texture.image = oldImage
mat.node_tree.nodes['Image Texture zbGlow'].image = oldImage
if doOnce:
if ts.use_map_alpha:
doOnce = False
if 'mask' in ts.texture.name.lower():
if 'alpha_mask' not in oldImage.name.lower():
oldImage.name += 'Alpha_Mask'
bpy.ops.object.zb_alpha_mask()
mat.node_tree.nodes['Image Texture zbAlpha_Mask'].image = oldImage
else:
if 'transparent' not in oldImage.name.lower():
oldImage.name += 'Transparent'
bpy.ops.object.zb_paint_transparent()
mat.node_tree.nodes['Image Texture zbColor'].image = oldImage
mat.active_texture.image = oldImage
if doOnce == False:
ts.texture = None
mat.active_texture.name = oldTexName
newSlot = 0
for ts in mat.texture_slots:
try:
if ts is not None:
if ts.texture == bpy.data.textures[aTex]:
break
newSlot += 1
except:
pass
mat.active_texture_index = newSlot
fu0()
bpy.ops.object.mode_set(mode='OBJECT')
except:
pass
if sel:
for obj in bpy.context.selected_objects:
obj.select = False
for ob in sel:
ob.select = True
if activeOb:
activeOb.select = True
bpy.context.scene.objects.active = activeOb
fu1(mode)
try:
if scene.zbQuickLights:
scene.world = bpy.data.worlds["ZB Quick Lights"]
except:
pass
for mat in bpy.data.materials:
try:
nodes = mat.node_tree.nodes
for node in nodes:
if node.type == 'OUTPUT_MATERIAL':
mat.use_nodes = True
break
except:
pass
for world in bpy.data.worlds:
world.use_nodes = True
else:
scene.render.engine = 'BLENDER_RENDER'
if bpy.context.screen.name in {'Hacker', 'Blender', 'Shader'}:
world = scene.world
world.light_settings.ao_factor = 1
try:
bpy.data.worlds['ZB Quick Lights'].ao_factor = 1
except:
pass
if bpy.context.screen.name == 'Shader':
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
area.spaces.active.viewport_shade = 'MATERIAL'
if scene.zbQuickLights == True:
try:
lastWorld = scene.zbLastWorld
scene.world = bpy.data.worlds[lastWorld]
except:
pass
for mat in bpy.data.materials:
if hasattr(mat.node_tree,'nodes'):
if 'Material Output' in mat.node_tree.nodes:
for ts in mat.texture_slots:
if hasattr(ts,'texture'):
if ts.texture:
if hasattr(ts.texture,'image'):
mat.use_nodes = False
break
try:
scnLight = bpy.data.objects['-Scene Light']
scnLight.hide_render = False
shadows = bpy.data.objects['-Shadows']
shadows.hide_render = False
except:
pass
scene.update()
try:
if len(scene.sfCFocus) > 0:
for ob in bpy.data.objects:
if ob.type == 'CAMERA':
cam = ob
break
scene.sfCFocus = cam.data.dof_object.name
world = scene.world
if world.mist_settings.use_mist:
bpy.ops.object.sf_cam_options(func="mist")
except:
pass
scene.update()
fu1(mode)
fu11()
class cl42(bpy.types.Operator):
bl_idname =i_0[134]
bl_label =i_0[135]
def execute(self,context):
brush = bpy.context.tool_settings.sculpt.brush
m = 'NORMAL'
try:
if brush.stroke_method != 'LINE':
m = 'SMOOTH'
bpy.ops.sculpt.brush_stroke('INVOKE_DEFAULT', mode=m)
except:
pass
return{'FINISHED'}
class cl43(bpy.types.Operator):
bl_idname =i_0[136]
bl_label =i_0[137]
def modal(self, context, event):
if event.value == 'RELEASE':
brush = bpy.context.tool_settings.sculpt.brush
try:
brush.direction = 'ADD'
except:
try:
brush.direction = 'INFLATE'
except:
pass
pass
return {'FINISHED'}
return {'RUNNING_MODAL'}
def invoke(self, context, event):
brush = bpy.context.tool_settings.sculpt.brush
if brush.stroke_method == 'LINE':
context.window_manager.modal_handler_add(self)
try:
brush.direction = 'SUBTRACT'
except:
try:
brush.direction = 'DEFLATE'
except:
pass
pass
bpy.ops.sculpt.brush_stroke('INVOKE_DEFAULT')
else:
bpy.ops.sculpt.brush_stroke('INVOKE_DEFAULT',
mode='INVERT')
return {'FINISHED'}
return {'RUNNING_MODAL'}
class cl44(bpy.types.Operator):
bl_idname =i_0[138]
bl_label =i_0[139]
bl_description =i_0[140]
shapeTool = bpy.props.StringProperty()
lastBrush = bpy.props.StringProperty()
def execute(self,context):
aType = bpy.context.area.type
sd = bpy.context.space_data
paint = bpy.context.mode.startswith('PAINT_TEXTURE')
weight = bpy.context.mode.startswith('PAINT_WEIGHT')
vertex = bpy.context.mode.startswith('PAINT_VERTEX')
sculpt = bpy.context.mode.startswith('SCULPT')
if paint or aType == 'IMAGE_EDITOR':
type = bpy.context.tool_settings.image_paint
try:
brush = bpy.context.tool_settings.image_paint.brush
if brush.image_tool != 'MASK':
if sd.viewport_shade == 'SOLID':
sd.viewport_shade = 'MATERIAL'
except:
pass
if sculpt:
type = bpy.context.tool_settings.sculpt
if weight:
type = bpy.context.tool_settings.weight_paint
if vertex:
type = bpy.context.tool_settings.vertex_paint
if paint or aType == 'IMAGE_EDITOR':
try:
if brush.use_gradient:
if type.brush.name == 'Fill':
if self.shapeTool == 'SPACE':
type.brush = bpy.data.brushes['Draw']
type.brush.use_gradient = True
if self.shapeTool == 'LINE':
type.brush = bpy.data.brushes['Fill']
type.brush.use_gradient = True
type.brush.gradient_fill_mode = 'LINEAR'
if self.shapeTool == 'ANCHORED':
self.shapeTool = 'LINE'
type.brush = bpy.data.brushes['Fill']
type.brush.use_gradient = True
type.brush.gradient_fill_mode = 'RADIAL'
except:
pass
if self.shapeTool == 'SPACE':
type.input_samples = 3
else:
type.input_samples = 1
if type.brush.texture:
if self.shapeTool == 'SPACE':
self.shapeTool = 'DOTS'
if 'Decal' in type.brush.name:
if self.shapeTool == 'DOTS':
self.shapeTool = 'DRAG_DOT'
type.brush.stroke_method = self.shapeTool
return{'FINISHED'}
class cl45(bpy.types.Operator):
bl_idname =i_0[141]
bl_label =i_0[142]
bl_description =i_0[143]
def execute(self,context):
bpy.ops.brush.stencil_reset_transform()
bpy.ops.brush.stencil_fit_image_aspect()
area = bpy.context.area
x = area.width / 2
y = area.height / 2
try:
if bpy.context.sculpt_object:
brushName = bpy.context.tool_settings.sculpt.brush.name
else:
brushName = bpy.context.tool_settings.image_paint.brush.name
brush = bpy.data.brushes[brushName]
brush.stencil_pos.xy = x, y
brush.texture_slot.scale.xyz = 1
except:
pass
return{'FINISHED'}
class cl46(bpy.types.Operator):
bl_idname =i_0[144]
bl_label =i_0[145]
bl_description = "Sync position, angle and scale of\
texture brush and texture mask stencils"
def execute(self,context):
brush = bpy.context.tool_settings.image_paint.brush
slot = brush.texture_slot
mask = brush.mask_texture_slot
mask.mask_map_mode = 'STENCIL'
brush.mask_stencil_pos = brush.stencil_pos
brush.mask_stencil_dimension = brush.stencil_dimension
mask.offset.xyz = slot.offset.xyz
mask.scale.xyz = slot.scale.xyz
mask.angle = slot.angle
return{'FINISHED'}
def zbGradientSwitch(self,context):
wm = bpy.context.window_manager
ts = bpy.context.tool_settings
paint = ts.image_paint
if self.zbGradientSwitch:
try:
wm.zbBeforeGradBrush = paint.brush.name
paint.brush = bpy.data.brushes['Fill']
except:
pass
paint.brush.use_gradient = True
else:
try:
if paint.brush.name != 'Draw':
paint.brush = bpy.data.brushes[wm.zbBeforeGradBrush]
except:
pass
paint.brush.use_gradient = False
def fu27(panel, context, layout, brush, settings, projpaint=False):
capabilities = brush.image_paint_capabilities
sd = bpy.context.space_data
try:
shade = sd.viewport_shade
except:
pass
wm = bpy.context.window_manager
scene = bpy.context.scene
aType = bpy.context.area.type
toolsettings = context.tool_settings
ipaint = toolsettings.image_paint
col = layout.column(align=True)
if capabilities.has_radius:
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
panel.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
panel.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
if capabilities.has_space_attenuation:
row.prop(brush, "use_space_attenuation", toggle=True, icon_only=True)
else:
row.operator('object.sf_tn', text='',icon='BLANK1')
panel.prop_unified_strength(row, context, brush, "strength", text="Strength")
panel.prop_unified_strength(row, context, brush, "use_pressure_strength")
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "zb_hardness", text='Hardness')
row.operator('object.sf_tn', text='',icon='BLANK1')
if brush.image_tool == 'SOFTEN':
col.separator()
col = layout.column(align=True)
col.row(align=True).prop(brush, "direction", expand=True)
col.prop(brush, "sharp_threshold")
col.prop(brush, "blur_kernel_radius")
if brush.image_tool == 'CLONE':
col.separator()
layout.column(align=True)
col.prop(settings, "use_clone_layer", text="Clone from paint slot", icon='BRUSH_CLONE')
if projpaint:
if settings.use_clone_layer:
ob = context.active_object
if len(ob.material_slots) > 1:
col.label("Materials")
col.template_list("MATERIAL_UL_matslots", "",
ob, "material_slots",
ob, "active_material_index", rows=2)
mat = ob.active_material
if mat:
col.template_list("TEXTURE_UL_texpaintslots", "",
mat, "texture_paint_images",
mat, "paint_clone_slot", rows=2)
else:
col.prop(brush, "clone_image", text="Image")
col.prop(brush, "clone_alpha", text="Alpha")
if brush.image_tool == 'FILL':
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "fill_threshold", text='Threshold', slider=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
if bpy.context.mode == 'PAINT_VERTEX':
brush = bpy.context.tool_settings.vertex_paint.brush
else:
brush = bpy.context.tool_settings.image_paint.brush
row = col.separator()
row = col.row(align=True)
row.label("Brush Stroke")
row.operator("object.zb_stroke_buttons",
text ="", icon='MOD_DYNAMICPAINT').shapeTool = 'SPACE'
row.operator("object.zb_stroke_buttons", text='',
icon='MOD_CURVE').shapeTool = 'CURVE'
row.operator("object.zb_stroke_buttons",
text ="", icon='IPO_LINEAR').shapeTool = 'LINE'
if not bpy.context.mode.startswith('PAINT_VERTEX'):
row.operator("object.zb_stroke_buttons",
text ="", icon='PROP_ON').shapeTool = 'ANCHORED'
if brush.stroke_method == 'LINE':
row = col.row(align=True)
row.label('(Hold alt to constrain line)')
if brush.stroke_method == 'CURVE':
row = col.separator()
row = col.row(align=True)
row.template_ID(brush, "paint_curve", new="paintcurve.new")
if brush.paint_curve:
row = col.separator()
row = col.row(align=True)
row.label("(Ctrl-Mouse)")
row.operator("paintcurve.draw", text="Draw Curve")
else:
row = col.row(align=True)
row.label("(Ctrl-click to make new curve)")
row = col.row(align=True)
if not bpy.context.mode.startswith('PAINT_VERTEX'):
if aType != 'IMAGE_EDITOR':
try:
toolsettings = context.tool_settings
ipaint = toolsettings.image_paint
row = col.separator()
row = col.row(align=True)
row.scale_y = 0.8
row.label("Mirror")
row.separator()
row.prop(ipaint, "use_symmetry_x", text="X", toggle=True)
row.prop(ipaint, "use_symmetry_y", text="Y", toggle=True)
row.prop(ipaint, "use_symmetry_z", text="Z", toggle=True)
except:
pass
row = col.separator()
if brush.image_tool == 'MASK':
col.prop(brush, "weight", text="Mask Value", slider=True)
row = col.separator()
row = col.row(align=True)
row.prop(ipaint, "invert_stencil", text="Invert", icon='IMAGE_ALPHA')
if wm.zbViewMaskMode or shade == 'SOLID':
maskIcon = 'RESTRICT_VIEW_OFF'
else:
maskIcon = 'RESTRICT_VIEW_ON'
row.prop(wm,"zbViewMaskMode", text='View', icon= maskIcon)
row = col.row(align=True)
set = scene.tool_settings.image_paint
row.prop(set, 'use_stencil_layer', text="", icon = 'CANCEL')
row.operator('image.new', text = 'Reset Mask').gen_context = 'PAINT_STENCIL'
row = col.separator()
row = col.row(align=True)
if brush.image_tool in {'DRAW', 'FILL'}:
if bpy.context.mode.startswith('PAINT_VERTEX'):
brush = bpy.context.tool_settings.vertex_paint.brush
col.separator()
box = col.box()
if not brush.use_gradient:
panel.prop_unified_color_picker(box, context, brush, "color", value_slider=True)
if brush.use_gradient:
box.template_color_ramp(brush, "gradient", expand=True)
if settings.palette:
sub = box.row(True)
sub = box.row(True)
sub.template_palette(settings, "palette", color=True)
if brush.image_tool == 'DRAW':
sub = box.row(True)
sub.prop(wm, "zbHidePaintOptions", text = '', icon = 'COLLAPSEMENU')
sub = sub.row(align=True)
sub.scale_x = 0.05
sub.prop(brush,'color',text='')
sub = sub.row(align=True)
sub.scale_x =10
sub.prop(brush, "gradient_stroke_mode",text="")
sub.prop(wm,"zbGradientSwitch", text ="Gradient", toggle=True)
if brush.gradient_stroke_mode in {'SPACING_REPEAT', 'SPACING_CLAMP'}:
sub = box.row(True)
sub.prop(brush, "grad_spacing")
elif brush.image_tool == 'FILL':
sub = box.row(True)
if bpy.context.mode.startswith('PAINT_VERTEX'):
sub.prop(brush, "vertex_tool", text="")
else:
sub.prop(wm, "zbHidePaintOptions", text = '', icon = 'COLLAPSEMENU')
sub = sub.row(align=True)
sub.scale_x = 0.05
sub.prop(brush,'color',text='')
sub = sub.row(align=True)
sub.scale_x =10
sub.prop(brush, "gradient_fill_mode", text="")
sub.prop(wm,"zbGradientSwitch", text ="Gradient", toggle=True)
else:
if settings.palette:
sub = box.row(True)
sub.template_palette(settings, "palette", color=True)
sub = box.row(True)
sub.prop(wm, "zbHidePaintOptions", text = '', icon = 'COLLAPSEMENU')
sub = sub.row(align=True)
sub.scale_x = 0.05
sub.prop(brush,'color',text='')
sub = sub.row(align=True)
sub.scale_x =10
if bpy.context.mode.startswith('PAINT_VERTEX'):
sub.prop(brush, "vertex_tool", text="")
else:
sub.prop(brush, "blend", text="")
sub.prop(wm,"zbGradientSwitch", text ="Gradient", toggle=True)
if wm.zbHidePaintOptions:
row = layout.row()
row.template_ID(settings, "palette", new="palette.new")
row = layout.row(align=True)
panel.prop_unified_color(row, context, brush, "color", text="")
panel.prop_unified_color(row, context, brush, "secondary_color", text="")
row.operator("paint.brush_colors_flip", icon='FILE_REFRESH', text="")
col = layout.column(align=True)
try:
t = brush.texture
col.prop(t, 'intensity')
col.prop(t, 'contrast')
col.prop(t, 'saturation')
except:
pass
row = layout.row()
row.prop(settings, "input_samples", text='Stroke Samples')
row = layout.row(align=True)
row.label('Curve')
row.operator("brush.curve_preset", icon='SMOOTHCURVE', text="").shape = 'SMOOTH'
row.operator("brush.curve_preset", icon='SPHERECURVE', text="").shape = 'ROUND'
row.operator("brush.curve_preset", icon='ROOTCURVE', text="").shape = 'ROOT'
row.operator("brush.curve_preset", icon='SHARPCURVE', text="").shape = 'SHARP'
row.operator("brush.curve_preset", icon='LINCURVE', text="").shape = 'LINE'
row.operator("brush.curve_preset", icon='NOCURVE', text="").shape = 'MAX'
row = layout.row()
row.prop(brush, "use_accumulate")
def fu28(self):
wm = bpy.context.window_manager
scene = bpy.context.scene
context = bpy.context
layout = self.layout
aType = bpy.context.area.type
ts = bpy.context.tool_settings
try:
brush = ts.image_paint.brush
tex_slot = brush.texture_slot
if context.image_paint_object:
brush = ts.image_paint.brush
tex_slot = brush.texture_slot
if context.vertex_paint_object:
brush = ts.vertex_paint.brush
tex_slot = brush.texture_slot
if context.weight_paint_object:
brush = ts.weight_paint.brush
tex_slot = brush.texture_slot
if context.sculpt_object:
brush = ts.sculpt.brush
tex_slot = brush.texture_slot
except:
pass
row = layout.row(align=True)
row.alignment = 'LEFT'
row.label('Load')
row.prop_menu_enum(scene,'zbLoadBrushType', icon='COLLAPSEMENU', text='')
row.operator('texture.zb_load_brush')
row.operator('texture.zb_load_brushes')
row = layout.row()
showScale = False
if 'Fill' in brush.name:
showScale = True
try:
if 'Draw' in wm.zbBeforeGradBrush:
showScale = False
if 'Graphic Pen' in wm.zbBeforeGradBrush:
showScale = False
except:
pass
if tex_slot.texture:
showScale = True
if showScale:
row = layout.row(align=True)
row.prop(tex_slot, "tex_paint_map_mode", text="")
row.prop(brush,"zb_texscale", text="")
if tex_slot.tex_paint_map_mode == 'STENCIL':
row = layout.row(align=True)
row.operator("object.zb_center_stencil", text="Center")
row.operator("object.zb_stencil_sync", text="Sync Mask")
row = layout.row()
row.scale_y = 0.5
row.label("MOVE: Right Mouse")
row = layout.row()
row.scale_y = 0.5
row.label("SCALE: Shift+RMouse")
row = layout.row()
row.scale_y = 0.5
row.label("ROTATE: Ctrl+X")
row = layout.row()
class BrushButtonsPanel(UnifiedPaintPanel):
bl_space_type =i_0[146]
bl_region_type =i_0[147]
@classmethod
def poll(cls, context):
sima = context.space_data
toolsettings = context.tool_settings.image_paint
return sima.show_paint and toolsettings.brush
class cl47(bpy.types.Operator):
bl_idname =i_0[148]
bl_label =i_0[149]
bl_description =i_0[150]
func = bpy.props.StringProperty()
def execute(self, context):
func = self.func
mode = bpy.context.mode
ob = bpy.context.active_object
if func == 'to_view':
bpy.context.space_data.mode = 'VIEW'
bpy.ops.object.mode_set(mode='EDIT')
if func == 'to_paint':
bpy.context.area.type = 'IMAGE_EDITOR'
if ob.active_material is not None:
bpy.context.space_data.mode = 'PAINT'
if func == 'to_image_editor':
bpy.context.area.type = 'IMAGE_EDITOR'
try:
bpy.context.space_data.mode = 'VIEW'
bpy.ops.object.mode_set(mode='EDIT')
if mode == 'PAINT_TEXTURE':
bpy.context.space_data.mode = 'PAINT'
bpy.context.space_data.use_realtime_update = True
tool_shelf = None
area = bpy.context.area
for region in area.regions:
if region.type == 'TOOLS':
tool_shelf = region
if tool_shelf:
if tool_shelf.width < 2:
bpy.ops.image.toolshelf()
except:
bpy.context.area.type = 'IMAGE_EDITOR'
func = ''
return {'FINISHED'}
class IMAGE_PT_paint(Panel, BrushButtonsPanel):
bl_label =i_0[151]
bl_region_type =i_0[152]
bl_category =i_0[153]
@classmethod
def poll(cls, context):
return bpy.context.area.spaces.active.mode == 'PAINT'
def draw(self, context):
wm = bpy.context.window_manager
layout = self.layout
settings = context.tool_settings.image_paint
brush = settings.brush
col = layout.column()
sub = col.row(align=True)
sub.scale_y = 1.4
sub.scale_x = 1.5
sub.alignment = 'LEFT'
sub.operator('object.zb_back_to_3d_view',text='Go Back',icon='FILE_TICK')
try:
bpy.context.active_object.active_material.active_texture
sub.operator('object.zb_image_edit', text='EDIT UVS'
,icon='ASSET_MANAGER').func = 'to_view'
except:
sub.operator('object.sf_tn', text='EDIT UVS'
,icon='ASSET_MANAGER').message = "This object has no uvs to edit"
col.separator()
col.template_ID_preview(settings, "brush", new="brush.add", rows=2, cols=6)
fu28(self)
fu27(self, context, layout, brush, settings)
class UVToolsPanel:
bl_space_type =i_0[154]
bl_region_type =i_0[155]
bl_category =i_0[156]
@classmethod
def poll(cls, context):
sima = context.space_data
return sima.show_uvedit and not context.tool_settings.use_uv_sculpt
class cl48(bpy.types.Operator):
bl_label =i_0[157]
bl_idname =i_0[158]
bl_description =i_0[159]
def execute(self,context):
if bpy.context.space_data.mode == 'PAINT':
ob = bpy.context.active_object
if ob:
if ob.data.uv_textures:
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
else:
bpy.ops.object.mode_set(mode='OBJECT')
else:
bpy.ops.object.mode_set(mode='OBJECT')
bpy.context.area.type = 'VIEW_3D'
return{'FINISHED'}
class IMAGE_PT_tools_zbPaintInEditor(Panel,UVToolsPanel):
bl_label =i_0[160]
bl_region_type =i_0[161]
bl_category =i_0[162]
@classmethod
def poll(cls, context):
sima = context.space_data
return sima.show_uvedit and not context.tool_settings.use_uv_sculpt
def draw(self, context):
layout = self.layout
sub = layout.column()
sub = sub.row(align=True)
sub.scale_y = 1.4
sub.scale_x = 1.5
sub.alignment = 'LEFT'
sub.operator('object.zb_back_to_3d_view',text='Go Back',icon='FILE_TICK')
sub.operator('object.zb_image_edit', text='PAINT UVS'
,icon='TPAINT_HLT').func = 'to_paint'
class View3DPaintPanel(UnifiedPaintPanel):
bl_space_type =i_0[163]
bl_region_type =i_0[164]
class cl49(Panel, View3DPaintPanel):
bl_category =i_0[165]
bl_label =i_0[166]
bl_region_type =i_0[167]
@classmethod
def poll(cls, context):
return cls.paint_settings(context)
def draw(self, context):
scene = bpy.context.scene
ob = bpy.context.active_object
layout = self.layout
toolsettings = context.tool_settings
settings = self.paint_settings(context)
brush = settings.brush
wm = bpy.context.window_manager
if not context.particle_edit_object:
col = layout.split().column()
col.template_ID_preview(settings, "brush",
new="brush.add", rows=3, cols=8)
fu28(self)
if context.particle_edit_object:
tool = settings.tool
box = layout.box()
box.column().prop(settings, "tool", text = 'Brush')
if tool != 'NONE':
if tool != 'WEIGHT':
try:
sub1 = box.row(True)
sub1.scale_x = 0.7
sub1.scale_y = 0.8
sub1.label('Mirror')
sub1.separator()
sub1.prop(ob.data, "use_mirror_x", text="X", toggle=True)
text1 = 'Can only x-mirror while in particle mode.'
sub1.operator('object.sf_tn', text = ' Y').message = text1
sub1.operator('object.sf_tn', text = ' Z').message = text1
except:
pass
col = box.column(True)
col.separator()
set = ob.particle_systems.active.settings
col.prop(brush, "size", slider=True, text = 'Brush Radius')
col.prop(brush, "strength", slider=True, text = 'Brush Strength')
if tool == 'ADD':
sub = col
sub.prop(brush, "count")
sub.separator()
sub.separator()
sub.prop(settings, "use_default_interpolate",toggle=True)
sub = sub.column(align=True)
sub.active = settings.use_default_interpolate
sub.prop(brush, "steps", slider=True)
sub.prop(settings, "default_key_count")
elif tool == 'LENGTH':
col.separator()
col.separator()
col.prop(brush, "length_mode", expand = True)
elif tool == 'PUFF':
col.separator()
col.separator()
sub = col.row(True)
sub.prop(brush, "puff_mode", expand=True)
col.prop(brush, "use_puff_volume", toggle=True)
if tool != 'NONE':
col.separator()
if set.child_type != 'NONE':
col = layout.column()
col.separator()
col.label('Child Particles')
sub = col.column(True)
sub.scale_y = 1
sub.scale_y = 1.4
sub.prop(set, 'draw_percentage', text = 'Display')
sub = sub.column(True)
sub.scale_y = 1
sub = sub.row(True)
sub.scale_x = .95
sub.scale_y = 0.95
sub.operator('object.zb_particle_detail_select', text = '',
icon = 'PARTICLEMODE').detailSelect = 'PARTICLE_EDIT'
sub.operator('object.zb_particle_detail_select', text = '',
icon = 'OBJECT_DATAMODE').detailSelect = 'OBJECT'
sub.operator('object.zb_particle_detail_select', text = '',
icon = 'RENDER_STILL').detailSelect = 'RENDER'
deSel = scene.zbParticleDetailSelect
if deSel == 'PARTICLE_EDIT':
pEdit = scene.tool_settings.particle_edit
sub.prop(pEdit, 'draw_step', text = 'Edit Detail', slider = True)
if deSel == 'OBJECT':
sys = ob.particle_systems.active.settings
sub.prop(sys, 'draw_step', text = 'Object Detail', slider = True)
if deSel == 'RENDER':
sys = ob.particle_systems.active.settings
sub.prop(sys, 'render_step', text = 'Render Detail', slider = True)
sub = col.column(True)
sub.scale_y = 1
sub.prop(set, 'child_radius', text = 'Spread', slider=True)
sub.prop(set, 'clump_factor', text = 'Clump', slider=True)
sub.prop(set, 'clump_shape', text = 'Clump Shape', slider=True)
sub.prop(set, 'roughness_1', text = 'Roughness', slider=True)
sub.prop(set, 'roughness_endpoint', text = 'Ends', slider=True)
sub.prop(set, 'child_length', text = 'Length', slider=True)
sub.separator()
sub.separator()
sub.separator()
sub.prop(set,'kink', text = '')
if set.kink != 'NO':
if set.kink == 'SPIRAL':
sub.prop(set, 'kink_extra_steps', text ='Detail')
sub1 = sub.row(True)
sub1.scale_x = 0.175
sub1.prop(set, 'kink_axis', text ='')
sub1 = sub1.row(True)
sub1.scale_x = 2
sub1.prop(set, 'kink_axis_random', text ='Direction')
sub.separator()
sub.prop(set, 'kink_frequency', slider=True)
sub.prop(set, 'kink_amplitude', text = 'Amplitude', slider=True)
if set.kink != 'SPIRAL':
sub.prop(set, 'kink_amplitude_clump', text = 'Kink Clump', slider=True)
sub.separator()
sub.prop(set, 'kink_shape', slider=True)
if set.kink != 'SPIRAL':
sub.prop(set, 'kink_flat', text = 'Flatness', slider=True)
sub.prop(set, 'kink_amplitude_random',
text = 'Random', slider=True)
else:
col = box.column()
elif context.sculpt_object and brush:
capabilities = brush.sculpt_capabilities
ups = toolsettings.unified_paint_settings
brushName = bpy.context.tool_settings.sculpt.brush.name
if 'Decal' in brushName or 'Stencil' in brushName:
row = layout.row()
texture = bpy.data.brushes[brushName].texture
row.prop(texture, "use_color_ramp", text = 'Use As Mask',
icon = 'MOD_MASK')
col = layout.column(align=True)
row = col.row(align=True)
if ((ups.use_unified_size and ups.use_locked_size) or
((not ups.use_unified_size) and brush.use_locked_size)):
self.prop_unified_size(row, context, brush, "use_locked_size", icon='LOCKED')
self.prop_unified_size(row, context, brush, "unprojected_radius", slider=True, text="Radius")
else:
self.prop_unified_size(row, context, brush, "use_locked_size", icon='UNLOCKED')
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
try:
if capabilities.has_strength_pressure:
row = col.row(align=True)
row.prop(brush, "use_space_attenuation", toggle=True, icon_only=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
if capabilities.has_auto_smooth:
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "auto_smooth_factor", slider=True)
row.prop(brush, "use_inverse_smooth_pressure", toggle=True, text="")
except:
pass
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "zb_hardness", text='Hardness')
row.operator('object.sf_tn', text='',icon='BLANK1')
if capabilities.has_pinch_factor:
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "crease_pinch_factor", slider=True, text="Pinch")
row.operator('object.sf_tn', text='',icon='BLANK1')
if capabilities.has_normal_weight:
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "normal_weight", slider=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
if brush.sculpt_tool == 'MASK':
col.prop(brush, "mask_tool", text="")
if capabilities.has_height:
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "height", slider=True, text="Height")
row.operator('object.sf_tn', text='',icon='BLANK1')
if brush.name == 'Mask':
col.separator()
if wm.zbViewMaskMode:
maskIcon = 'RESTRICT_VIEW_OFF'
else:
maskIcon = 'RESTRICT_VIEW_ON'
col.prop(wm,"zbViewMaskMode", text='View', icon= maskIcon)
row = col.separator()
row = col.row(align=True)
row.label("Brush Stroke")
row.operator("object.zb_stroke_buttons",
text ="", icon='MOD_DYNAMICPAINT').shapeTool = 'SPACE'
row.operator("object.zb_stroke_buttons", text='',
icon='MOD_CURVE').shapeTool = 'CURVE'
row.operator("object.zb_stroke_buttons",
text ="", icon='IPO_LINEAR').shapeTool = 'LINE'
if not bpy.context.mode.startswith('PAINT_VERTEX'):
row.operator("object.zb_stroke_buttons",
text ="", icon='PROP_ON').shapeTool = 'ANCHORED'
if brush.stroke_method == 'LINE':
row = col.row(align=True)
row.label('(Hold alt to constrain line)')
if brush.stroke_method == 'CURVE':
row = col.separator()
row = col.row(align=True)
row.template_ID(brush, "paint_curve", new="paintcurve.new")
if brush.paint_curve:
row = col.separator()
row = col.row(align=True)
row.label("(Ctrl-Mouse)")
row.operator("paintcurve.draw", text="Draw Curve")
else:
row = col.row(align=True)
row.label("(Ctrl-click to make new curve)")
row = col.separator()
row = col.separator()
row = col.row(align=True)
sculpt = context.tool_settings.sculpt
row.scale_y = 0.8
row.label("Mirror")
row.separator()
row.prop(sculpt, "use_symmetry_x", text="X", toggle=True)
row.prop(sculpt, "use_symmetry_y", text="Y", toggle=True)
row.prop(sculpt, "use_symmetry_z", text="Z", toggle=True)
row = col.separator()
row = col.row()
row.prop(wm, 'zbHidePaintOptions',
icon='COLLAPSEMENU', text='More Options',toggle=True)
if wm.zbHidePaintOptions:
row = col.separator()
row = col.row(align=True)
row.label('Curve')
row.operator("brush.curve_preset", icon='SMOOTHCURVE', text="").shape = 'SMOOTH'
row.operator("brush.curve_preset", icon='SPHERECURVE', text="").shape = 'ROUND'
row.operator("brush.curve_preset", icon='ROOTCURVE', text="").shape = 'ROOT'
row.operator("brush.curve_preset", icon='SHARPCURVE', text="").shape = 'SHARP'
row.operator("brush.curve_preset", icon='LINCURVE', text="").shape = 'LINE'
row.operator("brush.curve_preset", icon='NOCURVE', text="").shape = 'MAX'
if capabilities.has_sculpt_plane:
col.separator()
row = col.row()
row = col.row(align=True)
row.prop(brush, "use_original_normal", toggle=True, icon_only=True)
row.prop(brush, "sculpt_plane", text="")
if capabilities.has_plane_offset:
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "plane_offset", slider=True)
row.prop(brush, "use_offset_pressure", text="")
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "plane_trim", slider=True, text="Distance")
row.operator('object.sf_tn', text='',icon='BLANK1')
row = col.separator()
row = col.row(align=True)
row.prop(brush, "use_plane_trim", text="Trim", icon='BLANK1')
if capabilities.has_persistence:
ob = context.sculpt_object
do_persistent = True
for md in ob.modifiers:
if md.type == 'MULTIRES':
do_persistent = False
break
if do_persistent:
row = col.row(align=True)
row.prop(brush, "use_persistent", icon='BLANK1')
row.operator("sculpt.set_persistent_base", text='Set Base')
col.separator()
col.row().prop(brush, "direction", expand=True)
col.separator()
row = col.row()
row.prop(brush, "use_frontface", text="Front Faces Only")
if capabilities.has_accumulate:
col.separator()
col.prop(brush, "use_accumulate")
elif context.image_paint_object and brush:
fu27(self, context, layout, brush, settings, True)
elif context.weight_paint_object and brush:
col = layout.column(align=True)
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
self.prop_unified_weight(row, context, brush, "weight", slider=True, text="Weight")
row.operator('object.sf_tn', text='',icon='BLANK1')
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
row = col.row(align=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row.prop(brush, "zb_hardness", text='Hardness', slider=True)
row.operator('object.sf_tn', text='',icon='BLANK1')
row = layout.row()
row = layout.row()
row.prop(brush, "vertex_tool", text="")
col = layout.column()
col.prop(toolsettings, "use_auto_normalize", text="Auto Normalize")
col.prop(toolsettings, "use_multipaint", text="Multi-Paint")
brush = bpy.context.tool_settings.weight_paint.brush
row = col.separator()
row = col.row(align=True)
row.label("Brush Stroke")
row.operator("object.zb_stroke_buttons",
text ="", icon='MOD_DYNAMICPAINT').shapeTool = 'SPACE'
row.operator("object.zb_stroke_buttons", text='',
icon='MOD_CURVE').shapeTool = 'CURVE'
row.operator("object.zb_stroke_buttons",
text ="", icon='IPO_LINEAR').shapeTool = 'LINE'
if brush.stroke_method == 'LINE':
row = col.row(align=True)
row.label('(Hold alt to constrain line)')
brush = bpy.context.tool_settings.weight_paint.brush
if brush.stroke_method == 'CURVE':
row = col.separator()
row = col.row(align=True)
row.template_ID(brush, "paint_curve", new="paintcurve.new")
if brush.paint_curve:
row = col.separator()
row = col.row(align=True)
row.label("(Ctrl-Mouse)")
row.operator("paintcurve.draw", text="Draw Curve")
else:
row = col.row(align=True)
row.label("(Ctrl-click to make new curve)")
row = col.separator()
row = col.row()
row = col.row()
row = col.row(align=True)
row.scale_y = 0.8
row.label('Mirror')
row.separator()
row.prop(scene, "zbWeightMirror", text='X', toggle=True)
row.operator("object.sf_tn", text=' Y')
row.operator("object.sf_tn", text=' Z')
if len(bpy.data.armatures) > 0:
displayWarning = 0
try:
for mod in ob.modifiers:
if mod.type == 'ARMATURE':
if mod.object:
if mod.object.mode != 'POSE':
displayWarning = 1
col = layout.column(align=True)
col.separator()
col = layout.column(align=True)
col.scale_y = 0.65
col.label('The armature bones can not')
col.separator()
col.label('be selected from weight')
col.separator()
col.label('paint mode unless the arm-')
col.separator()
col.label('-ature is set to pose mode.')
col.separator()
col = layout.column(align=True)
col.scale_y = 2
col.prop(wm,'zbMakeBonesSelectable', toggle=True)
break
except:
pass
if displayWarning == 0:
col = layout.column(align=True)
col.separator()
col.scale_y = 0.75
if hasattr(scene,'sfSelectByGroup'):
col.label('Ctrl-Click to select bones')
col.label('Shift-Ctrl-Click for multiple')
else:
if scene.zbWeightMirror:
col.label('Ctrl-Click to mirror select bones')
elif context.vertex_paint_object and brush:
fu27(self, context, layout, brush, settings, True)
def zbMakeBonesSelectable(self,context):
if self.zbMakeBonesSelectable:
scene = bpy.context.scene
ob = bpy.context.active_object
arm = 0
if ob:
if ob.modifiers:
for mod in ob.modifiers:
if mod.type == 'ARMATURE':
if mod.object:
arm = mod.object
break
if arm:
scene.objects.active = arm
arm.select = True
bpy.ops.object.mode_set(mode='POSE')
ob.select = True
scene.objects.active = ob
bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
self.zbMakeBonesSelectable = False
def zbSelectParticleMat(self,context):
ob = bpy.context.active_object
parSys = ob.particle_systems.active
set = parSys.settings
mats = bpy.data.materials
newMat = bpy.data.materials[self.zbSelectParticleMat]
if len(ob.data.materials) < 1:
if len(ob.material_slots) == 0:
bpy.ops.object.material_slot_add()
if newMat.name not in ob.data.materials:
ob.data.materials.append(newMat)
if set.material_slot != newMat.name:
set.material_slot = newMat.name
self.zbSelectParticleMat = ''
class zbPaint(bpy.types.Panel):
bl_label =i_0[168]
bl_space_type =i_0[169]
bl_region_type =i_0[170]
def draw(self, context):
wm = context.window_manager
scene = bpy.context.scene
sd = bpy.context.space_data
ob = bpy.context.active_object
re = scene.render.engine
mode = bpy.context.mode
layout = self.layout
row = layout.row(True)
row.scale_y = .6
row.scale_x = 1.75
row.operator("object.zb_mode_buttons", text="", icon='OBJECT_DATA').modeButton = 1
row.operator("object.zb_mode_buttons", text="", icon='SCULPTMODE_HLT').modeButton = 2
row.operator("object.zb_mode_buttons", text="", icon='BRUSH_DATA').modeButton = 3
row.operator("object.zb_mode_buttons", text="", icon='PARTICLEMODE').modeButton = 4
sub = row.row(True)
sub.scale_y = .6
sub.scale_x = 1.25
sub.operator("wm.call_menu", text="", icon='BLANK1').name = "view3D.zb_layer_options_menu"
sub = sub.row(True)
sub.scale_y = .6
sub.scale_x = 4
sub.operator("screen.screen_full_area", text="", icon='FULLSCREEN_ENTER')
row = layout.row(True)
if mode == 'PAINT_TEXTURE':
special= 0
spacer = 0
col = layout.column()
row = layout.row(align=True)
if ob.active_material:
n = ob.active_material.name
if 'ZB Painted' in n or 'ZBA Painted' in n:
row.label('Alchemy Mode (locked)',icon = 'BOOKMARKS')
else:
row.label('Blending Mode')
sub = row.row(align=True)
sub.scale_x = 0.7
sub.prop(scene, "zbGoCycles", icon='NODETREE', text="Cycles")
col = layout.column(align=True)
if ob.active_material:
mat = ob.active_material
ts = mat.texture_slots[mat.active_texture_index]
if re != 'CYCLES':
if ts:
col.prop(ts,'blend_type',text='', icon = 'POTATO')
if ts.use_map_diffuse:
spacer += 1
col.prop(ts,'diffuse_factor', text = "Color Brightness", slider = True)
if ts.use_map_color_diffuse:
spacer += 1
col.prop(ts,'diffuse_color_factor', text = "Layer Opacity", slider = True)
if ts.use_map_translucency:
spacer += 1
col.prop(ts,'translucency_factor', text = "Translucency", slider = True)
if ts.use_map_specular:
spacer += 1
col.prop(ts,'specular_factor', text = "Specular", slider = True)
if ts.use_map_color_spec:
spacer += 1
col.prop(ts,'specular_color_factor', text = "Shininess", slider = True)
if ts.use_map_hardness:
spacer += 1
col.prop(ts,'hardness_factor', text = "Hardness", slider = True)
if ts.use_map_alpha:
spacer += 1
col.prop(ts,'alpha_factor', text = "Transparency", slider = True)
if ts.use_map_normal:
try:
if ts.texture.use_normal_map:
theText = 'Normal'
else:
theText = 'Bumpiness'
spacer += 1
col.prop(ts,'normal_factor', text = theText, slider = True)
except:
pass
if ts.use_map_warp:
spacer += 1
col.prop(ts,'warp_factor', text = "Warp", slider = True)
if ts.use_map_displacement:
spacer += 1
col.prop(ts,'displacement_factor', text = "Displacement", slider = True)
if ts.use_map_ambient:
spacer += 1
col.prop(ts,'ambient_factor', text = "Ambient", slider = True)
if ts.use_map_emit:
spacer += 1
col.prop(ts,'emit_factor', text = "Emit", slider = True)
if ts.use_map_mirror:
spacer += 1
col.prop(ts,'mirror_factor', text = "Mirror", slider = True)
if ts.use_map_raymir:
spacer += 1
col.prop(ts,'raymir_factor', text = "Ray Mirror", slider = True)
else:
special = 1
col.label('Cycles materials not created')
col.label('with Zero Brush can not be')
col.label('converted to Blender Render')
col.label('materials. Adding new layers')
col.label('may cause unexpected results.')
col.label('')
else:
if ts:
name = ts.texture.image.name
if "color" in name.lower() or "transparent" in name.lower() or "alpha" in name.lower():
if 'specular' not in name.lower():
try:
node = mat.node_tree.nodes['Mix zbColor']
col.prop(node, "blend_type", text="", icon='POTATO')
except:
pass
try:
node = mat.node_tree.nodes['Mixed1']
col.prop(node.inputs['Fac'], "default_value", text="Reflectivity")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Glossy BSDF zbColor']
col.prop(node.inputs['Roughness'], "default_value", text="Roughness")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Mixed2']
col.prop(node.inputs['Fac'], "default_value", text="Light Emission")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Mixed3']
col.prop(node.inputs['Fac'], "default_value", text="Transparency")
spacer +=1
except:
pass
col.separator()
col.separator()
try:
node = mat.node_tree.nodes['Math zbColor']
col.prop(node.inputs[1], "default_value", text="Bumpiness")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Bright/Contrast zbColor']
col.prop(node.inputs['Bright'], "default_value", text="Brightness")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Bright/Contrast zbColor']
col.prop(node.inputs['Contrast'], "default_value", text="Contrast")
spacer +=1
except:
pass
if "bump" in name.lower():
try:
node = mat.node_tree.nodes['Math zbBump']
col.prop(node, "operation", text = "", icon="POTATO")
except:
pass
try:
node = mat.node_tree.nodes['Math zbBump']
col.prop(node.inputs[1],"default_value", text="Bumpiness")
spacer +=1
except:
pass
col.separator()
col.separator()
try:
node = mat.node_tree.nodes['Bright/Contrast zbBump']
col.prop(node.inputs['Bright'], "default_value", text="Brightness")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Bright/Contrast zbBump']
col.prop(node.inputs['Contrast'], "default_value", text="Contrast")
spacer +=1
except:
pass
if "specular" in name.lower():
try:
node = mat.node_tree.nodes['Mix zbSpecular']
col.prop(node, "blend_type", text="", icon='POTATO')
except:
pass
try:
node = mat.node_tree.nodes['Glossy BSDF zbSpecular']
col.prop(node.inputs['Roughness'], "default_value", text="Roughness")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Math zbSpecular']
col.prop(node.inputs[1], "default_value", text="Shininess")
spacer +=1
except:
pass
col.separator()
col.separator()
try:
node = mat.node_tree.nodes['Bright/Contrast zbSpecular']
col.prop(node.inputs['Bright'], "default_value", text="Brightness")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Bright/Contrast zbSpecular']
col.prop(node.inputs['Contrast'], "default_value", text="Contrast")
spacer +=1
except:
pass
if "glow" in name.lower():
try:
node = mat.node_tree.nodes['Mix zbGlow']
col.prop(node, "blend_type", text="", icon='POTATO')
except:
pass
try:
node = mat.node_tree.nodes['Emission zbGlow']
col.prop(node.inputs['Strength'], "default_value",text="Strength")
spacer +=1
except:
pass
try:
node = mat.node_tree.nodes['Math zbGlow']
col.prop(node.inputs[1], "default_value",text="Intensity")
spacer +=1
except:
pass
if "normal" in name.lower():
if 'bump' not in name.lower():
try:
node = mat.node_tree.nodes['Mix zbColor']
col.prop(node, "blend_type", text="", icon='POTATO')
except:
pass
try:
node = mat.node_tree.nodes['Normal Map zbNormal']
col.prop(node.inputs[0], "default_value", text="Normal")
spacer +=1
except:
pass
if spacer == 0:
if re == 'CYCLES':
col.label('Layers not created with')
col.label('Zero Brush (and even some')
col.label('bake type layers created')
col.label('with Zero Brush) may not')
col.label('have any sliders to display.')
col.label('')
col.label('')
col.label('')
col.separator()
col.separator()
else:
spc = 32
if special == 0:
col.separator()
col.separator()
else:
col.separator()
col.separator()
spc = 8
for x in range(spc):
row = layout.row()
if spacer == 1:
col.separator()
col.separator()
for x in range(24):
row = layout.row()
if spacer == 2:
col.separator()
col.separator()
for x in range(20):
row = layout.row()
if spacer == 3:
for x in range(16):
row = layout.row()
if spacer == 4:
for x in range(12):
row = layout.row()
row = layout.row(align=True)
row = layout.row(align=True)
row.scale_y = 1.5
row.operator("wm.call_menu", icon = "COLLAPSEMENU",text="Options").name = "view3D.zb_layer_options_menu"
row.prop(wm, "zbPaintThrough", text = "", icon ="TPAINT_HLT")
if re != 'CYCLES':
if mat is not None:
row.prop(mat, "use_shadeless", text = "", icon ="TEXTURE_SHADED")
else:
row.prop(scene, "zbQuickLights", text = "", icon ='LAMP_SUN')
if sd.viewport_shade != 'RENDERED':
zbRendIcon = "MATERIAL"
else:
zbRendIcon = "SMOOTH"
row.operator("object.zb_render_prev", text="", icon = zbRendIcon)
row.operator("object.zb_reset_uvs", text = "", icon ="FILE_REFRESH")
row = layout.row()
if sd.viewport_shade == 'RENDERED':
row.scale_y = 0.8
row.label('(Ctrl-B set preview border)')
i = -1
for t in mat.texture_slots:
i+=1
try:
if t.texture.type =='IMAGE':
row = layout.row(align= True)
if t.texture == mat.active_texture:
ai = 'BRUSH_DATA'
else:
ai = 'BLANK1'
row.operator('object.zb_set_active_layer',
text = "", icon = ai).tex_index =i
row.prop(t.texture,'name', text = "")
if t.use:
ic = 'RESTRICT_VIEW_OFF'
else:
ic = 'RESTRICT_VIEW_ON'
if t.texture == mat.active_texture:
if len(mat.texture_slots.items()) > 1:
row.operator("object.zb_move_texture", text = "",icon = "TRIA_UP").tex_move_up = 1
row.operator("object.zb_move_texture", text = "",icon = "TRIA_DOWN").tex_move_down = 1
else:
row.operator("object.zb_mode_buttons", text = "",icon = "TRIA_UP").modeButton = 100
row.operator("object.zb_mode_buttons", text = "",icon = "TRIA_DOWN").modeButton = 100
row.operator("object.zb_delete_texture", text = "",icon = "X").tex_kill = i
if re != 'CYCLES':
row.prop(t,'use', text = "",icon = ic)
else:
row.operator('object.sf_tn',text='',icon='BLANK1')
except:
pass
row = layout.row()
col = layout.column(align = True)
box = col.box()
sub = box.column(True)
sub.operator("object.zb_paint_color", icon='TEXTURE')
sub.operator("object.zb_paint_bump", icon='TEXTURE')
sub.operator("object.zb_paint_specular", icon='TEXTURE')
sub.operator("object.zb_paint_glow", icon='TEXTURE')
sub = box.column()
sub = box.column(True)
sub.operator("object.zb_paint_transparent", icon='TEXTURE')
sub.operator("object.zb_alpha_mask", icon='TEXTURE')
sub = box.column()
sub = box.column(True)
sub.prop(scene, "zbImgSize")
sub = sub.row(align=True)
sub.scale_x = 1.2
sub.prop(wm,'zbSaveLayerOptions',text='',icon='FILE_FOLDER')
sub.operator("object.zb_save_layers")
sub = box.column()
if wm.zbSaveLayerOptions:
sub.scale_y = 0.5
sub.label('Save images to a folder')
sub.label('instead of internally')
sub = box.column()
sub.scale_y = 1
sub.prop(scene,'zbSaveToHardDrive',
text='Save To A Folder')
sub.prop(wm, 'zbSaveImagePath',text='')
sub = box.column(True)
sub = sub.row(align=True)
sub.label('as:')
sub.prop_enum(wm,"zbSaveType",value = '.PNG')
sub.prop_enum(wm,"zbSaveType",value = '.TIFF')
sub.prop_enum(wm,"zbSaveType",value = '.TGA')
sub.prop_enum(wm,"zbSaveType",value = '.JPEG')
sub = box.column()
sub = box.column()
sub.scale_y = 0.3
sub.label('New Layer Width x Height')
sub = box.column()
sub = sub.row(align=True)
sub.prop(scene,'zbImgSize', text = '')
sub.prop(scene,'zbImgSizeH',text = '')
sub = box.column()
sub = sub.row()
if scene.zbSaveToHardDrive:
sub.scale_y = 1.3
sub.operator('object.zb_reload_all_images',
text='Reload All Images', icon='FILE_REFRESH')
sub = box.column()
if mode.startswith('SCULPT'):
col = layout.column(align=True)
col.separator()
ob = bpy.context.active_object
multires = len([mod for mod in ob.modifiers if mod.type == 'MULTIRES'])
dynamic = bpy.context.active_object.use_dynamic_topology_sculpting
if not multires and not dynamic:
col.operator("object.zb_multires_add")
col.operator("object.zb_sculpt_dynamic")
if multires:
row = col.row(align = True)
row.label("Detail")
row.operator("object.zb_sub_multires", text='more')
row.operator("object.zb_reset_detail", text='',icon ='FILE_REFRESH')
row.operator("object.zb_multires_down", text='less')
row = layout.row(align=True)
row.operator("object.zb_generate_base_mesh", text="Apply")
row.operator("object.zb_message", text="As Normal").message = 'NORMALS'
row = layout.row()
if dynamic:
row = col.row(align = True)
row.operator("object.zb_generate_base_mesh", text="Apply Detail")
sub = col.row(align=True)
sub.operator("sculpt.sample_detail_size", text="", icon='EYEDROPPER')
toolsettings = context.tool_settings
sculpt = toolsettings.sculpt
sub.prop(sculpt, "constant_detail")
if mode.startswith('PARTICLE'):
ob = context.active_object
if ob:
sys = ob.particle_systems
if sys:
row = layout.row()
row = layout.row()
row.template_list("PARTICLE_UL_particle_systems", "particle_systems", ob, "particle_systems",
ob.particle_systems, "active_index", rows=1, maxrows = 25)
if sys.active:
col = layout.column()
row = col.row(True)
row.operator("object.zb_add_strands", text="New").option = 'NEW'
row.operator("object.zb_add_strands", text="Copy").option = 'COPY'
row.operator("object.particle_system_remove", text="Delete")
col = layout.column(True)
col.separator()
col.label('Layer Options')
col.separator()
col.prop(sys.active,'use_hair_dynamics', toggle = True,
icon = 'PHYSICS', text = 'Animate Strands')
col.separator()
col.separator()
sub1 = col.row(True)
sub1.scale_x = 0.11
sub1.prop_search(wm, "zbSelectParticleMat", bpy.data, "materials",
text = '',icon="MATERIAL_DATA")
sub1 = sub1.row(True)
sub1.scale_x = 5
sub1.operator('object.zb_add_strands',
text = 'Add Basic Material').option = 'ADD_BASIC_MATERIAL'
parMat = sys.active.settings.material_slot
sub = col.row(True)
sub.scale_x = 0.15
if parMat in ob.data.materials:
if len(ob.material_slots) > 1:
parMat = bpy.data.materials[parMat]
chngCol = True
if re == 'CYCLES':
if parMat.node_tree.nodes:
try:
nodeHair = parMat.node_tree.nodes['Hair BSDF']
sub.prop(nodeHair.inputs['Color'], 'default_value',
text = '')
except:
chngCol = False
else:
if parMat.strand.use_tangent_shading:
sub.prop(parMat, 'diffuse_color',text = '')
else:
chngCol = False
if chngCol:
sub = sub.row(True)
sub.scale_x = 2
sub.operator('object.zb_add_strands',
text = 'Apply Color').option = 'CHANGE_COLOR'
col.separator()
col.separator()
if mode.startswith('OBJECT'):
if wm.showBrushLayerOptions:
for mat in bpy.data.materials:
if '_zb_proxy' in mat.name:
slot = mat.texture_slots[0]
break
col = layout.column()
sub = layout.column(align=True)
sub.scale_y = 1.4
sub.operator('object.zb_layer_from_brush',
icon='FILE_TICK',text='Finish Brush Layer').action = 'FINISH'
sub = sub.column(align=True)
sub.scale_y = 1
sub.operator('object.zb_layer_from_brush',
icon='CANCEL', text='Cancel').action = 'CANCEL'
row = layout.row()
row.operator('object.zb_layer_from_brush', text='Re-Apply Brush').action = 'FLIP'
row = layout.row()
row.column().prop(slot, "offset", text= 'Position')
row.column().prop(slot, "scale", text='Scale')
def fu29(self,context):
scene = bpy.context.scene
scene.zbImgSizeH = scene.zbImgSize
return
class cl50(bpy.types.Operator):
bl_idname =i_0[171]
bl_label =i_0[172]
bl_description =i_0[173]
message = bpy.props.StringProperty()
def execute(self, context):
message = self.message
if len(message) > 0:
self.report({'INFO'}, message)
else:
self.report({'INFO'}, "This option is not available in this mode")
pass
return {'FINISHED'}
def fu30(scene):
try:
bpy.ops.object.zb_save_layers(save_only_active=False)
except:
pass
class cl51(bpy.types.Operator):
bl_idname =i_0[174]
bl_label =i_0[175]
def execute(self,context):
scene = bpy.context.scene
wm = bpy.context.window_manager
if scene.zbSaveWhenSave:
bpy.app.handlers.save_pre.append(fu30)
for map in wm.keyconfigs.addon.keymaps:
ki = map.keymap_items
for key in ki:
if 'ZB Init Listener' in key.name:
key.active = False
return{'FINISHED'}
def zbWeightMirror(self,context):
scene = bpy.context.scene
wm = bpy.context.window_manager
km = wm.keyconfigs.addon.keymaps['Weight Paint']
kmi = km.keymap_items["paint.zb_select_weight_mirror"]
ob = bpy.context.active_object
if ob:
if self.zbWeightMirror:
kmi.active = True
bpy.ops.object.origin_set(type='ORIGIN_CENTER_OF_MASS')
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.normals_make_consistent()
bpy.ops.mesh.symmetry_snap()
bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
ob.data.use_mirror_x = True
ob.data.use_mirror_topology = False
scene.tool_settings.use_multipaint = True
if ob.modifiers:
for mod in ob.modifiers:
if mod.type == 'ARMATURE':
if mod.object:
arm = mod.object.data
activeBone = arm.bones.active
mirrorBone = ''
if activeBone:
if '.l' in activeBone.name:
mirrorBone = activeBone.name.replace('.l','.r')
if '.r' in activeBone.name:
mirrorBone = activeBone.name.replace('.r','.l')
if mirrorBone:
arm.bones[mirrorBone].select = True
break
else:
kmi.active = False
ob.data.use_mirror_x = False
ob.data.use_mirror_topology = False
scene.tool_settings.use_multipaint = False
class cl52(bpy.types.Operator):
bl_idname =i_0[176]
bl_label =i_0[177]
def execute(self,context):
scene = bpy.context.scene
bpy.ops.view3d.select('INVOKE_DEFAULT')
ob = bpy.context.active_object
if ob.modifiers:
for mod in ob.modifiers:
if mod.type == 'ARMATURE':
if mod.object:
arm = mod.object.data
activeBone = arm.bones.active
mirrorBone = ''
if activeBone:
if '.l' in activeBone.name:
mirrorBone = activeBone.name.replace('.l','.r')
if '.r' in activeBone.name:
mirrorBone = activeBone.name.replace('.r','.l')
if mirrorBone:
arm.bones[mirrorBone].select = True
break
if scene.tool_settings.use_multipaint:
scene.tool_settings.use_multipaint = True
return{'FINISHED'}
def fu31(self,context):
aType = bpy.context.area.type
toolSet = bpy.context.tool_settings
mode = bpy.context.mode
if mode == 'PAINT_TEXTURE' or aType == 'IMAGE_EDITOR':
brush = toolSet.image_paint.brush
if mode == 'SCULPT':
brush = toolSet.sculpt.brush
if mode == 'PAINT_WEIGHT':
brush = toolSet.weight_paint.brush
if mode == 'PAINT_VERTEX':
brush = toolSet.vertex_paint.brush
val = float(brush.zb_hardness/100)
if val < 0.30:
val = 0.30
if val > 0.98:
val = 0.98
bpy.ops.brush.curve_preset(shape='LINE')
bpy.data.brushes[brush.name].curve.curves[0].points.new(val, val)
bpy.data.brushes[brush.name].curve.update()
def fu32(self,context):
toolSet = bpy.context.tool_settings
if context.image_paint_object:
brush = toolSet.image_paint.brush
if context.vertex_paint_object:
brush = toolSet.vertex_paint.brush
if context.sculpt_object:
brush = toolSet.sculpt.brush
tex = brush.texture_slot
percent = brush.zb_texscale
if percent > 97:
percent = 97
percent = 100 - percent
x = tex.scale[0]
y = tex.scale[1]
z = tex.scale[2]
if brush.zb_texscale_x == 0:
brush.zb_texscale_x = x * 3
if brush.zb_texscale_y == 0:
brush.zb_texscale_y = y * 3
if brush.zb_texscale_z == 0:
brush.zb_texscale_z = z * 3
x = brush.zb_texscale_x
y = brush.zb_texscale_y
z = brush.zb_texscale_z
tex.scale[0] = (percent * x) / 100.0
tex.scale[1] = (percent * y) / 100.0
tex.scale[2] = zPer = (percent * z) / 100.0
brush.mask_texture_slot.scale.xyz = brush.texture_slot.scale.xyz
class cl53(bpy.types.PropertyGroup):
wm = bpy.types.WindowManager
scene = bpy.types.Scene
txtNote1 = 'Open a new file, adjust these settings, then go to "File" and '
txtNote2 = 'press "Save Startup File" to keep your changes.'
txtNote=txtNote1+txtNote2
bpy.types.Brush.zb_texscale = bpy.props.IntProperty(
name = 'Brush Scale',
subtype = 'PERCENTAGE', min = 0, max = 100,
default = 50,
description = 'Increase or decrease the overall scale of the texture being used with this brush.',
update = fu32)
bpy.types.Brush.zb_texscale_x = bpy.props.IntProperty()
bpy.types.Brush.zb_texscale_y = bpy.props.IntProperty()
bpy.types.Brush.zb_texscale_z = bpy.props.IntProperty()
bpy.types.Brush.zb_hardness = bpy.props.IntProperty(
name = 'Brush Hardness',
subtype = 'PERCENTAGE', min = 0, max = 100,
default = 50,
description = 'Soften or harden the edge of the brush (controls brush curve)',
update = fu31)
txt1 = 'Use user defined bake values from the scene instead of predefined Zero Brush '
txt2 = 'values. This includes: "Margin", "Distance", "Bias", and "Extrusion" values. '
txt3 = '"Selected To Active" can still be set via the ZB options menu before baking, shortened to "Sel-To-Active".'
scene.zbUserBakeSettings = bpy.props.BoolProperty(
name = 'User Bake Settings',
default = False,
description = txt1+txt2+txt3
)
txt1 = 'Bake the contents of the first selected object to the active object '
txt2 = '(the last Shift-selected object). Leaving this option on with only one '
txt3 = 'object selected may cause strange bake results.'
scene.zbBakeSelectedToActive = bpy.props.BoolProperty(
name = 'Selected To Active',
default = False,
description = txt1+txt2+txt3
)
txt1 = 'Checking this will cause all selected objects to bake to the same uv map '
txt2 = 'and image. The map size and type will be generated from the current Zero Brush '
txt3 = 'settings which can be adjusted in the options menu (adjust these settings before '
txt4 = 'baking if needed).'
scene.zbBakeSingle = bpy.props.BoolProperty(
name = 'Bake Single',
default = False,
description = txt1+txt2+txt3+txt4
)
scene.zbDisablePopup = bpy.props.BoolProperty(
name = 'Disable Popup Info',
default=False,
description = 'Disable popup information related to Zero Brush tools and baking. '+txtNote
)
text1 = 'Paint ZB layers onto complex materials rather than overwrite them with new Zero Brush '
text2 = 'materials. Can not switch between BR and Cycles for a material painted with this mode. '
scene.zbAState = bpy.props.BoolProperty(default=False,
description = text1+text2+txtNote)
text1 = 'Pressing this will mirror the weight paint of your object along the '
text2 = 'X axis (if mesh is properly symmetrical). It will also activate, '
text3 = '"Multi-Paint" option so you can select the bone (and opposite mirrored '
text4 = 'bone), so you can see the effects of the mirror painting.'
text5 = text1+text2+text3+text4
scene.zbWeightMirror = bpy.props.BoolProperty(
name = 'X Axis Mirror Editing',
description = text5,
default = False,
update = zbWeightMirror
)
wm.zbMakeBonesSelectable = bpy.props.BoolProperty(
name = 'Make Bones Selectable',
description = 'Press this if unable to select bones while weight painting',
default = False,
update = zbMakeBonesSelectable
)
wm.zbLastObjectMode = bpy.props.StringProperty()
wm.zbSelectParticleMat = bpy.props.StringProperty(
name = 'Select Material',
description = 'Select a material to use for this particle layer.',
update = zbSelectParticleMat
)
scene.zbParticleDetailSelect = bpy.props.StringProperty(
default = 'PARTICLE_EDIT'
)
scene.zbAutoGPURender = bpy.props.BoolProperty(default = False,
name = 'Auto GPU Render',
update = zbAutoGPURender,
description = 'Automatically activate the highest possible settings for your graphics\
card (or cards) to increase Cycles rendering speed. Uncheck to return to CPU render. '+txtNote
)
scene.zbAutoConvertCycles = bpy.props.BoolProperty(default = True,
description = 'Convert all objects with uv mapped image materials (even if not painted with ZB) to \
Cycles compatible materials (if does not have already) when using Cycles. '+txtNote
)
scene.zbPrecisePaintOption = bpy.props.BoolProperty(default = False,
description = 'Use W,S,A,D to rotate and arrow keys to pan in texture paint mode.\
Use Ctrl + arrow keys to precise move stencil. MMB to exit',
update = zbPrecisePaintOption
)
scene.zbAutoConvertLamps = bpy.props.BoolProperty(default = False,
description = 'Auto convert lamps to work for both Cycles and Blender \
Render when switching between the two engines')
scene.zbDistractionFreeMode = bpy.props.BoolProperty(default = True,
update = zbDistractionFreeMode,
description="Auto shut off grid and axis when sculpting and painting. "+txtNote)
wm.zbGradientSwitch = bpy.props.BoolProperty(
default= False,
description= 'Switch between gradient and color wheel modes',
update = zbGradientSwitch
)
wm.zbBeforeGradBrush = bpy.props.StringProperty()
wm.zbLampBufferSize = bpy.props.IntProperty(default=0)
wm.zbSaveLayerOptions = bpy.props.BoolProperty(default = False)
scene.zbSaveToHardDrive = bpy.props.BoolProperty(default = False,
description='Save images dynamically to a folder on your\
hard drive so you can export or edit them externaly')
wm.zbSaveImagePath = bpy.props.StringProperty(
name = "ZB Save Image Path",
description = "Choose path to save images to disk",
subtype = 'DIR_PATH',
update = fu13)
wm.zbNewSavePath = bpy.props.BoolProperty(
default = True)
wm.zbSaveType = bpy.props.EnumProperty(
update = zbSaveType,
items=(
('.PNG', ".PNG", "",'NONE', 0),
('.TIFF', ".TIFF", "",'NONE', 1),
('.JPEG', ".JPEG", "",'NONE', 2),
('.TGA', ".TGA", "",'NONE', 3),
))
txt1 = 'Use "Lightmap Pack" to create the object uv map instead of '
txt2 = '"Smart UV Project" (if no seams or existing uv-map found).'
scene.zbUseLightMap = bpy.props.BoolProperty(default = False,
description = txt1+txt2
)
wm.zbLastBrushBlend = bpy.props.StringProperty(
name = "Last Brush Blend Mode",
default = "MIX")
txt1 = 'The size of the next layer you add (also controls size of new bakes and normal maps). '
txt2 = 'Changing the width value automatically sets the height value (to save time) to be the same so if you '
txt3 = 'need different values for width and height, set the height value after changing the width.'
scene.zbImgSize = bpy.props.FloatProperty(
name="New Layer Size",
description = txt1+txt2+txt3,
default = 2048,
update = fu29,
min = 64,
precision = 0,
step = 6400)
scene.zbImgSizeH = bpy.props.FloatProperty(
name = 'Next new layer height',
description = txt1+txt2+txt3,
default = 2048,
precision = 0,
min = 64,
step = 6400)
wm.zbHidePaintOptions = bpy.props.BoolProperty(
description = 'Expand Brush Options')
wm.showBrushLayerOptions = bpy.props.BoolProperty(default=False)
scene.zbAutoSaveLayers = bpy.props.BoolProperty(
name = "Auto Save layers", description = "Autosave paint layers when exiting\
texture paint mode (via ZB's mode selection menu). "+txtNote,
default = True)
scene.zbSaveWhenSave = bpy.props.BoolProperty(
name = 'Autosave (with file)',
description = 'Ensure that layer images are saved whenever saving the Blend file. '+txtNote,
default = True
)
wm.zbPaintThrough = bpy.props.BoolProperty(
name = "Paint Through", description = "Paint or erase all the way through your object",
update = fu23)
scene.zbDisableShadows = bpy.props.BoolProperty(
default = True,
name = "Disable Shadows",
description = "Disable shadows from lamps when sculpting or \
texture painting (increases performance).", update = fu23)
scene.zbFastMode = bpy.props.BoolProperty(
name = "Fast Mode Toggle", description = "Speed up performance in the 3D Viewport by lowering detail accross the scene",
update = fu9)
wm.zbViewMaskMode = bpy.props.BoolProperty(
name = "View Mask Mode", description = "view your mask brush strokes",
update = fu6)
wm.zbUseBrushColor = bpy.props.BoolProperty(
name = "Use Brush Color", description = "Use your brush's current color\
as the base color for your next new layer")
scene.zbLoadImgSculpt = bpy.props.BoolProperty(
name = "Texture And Sculpt Brush", description = "Load brushes for sculpt\
and texture paint modes instead of just the mode you're using.")
scene.zbLoadBrushType = bpy.props.EnumProperty(
items=(
('default', "Load As Default", "When you go to\
load a brush it will load as a default brush",'RADIOBUT_OFF', 0),
('decal', "Load As Decal", "Decal brushes allow you to\
drag the image your brush is using any where on your object",'RADIOBUT_OFF', 1),
('stencil', "Load As Stencil", "This setting will cause brushes\
to load as a stencil which you can then paint on to your object",'RADIOBUT_OFF', 2),
('hard', "Load As Hard", "Hard brushes are ideal for covering\
a lot of area quickly",'RADIOBUT_OFF', 3)
)
)
scene.zbGoCycles = bpy.props.BoolProperty(
name = "Change To Cycles",
description = "Switch between Cycles and Blender Internal Render Engine",
update = fu26)
scene.zbQuickLights = bpy.props.BoolProperty(
name = "ZB Quick Lights",
description = 'Generate an advanced light rig while in Cycles',
update = fu24)
scene.zbLastWorld = bpy.props.StringProperty(
name = "ZB Last World", description = "Remember the last world child",)
addon_keymaps = []
def fu33(kmi_props, attr, value):
try:
setattr(kmi_props, attr, value)
except:
pass
def register():
bpy.utils.register_module(__name__)
try:
wm = bpy.context.window_manager
km = wm.keyconfigs.addon.keymaps.new(name='3D View', space_type='VIEW_3D')
kmi = km.keymap_items.new("screen.sf_zb_init_listener", 'MOUSEMOVE', 'ANY')
kmi = km.keymap_items.new("wm.call_menu", 'Q','PRESS')
fu33(kmi.properties, 'name', 'view3D.zb_layer_options_menu')
km = wm.keyconfigs.addon.keymaps.new(name='Sculpt', space_type='EMPTY')
kmi = km.keymap_items.new("wm.call_menu", 'SPACE','PRESS')
fu33(kmi.properties, 'name', 'menu.zb_brush_menu')
kmi = km.keymap_items.new("object.zb_sub_multires", 'W','PRESS')
kmi = km.keymap_items.new('sculpt.zb_stroke_smooth', 'LEFTMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('sculpt.zb_stroke_inverse', 'LEFTMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS')
fu33(kmi.properties, 'data_path_primary', 'tool_settings.sculpt.brush.size')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.size')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_size')
fu33(kmi.properties, 'rotation_path', 'tool_settings.sculpt.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.sculpt.brush.cursor_color_add')
fu33(kmi.properties, 'image_id', 'tool_settings.sculpt.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', shift=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.sculpt.brush.strength')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.strength')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_strength')
fu33(kmi.properties, 'rotation_path', 'tool_settings.sculpt.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.sculpt.brush.cursor_color_add')
fu33(kmi.properties, 'image_id', 'tool_settings.sculpt.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', ctrl=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.sculpt.brush.texture_slot.angle')
fu33(kmi.properties, 'rotation_path', 'tool_settings.sculpt.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.sculpt.brush.cursor_color_add')
fu33(kmi.properties, 'image_id', 'tool_settings.sculpt.brush')
fu33(kmi.properties, 'secondary_tex', False)
km = wm.keyconfigs.addon.keymaps.new(name='Image Paint', space_type='EMPTY', modal=False)
kmi = km.keymap_items.new("wm.call_menu", 'SPACE','PRESS')
fu33(kmi.properties, 'name', 'menu.zb_brush_menu')
kmi = km.keymap_items.new("paint.zb_erase_paint", 'LEFTMOUSE','PRESS', shift=True)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS')
fu33(kmi.properties, 'data_path_primary', 'tool_settings.image_paint.brush.size')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.size')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_size')
fu33(kmi.properties, 'rotation_path', 'tool_settings.image_paint.brush.mask_texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.image_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.image_paint.brush.color')
fu33(kmi.properties, 'zoom_path', 'space_data.zoom')
fu33(kmi.properties, 'image_id', 'tool_settings.image_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', shift=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.image_paint.brush.strength')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.strength')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_strength')
fu33(kmi.properties, 'rotation_path', 'tool_settings.image_paint.brush.mask_texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.image_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.image_paint.brush.color')
fu33(kmi.properties, 'image_id', 'tool_settings.image_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', ctrl=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.image_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'rotation_path', 'tool_settings.image_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.image_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.image_paint.brush.color')
fu33(kmi.properties, 'image_id', 'tool_settings.image_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', ctrl=True, alt=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.image_paint.brush.mask_texture_slot.angle')
fu33(kmi.properties, 'rotation_path', 'tool_settings.image_paint.brush.mask_texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.image_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.image_paint.brush.color')
fu33(kmi.properties, 'image_id', 'tool_settings.image_paint.brush')
fu33(kmi.properties, 'secondary_tex', True)
kmi = km.keymap_items.new('paint.sample_color', 'I', 'PRESS')
kmi = km.keymap_items.new('paint.brush_colors_flip', 'C', 'PRESS')
km = wm.keyconfigs.addon.keymaps.new(name='Vertex Paint', space_type='EMPTY')
kmi = km.keymap_items.new("wm.call_menu", 'SPACE','PRESS')
fu33(kmi.properties, 'name', 'menu.zb_brush_menu')
kmi = km.keymap_items.new("paint.zb_erase_paint", 'LEFTMOUSE','PRESS', shift=True)
kmi = km.keymap_items.new("paint.zb_erase_paint", 'LEFTMOUSE','PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS')
fu33(kmi.properties, 'data_path_primary', 'tool_settings.vertex_paint.brush.size')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.size')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_size')
fu33(kmi.properties, 'rotation_path', 'tool_settings.vertex_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.vertex_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.vertex_paint.brush.color')
fu33(kmi.properties, 'image_id', 'tool_settings.vertex_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', shift=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.vertex_paint.brush.strength')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.strength')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_strength')
fu33(kmi.properties, 'rotation_path', 'tool_settings.vertex_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.vertex_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.vertex_paint.brush.color')
fu33(kmi.properties, 'image_id', 'tool_settings.vertex_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', ctrl=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.vertex_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'rotation_path', 'tool_settings.vertex_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.vertex_paint.brush.cursor_color_add')
fu33(kmi.properties, 'fill_color_path', 'tool_settings.vertex_paint.brush.color')
fu33(kmi.properties, 'image_id', 'tool_settings.vertex_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('paint.sample_color', 'I', 'PRESS')
km = wm.keyconfigs.addon.keymaps.new(name='Weight Paint', space_type='EMPTY')
kmi = km.keymap_items.new("paint.zb_select_weight_mirror", 'SELECTMOUSE', 'PRESS', ctrl = True)
kmi.active = False
kmi = km.keymap_items.new("wm.call_menu", 'SPACE','PRESS')
fu33(kmi.properties, 'name', 'menu.zb_brush_menu')
kmi = km.keymap_items.new("paint.zb_erase_paint", 'LEFTMOUSE','PRESS', shift=True)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS')
fu33(kmi.properties, 'data_path_primary', 'tool_settings.weight_paint.brush.size')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.size')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_size')
fu33(kmi.properties, 'rotation_path', 'tool_settings.weight_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.weight_paint.brush.cursor_color_add')
fu33(kmi.properties, 'image_id', 'tool_settings.weight_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
kmi = km.keymap_items.new('wm.radial_control', 'X', 'PRESS', shift=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.weight_paint.brush.strength')
fu33(kmi.properties, 'data_path_secondary', 'tool_settings.unified_paint_settings.strength')
fu33(kmi.properties, 'use_secondary', 'tool_settings.unified_paint_settings.use_unified_strength')
fu33(kmi.properties, 'rotation_path', 'tool_settings.weight_paint.brush.texture_slot.angle')
fu33(kmi.properties, 'color_path', 'tool_settings.weight_paint.brush.cursor_color_add')
fu33(kmi.properties, 'image_id', 'tool_settings.weight_paint.brush')
fu33(kmi.properties, 'secondary_tex', False)
km = wm.keyconfigs.addon.keymaps.new(name='Paint Curve', space_type='EMPTY')
kmi = km.keymap_items.new("paintcurve.add_point_slide", 'LEFTMOUSE','PRESS', ctrl=True)
km = wm.keyconfigs.addon.keymaps.new(name='Particle', space_type='EMPTY')
kmi = km.keymap_items.new("wm.call_menu", 'SPACE','PRESS')
fu33(kmi.properties, 'name', 'menu.zb_brush_menu')
kmi = km.keymap_items.new("wm.radial_control", 'X','PRESS')
fu33(kmi.properties, 'data_path_primary', 'tool_settings.particle_edit.brush.size')
kmi = km.keymap_items.new("wm.radial_control", 'X','PRESS', shift=True)
fu33(kmi.properties, 'data_path_primary', 'tool_settings.particle_edit.brush.strength')
km = wm.keyconfigs.addon.keymaps.new(name='3D View', space_type='VIEW_3D')
kmi = km.keymap_items.new("object.zb_render_prev", 'Z','PRESS', shift = True)
addon_keymaps.append(km)
except:
pass
def unregister():
bpy.utils.unregister_module(__name__)
wm = bpy.context.window_manager
for km in addon_keymaps:
wm.keyconfigs.addon.keymaps.remove(km)
del addon_keymaps[:]
if __name__ == "__main__":
register() | [
"[email protected]"
]
| |
6c9555e5229edd46116b0be86d1a84edff8b8db7 | 7bb7b95ba92409ca23c1774a1ed0aec9d9ab2c51 | /tutorial2_1.py | 84b409cb34ae81b5054bfe4008ca6f9fd2279ee5 | []
| no_license | vpobleteacustica/github_tutorial2 | 065945418640be0bd6364cf4ca0800ec30a59146 | 2a38c7707b07febaccdd062c98fde2cd293670d2 | refs/heads/main | 2023-09-03T14:09:47.074066 | 2021-11-02T19:53:03 | 2021-11-02T19:53:03 | 423,952,168 | 0 | 0 | null | 2021-11-02T19:53:04 | 2021-11-02T18:15:43 | Python | UTF-8 | Python | false | false | 91 | py | def hello_world(name):
print(f'Hello world {name}')
name = 'Víctor'
hello_world(name) | [
"="
]
| = |
e321495336794031ffac05cf2c6817555903bab5 | 5410700e83210d003f1ffbdb75499062008df0d6 | /Day-2/ndarray-6.py | 965e6ad37da413d65901a3423f451ab63c239b05 | []
| no_license | lilyandcy/python3 | 81182c35ab8b61fb86f67f7796e057936adf3ab7 | 11ef4ace7aa1f875491163d036935dd76d8b89e0 | refs/heads/master | 2021-06-14T18:41:42.089534 | 2019-10-22T00:24:30 | 2019-10-22T00:24:30 | 144,527,289 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | import numpy as np
a = np.array([0, 1, 2, 3, 4])
print(a)
print(a[1:3])
print(a[:3])
print(a[1:])
print(a[1:-1])
print(a[::2])
print(a[1:4:2])
print(a[::-1]) | [
"[email protected]"
]
| |
b2b0207764e1be3090b747924840f7fb6775ecbc | c2242e8ce873f68efaf5c813820543a495e0d2e5 | /project/attendance_system/student/.~c9_invoke_it09KR.py | 399e07d384fbdfacabf746a0c0e08588b04df4a2 | []
| no_license | BUBTAMS/Attendance_system | 7d5471dd699ee3f976051687274d721d8b519e90 | f831b29a7083da570f7eccc9ed0bb19b10023fbb | refs/heads/master | 2022-03-25T22:10:11.989744 | 2019-12-15T10:18:41 | 2019-12-15T10:18:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.db import models
from django.forms import ModelForm
from .models import studentprofile
attributes=('student_id','image','mobile_number','parents_number','department','semester','dob','address','roll_no','batch','Class')
class StudentProfileDataForm(forms.ModelForm):
class Meta:
model=studentprofile
fields=('student_id','image','mobile_number','parents_number','department','semester','dob','address','roll_no','batch','Class')
class edit_student(forms.ModelForm):
email=forms.EmailField()
first_name=forms.CharField()
last_name=forms.CharField()
subject=forms.Model
class Meta:
model= studentprofile
fields=attributes + ('email','first_name','last_name')
def __init__(self,em,fn,ln,*args,**kwargs):
super() | [
"ranjaneabhishek.com"
]
| ranjaneabhishek.com |
1899ab60d5f330bf69ce6a789f5aa04f754a4d41 | 9fab87766e5e4535db9d2a4f2bb9737d1c842ca9 | /python/test.py | 700fe0a846bbfbcfc8294bf961a98ca6e3aa8a0d | [
"Apache-2.0"
]
| permissive | lavizhao/StumbleUponEvergreen | f10ac075b7b582d68385ae3d8017615d501632c0 | 85658c4efeeacbb2beae3ad74f7ad176053e4a55 | refs/heads/master | 2016-09-05T13:57:26.138467 | 2014-06-11T13:34:44 | 2014-06-11T13:34:44 | 20,257,014 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | #coding: utf-8
from read_conf import config
import csv
import numpy as np
from sklearn.preprocessing import Imputer
dp = config("../conf/dp.conf")
f = open(dp["raw_train"])
imp = Imputer(missing_values='NaN', strategy='mean', axis=0)
reader = csv.reader(f,delimiter='\t')
a = 0
train = []
for line in reader :
if a == 0:
a += 1
continue
temp = []
line = line[5:]
line = line[:-1]
for item in line:
if item == "?":
temp.append(np.nan)
else :
temp.append(float(item))
train.append(temp)
a += 1
imp.fit(train)
train = imp.transform(train)
for i in range(10):
print train[i]
| [
"[email protected]"
]
| |
0ac18fe3f7edb520beebb06692cc97b23e5356d0 | f78ddd04ac900bfe670ae841ad8b05fbd6fd305d | /collective/powertoken/view/tests/base.py | 0115392663f1379ff88738dbe412242f5028fa2c | []
| no_license | RedTurtle/collective.powertoken.view | 9b0f0ca53ae9c09258f71b95d0ef2ec90b12e88d | 0a5bc89b78918d9319d4e73adc12cf42d42b11da | refs/heads/master | 2022-12-24T10:12:31.102480 | 2012-02-15T14:38:41 | 2012-02-15T14:38:41 | 3,111,637 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,673 | py | # -*- coding: utf-8 -*-
from Products.Five import zcml
from Products.Five import fiveconfigure
#from Testing import ZopeTestCase as ztc
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import onsetup
@onsetup
def setup_product():
"""Set up additional products and ZCML required to test this product.
The @onsetup decorator causes the execution of this body to be deferred
until the setup of the Plone site testing layer.
"""
# Load the ZCML configuration for this package and its dependencies
fiveconfigure.debug_mode = True
import collective.powertoken.core
import collective.powertoken.view
zcml.load_config('configure.zcml', collective.powertoken.core)
zcml.load_config('configure.zcml', collective.powertoken.view)
fiveconfigure.debug_mode = False
# We need to tell the testing framework that these products
# should be available. This can't happen until after we have loaded
# the ZCML.
#ztc.installPackage('collective.powertoken')
# provideAdapter(
# TestPowerActionProvider,
# (IContentish,
# IHTTPRequest),
# provides=IPowerActionProvider,
# name='foo'
# )
# The order here is important: We first call the deferred function and then
# let PloneTestCase install it during Plone site setup
setup_product()
#ptc.setupPloneSite(products=['collective.powertoken.view'])
ptc.setupPloneSite()
class TestCase(ptc.PloneTestCase):
"""Base class used for test cases
"""
class FunctionalTestCase(ptc.FunctionalTestCase):
"""Test case class used for functional (doc-)tests
"""
| [
"[email protected]"
]
| |
011b38953110ae8e07b6bf0a7eb33ac9fe408b87 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/republican.py | 1a99ad212489a57bc5f62e301acf0aef831ebb41 | []
| no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 1,212 | py | ii = [('CookGHP3.py', 12), ('MarrFDI.py', 2), ('CoolWHM2.py', 1), ('ChanWS.py', 5), ('SadlMLP.py', 2), ('WilbRLW.py', 1), ('AubePRP2.py', 1), ('CookGHP.py', 32), ('MartHSI2.py', 12), ('KembFJ1.py', 4), ('WilbRLW5.py', 2), ('LeakWTI3.py', 1), ('MarrFDI3.py', 4), ('PeckJNG.py', 3), ('WilbRLW2.py', 5), ('ClarGE2.py', 9), ('CarlTFR.py', 38), ('GrimSLE.py', 1), ('RoscTTI3.py', 9), ('CookGHP2.py', 1), ('RoscTTI2.py', 4), ('MarrFDI2.py', 4), ('ClarGE.py', 4), ('GilmCRS.py', 1), ('DaltJMA.py', 2), ('DibdTRL2.py', 1), ('CrocDNL.py', 1), ('LandWPA2.py', 4), ('WadeJEB.py', 5), ('GodwWLN.py', 3), ('CoopJBT.py', 2), ('LeakWTI4.py', 3), ('LeakWTI.py', 2), ('MedwTAI2.py', 1), ('BachARE.py', 1), ('SoutRD.py', 1), ('HowiWRL2.py', 1), ('MereHHB.py', 1), ('MartHRW.py', 9), ('MackCNH.py', 17), ('FitzRNS4.py', 2), ('FitzRNS.py', 2), ('RoscTTI.py', 2), ('ThomGLG.py', 4), ('StorJCC.py', 40), ('KembFJ2.py', 4), ('MackCNH2.py', 12), ('JacoWHI2.py', 2), ('HaliTBC.py', 7), ('ClarGE3.py', 5), ('MartHRW2.py', 6), ('FitzRNS2.py', 1), ('MartHSI.py', 27), ('EvarJSP.py', 10), ('DwigTHH.py', 39), ('NortSTC.py', 1), ('TaylIF.py', 2), ('WordWYR.py', 1), ('ThomWEC.py', 18), ('WaylFEP.py', 1), ('ClarGE4.py', 2), ('AdamJOA.py', 4)] | [
"[email protected]"
]
| |
41187baa2e786d43ecffa07d0bba00d160a1cf24 | 597d8b96c8796385b365f79d7a134f828e414d46 | /pythonTest/cn/sodbvi/exercise/example076.py | 022d87cbffcbf48924dbd4c82812d553518ea222 | []
| no_license | glorysongglory/pythonTest | a938c0184c8a492edeba9237bab1c00d69b0e5af | ed571d4c240fccfb4396e2890ad922726daa10a0 | refs/heads/master | 2021-01-21T11:39:35.657552 | 2019-08-14T02:49:26 | 2019-08-14T02:49:26 | 52,493,444 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
Created on 2016年1月12日
@author: sodbvi
'''
def peven(n):
i = 0
s = 0.0
for i in range(2,n + 1,2):
s += 1.0 / i
return s
def podd(n):
s = 0.0
for i in range(1, n + 1,2):
s += 1 / i
return s
def dcall(fp,n):
s = fp(n)
return s
if __name__ == '__main__':
n = int(raw_input('input a number:\n'))
if n % 2 == 0:
sum = dcall(peven,n)
else:
sum = dcall(podd,n)
print sum | [
"[email protected]"
]
| |
197169cda56b3bbb7963e86ddffa917b376e09b7 | 48bb4a0dbb361a67b88b7c7532deee24d70aa56a | /codekata/diftwostr.py | 7bc3823d1a60fba9ace83be0cf3bc0c788ab2075 | []
| no_license | PRAMILARASI/GUVI | 66080a80400888263d511138cb6ecd37540507c7 | 6a30a1d0a3f4a777db895f0b3adc8b0ac90fd25b | refs/heads/master | 2022-01-28T08:54:07.719735 | 2019-06-24T15:57:05 | 2019-06-24T15:57:05 | 191,355,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | s=input()
r=input()
ga=[]
if (s.isalpha() or " " in s) and (r.isalpha() or " " in r):
s=list(s.split(" "))
r=list(r.split(" "))
for i in s:
if s.count(i) > r.count(i) and i not in g:
ga.append(i)
for i in r:
if r.count(i)>s.count(i) and i not in ga:
ga.append(i)
print(*ga)
else:
for i in s:
if s.count(i)>r.count(i) and i not in g:
ga.append(i)
for j in r:
if r.count(j)>s.count(j) and j not in ga:
ga.append(j)
print(*ga)
| [
"[email protected]"
]
| |
b0a7b8d41e7a3721214629851ac2f43b60da1a3a | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_decadence.py | e45ac2d2bb3034736876e778afd0265ab221b42d | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 321 | py |
#calss header
class _DECADENCE():
def __init__(self,):
self.name = "DECADENCE"
self.definitions = [u'low moral standards and behaviour: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
594931710b7047f574166ef8b9ffc957b560e8f5 | dfb21d9a6537b86b4e62d1ee1dab562507133a72 | /python/get_data.py | 7cee60bf397736e798d3c871afb59e2b0cb517c1 | []
| no_license | kthyng/tabswebsite | a2ff8b540c59c8a66525997d4a1dd84dcd678ca9 | 3fcbd65a35a23b3aeb7143a54dfbfdf4ac8bbaf1 | refs/heads/master | 2021-06-01T15:10:10.446790 | 2021-04-06T15:37:14 | 2021-04-06T15:37:14 | 56,723,416 | 0 | 1 | null | 2016-09-14T18:15:50 | 2016-04-20T21:51:34 | JavaScript | UTF-8 | Python | false | false | 8,991 | py | '''
Get data into temporary text files and present it as table or image.
Or, if dstart/dend are not provided, read in previously-created daily file
and present as table or image.
example (since dstart and dend are optional)
run get_data.py '../tmp/tabs_F_ven_test' --dstart '2017-01-5' --dend '2017-01-5 00:00' 'data' --usemodel 'True'
run get_data.py '../tmp/tabs_F_ven_test' 'data'
run get_data.py '../tmp/ndbc_PTAT2_test' 'pic'
run get_data.py '../tmp/tabs_F_ven_test' 'data' --units 'E'
run get_data.py '../tmp/8770475' --dstart '2018-5-7' --dend '2018-5-12 00:00' 'pic' --usemodel 'False' --datum 'MLLW'
run get_data.py '../tmp/tabs_B_ven' --dstart '2018-6-1' --dend '2018-6-5 00:00' 'download' --usemodel 'True' --modelonly 'True' --s_rho '-999'
'''
import run_daily
import tools
import plot_buoy
import argparse
from os import path
import pandas as pd
from matplotlib.dates import date2num
import read
import logging
logging.basicConfig(filename=path.join('..', 'logs', 'get_data.log'),
level=logging.WARNING,
format='%(asctime)s %(message)s',
datefmt='%a %b %d %H:%M:%S %Z %Y')
bys = pd.read_csv('../includes/buoys.csv', index_col=0)
# parse the input arguments
parser = argparse.ArgumentParser()
parser.add_argument('fname', type=str, help='file name for either reading in or for saving to')
parser.add_argument('--dstart', type=str, help='dstart', default=None)
parser.add_argument('--dend', type=str, help='dend', default=None)
parser.add_argument('datatype', type=str, help='pic or data or download')
parser.add_argument('--units', type=str, help='units', default='M')
parser.add_argument('--tzname', type=str, help='time zone: "UTC" or "local" or "CST"', default='UTC')
parser.add_argument('--usemodel', type=str, help='plot model output', default='True')
parser.add_argument('--datum', type=str, help='Which tidal datum to use: "MHHW", "MHW", "MTL", "MSL", "MLW", "MLLW"', default='MSL')
parser.add_argument('--modelonly', type=str, help='Bonus option to be able to download model output. Excludes data.', default='False')
parser.add_argument('--s_rho', type=str,
help='Vertical layer for model output. Default gives surface of "-1". Input "-999" for full water column. There are 30 vertical layers to index.', default='-1')
args = parser.parse_args()
fname = args.fname
datatype = args.datatype
units = args.units
tzname = args.tzname
usemodel = args.usemodel
dstart = args.dstart
dend = args.dend
datum = args.datum
modelonly = args.modelonly
s_rho = args.s_rho
if tzname.lower() in ['utc', 'gmt']:
tz = 'UTC'
# CST or CDT as appropriate
elif tzname.lower() in ['local', 'cst/cdt', 'us/central']:
tz = 'US/Central'
# CST only -- no transition for CDT
elif tzname.lower() in ['cst']:
tz = 'Etc/GMT+6'
# can't figure out how to have variable from php a boolean so sending string
if usemodel == 'False':
usemodel = False
elif usemodel == 'True':
usemodel = True
if modelonly == 'False':
modelonly = False
elif modelonly == 'True':
modelonly = True
# change dstart and dend to datetime objects
if dstart is not None:
dstart = pd.Timestamp(dstart, tz=tz)
dend = pd.Timestamp(dend, tz=tz)
now = pd.Timestamp('now', tz=tz)
if dend is not None:
# add a day to dend time so that desired day is included
dend += pd.Timedelta('1 day')
if 'tabs_' in fname: # only need table name for tabs
table = fname.split('/')[-1].split('_')[2]
buoy = fname.split('/')[-1].split('_')[1]
else:
buoy = fname.split('/')[-1].split('_')[0]
table = bys.loc[buoy,'table1']
# force the use of metric units if making a plot since both units shown anyway
if datatype == 'pic':
units = 'M'
if datatype=='data':
assert not modelonly, 'cannot select "data" option for datatype with modelonly as True'
## Read in data ##
# from daily file, only for showing table since images created in run_daily.py
if dstart is None:
df = read.read(fname, dstart=None, dend=None, table=table, units=units, tz=tz, datum=datum)
dfmodelhindcast = None
dfmodelrecent = None
dfmodelforecast = None
# Call to database if needed
else:
## Read data ##
if not modelonly:
df = read.read(buoy, dstart, dend, table=table, units=units, tz=tz, datum=datum)
if df is not None: # won't work if data isn't available in this time period
tools.write_file(df, fname)
## Read model ##
# To use NOAA-provided model predictions
if usemodel and bys.loc[buoy,'table1'] == 'ports' and buoy != 'cc0101':
dfmodelhindcast = None
dfmodelrecent = None
dfmodelforecast = None
dfmodeltides = read.read(buoy, dstart, dend, usemodel=True,
userecent=True, tz=tz, units=units)
# using model but not ports buoy
elif usemodel: # and bys.loc[buoy,'table1'] in tables:
usehindcast, userecent, useforecast = False, False, False # initialize
dfmodelhindcast = None
dfmodelrecent = None
dfmodelforecast = None
dfmodeltides = None
# check timing relative to now to decide which model output to try using
today = now.normalize() # midnight today
yesterday = today - pd.Timedelta('1 day')
thisyear = today.year
lastyear = thisyear - 1
# use forecast if dend after today
if dend >= today:
useforecast = True
# use hindcast if want time from before lastyear
if dstart.year <= lastyear:
usehindcast = True
# use recent if want time from thisyear or lastyear, but before yesterday
if (dstart.year in [lastyear, thisyear] or dend.year in [lastyear, thisyear]) and (dstart <= yesterday):
userecent = True
if usehindcast:
dfmodelhindcast = read.read(buoy, dstart, dend, table=table,
usemodel='hindcast', tz=tz, units=units,
s_rho=int(s_rho))
# only look for nowcast model output if hindcast doesn't cover it
# sometimes the two times overlap but hindcast output is better
if usehindcast and userecent:
if dfmodelhindcast is not None and (dfmodelhindcast.index[-1] - dend) < pd.Timedelta('1 hour'):
dfmodelrecent = None
else:
dfmodelrecent = read.read(buoy, dstart, dend, table=table,
usemodel='recent', tz=tz, units=units,
s_rho=int(s_rho))
if (not usehindcast) and userecent:
dfmodelrecent = read.read(buoy, dstart, dend, table=table,
usemodel='recent', tz=tz, units=units,
s_rho=int(s_rho))
if useforecast:
dfmodelforecast = read.read(buoy, dstart, dend, table=table,
usemodel='forecast', tz=tz, units=units,
s_rho=int(s_rho))
if bys.loc[buoy,'table2'] == 'tidepredict':
dfmodeltides = read.read(buoy, dstart, dend, usemodel=True,
userecent=True, tz=tz, units=units, datum=datum)
else:
dfmodeltides = None
else:
dfmodelhindcast = None
dfmodelrecent = None
dfmodelforecast = None
dfmodeltides = None
if datatype == 'data':
# print('<br><br>')
tools.present(df) # print data table to screen
elif datatype == 'pic':
# does this get called from the front page or otherwise for "recent" data?
# if not path.exists(fname + '.png'):
print('<br><br>')
if dend is not None:
tlims = [date2num(pd.to_datetime(dstart.tz_localize(None)).to_pydatetime()), date2num(pd.to_datetime(dend.tz_localize(None)).to_pydatetime())]
else:
tlims = None
if any([dft is not None for dft in [df, dfmodelhindcast, dfmodelrecent, dfmodelforecast,dfmodeltides]]):
fig = plot_buoy.plot(df, buoy, which=table, df1=dfmodelhindcast,
df2=dfmodelrecent, df3=dfmodelforecast,
df4=dfmodeltides, tlims=tlims)
fig.savefig(fname + '.pdf')
fig.savefig(fname + '.png')
elif datatype == 'download' and modelonly:
# combine txla model output together
dfs = [dfmodelhindcast, dfmodelrecent, dfmodelforecast]
try:
df = pd.concat([df for df in dfs if not None], axis=0, sort=False)
# only remove duplicates if not multiple depths per time
if df['Depth [m]'][0] == df['Depth [m]'][1]:
df = df[~df.index.duplicated(keep='first')] # remove any duplicated indices
# add in NOAA model output
try:
df = df.join(dfmodeltides, how='outer')
except:
pass
except:
df = dfmodeltides
if df is not None:
tools.write_file(df, fname)
| [
"[email protected]"
]
| |
2783c24639ee36365aef4ca759ece83577cf2167 | a5dacc8ea7dba732c6884a18775317dd7c59a6d6 | /examples/cull-idle/jupyterhub_config.py | 2de8f424989a3a5da8e62e5c27381aabb3c47ecf | [
"BSD-3-Clause"
]
| permissive | richmoore1962/jupyterhub | de7b995d5a4d57a471bfed9ac96de3176cfa4c5f | 5d7e0080553a2255e13980aee7249afb141b154e | refs/heads/master | 2022-12-23T11:03:30.989101 | 2017-11-03T09:31:46 | 2017-11-03T09:31:46 | 109,429,441 | 0 | 0 | NOASSERTION | 2020-09-20T20:33:13 | 2017-11-03T18:25:32 | Python | UTF-8 | Python | false | false | 194 | py | # run cull-idle as a service
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': 'python cull_idle_servers.py --timeout=3600'.split(),
}
]
| [
"[email protected]"
]
| |
5f21d3d4219a6206895837a13333cb428c0e6212 | dc51e4714820d991e7d0e94b3e9eac4dbc67eea7 | /project/utils/auth.py | 18f86a9f587e9c86950a9f1d5416c4b9d61403b5 | []
| no_license | ruoxiaojie/Django | 537d27abe9ebb85e0dfc69585f318a87e7514a70 | 92b88600953cd4ff743032cab3d4785437c949e0 | refs/heads/master | 2021-01-15T22:18:56.033883 | 2018-03-09T06:15:46 | 2018-03-09T06:15:46 | 99,894,862 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | #!/usr/bin/python
#Author:xiaojie
# -*- coding:utf-8 -*-
from django.shortcuts import HttpResponse,redirect
def Auth(func):
def wrapper(request,*args,**kwargs):
session_dict = request.session.get('user_info')
if session_dict:
res = func(request,*args,**kwargs)
return res
else:
return redirect('/login')
return wrapper | [
"[email protected]"
]
| |
9d58e38063f9ca3c1fa93866836158614af67dbb | 0bb49acb7bb13a09adafc2e43e339f4c956e17a6 | /OpenAssembler/Gui/AttributeEditor/attributeEditor.py | 0fa595c533f6ab79ea8d3ca02c021437b4ec8084 | []
| no_license | all-in-one-of/openassembler-7 | 94f6cdc866bceb844246de7920b7cbff9fcc69bf | 69704d1c4aa4b1b99f484c8c7884cf73d412fafe | refs/heads/master | 2021-01-04T18:08:10.264830 | 2010-07-02T10:50:16 | 2010-07-02T10:50:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,152 | py | # #####################################################################################
#
# OpenAssembler V3
# Owner: Laszlo Mates
# Email: [email protected]
# Date: 2009.06.04
#
# #####################################################################################
from PyQt4 import QtCore, QtGui
from Gui.OAS_Window.oas_attribute_string import Ui_oas_attribute_widget
class attributeEditor(Ui_oas_attribute_widget):
def loadAttributes(self,attributeset,nodeSet):
self.inAE=attributeset
if self.oas_splitter.sizes()[1]==0:
self.oas_splitter.setSizes([700,300])
self.oas_nodeName.setText(self.inAE["name"])
self.oas_attribute_nodetype.setText(self.inAE["nodetype"])
QtCore.QObject.disconnect(self.oas_attribute_cache, QtCore.SIGNAL("stateChanged(int)"),self.nodeSettingE)
if str(self.inAE["nodesettings"]["_do_cache"])=="True":
self.oas_attribute_cache.setChecked(True)
else:
self.oas_attribute_cache.setChecked(False)
QtCore.QObject.connect(self.oas_attribute_cache, QtCore.SIGNAL("stateChanged(int)"),self.nodeSettingE)
sortedinputs=[]
for key in self.inAE["inputs"].keys():
sortedinputs.append(key)
sortedinputs.sort()
for ins in sortedinputs:
sts=self.connectionCollector.getConnectionID(self.inAE["ID"],ins)
if sts==[]:
status="free"
else:
status="connected"
varT=self.inAE["inputs"][ins]["variable_type"]
sablock=QtGui.QWidget(self.oas_attribute_area)
Ui_oas_attribute_widget().setupUi(sablock,str(ins),self.inAE["inputs"][ins]["value"],status,varT,nodeSet)
self.place_to_widgets.addWidget(sablock)
self.inAE["widget"][str(ins)]=sablock
for ins in self.inAE["extras"].keys():
status="free"
varT=self.inAE["extras"][ins]["variable_type"]
sablock=QtGui.QWidget(self.oas_attribute_area)
Ui_oas_attribute_widget().setupUi(sablock,str(ins),self.inAE["extras"][ins]["value"],status,varT,nodeSet)
self.place_to_widgets.addWidget(sablock)
self.inAE["widget"][str(ins)]=sablock
def cleanAttributes(self):
self.inAE={}
self.oas_attribute_nodetype.setText("empty")
self.oas_attribute_cache.setChecked(False)
self.oas_nodeName.setText("") | [
"laszlo.mates@732492aa-5b49-0410-a19c-07a6d82ec771"
]
| laszlo.mates@732492aa-5b49-0410-a19c-07a6d82ec771 |
c6cd25abaf3d9676fc733ab507b5ae2e2e31a87f | fb5b204943101746daf897f6ff6e0a12985543c3 | /models/usgs_sfbay/rules_hirakawa_aagaard_2021.py | c7d8ad87f9ebe5d20a829d14fd8b66c17f404ca5 | [
"LicenseRef-scancode-warranty-disclaimer",
"CC0-1.0",
"LicenseRef-scancode-public-domain"
]
| permissive | baagaard-usgs/geomodelgrids | 911a31ba23ca374be44873fdeb1e36a70ff25256 | 7d0db3c4ca1a83fea69ceb88f6ceec258928251a | refs/heads/main | 2023-08-03T07:52:25.727039 | 2023-07-27T21:56:19 | 2023-07-27T21:56:19 | 97,262,677 | 5 | 3 | NOASSERTION | 2023-03-23T03:34:45 | 2017-07-14T18:34:38 | C++ | UTF-8 | Python | false | false | 9,680 | py | """Rules for defining elastic properties given x, y, depth as given in
Hirakawa and Aagaard (20XX).
Hirakawa and Aagaard, B. T. (20XX), , Bulletin of the Seismological Society of
America, XXX(X), XXXX-XXXX, doi: 10.1785/XXXX.
The x and y coordinates are in the *unrotated* model coordinate system in units of m.
That is, the xy coordinates match the model CRS specified in the model configuration.
The rules were originally developed with Vp and Vs in km/s, density in
g/cm**3, and depth in km. Here all rules have been converted to SI
base units with Vp and Vs in m/s, density in kg/m**3, and depth in m.
"""
import math
from geomodelgrids.create.core import NODATA_VALUE
from rules_aagaard_etal_2010 import (
default_vs,
default_density,
default_qp,
default_qs)
MODEL_YAZIMUTH = 323.638
def compute_xy_refpoints():
"""Print coordinates of reference points in model coordinate system for subdividing fault blocks.
These coordinates are used in the functions below.
"""
REFERENCE_POINTS = {
"valley_sequence_sanleandro": (37.57, -121.95),
"franciscan_napa": (38.25, -122.28),
"cenozoic_napa": (37.96, -122.07),
"franciscan_sonoma": (38.25, -122.46),
"cenozoic_sonoma": (38.25, -122.46),
}
import pyproj
cs_geo = pyproj.CRS("EPSG:4326")
cs_model = pyproj.CRS('PROJCS["unnamed",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",35],PARAMETER["central_meridian",-123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1]]')
transformer = pyproj.Transformer.from_crs(cs_geo, cs_model)
for name, (lon, lat) in REFERENCE_POINTS.items():
x, y = transformer.transform(lon, lat)
print(f"{name} (x0, y0) = ({x:.1f}, {y:.1f})")
def is_along_azimuth(x, y, x0, y0, azimuth):
"""Check if point is along azimuth from reference point.
Args:
x (float)
Model x coordinate.
y (float)
Model y coordinate.
x0 (float)
X coordinate of reference point in model coordinate system.
y0 (float)
Y coordinate of reference point in model coordinate system.
azimuth (float)
Azimuth, in degrees CW, from y axis of model coordinate system.
Returns: (bool)
True if azimuth \dot (x-x0,y-y0) >= 0, False otherwize
"""
azRad = azimuth / 180.0 * math.pi
return (x-x0)*math.sin(azRad) + (y-y0)*math.cos(azRad) >= 0.0
def brocher2008_great_valley_sequence(x, y, depth):
"""Rule for elastic properties in Great Valley Sequence rocks. Brocher 2008.
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
if depth < 4.0e+3:
vp = 2.75e+3 + 0.4725*depth
elif depth < 7.0e+3:
vp = 4.64e+3 + 0.3*(depth-4.0e+3)
else:
vp = 5.54e+3 + 0.06*(depth-7.0e+3)
vs = default_vs(depth, vp)
density = default_density(depth, vp)
qs = default_qs(depth, vs)
qp = default_qp(depth, qs)
return (density, vp, vs, qp, qs)
def brocher2005_older_cenozoic_sedimentary(x, y, depth):
"""Rule for elastic properties in older Cenozoic sedimentary rocks. Brocher 2005.
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
if depth < 4.0e+3:
vp = 2.24e+3 + 0.6*depth
elif depth < 7.0e+3:
vp = 4.64e+3 + 0.3*(depth-4.0e+3)
else:
vp = 5.54e+3 + 0.06*(depth-7.0e+3)
vs = default_vs(depth, vp)
density = default_density(depth, vp)
qs = default_qs(depth, vs)
qp = default_qp(depth, qs)
return (density, vp, vs, qp, qs)
def franciscan_napa_sonoma(x, y, depth):
"""Rule for elastic properties in Franciscan(Napa-Sonoma variety) rocks.
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
a = 0.03e+3
vp0 = 5.4e+3 + a
density0 = 1.74e+3 * (vp0*1.0e-3)**0.25
if depth < 1.0e+3:
vp = a + 2.5e+3 + 2.0*depth
elif depth < 3.0e+3:
vp = a + 4.5e+3 + 0.45*(depth-1.0e+3)
else:
vp = a + 5.4e+3 + 0.0588*(depth-3.0e+3)
vs = default_vs(depth, vp)
density = density0 if depth < 3.0e+3 else default_density(depth, vp)
qs = 13.0 if vs < 300.0 else default_qs(depth, vs)
qp = default_qp(depth, qs)
return (density, vp, vs, qp, qs)
def quaternary_livermore(x, y, depth):
"""
Rule for elastic properties in Shallow livermore sediments obtained by trial and error
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
if depth < 6444.44:
vp = 1.64e+3 + 0.6*depth
else:
vp = 5.506667e+3 + 0.06*(depth-6444.44)
vs = default_vs(depth, vp)
density = default_density(depth, vp)
qs = default_qs(depth, vs)
qp = default_qp(depth, qs)
return (density, vp, vs, qp, qs)
def cenozoic_walnutcreek(x, y, depth):
"""Rule for elastic properties in southernmost part of Napa Block
* above 7km, same as 'tertiary_sedimentary_southbay' from aagaard_etal_2010.py
* below 7km, same as Brocher GVS and OlderCenozoicSeds
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
if depth < 750.0:
vp = 1.80e+3 + 1.2*depth
elif depth < 4.0e+3:
vp = 2.70e+3 + 0.597*(depth-750.0)
elif depth < 7.0e+3:
vp = 4.64e+3 + 0.3*(depth-4.0e+3)
else:
vp = 5.54e+3 + 0.06*(depth-7.0e+3)
vs = 500.0 + 0.4*depth if depth < 50.0 else default_vs(depth, vp)
density = default_density(depth, vp)
qs = 13.0 if vs < 300.0 else default_qs(depth, vs)
qp = default_qp(depth, qs)
return (density, vp, vs, qp, qs)
def valley_sequence_sanleandro(x, y, depth):
"""Rule for elastic properties in zone 'Valley Sequence', block 'San Leandro'
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
(x0, y0) = (92724.2, 285582.4)
if is_along_azimuth(x, y, x0, y0, 323.638):
return brocher2008_great_valley_sequence(x, y, depth)
else:
return brocher2005_older_cenozoic_sedimentary(x, y, depth)
def franciscan_napa(x, y, depth):
"""Rule for elastic properties in Franciscan rock, Napa Block
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
(x0, y0) = (62999.3, 360755.6)
if is_along_azimuth(x, y, x0, y0, 323.638):
return franciscan_napa_sonoma(x, y, depth)
else:
return brocher2008_great_valley_sequence(x, y, depth)
def cenozoic_napa(x, y, depth):
"""Rule for elastic properties in Cenozoic zone, Napa Block
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
(x0, y0) = (81696.7, 328741.7)
if is_along_azimuth(x, y, x0, y0, 323.638):
return brocher2005_older_cenozoic_sedimentary(x, y, depth)
else:
return cenozoic_walnutcreek(x, y, depth)
def franciscan_sonoma(x, y, depth):
"""Rule for elastic properties of Franciscan zone, Sonoma Block
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
(x0, y0) = (47249.3, 360648.4)
if is_along_azimuth(x, y, x0, y0, 323.638):
return franciscan_napa_sonoma(x, y, depth)
else:
return brocher2008_great_valley_sequence(x, y, depth)
def cenozoic_sonoma(x, y, depth):
"""Rule for elastic properties of Cenozoic zone, Sonoma Block
Args:
x(float)
Model x coordinate.
y(float)
Model y coordinate.
depth(float)
Depth of location in m.
Returns:
Tuple of density(kg/m**3), Vp(m/s), Vs(m/s), Qp, and Qs
"""
(x0, y0) = (47249.3, 360648.4)
if is_along_azimuth(x, y, x0, y0, 323.638):
return brocher2008_great_valley_sequence(x, y, depth)
else:
return brocher2005_older_cenozoic_sedimentary(x, y, depth)
| [
"[email protected]"
]
| |
f46481c123eee2c7b164413ece213c6de9a666b1 | a32c2ee4e6b2b1c6f8db02320c4bd50b17940af5 | /modules/TIMCardHolderAddFriends/TIMCardHolderAddFriends.py | 086b16f65630f26fa39058f13b7086328fafb33d | []
| no_license | wszg5/studyGit | 93d670884d4cba7445c4df3a5def8085e5bf9ac0 | bebfc90bc38689990c2ddf52e5a2f7a02649ea00 | refs/heads/master | 2020-04-05T02:55:17.367722 | 2018-11-07T06:01:03 | 2018-11-07T06:01:03 | 156,494,390 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,566 | py | # coding:utf-8
from uiautomator import Device
from Repo import *
import os, time, datetime, random
from zservice import ZDevice
class TIMCardHolderAddFriends:
def __init__(self):
self.repo = Repo()
def action(self, d, z, args):
cate_id = args["repo_number_cate_id"]
collect_count = int(args['collect_count']) # 要扫描多少人
count = self.repo.GetNumber(cate_id, 0, collect_count)
d.server.adb.cmd("shell", "am force-stop com.tencent.tim").communicate() # 强制停止
d.server.adb.cmd("shell",
"am start -n com.tencent.tim/com.tencent.mobileqq.activity.SplashActivity").wait() # 拉起来
d(index=2, className='android.widget.FrameLayout').click()
d(text='名片夹', className='android.widget.TextView').click()
if d(text='设置我的名片', className='android.widget.TextView').exists:
d(text='设置我的名片', className='android.widget.TextView').click()
d(text='添加我的名片', className='android.widget.TextView').click()
d(text='从相册选择', className='android.widget.TextView').click()
time.sleep(1)
d(index=0, className='android.widget.ImageView').click()
d(text='确定', className='android.widget.Button').click()
time.sleep(2)
d(text='完成', className='android.widget.TextView').click()
time.sleep(3)
self.collectData(count)
else:
self.collectData(count)
if (args["time_delay"]):
time.sleep(int(args["time_delay"]))
def collectData(self,count):
d(text='我的名片', className='android.widget.TextView').click()
print (count)
for i in range(0,len(count)/3 + 1):
for j in range(0,3):
if j == 0:
d(text='编辑', className='android.widget.TextView').click()
d(text='添加手机号', className='android.widget.TextView').click()
d(text='添加手机号', className='android.widget.TextView').click()
if len(count)>=i*3+j+1:
print (count[i * 3 + j])
d(text='填写号码', className='android.widget.EditText', index=j).set_text(count[i * 3 + j])
else:
break
if j==2:
d(text='完成', className='android.widget.TextView').click()
for k in range(0,3):
if k == 0:
str = d.info # 获取屏幕大小等信息
height = str["displayHeight"]
width = str["displayWidth"]
d.swipe(width / 2, height * 5 / 6, width / 2, height / 4)
time.sleep(3)
if d(index=2, className='android.widget.LinearLayout').child(index=k, className='android.widget.RelativeLayout').child(index=2, className='android.widget.TextView').exists:
d(index=2, className='android.widget.LinearLayout').child(index=k, className='android.widget.RelativeLayout').child(text='加好友', className='android.widget.Button').click()
# 加好友需要补充
# ************
# ************
# ************
# ************
# ************
# ************
# ************
# ************
# ************
# ************
# ************
else:
print ('结束扫描')
d(text='编辑', className='android.widget.TextView').click()
for k in range(0, 3):
d(className='android.widget.RelativeLayout', index=4).child(className='android.widget.EditText',index=k).clear_text()
d(text='完成', className='android.widget.TextView').click()
break
def getPluginClass():
return TIMCardHolderAddFriends
if __name__ == "__main__":
clazz = getPluginClass()
o = clazz()
d = Device("HT57FSK00089")
# material=u'有空聊聊吗'
z = ZDevice("HT57FSK00089")
d.server.adb.cmd("shell", "ime set com.zunyun.qk/.ZImeService").communicate()
args = {"repo_number_cate_id": "43", "add_count": "9", "time_delay": "3"}; # cate_id是仓库号,length是数量
o.action(d, z, args) | [
"[email protected]"
]
| |
d03d5c855680e1b6fa3515be40d126c7e532e244 | 943dca755b940493a8452223cfe5daa2fb4908eb | /abc116/c.py | 5267dea5164cda9c07be495dd9a9df3f46b7cd66 | []
| no_license | ymsk-sky/atcoder | 5e34556582763b7095a5f3a7bae18cbe5b2696b2 | 36d7841b70b521bee853cdd6d670f8e283d83e8d | refs/heads/master | 2023-08-20T01:34:16.323870 | 2023-08-13T04:49:12 | 2023-08-13T04:49:12 | 254,348,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | n=int(input())
hs=list(map(int,input().split()))
c=0
m=max(hs)
while not m==0:
b=0
for i,h in enumerate(hs):
if h==m:
if not b==m:
c+=1
hs[i]-=1
b=h
m=max(hs)
print(c)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.