repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ianclegg/requests-ntlm2 | requests_ntlm2/__init__.py | 1 | 1172 | # (c) 2015, Ian Clegg <[email protected]>
#
# ntlmlib is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .requests_ntlm2 import HttpNtlm2Auth
from .exceptions import InvalidCredentialsError, NtlmAuthenticationError
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Set default logging handler to avoid "No handler found" warnings.
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = ['HttpNtlm2Auth', 'NtlmAuthenticationError', 'InvalidCredentialsError']
| apache-2.0 | 4,448,434,275,216,971,300 | 39.413793 | 81 | 0.759386 | false |
Williams224/davinci-scripts | ksteta3pi/Consideredbkg/MC_12_11102202_MagUp.py | 3 | 17903 | #-- GAUDI jobOptions generated on Fri Jul 17 16:30:35 2015
#-- Contains event types :
#-- 11102202 - 178 files - 3032774 events - 655.11 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-124834
#-- StepId : 124834
#-- StepName : Reco14a for MC
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p7
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124620
#-- StepId : 124620
#-- StepName : Digi13 with G4 dE/dx
#-- ApplicationName : Boole
#-- ApplicationVersion : v26r3
#-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/Boole/Boole-SiG4EnergyDeposit.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124632
#-- StepId : 124632
#-- StepName : TCK-0x409f0045 Flagged for Sim08 2012
#-- ApplicationName : Moore
#-- ApplicationVersion : v14r8p1
#-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProductionWithL0Emulation.py;$APPCONFIGOPTS/Conditions/TCK-0x409f0045.py;$APPCONFIGOPTS/Moore/DataType-2012.py;$APPCONFIGOPTS/L0/L0TCK-0x0045.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-125999
#-- StepId : 125999
#-- StepName : Sim08d - 2012 - MU - Pythia8
#-- ApplicationName : Gauss
#-- ApplicationVersion : v45r6
#-- OptionFiles : $APPCONFIGOPTS/Gauss/Sim08-Beam4000GeV-mu100-2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIA8ROOT/options/Pythia8.py;$APPCONFIGOPTS/Gauss/G4PL_FTFP_BERT_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20130929-1
#-- CONDDB : sim-20130522-1-vc-mu100
#-- ExtraPackages : AppConfig.v3r182;DecFiles.v27r16
#-- Visible : Y
#-- Processing Pass Step-124630
#-- StepId : 124630
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000028_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000043_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000044_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000046_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000047_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000048_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000049_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000050_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000051_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000052_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000053_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000054_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000055_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000056_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000059_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000060_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000061_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000063_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000064_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000065_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000066_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000067_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000068_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000069_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000070_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000071_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000072_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000028_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000043_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000044_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000046_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000047_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000048_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000049_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000050_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000051_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000052_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000053_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000054_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000055_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000056_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000059_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000060_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000063_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000064_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000065_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000066_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000067_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000068_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000069_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000070_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000071_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000072_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000089_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000090_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000091_1.allstreams.dst'
], clear=True)
| mit | -5,889,822,724,552,636,000 | 69.762846 | 247 | 0.788415 | false |
gymnasium/edx-platform | common/test/acceptance/pages/lms/discussion.py | 11 | 29079 | from contextlib import contextmanager
from bok_choy.javascript import wait_for_js
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from common.test.acceptance.pages.common.utils import hover
from common.test.acceptance.pages.lms.course_page import CoursePage
from common.test.acceptance.tests.helpers import is_focused_on_element
class DiscussionPageMixin(object):
def is_ajax_finished(self):
return self.browser.execute_script("return jQuery.active") == 0
def find_visible_element(self, selector):
"""
Finds a single visible element with the specified selector.
"""
full_selector = selector
if self.root_selector:
full_selector = self.root_selector + " " + full_selector
elements = self.q(css=full_selector)
return next((element for element in elements if element.is_displayed()), None)
@property
def new_post_button(self):
"""
Returns the new post button if visible, else it returns None.
"""
return self.find_visible_element(".new-post-btn")
@property
def new_post_form(self):
"""
Returns the new post form if visible, else it returns None.
"""
return self.find_visible_element(".forum-new-post-form")
def click_new_post_button(self):
"""
Clicks the 'New Post' button.
"""
self.wait_for(
lambda: self.new_post_button,
description="Waiting for new post button"
)
self.new_post_button.click()
self.wait_for(
lambda: self.new_post_form,
description="Waiting for new post form"
)
def click_cancel_new_post(self):
"""
Clicks the 'Cancel' button from the new post form.
"""
self.click_element(".cancel")
self.wait_for(
lambda: not self.new_post_form,
"Waiting for new post form to close"
)
class DiscussionThreadPage(PageObject, DiscussionPageMixin):
url = None
def __init__(self, browser, thread_selector):
super(DiscussionThreadPage, self).__init__(browser)
self.thread_selector = thread_selector
def _find_within(self, selector):
"""
Returns a query corresponding to the given CSS selector within the scope
of this thread page
"""
return self.q(css=self.thread_selector + " " + selector)
def is_browser_on_page(self):
return self.q(css=self.thread_selector).visible
def _get_element_text(self, selector):
"""
Returns the text of the first element matching the given selector, or
None if no such element exists
"""
text_list = self._find_within(selector).text
return text_list[0] if text_list else None
def is_element_visible(self, selector):
"""
Returns true if the element matching the specified selector is visible.
Args:
selector (str): The CSS selector that matches the desired element.
Returns:
bool: True if the element is visible.
"""
query = self._find_within(selector)
return query.present and query.visible
@contextmanager
def secondary_action_menu_open(self, ancestor_selector):
"""
Given the selector for an ancestor of a secondary menu, return a context
manager that will open and close the menu
"""
self.wait_for_ajax()
self._find_within(ancestor_selector + " .action-more").click()
EmptyPromise(
lambda: self.is_element_visible(ancestor_selector + " .actions-dropdown"),
"Secondary action menu opened"
).fulfill()
yield
if self.is_element_visible(ancestor_selector + " .actions-dropdown"):
self._find_within(ancestor_selector + " .action-more").click()
EmptyPromise(
lambda: not self.is_element_visible(ancestor_selector + " .actions-dropdown"),
"Secondary action menu closed"
).fulfill()
def get_group_visibility_label(self):
"""
Returns the group visibility label shown for the thread.
"""
return self._get_element_text(".group-visibility-label")
def get_response_total_text(self):
"""Returns the response count text, or None if not present"""
self.wait_for_ajax()
return self._get_element_text(".response-count")
def get_num_displayed_responses(self):
"""Returns the number of responses actually rendered"""
return len(self._find_within(".discussion-response"))
def get_shown_responses_text(self):
"""Returns the shown response count text, or None if not present"""
return self._get_element_text(".response-display-count")
def get_load_responses_button_text(self):
"""Returns the load more responses button text, or None if not present"""
return self._get_element_text(".load-response-button")
def load_more_responses(self):
"""Clicks the load more responses button and waits for responses to load"""
self._find_within(".load-response-button").click()
EmptyPromise(
self.is_ajax_finished,
"Loading more Responses"
).fulfill()
def has_add_response_button(self):
"""Returns true if the add response button is visible, false otherwise"""
return self.is_element_visible(".add-response-btn")
def has_discussion_reply_editor(self):
"""
Returns true if the discussion reply editor is is visible
"""
return self.is_element_visible(".discussion-reply-new")
def click_add_response_button(self):
"""
Clicks the add response button and ensures that the response text
field receives focus
"""
self._find_within(".add-response-btn").first.click()
EmptyPromise(
lambda: self._find_within(".discussion-reply-new textarea:focus").present,
"Response field received focus"
).fulfill()
@wait_for_js
def is_response_editor_visible(self, response_id):
"""Returns true if the response editor is present, false otherwise"""
return self.is_element_visible(".response_{} .edit-post-body".format(response_id))
@wait_for_js
def is_discussion_body_visible(self):
return self.is_element_visible(".post-body")
def verify_mathjax_preview_available(self):
""" Checks that MathJax Preview css class is present """
self.wait_for(
lambda: len(self.q(css=".MathJax_Preview").text) > 0 and self.q(css=".MathJax_Preview").text[0] == "",
description="MathJax Preview is rendered"
)
def verify_mathjax_rendered(self):
""" Checks that MathJax css class is present """
self.wait_for(
lambda: self.is_element_visible(".MathJax_SVG"),
description="MathJax Preview is rendered"
)
def is_response_visible(self, comment_id):
"""Returns true if the response is viewable onscreen"""
self.wait_for_ajax()
return self.is_element_visible(".response_{} .response-body".format(comment_id))
def is_response_editable(self, response_id):
"""Returns true if the edit response button is present, false otherwise"""
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
return self.is_element_visible(".response_{} .discussion-response .action-edit".format(response_id))
def is_response_deletable(self, response_id):
"""
Returns true if the delete response button is present, false otherwise
"""
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
return self.is_element_visible(".response_{} .discussion-response .action-delete".format(response_id))
def get_response_body(self, response_id):
return self._get_element_text(".response_{} .response-body".format(response_id))
def start_response_edit(self, response_id):
"""Click the edit button for the response, loading the editing view"""
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
self._find_within(".response_{} .discussion-response .action-edit".format(response_id)).first.click()
EmptyPromise(
lambda: self.is_response_editor_visible(response_id),
"Response edit started"
).fulfill()
def get_link_href(self):
"""Extracts href attribute of the referenced link"""
link_href = self._find_within(".post-body p a").attrs('href')
return link_href[0] if link_href else None
def get_response_vote_count(self, response_id):
vote_count_css = '.response_{} .discussion-response .action-vote'.format(response_id)
vote_count_element = self.browser.find_element_by_css_selector(vote_count_css)
# To get the vote count, one must hover over the element first.
hover(self.browser, vote_count_element)
return self._get_element_text(".response_{} .discussion-response .action-vote .vote-count".format(response_id))
def vote_response(self, response_id):
current_count = self.get_response_vote_count(response_id)
self._find_within(".response_{} .discussion-response .action-vote".format(response_id)).first.click()
self.wait_for(
lambda: current_count != self.get_response_vote_count(response_id),
description="Vote updated for {response_id}".format(response_id=response_id)
)
def cannot_vote_response(self, response_id):
"""Assert that the voting button is not visible on this response"""
return not self.is_element_visible(".response_{} .discussion-response .action-vote".format(response_id))
def is_response_reported(self, response_id):
return self.is_element_visible(".response_{} .discussion-response .post-label-reported".format(response_id))
def report_response(self, response_id):
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
self._find_within(".response_{} .discussion-response .action-report".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: self.is_response_reported(response_id),
"Response is reported"
).fulfill()
def cannot_report_response(self, response_id):
"""Assert that the reporting button is not visible on this response"""
return not self.is_element_visible(".response_{} .discussion-response .action-report".format(response_id))
def is_response_endorsed(self, response_id):
return "endorsed" in self._get_element_text(".response_{} .discussion-response .posted-details".format(response_id))
def endorse_response(self, response_id):
self._find_within(".response_{} .discussion-response .action-endorse".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: self.is_response_endorsed(response_id),
"Response edit started"
).fulfill()
def set_response_editor_value(self, response_id, new_body):
"""Replace the contents of the response editor"""
self._find_within(".response_{} .discussion-response .wmd-input".format(response_id)).fill(new_body)
def verify_link_editor_error_messages_shown(self):
"""
Confirm that the error messages are displayed in the editor.
"""
def errors_visible():
"""
Returns True if both errors are visible, False otherwise.
"""
return (
self.q(css="#new-url-input-field-message.has-error").visible and
self.q(css="#new-url-desc-input-field-message.has-error").visible
)
self.wait_for(errors_visible, "Form errors should be visible.")
def add_content_via_editor_button(self, content_type, response_id, url, description, is_decorative=False):
"""Replace the contents of the response editor"""
self._find_within(
"#wmd-{}-button-edit-post-body-{}".format(
content_type,
response_id,
)
).click()
self.q(css='#new-url-input').fill(url)
self.q(css='#new-url-desc-input').fill(description)
if is_decorative:
self.q(css='#img-is-decorative').click()
self.q(css='input[value="OK"]').click()
def submit_response_edit(self, response_id, new_response_body):
"""Click the submit button on the response editor"""
def submit_response_check_func():
"""
Tries to click "Update post" and returns True if the post
was successfully updated, False otherwise.
"""
self._find_within(
".response_{} .discussion-response .post-update".format(
response_id
)
).first.click()
return (
not self.is_response_editor_visible(response_id) and
self.is_response_visible(response_id) and
self.get_response_body(response_id) == new_response_body
)
self.wait_for(submit_response_check_func, "Comment edit succeeded")
def is_show_comments_visible(self, response_id):
"""Returns true if the "show comments" link is visible for a response"""
return self.is_element_visible(".response_{} .action-show-comments".format(response_id))
def show_comments(self, response_id):
"""Click the "show comments" link for a response"""
self._find_within(".response_{} .action-show-comments".format(response_id)).first.click()
EmptyPromise(
lambda: self.is_element_visible(".response_{} .comments".format(response_id)),
"Comments shown"
).fulfill()
def is_add_comment_visible(self, response_id):
"""Returns true if the "add comment" form is visible for a response"""
return self.is_element_visible("#wmd-input-comment-body-{}".format(response_id))
def is_comment_visible(self, comment_id):
"""Returns true if the comment is viewable onscreen"""
return self.is_element_visible("#comment_{} .response-body".format(comment_id))
def get_comment_body(self, comment_id):
return self._get_element_text("#comment_{} .response-body".format(comment_id))
def is_comment_deletable(self, comment_id):
"""Returns true if the delete comment button is present, false otherwise"""
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
return self.is_element_visible("#comment_{} .action-delete".format(comment_id))
def delete_comment(self, comment_id):
with self.handle_alert():
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
self._find_within("#comment_{} .action-delete".format(comment_id)).first.click()
EmptyPromise(
lambda: not self.is_comment_visible(comment_id),
"Deleted comment was removed"
).fulfill()
def is_comment_editable(self, comment_id):
"""Returns true if the edit comment button is present, false otherwise"""
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
return self.is_element_visible("#comment_{} .action-edit".format(comment_id))
def is_comment_editor_visible(self, comment_id):
"""Returns true if the comment editor is present, false otherwise"""
return self.is_element_visible(".edit-comment-body[data-id='{}']".format(comment_id))
def _get_comment_editor_value(self, comment_id):
return self._find_within("#wmd-input-edit-comment-body-{}".format(comment_id)).text[0]
def start_comment_edit(self, comment_id):
"""Click the edit button for the comment, loading the editing view"""
old_body = self.get_comment_body(comment_id)
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
self._find_within("#comment_{} .action-edit".format(comment_id)).first.click()
EmptyPromise(
lambda: (
self.is_comment_editor_visible(comment_id) and
not self.is_comment_visible(comment_id) and
self._get_comment_editor_value(comment_id) == old_body
),
"Comment edit started"
).fulfill()
def set_comment_editor_value(self, comment_id, new_body):
"""Replace the contents of the comment editor"""
self._find_within("#comment_{} .wmd-input".format(comment_id)).fill(new_body)
def submit_comment_edit(self, comment_id, new_comment_body):
"""Click the submit button on the comment editor"""
self._find_within("#comment_{} .post-update".format(comment_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: (
not self.is_comment_editor_visible(comment_id) and
self.is_comment_visible(comment_id) and
self.get_comment_body(comment_id) == new_comment_body
),
"Comment edit succeeded"
).fulfill()
def cancel_comment_edit(self, comment_id, original_body):
"""Click the cancel button on the comment editor"""
self._find_within("#comment_{} .post-cancel".format(comment_id)).first.click()
EmptyPromise(
lambda: (
not self.is_comment_editor_visible(comment_id) and
self.is_comment_visible(comment_id) and
self.get_comment_body(comment_id) == original_body
),
"Comment edit was canceled"
).fulfill()
class DiscussionSortPreferencePage(CoursePage):
"""
Page that contain the discussion board with sorting options
"""
def __init__(self, browser, course_id):
super(DiscussionSortPreferencePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum"
def is_browser_on_page(self):
"""
Return true if the browser is on the right page else false.
"""
return self.q(css="body.discussion .forum-nav-sort-control").present
def show_all_discussions(self):
""" Show the list of all discussions. """
self.q(css=".all-topics").click()
def get_selected_sort_preference(self):
"""
Return the text of option that is selected for sorting.
"""
# Using this workaround (execute script) to make this test work with Chrome browser
selected_value = self.browser.execute_script(
'var selected_value = $(".forum-nav-sort-control").val(); return selected_value')
return selected_value
def change_sort_preference(self, sort_by):
"""
Change the option of sorting by clicking on new option.
"""
self.q(css=".forum-nav-sort-control option[value='{0}']".format(sort_by)).click()
# Click initiates an ajax call, waiting for it to complete
self.wait_for_ajax()
def refresh_page(self):
"""
Reload the page.
"""
self.browser.refresh()
class DiscussionTabSingleThreadPage(CoursePage):
def __init__(self, browser, course_id, discussion_id, thread_id):
super(DiscussionTabSingleThreadPage, self).__init__(browser, course_id)
self.thread_page = DiscussionThreadPage(
browser,
"body.discussion .discussion-article[data-id='{thread_id}']".format(thread_id=thread_id)
)
self.url_path = "discussion/forum/{discussion_id}/threads/{thread_id}".format(
discussion_id=discussion_id, thread_id=thread_id
)
def is_browser_on_page(self):
return self.thread_page.is_browser_on_page()
def __getattr__(self, name):
return getattr(self.thread_page, name)
def show_all_discussions(self):
""" Show the list of all discussions. """
self.q(css=".all-topics").click()
def close_open_thread(self):
with self.thread_page.secondary_action_menu_open(".thread-main-wrapper"):
self._find_within(".thread-main-wrapper .action-close").first.click()
def _thread_is_rendered_successfully(self, thread_id):
return self.q(css=".discussion-article[data-id='{}']".format(thread_id)).visible
def click_and_open_thread(self, thread_id):
"""
Click specific thread on the list.
"""
thread_selector = "li[data-id='{}']".format(thread_id)
self.show_all_discussions()
self.q(css=thread_selector).first.click()
EmptyPromise(
lambda: self._thread_is_rendered_successfully(thread_id),
"Thread has been rendered"
).fulfill()
def check_threads_rendered_successfully(self, thread_count):
"""
Count the number of threads available on page.
"""
return len(self.q(css=".forum-nav-thread").results) == thread_count
class InlineDiscussionPage(PageObject, DiscussionPageMixin):
"""
Acceptance tests for inline discussions.
"""
url = None
def __init__(self, browser, discussion_id):
super(InlineDiscussionPage, self).__init__(browser)
self.root_selector = (
".discussion-module[data-discussion-id='{discussion_id}'] ".format(
discussion_id=discussion_id
)
)
def _find_within(self, selector):
"""
Returns a query corresponding to the given CSS selector within the scope
of this discussion page
"""
return self.q(css=self.root_selector + " " + selector)
def is_browser_on_page(self):
self.wait_for_ajax()
return self.q(css=self.root_selector).present
def is_discussion_expanded(self):
return self._find_within(".discussion").present
def expand_discussion(self):
"""Click the link to expand the discussion"""
self._find_within(".discussion-show").first.click()
EmptyPromise(
self.is_discussion_expanded,
"Discussion expanded"
).fulfill()
def get_num_displayed_threads(self):
return len(self._find_within(".forum-nav-thread"))
def element_exists(self, selector):
return self.q(css=self.root_selector + " " + selector).present
def click_element(self, selector):
self.wait_for_element_presence(
"{discussion} {selector}".format(discussion=self.root_selector, selector=selector),
"{selector} is visible".format(selector=selector)
)
self._find_within(selector).click()
def is_new_post_button_visible(self):
"""
Check if new post button present and visible
"""
return self._is_element_visible('.new-post-btn')
@wait_for_js
def _is_element_visible(self, selector):
query = self._find_within(selector)
return query.present and query.visible
def show_thread(self, thread_id):
"""
Clicks the link for the specified thread to show the detailed view.
"""
self.wait_for_element_presence('.forum-nav-thread-link', 'Thread list has loaded')
thread_selector = ".forum-nav-thread[data-id='{thread_id}'] .forum-nav-thread-link".format(thread_id=thread_id)
self._find_within(thread_selector).first.click()
self.thread_page = InlineDiscussionThreadPage(self.browser, thread_id) # pylint: disable=attribute-defined-outside-init
self.thread_page.wait_for_page()
class InlineDiscussionThreadPage(DiscussionThreadPage):
"""
Page object to manipulate an individual thread view in an inline discussion.
"""
def __init__(self, browser, thread_id):
super(InlineDiscussionThreadPage, self).__init__(
browser,
".discussion-module .discussion-article[data-id='{thread_id}']".format(thread_id=thread_id)
)
def is_thread_anonymous(self):
return not self.q(css=".posted-details > .username").present
@wait_for_js
def check_if_selector_is_focused(self, selector):
"""
Check if selector is focused
"""
return is_focused_on_element(self.browser, selector)
class DiscussionUserProfilePage(CoursePage):
TEXT_NEXT = u'Next >'
TEXT_PREV = u'< Previous'
PAGING_SELECTOR = ".discussion-pagination[data-page-number]"
def __init__(self, browser, course_id, user_id, username, page=1):
super(DiscussionUserProfilePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum/dummy/users/{}?page={}".format(user_id, page)
self.username = username
def is_browser_on_page(self):
return (
self.q(css='.discussion-user-threads[data-course-id="{}"]'.format(self.course_id)).present
and
self.q(css='.user-name').present
and
self.q(css='.user-name').text[0] == self.username
)
@wait_for_js
def is_window_on_top(self):
return self.browser.execute_script("return $('html, body').offset().top") == 0
def get_shown_thread_ids(self):
elems = self.q(css="li.forum-nav-thread")
return [elem.get_attribute("data-id") for elem in elems]
def click_on_sidebar_username(self):
self.wait_for_page()
self.q(css='.user-name').first.click()
def get_user_roles(self):
"""Get user roles"""
return self.q(css='.user-roles').text[0]
class DiscussionTabHomePage(CoursePage, DiscussionPageMixin):
ALERT_SELECTOR = ".discussion-body .forum-nav .search-alert"
def __init__(self, browser, course_id):
super(DiscussionTabHomePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum/"
self.root_selector = None
def is_browser_on_page(self):
return self.q(css=".discussion-body section.home-header").present
def perform_search(self, text="dummy"):
self.q(css=".search-input").fill(text + chr(10))
EmptyPromise(
self.is_ajax_finished,
"waiting for server to return result"
).fulfill()
def is_element_visible(self, selector):
"""
Returns true if the element matching the specified selector is visible.
"""
query = self.q(css=selector)
return query.present and query.visible
def is_checkbox_selected(self, selector):
"""
Returns true or false depending upon the matching checkbox is checked.
"""
return self.q(css=selector).selected
def refresh_and_wait_for_load(self):
"""
Refresh the page and wait for all resources to load.
"""
self.browser.refresh()
self.wait_for_page()
def get_search_alert_messages(self):
return self.q(css=self.ALERT_SELECTOR + " .message").text
def get_search_alert_links(self):
return self.q(css=self.ALERT_SELECTOR + " .link-jump")
def dismiss_alert_message(self, text):
"""
dismiss any search alert message containing the specified text.
"""
def _match_messages(text):
return self.q(css=".search-alert").filter(lambda elem: text in elem.text)
for alert_id in _match_messages(text).attrs("id"):
self.q(css="{}#{} .dismiss".format(self.ALERT_SELECTOR, alert_id)).click()
EmptyPromise(
lambda: _match_messages(text).results == [],
"waiting for dismissed alerts to disappear"
).fulfill()
def click_element(self, selector):
"""
Clicks the element specified by selector
"""
element = self.q(css=selector)
return element.click()
def set_new_post_editor_value(self, new_body):
"""
Set the Discussions new post editor (wmd) with the content in new_body
"""
self.q(css=".wmd-input").fill(new_body)
def get_new_post_preview_value(self, selector=".wmd-preview > *"):
"""
Get the rendered preview of the contents of the Discussions new post editor
Waits for content to appear, as the preview is triggered on debounced/delayed onchange
"""
self.wait_for_element_visibility(selector, "WMD preview pane has contents", timeout=10)
return self.q(css=".wmd-preview").html[0]
def get_new_post_preview_text(self):
"""
Get the rendered preview of the contents of the Discussions new post editor
Waits for content to appear, as the preview is triggered on debounced/delayed onchange
"""
self.wait_for_element_visibility(".wmd-preview > div", "WMD preview pane has contents", timeout=10)
return self.q(css=".wmd-preview").text[0]
| agpl-3.0 | 8,772,388,915,769,356,000 | 38.563265 | 128 | 0.623646 | false |
utds3lab/pemu | scripts/tracetool/backend/dtrace.py | 94 | 2484 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
DTrace/SystemTAP backend.
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
PUBLIC = True
PROBEPREFIX = None
def _probeprefix():
if PROBEPREFIX is None:
raise ValueError("you must set PROBEPREFIX")
return PROBEPREFIX
BINARY = None
def _binary():
if BINARY is None:
raise ValueError("you must set BINARY")
return BINARY
def c(events):
pass
def h(events):
out('#include "trace/generated-tracers-dtrace.h"',
'')
for e in events:
out('static inline void trace_%(name)s(%(args)s) {',
' QEMU_%(uppername)s(%(argnames)s);',
'}',
name = e.name,
args = e.args,
uppername = e.name.upper(),
argnames = ", ".join(e.args.names()),
)
def d(events):
out('provider qemu {')
for e in events:
args = str(e.args)
# DTrace provider syntax expects foo() for empty
# params, not foo(void)
if args == 'void':
args = ''
# Define prototype for probe arguments
out('',
'probe %(name)s(%(args)s);',
name = e.name,
args = args,
)
out('',
'};')
# Technically 'self' is not used by systemtap yet, but
# they recommended we keep it in the reserved list anyway
RESERVED_WORDS = (
'break', 'catch', 'continue', 'delete', 'else', 'for',
'foreach', 'function', 'global', 'if', 'in', 'limit',
'long', 'next', 'probe', 'return', 'self', 'string',
'try', 'while'
)
def stap(events):
for e in events:
# Define prototype for probe arguments
out('probe %(probeprefix)s.%(name)s = process("%(binary)s").mark("%(name)s")',
'{',
probeprefix = _probeprefix(),
name = e.name,
binary = _binary(),
)
i = 1
if len(e.args) > 0:
for name in e.args.names():
# Append underscore to reserved keywords
if name in RESERVED_WORDS:
name += '_'
out(' %s = $arg%d;' % (name, i))
i += 1
out('}')
out()
| gpl-2.0 | -6,506,933,299,458,567,000 | 21.770642 | 86 | 0.514102 | false |
cbertinato/pandas | pandas/tests/io/test_packers.py | 1 | 33090 | import datetime
from distutils.version import LooseVersion
import glob
from io import BytesIO
import os
from warnings import catch_warnings
import numpy as np
import pytest
from pandas._libs.tslib import iNaT
from pandas.errors import PerformanceWarning
import pandas
from pandas import (
Categorical, DataFrame, Index, Interval, MultiIndex, NaT, Period, Series,
Timestamp, bdate_range, date_range, period_range)
import pandas.util.testing as tm
from pandas.util.testing import (
assert_categorical_equal, assert_frame_equal, assert_index_equal,
assert_series_equal, ensure_clean)
from pandas.io.packers import read_msgpack, to_msgpack
nan = np.nan
try:
import blosc # NOQA
except ImportError:
_BLOSC_INSTALLED = False
else:
_BLOSC_INSTALLED = True
try:
import zlib # NOQA
except ImportError:
_ZLIB_INSTALLED = False
else:
_ZLIB_INSTALLED = True
@pytest.fixture(scope='module')
def current_packers_data():
# our current version packers data
from pandas.tests.io.generate_legacy_storage_files import (
create_msgpack_data)
return create_msgpack_data()
@pytest.fixture(scope='module')
def all_packers_data():
# our all of our current version packers data
from pandas.tests.io.generate_legacy_storage_files import (
create_data)
return create_data()
def check_arbitrary(a, b):
if isinstance(a, (list, tuple)) and isinstance(b, (list, tuple)):
assert(len(a) == len(b))
for a_, b_ in zip(a, b):
check_arbitrary(a_, b_)
elif isinstance(a, DataFrame):
assert_frame_equal(a, b)
elif isinstance(a, Series):
assert_series_equal(a, b)
elif isinstance(a, Index):
assert_index_equal(a, b)
elif isinstance(a, Categorical):
# Temp,
# Categorical.categories is changed from str to bytes in PY3
# maybe the same as GH 13591
if b.categories.inferred_type == 'string':
pass
else:
tm.assert_categorical_equal(a, b)
elif a is NaT:
assert b is NaT
elif isinstance(a, Timestamp):
assert a == b
assert a.freq == b.freq
else:
assert(a == b)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestPackers:
def setup_method(self, method):
self.path = '__%s__.msg' % tm.rands(10)
def teardown_method(self, method):
pass
def encode_decode(self, x, compress=None, **kwargs):
with ensure_clean(self.path) as p:
to_msgpack(p, x, compress=compress, **kwargs)
return read_msgpack(p, **kwargs)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestAPI(TestPackers):
def test_string_io(self):
df = DataFrame(np.random.randn(10, 2))
s = df.to_msgpack(None)
result = read_msgpack(s)
tm.assert_frame_equal(result, df)
s = df.to_msgpack()
result = read_msgpack(s)
tm.assert_frame_equal(result, df)
s = df.to_msgpack()
result = read_msgpack(BytesIO(s))
tm.assert_frame_equal(result, df)
s = to_msgpack(None, df)
result = read_msgpack(s)
tm.assert_frame_equal(result, df)
with ensure_clean(self.path) as p:
s = df.to_msgpack()
with open(p, 'wb') as fh:
fh.write(s)
result = read_msgpack(p)
tm.assert_frame_equal(result, df)
def test_path_pathlib(self):
df = tm.makeDataFrame()
result = tm.round_trip_pathlib(df.to_msgpack, read_msgpack)
tm.assert_frame_equal(df, result)
def test_path_localpath(self):
df = tm.makeDataFrame()
result = tm.round_trip_localpath(df.to_msgpack, read_msgpack)
tm.assert_frame_equal(df, result)
def test_iterator_with_string_io(self):
dfs = [DataFrame(np.random.randn(10, 2)) for i in range(5)]
s = to_msgpack(None, *dfs)
for i, result in enumerate(read_msgpack(s, iterator=True)):
tm.assert_frame_equal(result, dfs[i])
def test_invalid_arg(self):
# GH10369
class A:
def __init__(self):
self.read = 0
msg = "Invalid file path or buffer object type: <class '{}'>"
with pytest.raises(ValueError, match=msg.format('NoneType')):
read_msgpack(path_or_buf=None)
with pytest.raises(ValueError, match=msg.format('dict')):
read_msgpack(path_or_buf={})
with pytest.raises(ValueError, match=msg.format(r'.*\.A')):
read_msgpack(path_or_buf=A())
class TestNumpy(TestPackers):
def test_numpy_scalar_float(self):
x = np.float32(np.random.rand())
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_numpy_scalar_complex(self):
x = np.complex64(np.random.rand() + 1j * np.random.rand())
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_scalar_float(self):
x = np.random.rand()
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_scalar_bool(self):
x = np.bool_(1)
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
x = np.bool_(0)
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_scalar_complex(self):
x = np.random.rand() + 1j * np.random.rand()
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_list_numpy_float(self):
x = [np.float32(np.random.rand()) for i in range(5)]
x_rec = self.encode_decode(x)
# current msgpack cannot distinguish list/tuple
tm.assert_almost_equal(tuple(x), x_rec)
x_rec = self.encode_decode(tuple(x))
tm.assert_almost_equal(tuple(x), x_rec)
def test_list_numpy_float_complex(self):
if not hasattr(np, 'complex128'):
pytest.skip('numpy can not handle complex128')
x = [np.float32(np.random.rand()) for i in range(5)] + \
[np.complex128(np.random.rand() + 1j * np.random.rand())
for i in range(5)]
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_list_float(self):
x = [np.random.rand() for i in range(5)]
x_rec = self.encode_decode(x)
# current msgpack cannot distinguish list/tuple
tm.assert_almost_equal(tuple(x), x_rec)
x_rec = self.encode_decode(tuple(x))
tm.assert_almost_equal(tuple(x), x_rec)
def test_list_float_complex(self):
x = [np.random.rand() for i in range(5)] + \
[(np.random.rand() + 1j * np.random.rand()) for i in range(5)]
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_dict_float(self):
x = {'foo': 1.0, 'bar': 2.0}
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_dict_complex(self):
x = {'foo': 1.0 + 1.0j, 'bar': 2.0 + 2.0j}
x_rec = self.encode_decode(x)
tm.assert_dict_equal(x, x_rec)
for key in x:
tm.assert_class_equal(x[key], x_rec[key], obj="complex value")
def test_dict_numpy_float(self):
x = {'foo': np.float32(1.0), 'bar': np.float32(2.0)}
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_dict_numpy_complex(self):
x = {'foo': np.complex128(1.0 + 1.0j),
'bar': np.complex128(2.0 + 2.0j)}
x_rec = self.encode_decode(x)
tm.assert_dict_equal(x, x_rec)
for key in x:
tm.assert_class_equal(x[key], x_rec[key], obj="numpy complex128")
def test_numpy_array_float(self):
# run multiple times
for n in range(10):
x = np.random.rand(10)
for dtype in ['float32', 'float64']:
x = x.astype(dtype)
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_numpy_array_complex(self):
x = (np.random.rand(5) + 1j * np.random.rand(5)).astype(np.complex128)
x_rec = self.encode_decode(x)
assert (all(map(lambda x, y: x == y, x, x_rec)) and
x.dtype == x_rec.dtype)
def test_list_mixed(self):
x = [1.0, np.float32(3.5), np.complex128(4.25), 'foo', np.bool_(1)]
x_rec = self.encode_decode(x)
# current msgpack cannot distinguish list/tuple
tm.assert_almost_equal(tuple(x), x_rec)
x_rec = self.encode_decode(tuple(x))
tm.assert_almost_equal(tuple(x), x_rec)
class TestBasic(TestPackers):
def test_timestamp(self):
for i in [Timestamp(
'20130101'), Timestamp('20130101', tz='US/Eastern'),
Timestamp('201301010501')]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_nat(self):
nat_rec = self.encode_decode(NaT)
assert NaT is nat_rec
def test_datetimes(self):
for i in [datetime.datetime(2013, 1, 1),
datetime.datetime(2013, 1, 1, 5, 1),
datetime.date(2013, 1, 1),
np.datetime64(datetime.datetime(2013, 1, 5, 2, 15))]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_timedeltas(self):
for i in [datetime.timedelta(days=1),
datetime.timedelta(days=1, seconds=10),
np.timedelta64(1000000)]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_periods(self):
# 13463
for i in [Period('2010-09', 'M'), Period('2014-Q1', 'Q')]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_intervals(self):
# 19967
for i in [Interval(0, 1), Interval(0, 1, 'left'),
Interval(10, 25., 'right')]:
i_rec = self.encode_decode(i)
assert i == i_rec
class TestIndex(TestPackers):
def setup_method(self, method):
super().setup_method(method)
self.d = {
'string': tm.makeStringIndex(100),
'date': tm.makeDateIndex(100),
'int': tm.makeIntIndex(100),
'rng': tm.makeRangeIndex(100),
'float': tm.makeFloatIndex(100),
'empty': Index([]),
'tuple': Index(zip(['foo', 'bar', 'baz'], [1, 2, 3])),
'period': Index(period_range('2012-1-1', freq='M', periods=3)),
'date2': Index(date_range('2013-01-1', periods=10)),
'bdate': Index(bdate_range('2013-01-02', periods=10)),
'cat': tm.makeCategoricalIndex(100),
'interval': tm.makeIntervalIndex(100),
'timedelta': tm.makeTimedeltaIndex(100, 'H')
}
self.mi = {
'reg': MultiIndex.from_tuples([('bar', 'one'), ('baz', 'two'),
('foo', 'two'),
('qux', 'one'), ('qux', 'two')],
names=['first', 'second']),
}
def test_basic_index(self):
for s, i in self.d.items():
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
# datetime with no freq (GH5506)
i = Index([Timestamp('20130101'), Timestamp('20130103')])
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
# datetime with timezone
i = Index([Timestamp('20130101 9:00:00'), Timestamp(
'20130103 11:00:00')]).tz_localize('US/Eastern')
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
def test_multi_index(self):
for s, i in self.mi.items():
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
def test_unicode(self):
i = tm.makeUnicodeIndex(100)
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
def categorical_index(self):
# GH15487
df = DataFrame(np.random.randn(10, 2))
df = df.astype({0: 'category'}).set_index(0)
result = self.encode_decode(df)
tm.assert_frame_equal(result, df)
class TestSeries(TestPackers):
def setup_method(self, method):
super().setup_method(method)
self.d = {}
s = tm.makeStringSeries()
s.name = 'string'
self.d['string'] = s
s = tm.makeObjectSeries()
s.name = 'object'
self.d['object'] = s
s = Series(iNaT, dtype='M8[ns]', index=range(5))
self.d['date'] = s
data = {
'A': [0., 1., 2., 3., np.nan],
'B': [0, 1, 0, 1, 0],
'C': ['foo1', 'foo2', 'foo3', 'foo4', 'foo5'],
'D': date_range('1/1/2009', periods=5),
'E': [0., 1, Timestamp('20100101'), 'foo', 2.],
'F': [Timestamp('20130102', tz='US/Eastern')] * 2 +
[Timestamp('20130603', tz='CET')] * 3,
'G': [Timestamp('20130102', tz='US/Eastern')] * 5,
'H': Categorical([1, 2, 3, 4, 5]),
'I': Categorical([1, 2, 3, 4, 5], ordered=True),
'J': (np.bool_(1), 2, 3, 4, 5),
}
self.d['float'] = Series(data['A'])
self.d['int'] = Series(data['B'])
self.d['mixed'] = Series(data['E'])
self.d['dt_tz_mixed'] = Series(data['F'])
self.d['dt_tz'] = Series(data['G'])
self.d['cat_ordered'] = Series(data['H'])
self.d['cat_unordered'] = Series(data['I'])
self.d['numpy_bool_mixed'] = Series(data['J'])
def test_basic(self):
# run multiple times here
for n in range(10):
for s, i in self.d.items():
i_rec = self.encode_decode(i)
assert_series_equal(i, i_rec)
class TestCategorical(TestPackers):
def setup_method(self, method):
super().setup_method(method)
self.d = {}
self.d['plain_str'] = Categorical(['a', 'b', 'c', 'd', 'e'])
self.d['plain_str_ordered'] = Categorical(['a', 'b', 'c', 'd', 'e'],
ordered=True)
self.d['plain_int'] = Categorical([5, 6, 7, 8])
self.d['plain_int_ordered'] = Categorical([5, 6, 7, 8], ordered=True)
def test_basic(self):
# run multiple times here
for n in range(10):
for s, i in self.d.items():
i_rec = self.encode_decode(i)
assert_categorical_equal(i, i_rec)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestNDFrame(TestPackers):
def setup_method(self, method):
super().setup_method(method)
data = {
'A': [0., 1., 2., 3., np.nan],
'B': [0, 1, 0, 1, 0],
'C': ['foo1', 'foo2', 'foo3', 'foo4', 'foo5'],
'D': date_range('1/1/2009', periods=5),
'E': [0., 1, Timestamp('20100101'), 'foo', 2.],
'F': [Timestamp('20130102', tz='US/Eastern')] * 5,
'G': [Timestamp('20130603', tz='CET')] * 5,
'H': Categorical(['a', 'b', 'c', 'd', 'e']),
'I': Categorical(['a', 'b', 'c', 'd', 'e'], ordered=True),
}
self.frame = {
'float': DataFrame(dict(A=data['A'], B=Series(data['A']) + 1)),
'int': DataFrame(dict(A=data['B'], B=Series(data['B']) + 1)),
'mixed': DataFrame(data)}
def test_basic_frame(self):
for s, i in self.frame.items():
i_rec = self.encode_decode(i)
assert_frame_equal(i, i_rec)
def test_multi(self):
i_rec = self.encode_decode(self.frame)
for k in self.frame.keys():
assert_frame_equal(self.frame[k], i_rec[k])
packed_items = tuple([self.frame['float'], self.frame['float'].A,
self.frame['float'].B, None])
l_rec = self.encode_decode(packed_items)
check_arbitrary(packed_items, l_rec)
# this is an oddity in that packed lists will be returned as tuples
packed_items = [self.frame['float'], self.frame['float'].A,
self.frame['float'].B, None]
l_rec = self.encode_decode(packed_items)
assert isinstance(l_rec, tuple)
check_arbitrary(packed_items, l_rec)
def test_iterator(self):
packed_items = [self.frame['float'], self.frame['float'].A,
self.frame['float'].B, None]
with ensure_clean(self.path) as path:
to_msgpack(path, *packed_items)
for i, packed in enumerate(read_msgpack(path, iterator=True)):
check_arbitrary(packed, packed_items[i])
def tests_datetimeindex_freq_issue(self):
# GH 5947
# inferring freq on the datetimeindex
df = DataFrame([1, 2, 3], index=date_range('1/1/2013', '1/3/2013'))
result = self.encode_decode(df)
assert_frame_equal(result, df)
df = DataFrame([1, 2], index=date_range('1/1/2013', '1/2/2013'))
result = self.encode_decode(df)
assert_frame_equal(result, df)
def test_dataframe_duplicate_column_names(self):
# GH 9618
expected_1 = DataFrame(columns=['a', 'a'])
expected_2 = DataFrame(columns=[1] * 100)
expected_2.loc[0] = np.random.randn(100)
expected_3 = DataFrame(columns=[1, 1])
expected_3.loc[0] = ['abc', np.nan]
result_1 = self.encode_decode(expected_1)
result_2 = self.encode_decode(expected_2)
result_3 = self.encode_decode(expected_3)
assert_frame_equal(result_1, expected_1)
assert_frame_equal(result_2, expected_2)
assert_frame_equal(result_3, expected_3)
@pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")
@pytest.mark.filterwarnings("ignore:Series.to_sparse:FutureWarning")
@pytest.mark.filterwarnings("ignore:DataFrame.to_sparse:FutureWarning")
class TestSparse(TestPackers):
def _check_roundtrip(self, obj, comparator, **kwargs):
# currently these are not implemetned
# i_rec = self.encode_decode(obj)
# comparator(obj, i_rec, **kwargs)
msg = r"msgpack sparse (series|frame) is not implemented"
with pytest.raises(NotImplementedError, match=msg):
self.encode_decode(obj)
def test_sparse_series(self):
s = tm.makeStringSeries()
s[3:5] = np.nan
ss = s.to_sparse()
self._check_roundtrip(ss, tm.assert_series_equal,
check_series_type=True)
ss2 = s.to_sparse(kind='integer')
self._check_roundtrip(ss2, tm.assert_series_equal,
check_series_type=True)
ss3 = s.to_sparse(fill_value=0)
self._check_roundtrip(ss3, tm.assert_series_equal,
check_series_type=True)
def test_sparse_frame(self):
s = tm.makeDataFrame()
s.loc[3:5, 1:3] = np.nan
s.loc[8:10, -2] = np.nan
ss = s.to_sparse()
self._check_roundtrip(ss, tm.assert_frame_equal,
check_frame_type=True)
ss2 = s.to_sparse(kind='integer')
self._check_roundtrip(ss2, tm.assert_frame_equal,
check_frame_type=True)
ss3 = s.to_sparse(fill_value=0)
self._check_roundtrip(ss3, tm.assert_frame_equal,
check_frame_type=True)
class TestCompression(TestPackers):
"""See https://github.com/pandas-dev/pandas/pull/9783
"""
def setup_method(self, method):
try:
from sqlalchemy import create_engine
self._create_sql_engine = create_engine
except ImportError:
self._SQLALCHEMY_INSTALLED = False
else:
self._SQLALCHEMY_INSTALLED = True
super().setup_method(method)
data = {
'A': np.arange(1000, dtype=np.float64),
'B': np.arange(1000, dtype=np.int32),
'C': list(100 * 'abcdefghij'),
'D': date_range(datetime.datetime(2015, 4, 1), periods=1000),
'E': [datetime.timedelta(days=x) for x in range(1000)],
}
self.frame = {
'float': DataFrame({k: data[k] for k in ['A', 'A']}),
'int': DataFrame({k: data[k] for k in ['B', 'B']}),
'mixed': DataFrame(data),
}
def test_plain(self):
i_rec = self.encode_decode(self.frame)
for k in self.frame.keys():
assert_frame_equal(self.frame[k], i_rec[k])
def _test_compression(self, compress):
i_rec = self.encode_decode(self.frame, compress=compress)
for k in self.frame.keys():
value = i_rec[k]
expected = self.frame[k]
assert_frame_equal(value, expected)
# make sure that we can write to the new frames
for block in value._data.blocks:
assert block.values.flags.writeable
def test_compression_zlib(self):
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
self._test_compression('zlib')
def test_compression_blosc(self):
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
self._test_compression('blosc')
def _test_compression_warns_when_decompress_caches(
self, monkeypatch, compress):
not_garbage = []
control = [] # copied data
compress_module = globals()[compress]
real_decompress = compress_module.decompress
def decompress(ob):
"""mock decompress function that delegates to the real
decompress but caches the result and a copy of the result.
"""
res = real_decompress(ob)
not_garbage.append(res) # hold a reference to this bytes object
control.append(bytearray(res)) # copy the data here to check later
return res
# types mapped to values to add in place.
rhs = {
np.dtype('float64'): 1.0,
np.dtype('int32'): 1,
np.dtype('object'): 'a',
np.dtype('datetime64[ns]'): np.timedelta64(1, 'ns'),
np.dtype('timedelta64[ns]'): np.timedelta64(1, 'ns'),
}
with monkeypatch.context() as m, \
tm.assert_produces_warning(PerformanceWarning) as ws:
m.setattr(compress_module, 'decompress', decompress)
i_rec = self.encode_decode(self.frame, compress=compress)
for k in self.frame.keys():
value = i_rec[k]
expected = self.frame[k]
assert_frame_equal(value, expected)
# make sure that we can write to the new frames even though
# we needed to copy the data
for block in value._data.blocks:
assert block.values.flags.writeable
# mutate the data in some way
block.values[0] += rhs[block.dtype]
for w in ws:
# check the messages from our warnings
assert str(w.message) == ('copying data after decompressing; '
'this may mean that decompress is '
'caching its result')
for buf, control_buf in zip(not_garbage, control):
# make sure none of our mutations above affected the
# original buffers
assert buf == control_buf
def test_compression_warns_when_decompress_caches_zlib(self, monkeypatch):
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
self._test_compression_warns_when_decompress_caches(
monkeypatch, 'zlib')
def test_compression_warns_when_decompress_caches_blosc(self, monkeypatch):
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
self._test_compression_warns_when_decompress_caches(
monkeypatch, 'blosc')
def _test_small_strings_no_warn(self, compress):
empty = np.array([], dtype='uint8')
with tm.assert_produces_warning(None):
empty_unpacked = self.encode_decode(empty, compress=compress)
tm.assert_numpy_array_equal(empty_unpacked, empty)
assert empty_unpacked.flags.writeable
char = np.array([ord(b'a')], dtype='uint8')
with tm.assert_produces_warning(None):
char_unpacked = self.encode_decode(char, compress=compress)
tm.assert_numpy_array_equal(char_unpacked, char)
assert char_unpacked.flags.writeable
# if this test fails I am sorry because the interpreter is now in a
# bad state where b'a' points to 98 == ord(b'b').
char_unpacked[0] = ord(b'b')
# we compare the ord of bytes b'a' with unicode 'a' because the should
# always be the same (unless we were able to mutate the shared
# character singleton in which case ord(b'a') == ord(b'b').
assert ord(b'a') == ord('a')
tm.assert_numpy_array_equal(
char_unpacked,
np.array([ord(b'b')], dtype='uint8'),
)
def test_small_strings_no_warn_zlib(self):
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
self._test_small_strings_no_warn('zlib')
def test_small_strings_no_warn_blosc(self):
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
self._test_small_strings_no_warn('blosc')
def test_readonly_axis_blosc(self):
# GH11880
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
df1 = DataFrame({'A': list('abcd')})
df2 = DataFrame(df1, index=[1., 2., 3., 4.])
assert 1 in self.encode_decode(df1['A'], compress='blosc')
assert 1. in self.encode_decode(df2['A'], compress='blosc')
def test_readonly_axis_zlib(self):
# GH11880
df1 = DataFrame({'A': list('abcd')})
df2 = DataFrame(df1, index=[1., 2., 3., 4.])
assert 1 in self.encode_decode(df1['A'], compress='zlib')
assert 1. in self.encode_decode(df2['A'], compress='zlib')
def test_readonly_axis_blosc_to_sql(self):
# GH11880
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
if not self._SQLALCHEMY_INSTALLED:
pytest.skip('no sqlalchemy')
expected = DataFrame({'A': list('abcd')})
df = self.encode_decode(expected, compress='blosc')
eng = self._create_sql_engine("sqlite:///:memory:")
df.to_sql('test', eng, if_exists='append')
result = pandas.read_sql_table('test', eng, index_col='index')
result.index.names = [None]
assert_frame_equal(expected, result)
def test_readonly_axis_zlib_to_sql(self):
# GH11880
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
if not self._SQLALCHEMY_INSTALLED:
pytest.skip('no sqlalchemy')
expected = DataFrame({'A': list('abcd')})
df = self.encode_decode(expected, compress='zlib')
eng = self._create_sql_engine("sqlite:///:memory:")
df.to_sql('test', eng, if_exists='append')
result = pandas.read_sql_table('test', eng, index_col='index')
result.index.names = [None]
assert_frame_equal(expected, result)
class TestEncoding(TestPackers):
def setup_method(self, method):
super().setup_method(method)
data = {
'A': ['\u2019'] * 1000,
'B': np.arange(1000, dtype=np.int32),
'C': list(100 * 'abcdefghij'),
'D': date_range(datetime.datetime(2015, 4, 1), periods=1000),
'E': [datetime.timedelta(days=x) for x in range(1000)],
'G': [400] * 1000
}
self.frame = {
'float': DataFrame({k: data[k] for k in ['A', 'A']}),
'int': DataFrame({k: data[k] for k in ['B', 'B']}),
'mixed': DataFrame(data),
}
self.utf_encodings = ['utf8', 'utf16', 'utf32']
def test_utf(self):
# GH10581
for encoding in self.utf_encodings:
for frame in self.frame.values():
result = self.encode_decode(frame, encoding=encoding)
assert_frame_equal(result, frame)
def test_default_encoding(self):
for frame in self.frame.values():
result = frame.to_msgpack()
expected = frame.to_msgpack(encoding='utf8')
assert result == expected
result = self.encode_decode(frame)
assert_frame_equal(result, frame)
files = glob.glob(os.path.join(os.path.dirname(__file__), "data",
"legacy_msgpack", "*", "*.msgpack"))
@pytest.fixture(params=files)
def legacy_packer(request, datapath):
return datapath(request.param)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
@pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")
class TestMsgpack:
"""
How to add msgpack tests:
1. Install pandas version intended to output the msgpack.
2. Execute "generate_legacy_storage_files.py" to create the msgpack.
$ python generate_legacy_storage_files.py <output_dir> msgpack
3. Move the created pickle to "data/legacy_msgpack/<version>" directory.
"""
minimum_structure = {'series': ['float', 'int', 'mixed',
'ts', 'mi', 'dup'],
'frame': ['float', 'int', 'mixed', 'mi'],
'panel': ['float'],
'index': ['int', 'date', 'period'],
'mi': ['reg2']}
def check_min_structure(self, data, version):
for typ, v in self.minimum_structure.items():
if typ == "panel":
# FIXME: kludge; get this key out of the legacy file
continue
assert typ in data, '"{0}" not found in unpacked data'.format(typ)
for kind in v:
msg = '"{0}" not found in data["{1}"]'.format(kind, typ)
assert kind in data[typ], msg
def compare(self, current_data, all_data, vf, version):
# GH12277 encoding default used to be latin-1, now utf-8
if LooseVersion(version) < LooseVersion('0.18.0'):
data = read_msgpack(vf, encoding='latin-1')
else:
data = read_msgpack(vf)
if "panel" in data:
# FIXME: kludge; get the key out of the stored file
del data["panel"]
self.check_min_structure(data, version)
for typ, dv in data.items():
assert typ in all_data, ('unpacked data contains '
'extra key "{0}"'
.format(typ))
for dt, result in dv.items():
assert dt in current_data[typ], ('data["{0}"] contains extra '
'key "{1}"'.format(typ, dt))
try:
expected = current_data[typ][dt]
except KeyError:
continue
# use a specific comparator
# if available
comp_method = "compare_{typ}_{dt}".format(typ=typ, dt=dt)
comparator = getattr(self, comp_method, None)
if comparator is not None:
comparator(result, expected, typ, version)
else:
check_arbitrary(result, expected)
return data
def compare_series_dt_tz(self, result, expected, typ, version):
# 8260
# dtype is object < 0.17.0
if LooseVersion(version) < LooseVersion('0.17.0'):
expected = expected.astype(object)
tm.assert_series_equal(result, expected)
else:
tm.assert_series_equal(result, expected)
def compare_frame_dt_mixed_tzs(self, result, expected, typ, version):
# 8260
# dtype is object < 0.17.0
if LooseVersion(version) < LooseVersion('0.17.0'):
expected = expected.astype(object)
tm.assert_frame_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
def test_msgpacks_legacy(self, current_packers_data, all_packers_data,
legacy_packer, datapath):
version = os.path.basename(os.path.dirname(legacy_packer))
# GH12142 0.17 files packed in P2 can't be read in P3
if (version.startswith('0.17.') and
legacy_packer.split('.')[-4][-1] == '2'):
msg = "Files packed in Py2 can't be read in Py3 ({})"
pytest.skip(msg.format(version))
try:
with catch_warnings(record=True):
self.compare(current_packers_data, all_packers_data,
legacy_packer, version)
except ImportError:
# blosc not installed
pass
def test_msgpack_period_freq(self):
# https://github.com/pandas-dev/pandas/issues/24135
s = Series(np.random.rand(5), index=date_range('20130101', periods=5))
r = read_msgpack(s.to_msgpack())
repr(r)
| bsd-3-clause | 211,160,120,418,721,380 | 33.794953 | 79 | 0.552705 | false |
maleficarium/youtube-dl | youtube_dl/extractor/cbsnews.py | 3 | 3855 | # encoding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .cbs import CBSBaseIE
from ..utils import (
parse_duration,
)
class CBSNewsIE(CBSBaseIE):
IE_DESC = 'CBS News'
_VALID_URL = r'https?://(?:www\.)?cbsnews\.com/(?:news|videos)/(?P<id>[\da-z_-]+)'
_TESTS = [
{
'url': 'http://www.cbsnews.com/news/tesla-and-spacex-elon-musks-industrial-empire/',
'info_dict': {
'id': 'tesla-and-spacex-elon-musks-industrial-empire',
'ext': 'flv',
'title': 'Tesla and SpaceX: Elon Musk\'s industrial empire',
'thumbnail': 'http://beta.img.cbsnews.com/i/2014/03/30/60147937-2f53-4565-ad64-1bdd6eb64679/60-0330-pelley-640x360.jpg',
'duration': 791,
},
'params': {
# rtmp download
'skip_download': True,
},
},
{
'url': 'http://www.cbsnews.com/videos/fort-hood-shooting-army-downplays-mental-illness-as-cause-of-attack/',
'info_dict': {
'id': 'SNJBOYzXiWBOvaLsdzwH8fmtP1SCd91Y',
'ext': 'mp4',
'title': 'Fort Hood shooting: Army downplays mental illness as cause of attack',
'description': 'md5:4a6983e480542d8b333a947bfc64ddc7',
'upload_date': '19700101',
'uploader': 'CBSI-NEW',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 205,
'subtitles': {
'en': [{
'ext': 'ttml',
}],
},
},
'params': {
# m3u8 download
'skip_download': True,
},
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_info = self._parse_json(self._html_search_regex(
r'(?:<ul class="media-list items" id="media-related-items"><li data-video-info|<div id="cbsNewsVideoPlayer" data-video-player-options)=\'({.+?})\'',
webpage, 'video JSON info'), video_id)
item = video_info['item'] if 'item' in video_info else video_info
guid = item['mpxRefId']
return self._extract_video_info('byGuid=%s' % guid, guid)
class CBSNewsLiveVideoIE(InfoExtractor):
IE_DESC = 'CBS News Live Videos'
_VALID_URL = r'https?://(?:www\.)?cbsnews\.com/live/video/(?P<id>[\da-z_-]+)'
_TEST = {
'url': 'http://www.cbsnews.com/live/video/clinton-sanders-prepare-to-face-off-in-nh/',
'info_dict': {
'id': 'clinton-sanders-prepare-to-face-off-in-nh',
'ext': 'flv',
'title': 'Clinton, Sanders Prepare To Face Off In NH',
'duration': 334,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_info = self._parse_json(self._html_search_regex(
r'data-story-obj=\'({.+?})\'', webpage, 'video JSON info'), video_id)['story']
hdcore_sign = 'hdcore=3.3.1'
f4m_formats = self._extract_f4m_formats(video_info['url'] + '&' + hdcore_sign, video_id)
if f4m_formats:
for entry in f4m_formats:
# URLs without the extra param induce an 404 error
entry.update({'extra_param_to_segment_url': hdcore_sign})
self._sort_formats(f4m_formats)
return {
'id': video_id,
'title': video_info['headline'],
'thumbnail': video_info.get('thumbnail_url_hd') or video_info.get('thumbnail_url_sd'),
'duration': parse_duration(video_info.get('segmentDur')),
'formats': f4m_formats,
}
| unlicense | 1,002,380,293,783,139,500 | 36.067308 | 160 | 0.524514 | false |
wengyian/project-manager-system | node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | 1283 | 65086 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
# "xcodebuild" is called too quickly (it has been found to return incorrect
# version number).
XCODE_VERSION_CACHE = None
# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
# corresponding to the installed version of Xcode.
XCODE_ARCHS_DEFAULT_CACHE = None
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
"""Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
mapping = {'$(ARCHS_STANDARD)': archs}
if archs_including_64_bit:
mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
return mapping
class XcodeArchsDefault(object):
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
macros and implementing filtering by VALID_ARCHS. The expansion of macros
depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
on the version of Xcode.
"""
# Match variable like $(ARCHS_STANDARD).
variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
def __init__(self, default, mac, iphonesimulator, iphoneos):
self._default = (default,)
self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
def _VariableMapping(self, sdkroot):
"""Returns the dictionary of variable mapping depending on the SDKROOT."""
sdkroot = sdkroot.lower()
if 'iphoneos' in sdkroot:
return self._archs['ios']
elif 'iphonesimulator' in sdkroot:
return self._archs['iossim']
else:
return self._archs['mac']
def _ExpandArchs(self, archs, sdkroot):
"""Expands variables references in ARCHS, and remove duplicates."""
variable_mapping = self._VariableMapping(sdkroot)
expanded_archs = []
for arch in archs:
if self.variable_pattern.match(arch):
variable = arch
try:
variable_expansion = variable_mapping[variable]
for arch in variable_expansion:
if arch not in expanded_archs:
expanded_archs.append(arch)
except KeyError as e:
print 'Warning: Ignoring unsupported variable "%s".' % variable
elif arch not in expanded_archs:
expanded_archs.append(arch)
return expanded_archs
def ActiveArchs(self, archs, valid_archs, sdkroot):
"""Expands variables references in ARCHS, and filter by VALID_ARCHS if it
is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
values present in VALID_ARCHS are kept)."""
expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
if valid_archs:
filtered_archs = []
for arch in expanded_archs:
if arch in valid_archs:
filtered_archs.append(arch)
expanded_archs = filtered_archs
return expanded_archs
def GetXcodeArchsDefault():
"""Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
installed version of Xcode. The default values used by Xcode for ARCHS
and the expansion of the variables depends on the version of Xcode used.
For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
$(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
and deprecated with Xcode 5.1.
For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
architecture as part of $(ARCHS_STANDARD) and default to only building it.
For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
are also part of $(ARCHS_STANDARD).
All thoses rules are coded in the construction of the |XcodeArchsDefault|
object to use depending on the version of Xcode detected. The object is
for performance reason."""
global XCODE_ARCHS_DEFAULT_CACHE
if XCODE_ARCHS_DEFAULT_CACHE:
return XCODE_ARCHS_DEFAULT_CACHE
xcode_version, _ = XcodeVersion()
if xcode_version < '0500':
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD)',
XcodeArchsVariableMapping(['i386']),
XcodeArchsVariableMapping(['i386']),
XcodeArchsVariableMapping(['armv7']))
elif xcode_version < '0510':
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD_INCLUDING_64_BIT)',
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
XcodeArchsVariableMapping(
['armv7', 'armv7s'],
['armv7', 'armv7s', 'arm64']))
else:
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD)',
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
XcodeArchsVariableMapping(
['armv7', 'armv7s', 'arm64'],
['armv7', 'armv7s', 'arm64']))
return XCODE_ARCHS_DEFAULT_CACHE
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def IsBinaryOutputFormat(self, configname):
default = "binary" if self.isIOS else "xml"
format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
default)
return format == "binary"
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def _IsIosAppExtension(self):
return int(self.spec.get('ios_app_extension', 0)) != 0
def _IsIosWatchKitExtension(self):
return int(self.spec.get('ios_watchkit_extension', 0)) != 0
def _IsIosWatchApp(self):
return int(self.spec.get('ios_watch_app', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
return '.' + self.spec.get('product_extension', 'appex')
else:
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsIosAppExtension():
assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.app-extension'
if self._IsIosWatchKitExtension():
assert self._IsBundle(), ('ios_watchkit_extension flag requires '
'mac_bundle (target %s)' % self.spec['target_name'])
return 'com.apple.product-type.watchkit-extension'
if self._IsIosWatchApp():
assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.application.watchapp'
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
config_settings = self.xcode_settings[configname]
xcode_archs_default = GetXcodeArchsDefault()
return xcode_archs_default.ActiveArchs(
config_settings.get('ARCHS'),
config_settings.get('VALID_ARCHS'),
config_settings.get('SDKROOT'))
def _GetSdkVersionInfoItem(self, sdk, infoitem):
# xcodebuild requires Xcode and can't run on Command Line Tools-only
# systems from 10.7 onward.
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
except:
pass
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
# In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
# llvm-gcc. It also requires a fairly recent libtool, and
# if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
# path to the libLTO.dylib that matches the used clang.
if self._Test('LLVM_LTO', 'YES', default='NO'):
cflags.append('-flto')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
assert self.configname
archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
if sdk_root:
framework_root = sdk_root
else:
framework_root = ''
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = r'(\S+)'
WORD = r'\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings() and self._SdkPath():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
assert self.configname
archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
sdk_root = self._SdkPath()
if not sdk_root:
sdk_root = ''
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
if sdk_root and is_extension:
# Adds the link flags for extensions. These flags are common for all
# extensions and provide loader and main function.
# These flags reflect the compilation options used by xcode to compile
# extensions.
ldflags.append('-lpkstart')
if XcodeVersion() < '0900':
ldflags.append(sdk_root +
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
ldflags.append('-fapplication-extension')
ldflags.append('-Xlinker -rpath '
'-Xlinker @executable_path/../../Frameworks')
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == 'executable'):
return []
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
fingerprint = line.split()[1]
cache = XcodeSettings._codesigning_key_cache
assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
sdk_root = self._SdkPath(config_name)
if not sdk_root:
sdk_root = ''
# Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
# ".dylib" without providing a real support for them. What it does, for
# "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
# library order and cause collision when building Chrome.
#
# Instead substitude ".tbd" to ".dylib" in the generated project when the
# following conditions are both true:
# - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
# - the ".dylib" file does not exists but a ".tbd" file do.
library = l.replace('$(SDKROOT)', sdk_root)
if l.startswith('$(SDKROOT)'):
basename, ext = os.path.splitext(library)
if ext == '.dylib' and not os.path.exists(library):
tbd_library = basename + '.tbd'
if os.path.exists(tbd_library):
library = tbd_library
return library
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return GetStdout(['sw_vers', '-buildVersion'])
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
xcode_version, xcode_build = XcodeVersion()
if xcode_version < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
try:
all_sdks = GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def XcodeVersion():
"""Returns a tuple of version and build version of installed Xcode."""
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
global XCODE_VERSION_CACHE
if XCODE_VERSION_CACHE:
return XCODE_VERSION_CACHE
try:
version_list = GetStdout(['xcodebuild', '-version']).splitlines()
# In some circumstances xcodebuild exits 0 but doesn't return
# the right results; for example, a user on 10.7 or 10.8 with
# a bogus path set via xcode-select
# In that case this may be a CLT-only install so fall back to
# checking that version.
if len(version_list) < 2:
raise GypError("xcodebuild returned unexpected results")
except:
version = CLTVersion()
if version:
version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
else:
raise GypError("No Xcode or CLT version detected!")
# The CLT has no build information, so we return an empty string.
version_list = [version, '']
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
if build:
build = build.split()[-1]
XCODE_VERSION_CACHE = (version, build)
return XCODE_VERSION_CACHE
# This function ported from the logic in Homebrew's CLT version check
def CLTVersion():
"""Returns the version of command-line tools from pkgutil."""
# pkgutil output looks like
# package-id: com.apple.pkg.CLTools_Executables
# version: 5.0.1.0.1.1382131676
# volume: /
# location: /
# install-time: 1382544035
# groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
regex = re.compile('version: (?P<version>.+)')
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
try:
output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
return re.search(regex, output).groupdict()['version']
except:
continue
def GetStdout(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_FRAMEWORKS_DIR' : built_products_dir,
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
sdk_root = xcode_settings._SdkRoot(configuration)
if not sdk_root:
sdk_root = xcode_settings._XcodeSdkPath('')
if sdk_root is None:
sdk_root = ''
env['SDKROOT'] = sdk_root
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices and use correct architectures for those builds."""
for target_dict in targets.itervalues():
toolset = target_dict['toolset']
configs = target_dict['configurations']
for config_name, config_dict in dict(configs).iteritems():
iphoneos_config_dict = copy.deepcopy(config_dict)
configs[config_name + '-iphoneos'] = iphoneos_config_dict
configs[config_name + '-iphonesimulator'] = config_dict
if toolset == 'target':
iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts
| mit | 4,676,288,699,215,644,000 | 38.954573 | 191 | 0.6628 | false |
yiheng/BigDL | spark/dl/src/test/resources/tf/models/rnn.py | 9 | 1652 | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tensorflow as tf
import numpy as np
from sys import argv
from tensorflow.contrib import rnn
from util import run_model
def main():
"""
Run this command to generate the pb file
1. mkdir model
2. python rnn.py
"""
tf.set_random_seed(1)
n_steps = 2
n_input = 10
n_hidden = 20
n_output = 5
xs = tf.Variable(tf.random_uniform([4, n_steps, n_input]), name='input', dtype=tf.float32)
xs = tf.identity(xs, "input_node")
weight = tf.Variable(tf.random_uniform([n_hidden, n_output]), name="weight", dtype=tf.float32)
bias = tf.Variable(tf.random_uniform([n_output]), name="bias", dtype=tf.float32)
x = tf.unstack(xs, n_steps, 1)
cell = rnn.BasicRNNCell(n_hidden)
output, states = rnn.static_rnn(cell, x, dtype=tf.float32)
final = tf.nn.bias_add(tf.matmul(output[-1], weight), bias, name='output')
net_outputs = map(lambda x: tf.get_default_graph().get_tensor_by_name(x), argv[2].split(','))
run_model(net_outputs, argv[1], 'rnn', argv[3] == 'True')
if __name__ == "__main__":
main()
| apache-2.0 | 4,298,372,140,444,098,000 | 35.711111 | 98 | 0.679782 | false |
poo12138/gem5-stable | src/arch/x86/isa/insts/general_purpose/control_transfer/__init__.py | 91 | 2404 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["call",
"conditional_jump",
"interrupts_and_exceptions",
"jump",
"loop",
"xreturn"]
microcode = ""
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause | 8,952,808,968,545,798,000 | 49.083333 | 72 | 0.764143 | false |
xzturn/tensorflow | tensorflow/lite/micro/examples/magic_wand/train/data_prepare.py | 19 | 5911 | # Lint as: python3
# coding=utf-8
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Prepare data for further process.
Read data from "/slope", "/ring", "/wing", "/negative" and save them
in "/data/complete_data" in python dict format.
It will generate a new file with the following structure:
├── data
│ └── complete_data
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import json
import os
import random
LABEL_NAME = "gesture"
DATA_NAME = "accel_ms2_xyz"
folders = ["wing", "ring", "slope"]
names = [
"hyw", "shiyun", "tangsy", "dengyl", "zhangxy", "pengxl", "liucx",
"jiangyh", "xunkai"
]
def prepare_original_data(folder, name, data, file_to_read): # pylint: disable=redefined-outer-name
"""Read collected data from files."""
if folder != "negative":
with open(file_to_read, "r") as f:
lines = csv.reader(f)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
for idx, line in enumerate(lines): # pylint: disable=unused-variable,redefined-outer-name
if len(line) == 3:
if line[2] == "-" and data_new[DATA_NAME]:
data.append(data_new)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
elif line[2] != "-":
data_new[DATA_NAME].append([float(i) for i in line[0:3]])
data.append(data_new)
else:
with open(file_to_read, "r") as f:
lines = csv.reader(f)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
for idx, line in enumerate(lines):
if len(line) == 3 and line[2] != "-":
if len(data_new[DATA_NAME]) == 120:
data.append(data_new)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
else:
data_new[DATA_NAME].append([float(i) for i in line[0:3]])
data.append(data_new)
def generate_negative_data(data): # pylint: disable=redefined-outer-name
"""Generate negative data labeled as 'negative6~8'."""
# Big movement -> around straight line
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
start_x = (random.random() - 0.5) * 2000
start_y = (random.random() - 0.5) * 2000
start_z = (random.random() - 0.5) * 2000
x_increase = (random.random() - 0.5) * 10
y_increase = (random.random() - 0.5) * 10
z_increase = (random.random() - 0.5) * 10
for j in range(128):
dic[DATA_NAME].append([
start_x + j * x_increase + (random.random() - 0.5) * 6,
start_y + j * y_increase + (random.random() - 0.5) * 6,
start_z + j * z_increase + (random.random() - 0.5) * 6
])
data.append(dic)
# Random
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
for j in range(128):
dic[DATA_NAME].append([(random.random() - 0.5) * 1000,
(random.random() - 0.5) * 1000,
(random.random() - 0.5) * 1000])
data.append(dic)
# Stay still
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
start_x = (random.random() - 0.5) * 2000
start_y = (random.random() - 0.5) * 2000
start_z = (random.random() - 0.5) * 2000
for j in range(128):
dic[DATA_NAME].append([
start_x + (random.random() - 0.5) * 40,
start_y + (random.random() - 0.5) * 40,
start_z + (random.random() - 0.5) * 40
])
data.append(dic)
# Write data to file
def write_data(data_to_write, path):
with open(path, "w") as f:
for idx, item in enumerate(data_to_write): # pylint: disable=unused-variable,redefined-outer-name
dic = json.dumps(item, ensure_ascii=False)
f.write(dic)
f.write("\n")
if __name__ == "__main__":
data = [] # pylint: disable=redefined-outer-name
for idx1, folder in enumerate(folders):
for idx2, name in enumerate(names):
prepare_original_data(folder, name, data,
"./%s/output_%s_%s.txt" % (folder, folder, name))
for idx in range(5):
prepare_original_data("negative", "negative%d" % (idx + 1), data,
"./negative/output_negative_%d.txt" % (idx + 1))
generate_negative_data(data)
print("data_length: " + str(len(data)))
if not os.path.exists("./data"):
os.makedirs("./data")
write_data(data, "./data/complete_data")
| apache-2.0 | 7,116,094,634,051,204,000 | 34.945122 | 102 | 0.571841 | false |
edx/lettuce | tests/integration/lib/Django-1.3/tests/regressiontests/test_client_regress/views.py | 50 | 4115 | from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.core.exceptions import SuspiciousOperation
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.utils.encoding import smart_str
from django.core.serializers.json import DjangoJSONEncoder
from django.test.client import CONTENT_TYPE_RE
from django.template import RequestContext
def no_template_view(request):
"A simple view that expects a GET request, and returns a rendered template"
return HttpResponse("No template used. Sample content: twice once twice. Content ends.")
def staff_only_view(request):
"A view that can only be visited by staff. Non staff members get an exception"
if request.user.is_staff:
return HttpResponse('')
else:
raise SuspiciousOperation()
def get_view(request):
"A simple login protected view"
return HttpResponse("Hello world")
get_view = login_required(get_view)
def request_data(request, template='base.html', data='sausage'):
"A simple view that returns the request data in the context"
return render_to_response(template, {
'get-foo':request.GET.get('foo',None),
'get-bar':request.GET.get('bar',None),
'post-foo':request.POST.get('foo',None),
'post-bar':request.POST.get('bar',None),
'request-foo':request.REQUEST.get('foo',None),
'request-bar':request.REQUEST.get('bar',None),
'data': data,
})
def view_with_argument(request, name):
"""A view that takes a string argument
The purpose of this view is to check that if a space is provided in
the argument, the test framework unescapes the %20 before passing
the value to the view.
"""
if name == 'Arthur Dent':
return HttpResponse('Hi, Arthur')
else:
return HttpResponse('Howdy, %s' % name)
def login_protected_redirect_view(request):
"A view that redirects all requests to the GET view"
return HttpResponseRedirect('/test_client_regress/get_view/')
login_protected_redirect_view = login_required(login_protected_redirect_view)
def set_session_view(request):
"A view that sets a session variable"
request.session['session_var'] = 'YES'
return HttpResponse('set_session')
def check_session_view(request):
"A view that reads a session variable"
return HttpResponse(request.session.get('session_var', 'NO'))
def request_methods_view(request):
"A view that responds with the request method"
return HttpResponse('request method: %s' % request.method)
def return_unicode(request):
return render_to_response('unicode.html')
def return_json_file(request):
"A view that parses and returns a JSON string as a file."
match = CONTENT_TYPE_RE.match(request.META['CONTENT_TYPE'])
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
# This just checks that the uploaded data is JSON
obj_dict = simplejson.loads(request.raw_post_data.decode(charset))
obj_json = simplejson.dumps(obj_dict, encoding=charset,
cls=DjangoJSONEncoder,
ensure_ascii=False)
response = HttpResponse(smart_str(obj_json, encoding=charset), status=200,
mimetype='application/json; charset=' + charset)
response['Content-Disposition'] = 'attachment; filename=testfile.json'
return response
def check_headers(request):
"A view that responds with value of the X-ARG-CHECK header"
return HttpResponse('HTTP_X_ARG_CHECK: %s' % request.META.get('HTTP_X_ARG_CHECK', 'Undefined'))
def raw_post_data(request):
"A view that is requested with GET and accesses request.raw_post_data. Refs #14753."
return HttpResponse(request.raw_post_data)
def request_context_view(request):
# Special attribute that won't be present on a plain HttpRequest
request.special_path = request.path
return render_to_response('request_context.html', context_instance=RequestContext(request, {}))
| gpl-3.0 | 7,834,027,810,325,758,000 | 39.343137 | 99 | 0.705954 | false |
jjhelmus/scipy | scipy/stats/tests/test_discrete_basic.py | 38 | 8770 | from __future__ import division, print_function, absolute_import
import numpy.testing as npt
import numpy as np
from scipy._lib.six import xrange
from scipy import stats
from common_tests import (check_normalization, check_moment, check_mean_expect,
check_var_expect, check_skew_expect,
check_kurt_expect, check_entropy,
check_private_entropy, check_edge_support,
check_named_args, check_random_state_property,
check_pickling, check_rvs_broadcast)
from scipy.stats._distr_params import distdiscrete
knf = npt.dec.knownfailureif
vals = ([1, 2, 3, 4], [0.1, 0.2, 0.3, 0.4])
distdiscrete += [[stats.rv_discrete(values=vals), ()]]
def test_discrete_basic():
for distname, arg in distdiscrete:
try:
distfn = getattr(stats, distname)
except TypeError:
distfn = distname
distname = 'sample distribution'
np.random.seed(9765456)
rvs = distfn.rvs(size=2000, *arg)
supp = np.unique(rvs)
m, v = distfn.stats(*arg)
yield check_cdf_ppf, distfn, arg, supp, distname + ' cdf_ppf'
yield check_pmf_cdf, distfn, arg, distname
yield check_oth, distfn, arg, supp, distname + ' oth'
yield check_edge_support, distfn, arg
alpha = 0.01
yield (check_discrete_chisquare, distfn, arg, rvs, alpha,
distname + ' chisquare')
seen = set()
for distname, arg in distdiscrete:
if distname in seen:
continue
seen.add(distname)
try:
distfn = getattr(stats, distname)
except TypeError:
distfn = distname
distname = 'sample distribution'
locscale_defaults = (0,)
meths = [distfn.pmf, distfn.logpmf, distfn.cdf, distfn.logcdf,
distfn.logsf]
# make sure arguments are within support
spec_k = {'randint': 11, 'hypergeom': 4, 'bernoulli': 0, }
k = spec_k.get(distname, 1)
yield check_named_args, distfn, k, arg, locscale_defaults, meths
if distname != 'sample distribution':
yield check_scale_docstring, distfn
yield check_random_state_property, distfn, arg
yield check_pickling, distfn, arg
# Entropy
yield check_entropy, distfn, arg, distname
if distfn.__class__._entropy != stats.rv_discrete._entropy:
yield check_private_entropy, distfn, arg, stats.rv_discrete
def test_moments():
for distname, arg in distdiscrete:
try:
distfn = getattr(stats, distname)
except TypeError:
distfn = distname
distname = 'sample distribution'
m, v, s, k = distfn.stats(*arg, moments='mvsk')
yield check_normalization, distfn, arg, distname
# compare `stats` and `moment` methods
yield check_moment, distfn, arg, m, v, distname
yield check_mean_expect, distfn, arg, m, distname
yield check_var_expect, distfn, arg, m, v, distname
yield check_skew_expect, distfn, arg, m, v, s, distname
cond = distname in ['zipf']
msg = distname + ' fails kurtosis'
yield knf(cond, msg)(check_kurt_expect), distfn, arg, m, v, k, distname
# frozen distr moments
yield check_moment_frozen, distfn, arg, m, 1
yield check_moment_frozen, distfn, arg, v+m*m, 2
def test_rvs_broadcast():
for dist, shape_args in distdiscrete:
# If shape_only is True, it means the _rvs method of the
# distribution uses more than one random number to generate a random
# variate. That means the result of using rvs with broadcasting or
# with a nontrivial size will not necessarily be the same as using the
# numpy.vectorize'd version of rvs(), so we can only compare the shapes
# of the results, not the values.
# Whether or not a distribution is in the following list is an
# implementation detail of the distribution, not a requirement. If
# the implementation the rvs() method of a distribution changes, this
# test might also have to be changed.
shape_only = dist in ['skellam']
try:
distfunc = getattr(stats, dist)
except TypeError:
distfunc = dist
dist = 'rv_discrete(values=(%r, %r))' % (dist.xk, dist.pk)
loc = np.zeros(2)
nargs = distfunc.numargs
allargs = []
bshape = []
# Generate shape parameter arguments...
for k in range(nargs):
shp = (k + 3,) + (1,)*(k + 1)
param_val = shape_args[k]
allargs.append(param_val*np.ones(shp, dtype=np.array(param_val).dtype))
bshape.insert(0, shp[0])
allargs.append(loc)
bshape.append(loc.size)
# bshape holds the expected shape when loc, scale, and the shape
# parameters are all broadcast together.
yield check_rvs_broadcast, distfunc, dist, allargs, bshape, shape_only, [np.int_]
def check_cdf_ppf(distfn, arg, supp, msg):
# cdf is a step function, and ppf(q) = min{k : cdf(k) >= q, k integer}
npt.assert_array_equal(distfn.ppf(distfn.cdf(supp, *arg), *arg),
supp, msg + '-roundtrip')
npt.assert_array_equal(distfn.ppf(distfn.cdf(supp, *arg) - 1e-8, *arg),
supp, msg + '-roundtrip')
if not hasattr(distfn, 'xk'):
supp1 = supp[supp < distfn.b]
npt.assert_array_equal(distfn.ppf(distfn.cdf(supp1, *arg) + 1e-8, *arg),
supp1 + distfn.inc, msg + ' ppf-cdf-next')
# -1e-8 could cause an error if pmf < 1e-8
def check_pmf_cdf(distfn, arg, distname):
if hasattr(distfn, 'xk'):
index = distfn.xk
else:
startind = int(distfn.ppf(0.01, *arg) - 1)
index = list(range(startind, startind + 10))
cdfs = distfn.cdf(index, *arg)
pmfs_cum = distfn.pmf(index, *arg).cumsum()
atol, rtol = 1e-10, 1e-10
if distname == 'skellam': # ncx2 accuracy
atol, rtol = 1e-5, 1e-5
npt.assert_allclose(cdfs - cdfs[0], pmfs_cum - pmfs_cum[0],
atol=atol, rtol=rtol)
def check_moment_frozen(distfn, arg, m, k):
npt.assert_allclose(distfn(*arg).moment(k), m,
atol=1e-10, rtol=1e-10)
def check_oth(distfn, arg, supp, msg):
# checking other methods of distfn
npt.assert_allclose(distfn.sf(supp, *arg), 1. - distfn.cdf(supp, *arg),
atol=1e-10, rtol=1e-10)
q = np.linspace(0.01, 0.99, 20)
npt.assert_allclose(distfn.isf(q, *arg), distfn.ppf(1. - q, *arg),
atol=1e-10, rtol=1e-10)
median_sf = distfn.isf(0.5, *arg)
npt.assert_(distfn.sf(median_sf - 1, *arg) > 0.5)
npt.assert_(distfn.cdf(median_sf + 1, *arg) > 0.5)
def check_discrete_chisquare(distfn, arg, rvs, alpha, msg):
"""Perform chisquare test for random sample of a discrete distribution
Parameters
----------
distname : string
name of distribution function
arg : sequence
parameters of distribution
alpha : float
significance level, threshold for p-value
Returns
-------
result : bool
0 if test passes, 1 if test fails
"""
wsupp = 0.05
# construct intervals with minimum mass `wsupp`.
# intervals are left-half-open as in a cdf difference
lo = int(max(distfn.a, -1000))
distsupport = xrange(lo, int(min(distfn.b, 1000)) + 1)
last = 0
distsupp = [lo]
distmass = []
for ii in distsupport:
current = distfn.cdf(ii, *arg)
if current - last >= wsupp - 1e-14:
distsupp.append(ii)
distmass.append(current - last)
last = current
if current > (1 - wsupp):
break
if distsupp[-1] < distfn.b:
distsupp.append(distfn.b)
distmass.append(1 - last)
distsupp = np.array(distsupp)
distmass = np.array(distmass)
# convert intervals to right-half-open as required by histogram
histsupp = distsupp + 1e-8
histsupp[0] = distfn.a
# find sample frequencies and perform chisquare test
freq, hsupp = np.histogram(rvs, histsupp)
chis, pval = stats.chisquare(np.array(freq), len(rvs)*distmass)
npt.assert_(pval > alpha,
'chisquare - test for %s at arg = %s with pval = %s' %
(msg, str(arg), str(pval)))
def check_scale_docstring(distfn):
if distfn.__doc__ is not None:
# Docstrings can be stripped if interpreter is run with -OO
npt.assert_('scale' not in distfn.__doc__)
if __name__ == "__main__":
npt.run_module_suite()
| bsd-3-clause | 3,297,111,217,996,479,000 | 35.390041 | 89 | 0.589966 | false |
4shadoww/hakkuframework | core/lib/whois/__init__.py | 3 | 2563 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import *
import re
import sys
import os
import subprocess
import socket
from .parser import WhoisEntry
from .whois import NICClient
def whois(url, command=False):
# clean domain to expose netloc
ip_match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", url)
if ip_match:
domain = url
try:
result = socket.gethostbyaddr(url)
except socket.herror as e:
pass
else:
domain = result[0]
else:
domain = extract_domain(url)
if command:
# try native whois command
r = subprocess.Popen(['whois', domain], stdout=subprocess.PIPE)
text = r.stdout.read()
else:
# try builtin client
nic_client = NICClient()
text = nic_client.whois_lookup(None, domain, 0)
return WhoisEntry.load(domain, text)
def extract_domain(url):
"""Extract the domain from the given URL
>>> extract_domain('http://www.google.com.au/tos.html')
'google.com.au'
>>> extract_domain('www.webscraping.com')
'webscraping.com'
>>> extract_domain('198.252.206.140')
'stackoverflow.com'
>>> extract_domain('102.112.2O7.net')
'2o7.net'
>>> extract_domain('1-0-1-1-1-0-1-1-1-1-1-1-1-.0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info')
'0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info'
"""
if re.match(r'\d+\.\d+\.\d+\.\d+', url):
# this is an IP address
return socket.gethostbyaddr(url)[0]
tlds_path = os.path.join(os.getcwd(), os.path.dirname(__file__), 'data', 'tlds.txt')
with open(tlds_path) as tlds_fil:
suffixes = [line.lower().encode('utf-8')
for line in (x.strip() for x in tlds_fil)
if not line.startswith('#')]
suff = 'xn--p1ai'
if not isinstance(url, str):
url = url.decode('utf-8')
url = re.sub('^.*://', '', url)
url = url.split('/')[0].lower().encode('idna')
domain = []
for section in url.split(b'.'):
if section in suffixes:
domain.append(section)
else:
domain = [section]
return b'.'.join(domain).decode('idna')
if __name__ == '__main__':
try:
url = sys.argv[1]
except IndexError:
print('Usage: %s url' % sys.argv[0])
else:
print(whois(url))
| mit | -1,801,736,246,761,140,700 | 28.802326 | 112 | 0.577058 | false |
Funky7Monkey/WargamingAPI.py | WargamingAPI/enums.py | 1 | 1356 | """
The MIT License (MIT)
Copyright (c) 2017 Funky7Monkey
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from enum import Enum
class Region(Enum):
EU = 1
NA = 2
RU = 3
ASIA = 4
def domain(self):
if self.name == 'NA':
return 'com'
else:
return self.name.lower()
class Platform(Enum):
XBOX = 1
PS4 = 2
| mit | 2,903,654,695,266,961,400 | 30.534884 | 75 | 0.735988 | false |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/OpenGL/GL/APPLE/ycbcr_422.py | 9 | 3045 | '''OpenGL extension APPLE.ycbcr_422
This module customises the behaviour of the
OpenGL.raw.GL.APPLE.ycbcr_422 to provide a more
Python-friendly API
Overview (from the spec)
This extension provides a method for GL to read, store and optionally
process textures that are defined in Y'CbCr 422 video formats. This
extension supports the two common Y'CbCr 422 video formats (known by
QuickTime FourCC as '2vuy' and 'yuvs'). These formats represent one of the
most common 16 bit Y'CbCr formats in both standard and reverse byte
ordering. From a client stand point these can be assumed to be decoded
immediately (even though the implementation is free to optimize the data
storage and keep it in the native format) and otherwise function as any
other texture format. The texture command <internalformat> parameter
normally be should be specified as RGB, since Y'CbCr is just a form of RGB
data. This extension can be supported with either hardware or software
decoding and it is up to the specific implementation to determine which is
used.
A new <format> is added, YCBCR_422_APPLE. Additionally, to handle the
difference in pixel size and byte ordering for 422 video, the pixel storage
operations treat YCBCR_422_APPLE as a 2 component format using
the UNSIGNED_SHORT_8_8_APPLE or UNSIGNED_SHORT_8_8_REV_APPLE <type>.
The '2vuy' or k2vuyPixelFormat pixel format is an 8-bit 4:2:2 Component
Y'CbCr format. Each 16 bit pixel is represented by an unsigned eight bit
luminance component and two unsigned eight bit chroma components. Each pair
of pixels shares a common set of chroma values. The components are ordered
in memory; Cb, Y0, Cr, Y1. The luminance components have a range of [16,
235], while the chroma value has a range of [16, 240]. This is consistent
with the CCIR601 spec. This format is fairly prevalent on both Mac and Win32
platforms. The equivalent Microsoft fourCC is OUYVYO. This format is
supported with the UNSIGNED_SHORT_8_8_REV_APPLE type for pixel storage
operations.
The 'yuvs' or kYUVSPixelFormat is an 8-bit 4:2:2 Component Y'CbCr format.
Identical to the k2vuyPixelFormat except each 16 bit word has been byte
swapped. This results in a component ordering of; Y0, Cb, Y1, Cr. This is
most prevalent yuv 4:2:2 format on both Mac and Win32 platforms. The
equivalent Microsoft fourCC is 'YUY2'. This format is supported with the
UNSIGNED_SHORT_8_8_APPLE type for pixel storage operations.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/APPLE/ycbcr_422.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.APPLE.ycbcr_422 import *
from OpenGL.raw.GL.APPLE.ycbcr_422 import _EXTENSION_NAME
def glInitYcbcr422APPLE():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-3.0 | 6,442,371,417,993,140,000 | 48.129032 | 77 | 0.781281 | false |
abhattad4/Digi-Menu | digimenu2/tests/migrations/models.py | 386 | 1780 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps.registry import Apps
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class CustomModelBase(models.base.ModelBase):
pass
class ModelWithCustomBase(six.with_metaclass(CustomModelBase, models.Model)):
pass
@python_2_unicode_compatible
class UnicodeModel(models.Model):
title = models.CharField('ÚÑÍ¢ÓÐÉ', max_length=20, default='“Ðjáñgó”')
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
verbose_name = 'úñí©óðé µóðéø'
verbose_name_plural = 'úñí©óðé µóðéøß'
def __str__(self):
return self.title
class Unserializable(object):
"""
An object that migration doesn't know how to serialize.
"""
pass
class UnserializableModel(models.Model):
title = models.CharField(max_length=20, default=Unserializable())
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
class UnmigratedModel(models.Model):
"""
A model that is in a migration-less app (which this app is
if its migrations directory has not been repointed)
"""
pass
class EmptyManager(models.Manager):
use_in_migrations = True
class FoodQuerySet(models.query.QuerySet):
pass
class BaseFoodManager(models.Manager):
def __init__(self, a, b, c=1, d=2):
super(BaseFoodManager, self).__init__()
self.args = (a, b, c, d)
class FoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
use_in_migrations = True
class NoMigrationFoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
pass
| bsd-3-clause | 2,094,950,055,346,228,500 | 22.513514 | 77 | 0.689655 | false |
jmagnusson/flask-admin | examples/geo_alchemy/app.py | 39 | 2044 | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_admin as admin
from geoalchemy2.types import Geometry
from flask_admin.contrib.geoa import ModelView
# Create application
app = Flask(__name__)
app.config.from_pyfile('config.py')
db = SQLAlchemy(app)
class Point(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POINT"))
class MultiPoint(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOINT"))
class Polygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POLYGON"))
class MultiPolygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOLYGON"))
class LineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("LINESTRING"))
class MultiLineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTILINESTRING"))
# Flask views
@app.route('/')
def index():
return '<a href="/admin/">Click me to get to Admin!</a>'
# Create admin
admin = admin.Admin(app, name='Example: GeoAlchemy', template_mode='bootstrap3')
# Add views
admin.add_view(ModelView(Point, db.session, category='Points'))
admin.add_view(ModelView(MultiPoint, db.session, category='Points'))
admin.add_view(ModelView(Polygon, db.session, category='Polygons'))
admin.add_view(ModelView(MultiPolygon, db.session, category='Polygons'))
admin.add_view(ModelView(LineString, db.session, category='Lines'))
admin.add_view(ModelView(MultiLineString, db.session, category='Lines'))
if __name__ == '__main__':
db.create_all()
# Start app
app.run(debug=True)
| bsd-3-clause | -169,720,292,398,492,030 | 27.388889 | 80 | 0.704501 | false |
offlinehacker/pyfilesystem | fs/osfs/watch_win32.py | 10 | 16983 | """
fs.osfs.watch_win32
===================
Change watcher support for OSFS, using ReadDirectoryChangesW on win32.
"""
import os
import sys
import errno
import threading
import Queue
import stat
import struct
import ctypes
import ctypes.wintypes
import traceback
import weakref
try:
LPVOID = ctypes.wintypes.LPVOID
except AttributeError:
# LPVOID wasn't defined in Py2.5, guess it was introduced in Py2.6
LPVOID = ctypes.c_void_p
from fs.errors import *
from fs.path import *
from fs.watch import *
INVALID_HANDLE_VALUE = 0xFFFFFFFF
FILE_NOTIFY_CHANGE_FILE_NAME = 0x01
FILE_NOTIFY_CHANGE_DIR_NAME = 0x02
FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x04
FILE_NOTIFY_CHANGE_SIZE = 0x08
FILE_NOTIFY_CHANGE_LAST_WRITE = 0x010
FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x020
FILE_NOTIFY_CHANGE_CREATION = 0x040
FILE_NOTIFY_CHANGE_SECURITY = 0x0100
FILE_LIST_DIRECTORY = 0x01
FILE_SHARE_READ = 0x01
FILE_SHARE_WRITE = 0x02
OPEN_EXISTING = 3
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
FILE_FLAG_OVERLAPPED = 0x40000000
THREAD_TERMINATE = 0x0001
FILE_ACTION_ADDED = 1
FILE_ACTION_REMOVED = 2
FILE_ACTION_MODIFIED = 3
FILE_ACTION_RENAMED_OLD_NAME = 4
FILE_ACTION_RENAMED_NEW_NAME = 5
FILE_ACTION_OVERFLOW = 0xFFFF
WAIT_ABANDONED = 0x00000080
WAIT_IO_COMPLETION = 0x000000C0
WAIT_OBJECT_0 = 0x00000000
WAIT_TIMEOUT = 0x00000102
def _errcheck_bool(value,func,args):
if not value:
raise ctypes.WinError()
return args
def _errcheck_handle(value,func,args):
if not value:
raise ctypes.WinError()
if value == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
return args
def _errcheck_dword(value,func,args):
if value == 0xFFFFFFFF:
raise ctypes.WinError()
return args
class OVERLAPPED(ctypes.Structure):
_fields_ = [('Internal', LPVOID),
('InternalHigh', LPVOID),
('Offset', ctypes.wintypes.DWORD),
('OffsetHigh', ctypes.wintypes.DWORD),
('Pointer', LPVOID),
('hEvent', ctypes.wintypes.HANDLE),
]
try:
ReadDirectoryChangesW = ctypes.windll.kernel32.ReadDirectoryChangesW
except AttributeError:
raise ImportError("ReadDirectoryChangesW is not available")
ReadDirectoryChangesW.restype = ctypes.wintypes.BOOL
ReadDirectoryChangesW.errcheck = _errcheck_bool
ReadDirectoryChangesW.argtypes = (
ctypes.wintypes.HANDLE, # hDirectory
LPVOID, # lpBuffer
ctypes.wintypes.DWORD, # nBufferLength
ctypes.wintypes.BOOL, # bWatchSubtree
ctypes.wintypes.DWORD, # dwNotifyFilter
ctypes.POINTER(ctypes.wintypes.DWORD), # lpBytesReturned
ctypes.POINTER(OVERLAPPED), # lpOverlapped
LPVOID #FileIOCompletionRoutine # lpCompletionRoutine
)
CreateFileW = ctypes.windll.kernel32.CreateFileW
CreateFileW.restype = ctypes.wintypes.HANDLE
CreateFileW.errcheck = _errcheck_handle
CreateFileW.argtypes = (
ctypes.wintypes.LPCWSTR, # lpFileName
ctypes.wintypes.DWORD, # dwDesiredAccess
ctypes.wintypes.DWORD, # dwShareMode
LPVOID, # lpSecurityAttributes
ctypes.wintypes.DWORD, # dwCreationDisposition
ctypes.wintypes.DWORD, # dwFlagsAndAttributes
ctypes.wintypes.HANDLE # hTemplateFile
)
CloseHandle = ctypes.windll.kernel32.CloseHandle
CloseHandle.restype = ctypes.wintypes.BOOL
CloseHandle.argtypes = (
ctypes.wintypes.HANDLE, # hObject
)
CreateEvent = ctypes.windll.kernel32.CreateEventW
CreateEvent.restype = ctypes.wintypes.HANDLE
CreateEvent.errcheck = _errcheck_handle
CreateEvent.argtypes = (
LPVOID, # lpEventAttributes
ctypes.wintypes.BOOL, # bManualReset
ctypes.wintypes.BOOL, # bInitialState
ctypes.wintypes.LPCWSTR, #lpName
)
SetEvent = ctypes.windll.kernel32.SetEvent
SetEvent.restype = ctypes.wintypes.BOOL
SetEvent.errcheck = _errcheck_bool
SetEvent.argtypes = (
ctypes.wintypes.HANDLE, # hEvent
)
WaitForSingleObjectEx = ctypes.windll.kernel32.WaitForSingleObjectEx
WaitForSingleObjectEx.restype = ctypes.wintypes.DWORD
WaitForSingleObjectEx.errcheck = _errcheck_dword
WaitForSingleObjectEx.argtypes = (
ctypes.wintypes.HANDLE, # hObject
ctypes.wintypes.DWORD, # dwMilliseconds
ctypes.wintypes.BOOL, # bAlertable
)
CreateIoCompletionPort = ctypes.windll.kernel32.CreateIoCompletionPort
CreateIoCompletionPort.restype = ctypes.wintypes.HANDLE
CreateIoCompletionPort.errcheck = _errcheck_handle
CreateIoCompletionPort.argtypes = (
ctypes.wintypes.HANDLE, # FileHandle
ctypes.wintypes.HANDLE, # ExistingCompletionPort
LPVOID, # CompletionKey
ctypes.wintypes.DWORD, # NumberOfConcurrentThreads
)
GetQueuedCompletionStatus = ctypes.windll.kernel32.GetQueuedCompletionStatus
GetQueuedCompletionStatus.restype = ctypes.wintypes.BOOL
GetQueuedCompletionStatus.errcheck = _errcheck_bool
GetQueuedCompletionStatus.argtypes = (
ctypes.wintypes.HANDLE, # CompletionPort
LPVOID, # lpNumberOfBytesTransferred
LPVOID, # lpCompletionKey
ctypes.POINTER(OVERLAPPED), # lpOverlapped
ctypes.wintypes.DWORD, # dwMilliseconds
)
PostQueuedCompletionStatus = ctypes.windll.kernel32.PostQueuedCompletionStatus
PostQueuedCompletionStatus.restype = ctypes.wintypes.BOOL
PostQueuedCompletionStatus.errcheck = _errcheck_bool
PostQueuedCompletionStatus.argtypes = (
ctypes.wintypes.HANDLE, # CompletionPort
ctypes.wintypes.DWORD, # lpNumberOfBytesTransferred
ctypes.wintypes.DWORD, # lpCompletionKey
ctypes.POINTER(OVERLAPPED), # lpOverlapped
)
class WatchedDirectory(object):
def __init__(self,callback,path,flags,recursive=True):
self.path = path
self.flags = flags
self.callback = callback
self.recursive = recursive
self.handle = None
self.error = None
self.handle = CreateFileW(path,
FILE_LIST_DIRECTORY,
FILE_SHARE_READ | FILE_SHARE_WRITE,
None,
OPEN_EXISTING,
FILE_FLAG_BACKUP_SEMANTICS|FILE_FLAG_OVERLAPPED,
None)
self.result = ctypes.create_string_buffer(1024)
self.overlapped = overlapped = OVERLAPPED()
self.ready = threading.Event()
def __del__(self):
self.close()
def close(self):
if self.handle is not None:
CloseHandle(self.handle)
self.handle = None
def post(self):
overlapped = self.overlapped
overlapped.Internal = 0
overlapped.InternalHigh = 0
overlapped.Offset = 0
overlapped.OffsetHigh = 0
overlapped.Pointer = 0
overlapped.hEvent = 0
try:
ReadDirectoryChangesW(self.handle,
ctypes.byref(self.result),len(self.result),
self.recursive,self.flags,None,
overlapped,None)
except WindowsError, e:
self.error = e
self.close()
def complete(self,nbytes):
if nbytes == 0:
self.callback(None,0)
else:
res = self.result.raw[:nbytes]
for (name,action) in self._extract_change_info(res):
if self.callback:
self.callback(os.path.join(self.path,name),action)
def _extract_change_info(self,buffer):
"""Extract the information out of a FILE_NOTIFY_INFORMATION structure."""
pos = 0
while pos < len(buffer):
jump, action, namelen = struct.unpack("iii",buffer[pos:pos+12])
# TODO: this may return a shortname or a longname, with no way
# to tell which. Normalise them somehow?
name = buffer[pos+12:pos+12+namelen].decode("utf16")
yield (name,action)
if not jump:
break
pos += jump
class WatchThread(threading.Thread):
"""Thread for watching filesystem changes."""
def __init__(self):
super(WatchThread,self).__init__()
self.closed = False
self.watched_directories = {}
self.ready = threading.Event()
self._iocp = None
self._new_watches = Queue.Queue()
def close(self):
if not self.closed:
self.closed = True
if self._iocp:
PostQueuedCompletionStatus(self._iocp,0,1,None)
def add_watcher(self,callback,path,events,recursive):
if os.path.isfile(path):
path = os.path.dirname(path)
watched_dirs = []
for w in self._get_watched_dirs(callback,path,events,recursive):
self.attach_watched_directory(w)
watched_dirs.append(w)
return watched_dirs
def del_watcher(self,w):
w = self.watched_directories.pop(hash(w))
w.callback = None
w.close()
def _get_watched_dirs(self,callback,path,events,recursive):
do_access = False
do_change = False
flags = 0
for evt in events:
if issubclass(ACCESSED,evt):
do_access = True
if issubclass(MODIFIED,evt):
do_change = True
flags |= FILE_NOTIFY_CHANGE_ATTRIBUTES
flags |= FILE_NOTIFY_CHANGE_CREATION
flags |= FILE_NOTIFY_CHANGE_SECURITY
if issubclass(CREATED,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if issubclass(REMOVED,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if issubclass(MOVED_SRC,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if issubclass(MOVED_DST,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if do_access:
# Separately capture FILE_NOTIFY_CHANGE_LAST_ACCESS events
# so we can reliably generate ACCESSED events.
def on_access_event(path,action):
if action == FILE_ACTION_OVERFLOW:
callback(OVERFLOW,path)
else:
callback(ACCESSED,path)
yield WatchedDirectory(on_access_event,path,
FILE_NOTIFY_CHANGE_LAST_ACCESS,recursive)
if do_change:
# Separately capture FILE_NOTIFY_CHANGE_LAST_WRITE events
# so we can generate MODIFIED(data_changed=True) events.
cflags = FILE_NOTIFY_CHANGE_LAST_WRITE | FILE_NOTIFY_CHANGE_SIZE
def on_change_event(path,action):
if action == FILE_ACTION_OVERFLOW:
callback(OVERFLOW,path)
else:
callback(MODIFIED,path,True)
yield WatchedDirectory(on_change_event,path,cflags,recursive)
if flags:
# All other events we can route through a common handler.
old_name = [None]
def on_misc_event(path,action):
if action == FILE_ACTION_OVERFLOW:
callback(OVERFLOW,path)
elif action == FILE_ACTION_ADDED:
callback(CREATED,path)
elif action == FILE_ACTION_REMOVED:
callback(REMOVED,path)
elif action == FILE_ACTION_MODIFIED:
callback(MODIFIED,path)
elif action == FILE_ACTION_RENAMED_OLD_NAME:
old_name[0] = path
elif action == FILE_ACTION_RENAMED_NEW_NAME:
callback(MOVED_DST,path,old_name[0])
callback(MOVED_SRC,old_name[0],path)
old_name[0] = None
yield WatchedDirectory(on_misc_event,path,flags,recursive)
def run(self):
try:
self._iocp = CreateIoCompletionPort(INVALID_HANDLE_VALUE,None,0,1)
self.ready.set()
nbytes = ctypes.wintypes.DWORD()
iocpkey = ctypes.wintypes.DWORD()
overlapped = OVERLAPPED()
while not self.closed:
try:
GetQueuedCompletionStatus(self._iocp,
ctypes.byref(nbytes),
ctypes.byref(iocpkey),
ctypes.byref(overlapped),
-1)
except WindowsError:
traceback.print_exc()
else:
if iocpkey.value > 1:
try:
w = self.watched_directories[iocpkey.value]
except KeyError:
pass
else:
w.complete(nbytes.value)
w.post()
elif not self.closed:
try:
while True:
w = self._new_watches.get_nowait()
if w.handle is not None:
CreateIoCompletionPort(w.handle,
self._iocp,
hash(w),0)
w.post()
w.ready.set()
except Queue.Empty:
pass
finally:
self.ready.set()
for w in self.watched_directories.itervalues():
w.close()
if self._iocp:
CloseHandle(self._iocp)
def attach_watched_directory(self,w):
self.watched_directories[hash(w)] = w
self._new_watches.put(w)
PostQueuedCompletionStatus(self._iocp,0,1,None)
w.ready.wait()
class OSFSWatchMixin(WatchableFSMixin):
"""Mixin providing change-watcher support via pyinotify."""
__watch_lock = threading.Lock()
__watch_thread = None
def close(self):
super(OSFSWatchMixin,self).close()
self.__shutdown_watch_thread(force=True)
self.notify_watchers(CLOSED)
@convert_os_errors
def add_watcher(self,callback,path="/",events=None,recursive=True):
w = super(OSFSWatchMixin,self).add_watcher(callback,path,events,recursive)
syspath = self.getsyspath(path)
wt = self.__get_watch_thread()
# Careful not to create a reference cycle here.
weak_self = weakref.ref(self)
def handle_event(event_class,path,*args,**kwds):
selfref = weak_self()
if selfref is None:
return
try:
path = selfref.unsyspath(path)
except ValueError:
pass
else:
if event_class in (MOVED_SRC,MOVED_DST) and args and args[0]:
args = (selfref.unsyspath(args[0]),) + args[1:]
event = event_class(selfref,path,*args,**kwds)
w.handle_event(event)
w._watch_objs = wt.add_watcher(handle_event,syspath,w.events,w.recursive)
for wd in w._watch_objs:
if wd.error is not None:
self.del_watcher(w)
raise wd.error
return w
@convert_os_errors
def del_watcher(self,watcher_or_callback):
wt = self.__get_watch_thread()
if isinstance(watcher_or_callback,Watcher):
watchers = [watcher_or_callback]
else:
watchers = self._find_watchers(watcher_or_callback)
for watcher in watchers:
for wobj in watcher._watch_objs:
wt.del_watcher(wobj)
super(OSFSWatchMixin,self).del_watcher(watcher)
if not wt.watched_directories:
self.__shutdown_watch_thread()
def __get_watch_thread(self):
"""Get the shared watch thread, initializing if necessary."""
if self.__watch_thread is None:
self.__watch_lock.acquire()
try:
if self.__watch_thread is None:
wt = WatchThread()
wt.start()
wt.ready.wait()
OSFSWatchMixin.__watch_thread = wt
finally:
self.__watch_lock.release()
return self.__watch_thread
def __shutdown_watch_thread(self,force=False):
"""Stop the shared watch manager, if there are no watches left."""
self.__watch_lock.acquire()
try:
if OSFSWatchMixin.__watch_thread is None:
return
if not force and OSFSWatchMixin.__watch_thread.watched_directories:
return
try:
OSFSWatchMixin.__watch_thread.close()
except EnvironmentError:
pass
else:
OSFSWatchMixin.__watch_thread.join()
OSFSWatchMixin.__watch_thread = None
finally:
self.__watch_lock.release()
| bsd-3-clause | -8,146,639,791,469,691,000 | 33.944444 | 82 | 0.590885 | false |
40223209/test | static/Brython3.1.3-20150514-095342/Lib/unittest/case.py | 743 | 48873 | """Test case implementation"""
import sys
import functools
import difflib
import pprint
import re
import warnings
import collections
from . import result
from .util import (strclass, safe_repr, _count_diff_all_purpose,
_count_diff_hashable)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestCase.skipTest() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
super(_ExpectedFailure, self).__init__()
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
class _Outcome(object):
def __init__(self):
self.success = True
self.skipped = None
self.unexpectedSuccess = None
self.expectedFailure = None
self.errors = []
self.failures = []
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not isinstance(test_item, type):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesBaseContext(object):
def __init__(self, expected, test_case, callable_obj=None,
expected_regex=None):
self.expected = expected
self.test_case = test_case
if callable_obj is not None:
try:
self.obj_name = callable_obj.__name__
except AttributeError:
self.obj_name = str(callable_obj)
else:
self.obj_name = None
if isinstance(expected_regex, (bytes, str)):
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
self.msg = None
def _raiseFailure(self, standardMsg):
msg = self.test_case._formatMessage(self.msg, standardMsg)
raise self.test_case.failureException(msg)
def handle(self, name, callable_obj, args, kwargs):
"""
If callable_obj is None, assertRaises/Warns is being used as a
context manager, so check for a 'msg' kwarg and return self.
If callable_obj is not None, call it passing args and kwargs.
"""
if callable_obj is None:
self.msg = kwargs.pop('msg', None)
return self
with self:
callable_obj(*args, **kwargs)
class _AssertRaisesContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
if self.obj_name:
self._raiseFailure("{} not raised by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
# store exception, without traceback, for later retrieval
self.exception = exc_value.with_traceback(None)
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
if not expected_regex.search(str(exc_value)):
self._raiseFailure('"{}" does not match "{}"'.format(
expected_regex.pattern, str(exc_value)))
return True
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
for v in sys.modules.values():
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
self.warnings = self.warnings_manager.__enter__()
warnings.simplefilter("always", self.expected)
return self
def __exit__(self, exc_type, exc_value, tb):
self.warnings_manager.__exit__(exc_type, exc_value, tb)
if exc_type is not None:
# let unexpected exceptions pass through
return
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
first_matching = None
for m in self.warnings:
w = m.message
if not isinstance(w, self.expected):
continue
if first_matching is None:
first_matching = w
if (self.expected_regex is not None and
not self.expected_regex.search(str(w))):
continue
# store warning for later retrieval
self.warning = w
self.filename = m.filename
self.lineno = m.lineno
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
self._raiseFailure('"{}" does not match "{}"'.format(
self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
self._raiseFailure("{} not triggered by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not triggered".format(exc_name))
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
When subclassing TestCase, you can set these attributes:
* failureException: determines which exception will be raised when
the instance's assertion methods fail; test methods raising this
exception will be deemed to have 'failed' rather than 'errored'.
* longMessage: determines whether long messages (including repr of
objects used in assert methods) will be printed on failure in *addition*
to any explicit message passed.
* maxDiff: sets the maximum length of a diff in failure messages
by assert methods using difflib. It is looked up as an instance
attribute so can be configured by individual tests if required.
"""
failureException = AssertionError
longMessage = True
maxDiff = 80*8
# If a string is longer than _diffThreshold, use normal comparison instead
# of difflib. See #11763.
_diffThreshold = 2**16
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._outcomeForDoCleanups = None
self._testMethodDoc = 'No test'
try:
testMethod = getattr(self, methodName)
except AttributeError:
if methodName != 'runTest':
# we allow instantiation with no explicit method name
# but not an *incorrect* or missing method name
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
else:
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(str, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
def _executeTestPart(self, function, outcome, isTest=False):
try:
function()
except KeyboardInterrupt:
raise
except SkipTest as e:
outcome.success = False
outcome.skipped = str(e)
except _UnexpectedSuccess:
exc_info = sys.exc_info()
outcome.success = False
if isTest:
outcome.unexpectedSuccess = exc_info
else:
outcome.errors.append(exc_info)
except _ExpectedFailure:
outcome.success = False
exc_info = sys.exc_info()
if isTest:
outcome.expectedFailure = exc_info
else:
outcome.errors.append(exc_info)
except self.failureException:
outcome.success = False
outcome.failures.append(sys.exc_info())
exc_info = sys.exc_info()
except:
outcome.success = False
outcome.errors.append(sys.exc_info())
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
outcome = _Outcome()
self._outcomeForDoCleanups = outcome
self._executeTestPart(self.setUp, outcome)
if outcome.success:
self._executeTestPart(testMethod, outcome, isTest=True)
self._executeTestPart(self.tearDown, outcome)
self.doCleanups()
if outcome.success:
result.addSuccess(self)
else:
if outcome.skipped is not None:
self._addSkip(result, outcome.skipped)
for exc_info in outcome.errors:
result.addError(self, exc_info)
for exc_info in outcome.failures:
result.addFailure(self, exc_info)
if outcome.unexpectedSuccess is not None:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failures",
RuntimeWarning)
result.addFailure(self, outcome.unexpectedSuccess)
if outcome.expectedFailure is not None:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, outcome.expectedFailure)
else:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
return result
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
outcome = self._outcomeForDoCleanups or _Outcome()
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
part = lambda: function(*args, **kwargs)
self._executeTestPart(part, outcome)
# return this for backwards compatibility
# even though we no longer us it internally
return outcome.success
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"""Check that the expression is false."""
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
# don't switch to '{}' formatting in Python 2.X
# it changes the way unicode input is handled
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is raised
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
raised, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
An optional keyword argument 'msg' can be provided when assertRaises
is used as a context object.
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(excClass, self, callableObj)
return context.handle('assertRaises', callableObj, args, kwargs)
def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs):
"""Fail unless a warning of class warnClass is triggered
by callable_obj when invoked with arguments args and keyword
arguments kwargs. If a different type of warning is
triggered, it will not be handled: depending on the other
warning filtering rules in effect, it might be silenced, printed
out, or raised as an exception.
If called with callable_obj omitted or None, will return a
context object used like this::
with self.assertWarns(SomeWarning):
do_something()
An optional keyword argument 'msg' can be provided when assertWarns
is used as a context object.
The context manager keeps a reference to the first matching
warning as the 'warning' attribute; similarly, the 'filename'
and 'lineno' attributes give you information about the line
of Python code from which the warning was triggered.
This allows you to inspect the warning after the assertion::
with self.assertWarns(SomeWarning) as cm:
do_something()
the_warning = cm.warning
self.assertEqual(the_warning.some_attribute, 147)
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj)
return context.handle('assertWarns', callable_obj, args, kwargs)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
if isinstance(asserter, str):
asserter = getattr(self, asserter)
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '!='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = safe_repr(seq1)
seq2_repr = safe_repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in range(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support different types of sets, and
is optimized for sets specifically (parameters must support a
difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1),
safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, subset, dictionary, msg=None):
"""Checks whether dictionary is a superset of subset."""
warnings.warn('assertDictContainsSubset is deprecated',
DeprecationWarning)
missing = []
mismatched = []
for key, value in subset.items():
if key not in dictionary:
missing.append(key)
elif value != dictionary[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(dictionary[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertCountEqual(self, first, second, msg=None):
"""An unordered sequence comparison asserting that the same elements,
regardless of order. If the same element occurs more than once,
it verifies that the elements occur the same number of times.
self.assertEqual(Counter(list(first)),
Counter(list(second)))
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
first_seq, second_seq = list(first), list(second)
try:
first = collections.Counter(first_seq)
second = collections.Counter(second_seq)
except TypeError:
# Handle case with unhashable elements
differences = _count_diff_all_purpose(first_seq, second_seq)
else:
if first == second:
return
differences = _count_diff_hashable(first_seq, second_seq)
if differences:
standardMsg = 'Element counts were not equal:\n'
lines = ['First has %d, Second has %d: %r' % diff for diff in differences]
diffMsg = '\n'.join(lines)
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, str, 'First argument is not a string')
self.assertIsInstance(second, str, 'Second argument is not a string')
if first != second:
# don't use difflib if the strings are too long
if (len(first) > self._diffThreshold or
len(second) > self._diffThreshold):
self._baseAssertEqual(first, second, msg)
firstlines = first.splitlines(keepends=True)
secondlines = second.splitlines(keepends=True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % (safe_repr(first, True),
safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegex(self, expected_exception, expected_regex,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regex.
Args:
expected_exception: Exception class expected to be raised.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
msg: Optional message used in case of failure. Can only be used
when assertRaisesRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, callable_obj,
expected_regex)
return context.handle('assertRaisesRegex', callable_obj, args, kwargs)
def assertWarnsRegex(self, expected_warning, expected_regex,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a triggered warning matches a regexp.
Basic functioning is similar to assertWarns() with the addition
that only warnings whose messages also match the regular expression
are considered successful matches.
Args:
expected_warning: Warning class expected to be triggered.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
msg: Optional message used in case of failure. Can only be used
when assertWarnsRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj,
expected_regex)
return context.handle('assertWarnsRegex', callable_obj, args, kwargs)
def assertRegex(self, text, expected_regex, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regex, (str, bytes)):
assert expected_regex, "expected_regex must not be empty."
expected_regex = re.compile(expected_regex)
if not expected_regex.search(text):
msg = msg or "Regex didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text)
raise self.failureException(msg)
def assertNotRegex(self, text, unexpected_regex, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regex, (str, bytes)):
unexpected_regex = re.compile(unexpected_regex)
match = unexpected_regex.search(text)
if match:
msg = msg or "Regex matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regex.pattern,
text)
raise self.failureException(msg)
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
'Please use {0} instead.'.format(original_func.__name__),
DeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
# see #9424
failUnlessEqual = assertEquals = _deprecate(assertEqual)
failIfEqual = assertNotEquals = _deprecate(assertNotEqual)
failUnlessAlmostEqual = assertAlmostEquals = _deprecate(assertAlmostEqual)
failIfAlmostEqual = assertNotAlmostEquals = _deprecate(assertNotAlmostEqual)
failUnless = assert_ = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
assertRaisesRegexp = _deprecate(assertRaisesRegex)
assertRegexpMatches = _deprecate(assertRegex)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s tec=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
| agpl-3.0 | 5,558,783,259,164,020,000 | 39.257825 | 109 | 0.575164 | false |
franky88/emperioanimesta | env/Lib/site-packages/django/contrib/admin/options.py | 27 | 83212 | from __future__ import unicode_literals
import copy
import json
import operator
from collections import OrderedDict
from functools import partial, reduce, update_wrapper
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import helpers, widgets
from django.contrib.admin.checks import (
BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks,
)
from django.contrib.admin.exceptions import DisallowedModelAdminToField
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.utils import (
NestedObjects, flatten_fieldsets, get_deleted_objects,
lookup_needs_distinct, model_format_dict, quote, unquote,
)
from django.contrib.auth import get_permission_codename
from django.core.exceptions import (
FieldDoesNotExist, FieldError, PermissionDenied, ValidationError,
)
from django.core.paginator import Paginator
from django.db import models, router, transaction
from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import BLANK_CHOICE_DASH
from django.forms.formsets import DELETION_FIELD_NAME, all_valid
from django.forms.models import (
BaseInlineFormSet, inlineformset_factory, modelform_defines_fields,
modelform_factory, modelformset_factory,
)
from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple
from django.http import Http404, HttpResponseRedirect
from django.http.response import HttpResponseBase
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.urls import reverse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, format_html
from django.utils.http import urlencode, urlquote
from django.utils.safestring import mark_safe
from django.utils.text import capfirst, get_text_list
from django.utils.translation import (
override as translation_override, string_concat, ugettext as _, ungettext,
)
from django.views.decorators.csrf import csrf_protect
from django.views.generic import RedirectView
IS_POPUP_VAR = '_popup'
TO_FIELD_VAR = '_to_field'
HORIZONTAL, VERTICAL = 1, 2
def get_content_type_for_model(obj):
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level.
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(obj, for_concrete_model=False)
def get_ul_class(radio_style):
return 'radiolist' if radio_style == VERTICAL else 'radiolist inline'
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.EmailField: {'widget': widgets.AdminEmailInputWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
view_on_site = True
show_full_result_count = True
checks_class = BaseModelAdminChecks
def check(self, **kwargs):
return self.checks_class().check(self, **kwargs)
def __init__(self):
# Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides
# rather than simply overwriting.
overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS)
for k, v in self.formfield_overrides.items():
overrides.setdefault(k, {}).update(v)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, request, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, models.ManyToManyField) or isinstance(db_field, models.ForeignKey):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model)
wrapper_kwargs = {}
if related_modeladmin:
wrapper_kwargs.update(
can_add_related=related_modeladmin.has_add_permission(request),
can_change_related=related_modeladmin.has_change_permission(request),
can_delete_related=related_modeladmin.has_delete_permission(request),
)
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs
)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def get_field_queryset(self, db, db_field, request):
"""
If the ModelAdmin specifies ordering, the queryset should respect that
ordering. Otherwise don't specify the queryset, let the field decide
(returns None in that case).
"""
related_admin = self.admin_site._registry.get(db_field.remote_field.model)
if related_admin is not None:
ordering = related_admin.get_ordering(request)
if ordering is not None and ordering != ():
return db_field.remote_field.model._default_manager.using(db).order_by(*ordering)
return None
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = _('None') if db_field.blank else None
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.remote_field.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.remote_field, self.admin_site, using=db)
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(
db_field.verbose_name,
db_field.name in self.filter_vertical
)
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
form_field = db_field.formfield(**kwargs)
if isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, CheckboxSelectMultiple):
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
help_text = form_field.help_text
form_field.help_text = string_concat(help_text, ' ', msg) if help_text else msg
return form_field
def get_view_on_site_url(self, obj=None):
if obj is None or not self.view_on_site:
return None
if callable(self.view_on_site):
return self.view_on_site(obj)
elif self.view_on_site and hasattr(obj, 'get_absolute_url'):
# use the ContentType lookup if view_on_site is True
return reverse('admin:view_on_site', kwargs={
'content_type_id': get_content_type_for_model(obj).pk,
'object_id': obj.pk
})
def get_empty_value_display(self):
"""
Return the empty_value_display set on ModelAdmin or AdminSite.
"""
try:
return mark_safe(self.empty_value_display)
except AttributeError:
return mark_safe(self.admin_site.empty_value_display)
def get_fields(self, request, obj=None):
"""
Hook for specifying fields.
"""
return self.fields
def get_fieldsets(self, request, obj=None):
"""
Hook for specifying fieldsets.
"""
if self.fieldsets:
return self.fieldsets
return [(None, {'fields': self.get_fields(request, obj)})]
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
from django.contrib.admin.filters import SimpleListFilter
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
# As ``limit_choices_to`` can be a callable, invoke it here.
if callable(l):
l = l()
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
relation_parts = []
prev_field = None
for part in lookup.split(LOOKUP_SEP):
try:
field = model._meta.get_field(part)
except FieldDoesNotExist:
# Lookups on non-existent fields are ok, since they're ignored
# later.
break
# It is allowed to filter on values that would be found from local
# model anyways. For example, if you filter on employee__department__id,
# then the id value would be found already from employee__department_id.
if not prev_field or (prev_field.concrete and
field not in prev_field.get_path_info()[-1].target_fields):
relation_parts.append(part)
if not getattr(field, 'get_path_info', None):
# This is not a relational field, so further parts
# must be transforms.
break
prev_field = field
model = field.get_path_info()[-1].to_opts.model
if len(relation_parts) <= 1:
# Either a local field filter, or no fields at all.
return True
clean_lookup = LOOKUP_SEP.join(relation_parts)
valid_lookups = [self.date_hierarchy]
for filter_item in self.list_filter:
if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter):
valid_lookups.append(filter_item.parameter_name)
elif isinstance(filter_item, (list, tuple)):
valid_lookups.append(filter_item[0])
else:
valid_lookups.append(filter_item)
return clean_lookup in valid_lookups
def to_field_allowed(self, request, to_field):
"""
Returns True if the model associated with this admin should be
allowed to be referenced by the specified field.
"""
opts = self.model._meta
try:
field = opts.get_field(to_field)
except FieldDoesNotExist:
return False
# Always allow referencing the primary key since it's already possible
# to get this information from the change view URL.
if field.primary_key:
return True
# Allow reverse relationships to models defining m2m fields if they
# target the specified field.
for many_to_many in opts.many_to_many:
if many_to_many.m2m_target_field_name() == to_field:
return True
# Make sure at least one of the models registered for this site
# references this field through a FK or a M2M relationship.
registered_models = set()
for model, admin in self.admin_site._registry.items():
registered_models.add(model)
for inline in admin.inlines:
registered_models.add(inline.model)
related_objects = (
f for f in opts.get_fields(include_hidden=True)
if (f.auto_created and not f.concrete)
)
for related_object in related_objects:
related_model = related_object.related_model
remote_field = related_object.field.remote_field
if (any(issubclass(model, related_model) for model in registered_models) and
hasattr(remote_field, 'get_related_field') and
remote_field.get_related_field() == field):
return True
return False
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.opts
codename = get_permission_codename('add', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_module_permission(self, request):
"""
Returns True if the given request has any permission in the given
app label.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to view the module on
the admin index page and access the module's index page. Overriding it
does not restrict access to the add, change or delete views. Use
`ModelAdmin.has_(add|change|delete)_permission` for that.
"""
return request.user.has_module_perms(self.opts.app_label)
@python_2_unicode_compatible
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_as_continue = True
save_on_top = False
paginator = Paginator
preserve_filters = True
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
checks_class = ModelAdminChecks
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def __str__(self):
return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__)
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
wrapper.model_admin = self
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = [
url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info),
url(r'^add/$', wrap(self.add_view), name='%s_%s_add' % info),
url(r'^(.+)/history/$', wrap(self.history_view), name='%s_%s_history' % info),
url(r'^(.+)/delete/$', wrap(self.delete_view), name='%s_%s_delete' % info),
url(r'^(.+)/change/$', wrap(self.change_view), name='%s_%s_change' % info),
# For backwards compatibility (was the change url before 1.9)
url(r'^(.+)/$', wrap(RedirectView.as_view(
pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)
))),
]
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'vendor/jquery/jquery%s.js' % extra,
'jquery.init.js',
'admin/RelatedObjectLookups.js',
'actions%s.js' % extra,
'urlify.js',
'prepopulate%s.js' % extra,
'vendor/xregexp/xregexp%s.js' % extra,
]
return forms.Media(js=['admin/js/%s' % url for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_form(request, obj, fields=None)
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
readonly_fields = self.get_readonly_fields(request, obj)
exclude.extend(readonly_fields)
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistent with the
# default on modelform_factory
exclude = exclude or None
# Remove declared form fields which are in readonly_fields.
new_attrs = OrderedDict(
(f, None) for f in readonly_fields
if f in self.form.declared_fields
)
form = type(self.form.__name__, (self.form,), new_attrs)
defaults = {
"form": form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError(
'%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__)
)
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id, from_field=None):
"""
Returns an instance matching the field and value provided, the primary
key is used if no field is provided. Returns ``None`` if no match is
found or the object_id fails validation.
"""
queryset = self.get_queryset(request)
model = queryset.model
field = model._meta.pk if from_field is None else model._meta.get_field(from_field)
try:
object_id = field.to_python(object_id)
return queryset.get(**{field.name: object_id})
except (model.DoesNotExist, ValidationError, ValueError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(
self.model, self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults
)
def get_formsets_with_inlines(self, request, obj=None):
"""
Yields formsets and the corresponding inlines.
"""
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj), inline
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object, message):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION,
change_message=message,
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=message,
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method must be
called before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=object_repr,
action_flag=DELETION,
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
if self.actions is None or IS_POPUP_VAR in request.GET:
return OrderedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(
(name, (func, name, desc))
for func, name, desc in actions
)
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or self.list_display_links is None or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def get_list_select_related(self, request):
"""
Returns a list of fields to add to the select_related() part of the
changelist items query.
"""
return self.list_select_related
def get_search_fields(self, request):
"""
Returns a sequence containing the fields to be searched whenever
somebody submits a search query.
"""
return self.search_fields
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
"""
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
search_fields = self.get_search_fields(request)
if search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_preserved_filters(self, request):
"""
Returns the preserved filters querystring.
"""
match = request.resolver_match
if self.preserve_filters and match:
opts = self.model._meta
current_url = '%s:%s' % (match.app_name, match.url_name)
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
if current_url == changelist_url:
preserved_filters = request.GET.urlencode()
else:
preserved_filters = request.GET.get('_changelist_filters')
if preserved_filters:
return urlencode({'_changelist_filters': preserved_filters})
return ''
def construct_change_message(self, request, form, formsets, add=False):
"""
Construct a JSON structure describing changes from a changed object.
Translations are deactivated so that strings are stored untranslated.
Translation happens later on LogEntry access.
"""
change_message = []
if add:
change_message.append({'added': {}})
elif form.changed_data:
change_message.append({'changed': {'fields': form.changed_data}})
if formsets:
with translation_override(None):
for formset in formsets:
for added_object in formset.new_objects:
change_message.append({
'added': {
'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object),
}
})
for changed_object, changed_fields in formset.changed_objects:
change_message.append({
'changed': {
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object),
'fields': changed_fields,
}
})
for deleted_object in formset.deleted_objects:
change_message.append({
'deleted': {
'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object),
}
})
return change_message
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError(
'Bad message level string: `%s`. Possible values are: %s'
% (level, levels_repr)
)
messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
preserved_filters = self.get_preserved_filters(request)
form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url)
view_on_site_url = self.get_view_on_site_url(obj)
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': view_on_site_url is not None,
'absolute_url': view_on_site_url,
'form_url': form_url,
'opts': opts,
'content_type_id': get_content_type_for_model(self.model).pk,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
'to_field_var': TO_FIELD_VAR,
'is_popup_var': IS_POPUP_VAR,
'app_label': app_label,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
request.current_app = self.admin_site.name
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.model_name),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
obj_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
args=(quote(pk_value),),
current_app=self.admin_site.name,
)
# Add a link to the object's change form if the user can edit the obj.
if self.has_change_permission(request, obj):
obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj)
else:
obj_repr = force_text(obj)
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': obj_repr,
}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
if to_field:
attr = str(to_field)
else:
attr = obj._meta.pk.attname
value = obj.serializable_value(attr)
popup_response_data = json.dumps({
'value': six.text_type(value),
'obj': six.text_type(obj),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
elif "_continue" in request.POST or (
# Redirecting after "Save as new".
"_saveasnew" in request.POST and self.save_as_continue and
self.has_change_permission(request, obj)
):
msg = format_html(
_('The {name} "{obj}" was added successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
if post_url_continue is None:
post_url_continue = obj_url
post_url_continue = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts},
post_url_continue
)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may add another {name} below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = format_html(
_('The {name} "{obj}" was added successfully.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
attr = str(to_field) if to_field else obj._meta.pk.attname
# Retrieve the `object_id` from the resolved pattern arguments.
value = request.resolver_match.args[0]
new_value = obj.serializable_value(attr)
popup_response_data = json.dumps({
'action': 'change',
'value': six.text_type(value),
'obj': six.text_type(obj),
'new_value': six.text_type(new_value),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
opts = self.model._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj),
}
if "_continue" in request.POST:
msg = format_html(
_('The {name} "{obj}" was changed successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_saveasnew" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_addanother" in request.POST:
msg = format_html(
_('The {name} "{obj}" was changed successfully. You may add another {name} below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = format_html(
_('The {name} "{obj}" was changed successfully.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func = self.get_actions(request)[action][0]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse-like object, which will be
# used as the response from the POST. If not, we'll be a good
# little HTTP citizen and redirect back to the changelist page.
if isinstance(response, HttpResponseBase):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg, messages.WARNING)
return None
def response_delete(self, request, obj_display, obj_id):
"""
Determines the HttpResponse for the delete_view stage.
"""
opts = self.model._meta
if IS_POPUP_VAR in request.POST:
popup_response_data = json.dumps({
'action': 'delete',
'value': str(obj_id),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
self.message_user(
request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display),
},
messages.SUCCESS,
)
if self.has_change_permission(request, None):
post_url = reverse(
'admin:%s_%s_changelist' % (opts.app_label, opts.model_name),
current_app=self.admin_site.name,
)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts}, post_url
)
else:
post_url = reverse('admin:index', current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def render_delete_form(self, request, context):
opts = self.model._meta
app_label = opts.app_label
request.current_app = self.admin_site.name
context.update(
to_field_var=TO_FIELD_VAR,
is_popup_var=IS_POPUP_VAR,
media=self.media,
)
return TemplateResponse(
request,
self.delete_confirmation_template or [
"admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name),
"admin/{}/delete_confirmation.html".format(app_label),
"admin/delete_confirmation.html",
],
context,
)
def get_inline_formsets(self, request, formsets, inline_instances, obj=None):
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(
inline, formset, fieldsets, prepopulated, readonly,
model_admin=self,
)
inline_admin_formsets.append(inline_admin_formset)
return inline_admin_formsets
def get_changeform_initial_data(self, request):
"""
Get the initial form data.
Unless overridden, this populates from the GET params.
"""
initial = dict(request.GET.items())
for k in initial:
try:
f = self.model._meta.get_field(k)
except FieldDoesNotExist:
continue
# We have to special-case M2Ms as a list of comma-separated PKs.
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return initial
@csrf_protect_m
@transaction.atomic
def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
model = self.model
opts = model._meta
if request.method == 'POST' and '_saveasnew' in request.POST:
object_id = None
add = object_id is None
if add:
if not self.has_add_permission(request):
raise PermissionDenied
obj = None
else:
obj = self.get_object(request, unquote(object_id), to_field)
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name), 'key': escape(object_id)})
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=not add)
else:
form_validated = False
new_object = form.instance
formsets, inline_instances = self._create_formsets(request, new_object, change=not add)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, not add)
self.save_related(request, form, formsets, not add)
change_message = self.construct_change_message(request, form, formsets, add)
if add:
self.log_addition(request, new_object, change_message)
return self.response_add(request, new_object)
else:
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
form_validated = False
else:
if add:
initial = self.get_changeform_initial_data(request)
form = ModelForm(initial=initial)
formsets, inline_instances = self._create_formsets(request, form.instance, change=False)
else:
form = ModelForm(instance=obj)
formsets, inline_instances = self._create_formsets(request, obj, change=True)
adminForm = helpers.AdminForm(
form,
list(self.get_fieldsets(request, obj)),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)
for inline_formset in inline_formsets:
media = media + inline_formset.media
context = dict(
self.admin_site.each_context(request),
title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),
adminform=adminForm,
object_id=object_id,
original=obj,
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
media=media,
inline_admin_formsets=inline_formsets,
errors=helpers.AdminErrorList(form, formsets),
preserved_filters=self.get_preserved_filters(request),
)
# Hide the "Save" and "Save and continue" buttons if "Save as New" was
# previously chosen to prevent the interface from getting confusing.
if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST:
context['show_save'] = False
context['show_save_and_continue'] = False
# Use the change template instead of the add template.
add = False
context.update(extra_context or {})
return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url)
def add_view(self, request, form_url='', extra_context=None):
return self.changeform_view(request, None, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
return self.changeform_view(request, object_id, form_url, extra_context)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
search_fields = self.get_search_fields(request)
list_select_related = self.get_list_select_related(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(
request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
search_fields, list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self,
)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=self.get_queryset(request))
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext(
"%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount
) % {
'count': changecount,
'name': name,
'obj': force_text(obj),
}
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext(
'%(total_count)s selected',
'All %(total_count)s selected',
cl.result_count
)
context = dict(
self.admin_site.each_context(request),
module_name=force_text(opts.verbose_name_plural),
selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
selection_note_all=selection_note_all % {'total_count': cl.result_count},
title=cl.title,
is_popup=cl.is_popup,
to_field=cl.to_field,
cl=cl,
media=media,
has_add_permission=self.has_add_permission(request),
opts=cl.opts,
action_form=action_form,
actions_on_top=self.actions_on_top,
actions_on_bottom=self.actions_on_bottom,
actions_selection_counter=self.actions_selection_counter,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
request.current_app = self.admin_site.name
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.model_name),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context)
@csrf_protect_m
@transaction.atomic
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
obj = self.get_object(request, unquote(object_id), to_field)
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
)
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, model_count, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST and not protected: # The user has confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
attr = str(to_field) if to_field else opts.pk.attname
obj_id = obj.serializable_value(attr)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
return self.response_delete(request, obj_display, obj_id)
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = dict(
self.admin_site.each_context(request),
title=title,
object_name=object_name,
object=obj,
deleted_objects=deleted_objects,
model_count=dict(model_count).items(),
perms_lacking=perms_needed,
protected=protected,
opts=opts,
app_label=app_label,
preserved_filters=self.get_preserved_filters(request),
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
)
context.update(extra_context or {})
return self.render_delete_form(request, context)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = self.get_object(request, unquote(object_id))
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(model._meta.verbose_name),
'key': escape(object_id),
})
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type=get_content_type_for_model(model)
).select_related().order_by('action_time')
context = dict(
self.admin_site.each_context(request),
title=_('Change history: %s') % force_text(obj),
action_list=action_list,
module_name=capfirst(force_text(opts.verbose_name_plural)),
object=obj,
opts=opts,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
request.current_app = self.admin_site.name
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context)
def _create_formsets(self, request, obj, change):
"Helper function to generate formsets for add/change_view."
formsets = []
inline_instances = []
prefixes = {}
get_formsets_args = [request]
if change:
get_formsets_args.append(obj)
for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset_params = {
'instance': obj,
'prefix': prefix,
'queryset': inline.get_queryset(request),
}
if request.method == 'POST':
formset_params.update({
'data': request.POST,
'files': request.FILES,
'save_as_new': '_saveasnew' in request.POST
})
formsets.append(FormSet(**formset_params))
inline_instances.append(inline)
return formsets, inline_instances
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``fk_name`` to specify the attribute name of the ``ForeignKey``
from ``model`` to its parent. This is required if ``model`` has more than
one ``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
min_num = None
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
show_change_link = False
checks_class = InlineModelAdminChecks
classes = None
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
self.has_registered_model = admin_site.is_registered(self.model)
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js',
'inlines%s.js' % extra]
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
if self.classes and 'collapse' in self.classes:
js.append('collapse%s.js' % extra)
return forms.Media(js=['admin/js/%s' % url for url in js])
def get_extra(self, request, obj=None, **kwargs):
"""Hook for customizing the number of extra inline forms."""
return self.extra
def get_min_num(self, request, obj=None, **kwargs):
"""Hook for customizing the min number of inline forms."""
return self.min_num
def get_max_num(self, request, obj=None, **kwargs):
"""Hook for customizing the max number of extra inline forms."""
return self.max_num
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# If exclude is an empty list we use None, since that's the actual
# default.
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.get_extra(request, obj, **kwargs),
"min_num": self.get_min_num(request, obj, **kwargs),
"max_num": self.get_max_num(request, obj, **kwargs),
"can_delete": can_delete,
}
defaults.update(kwargs)
base_model_form = defaults['form']
class DeleteProtectedModelForm(base_model_form):
def hand_clean_DELETE(self):
"""
We don't validate the 'DELETE' field itself because on
templates it's not rendered using the field information, but
just using a generic "deletion_field" of the InlineModelAdmin.
"""
if self.cleaned_data.get(DELETION_FIELD_NAME, False):
using = router.db_for_write(self._meta.model)
collector = NestedObjects(using=using)
if self.instance.pk is None:
return
collector.collect([self.instance])
if collector.protected:
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation,
# suitable to be an item in a list.
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}
)
params = {'class_name': self._meta.model._meta.verbose_name,
'instance': self.instance,
'related_objects': get_text_list(objs, _('and'))}
msg = _("Deleting %(class_name)s %(instance)s would require "
"deleting the following protected related objects: "
"%(related_objects)s")
raise ValidationError(msg, code='deleting_protected', params=params)
def is_valid(self):
result = super(DeleteProtectedModelForm, self).is_valid()
self.hand_clean_DELETE()
return result
defaults['form'] = DeleteProtectedModelForm
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_formset(request, obj, fields=None).form
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_queryset(self, request):
queryset = super(InlineModelAdmin, self).get_queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return super(InlineModelAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.remote_field and field.remote_field.model != self.parent_model:
opts = field.remote_field.model._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'
| gpl-3.0 | 4,290,083,572,786,878,000 | 40.564436 | 119 | 0.587475 | false |
kamyu104/LeetCode | Python/design-phone-directory.py | 2 | 1688 | # init: Time: O(n), Space: O(n)
# get: Time: O(1), Space: O(1)
# check: Time: O(1), Space: O(1)
# release: Time: O(1), Space: O(1)
class PhoneDirectory(object):
def __init__(self, maxNumbers):
"""
Initialize your data structure here
@param maxNumbers - The maximum numbers that can be stored in the phone directory.
:type maxNumbers: int
"""
self.__curr = 0
self.__numbers = range(maxNumbers)
self.__used = [False] * maxNumbers
def get(self):
"""
Provide a number which is not assigned to anyone.
@return - Return an available number. Return -1 if none is available.
:rtype: int
"""
if self.__curr == len(self.__numbers):
return -1
number = self.__numbers[self.__curr]
self.__curr += 1
self.__used[number] = True
return number
def check(self, number):
"""
Check if a number is available or not.
:type number: int
:rtype: bool
"""
return 0 <= number < len(self.__numbers) and \
not self.__used[number]
def release(self, number):
"""
Recycle or release a number.
:type number: int
:rtype: void
"""
if not 0 <= number < len(self.__numbers) or \
not self.__used[number]:
return
self.__used[number] = False
self.__curr -= 1
self.__numbers[self.__curr] = number
# Your PhoneDirectory object will be instantiated and called as such:
# obj = PhoneDirectory(maxNumbers)
# param_1 = obj.get()
# param_2 = obj.check(number)
# obj.release(number)
| mit | -3,735,760,812,938,851,300 | 26.672131 | 90 | 0.540877 | false |
qrealka/skia-hc | third_party/externals/shaderc2/utils/remove-file-by-suffix.py | 16 | 1028 | #!/usr/bin/env python
# Copyright 2015 The Shaderc Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Removes all files with a certain suffix in a given path recursively.
# Arguments: path suffix
import os
import sys
def main():
path = sys.argv[1]
suffix = sys.argv[2]
for root, _, filenames in os.walk(path):
for filename in filenames:
if filename.endswith(suffix):
os.remove(os.path.join(root, filename))
if __name__ == '__main__':
main()
| apache-2.0 | -8,016,136,986,149,278,000 | 28.371429 | 74 | 0.701362 | false |
trevor/calendarserver | txdav/carddav/datastore/query/test/test_filter.py | 1 | 3060 | ##
# Copyright (c) 2011-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twext.enterprise.dal.syntax import SQLFragment
from twisted.trial.unittest import TestCase
from twistedcaldav import carddavxml
from txdav.carddav.datastore.query.filter import Filter, FilterBase
from txdav.common.datastore.sql_tables import schema
from txdav.carddav.datastore.query.builder import buildExpression
from txdav.common.datastore.query.generator import SQLQueryGenerator
from txdav.carddav.datastore.index_file import sqladdressbookquery
class TestQueryFilter(TestCase):
_objectSchema = schema.ADDRESSBOOK_OBJECT
_queryFields = {
"UID": _objectSchema.UID
}
def test_query(self):
"""
Basic query test - single term.
Only UID can be queried via sql.
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name":"UID"}
)]
)
filter = Filter(filter)
expression = buildExpression(filter, self._queryFields)
sql = SQLQueryGenerator(expression, self, 1234)
select, args = sql.generate()
self.assertEqual(select.toSQL(), SQLFragment("select distinct RESOURCE_NAME, VCARD_UID from ADDRESSBOOK_OBJECT where ADDRESSBOOK_HOME_RESOURCE_ID = ? and VCARD_UID like (? || (? || ?))", [1234, "%", "Example", "%"]))
self.assertEqual(args, {})
def test_sqllite_query(self):
"""
Basic query test - single term.
Only UID can be queried via sql.
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name":"UID"}
)]
)
filter = Filter(filter)
sql, args = sqladdressbookquery(filter, 1234)
self.assertEqual(sql, " from RESOURCE where RESOURCE.UID GLOB :1")
self.assertEqual(args, ["*Example*"])
class TestQueryFilterSerialize(TestCase):
def test_query(self):
"""
Basic query test - no time range
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name":"UID"}
)]
)
filter = Filter(filter)
j = filter.serialize()
self.assertEqual(j["type"], "Filter")
f = FilterBase.deserialize(j)
self.assertTrue(isinstance(f, Filter))
| apache-2.0 | 5,518,206,831,358,400,000 | 30.875 | 224 | 0.642157 | false |
Alexoner/web-crawlers | scripts/12306/12306.py | 1 | 63121 | #!/usr/bin/env python2
# -*- encoding: utf-8 -*-
# standard library
import argparse
import ConfigParser
import datetime
import sys
import random
import re
import time
import urllib
# third party library
import requests
# customized libarary
from utils import hasKeys,dumpObj
# set default encoding to UTF-8
reload(sys)
sys.setdefaultencoding('utf-8')
# global variables
RET_OK = 0
RET_ERR = -1
MAX_TRIES = 2
MAX_DAYS = 60
stations = []
cards = {
'1': u'二代身份证',
'2': u'一代身份证',
'C': u'港澳通行证',
'G': u'台湾通行证',
'B': u'护照'
}
Tickets = {
'1': u'成人票',
'2': u'儿童票',
'3': u'学生票',
'4': u'残军票'
}
seatMaps = [
('1', u'硬座'), # 硬座/无座
('3', u'硬卧'),
('4', u'软卧'),
('7', u'一等软座'),
('8', u'二等软座'),
('9', u'商务座'),
('M', u'一等座'),
('O', u'二等座'),
('B', u'混编硬座'),
('P', u'特等座')
]
# global functions
def printDelimiter():
print('-' * 64)
def getTime():
# convert a time tuple to a string according to a format
# specification
return time.strftime('%Y-%m-%d %X', time.localtime()) # 2014-01-01 12:00:00
def date2UTC(d):
# Convert '2014-01-01' to 'Wed Jan 01 00:00:00 UTC+0800 2014'
# parse a string to a time tuple according to a format specification
t = time.strptime(d, '%Y-%m-%d')
asc = time.asctime(t) # 'Wed Jan 01 00:00:00 2014'
# 'Wed Jan 01 00:00:00 UTC+0800 2014'
return (asc[0:-4] + 'UTC+0800 ' + asc[-4:])
def getCardType(key):
return cards[key] if key in cards else u'未知证件类型'
def getTicketType(key):
return Tickets[key] if key in Tickets else u'未知票种'
def getSeatType(key):
d = dict(seatMaps)
return d[key] if key in d else u'未知席别'
def selectSeatType():
key = '1' # 默认硬座
while True:
print(u'请选择席别编码(即左边第一个英文字母):')
for xb in seatMaps:
print(u'%s: %s' % (xb[0], xb[1]))
key = raw_input().upper()
d = dict(seatMaps)
if key in d:
return key
else:
print(u'无效的席别类型!')
def checkDate(date):
m = re.match(r'^\d{4}-\d{2}-\d{2}$', date) # 2014-01-01
if m:
today = datetime.datetime.now()
fmt = '%Y-%m-%d'
today = datetime.datetime.strptime(today.strftime(fmt), fmt)
train_date = datetime.datetime.strptime(m.group(0), fmt)
delta = train_date - today
if delta.days < 0:
print(u'乘车日期%s无效, 只能预订%s以后的车票' % (
train_date.strftime(fmt),
today.strftime(fmt)))
return False
else:
return True
else:
return False
def selectDate():
fmt = '%Y-%m-%d'
week_days = [u'星期一', u'星期二', u'星期三', u'星期四', u'星期五', u'星期六', u'星期天']
now = datetime.datetime.now()
available_date = [(now + datetime.timedelta(days=i)) for i in xrange(MAX_DAYS)]
for i in xrange(0, MAX_DAYS, 2):
print(u'第%2d天: %s(%s)' % (
i + 1, available_date[i].strftime(fmt), week_days[available_date[i].weekday()])),
if i + 1 < MAX_DAYS:
print(u'\t\t第%2d天: %s(%s)' % (
i + 2, available_date[i + 1].strftime(fmt), week_days[available_date[i + 1].weekday()]))
else:
print('')
while True:
print(u'请选择乘车日期(1~%d)' % (MAX_DAYS))
index = raw_input()
if not index.isdigit():
print(u'只能输入数字序号, 请重新选择乘车日期(1~%d)' % (MAX_DAYS))
continue
index = int(index)
if index < 1 or index > MAX_DAYS:
print(u'输入的序号无效, 请重新选择乘车日期(1~%d)' % (MAX_DAYS))
continue
index -= 1
train_date = available_date[index].strftime(fmt)
return train_date
def getStationByName(name):
matched_stations = []
for station in stations:
if (
station['name'] == name
or station['abbr'].find(name.lower()) != -1
or station['pinyin'].find(name.lower()) != -1
or station['pyabbr'].find(name.lower()) != -1):
matched_stations.append(station)
count = len(matched_stations)
if count <= 0:
return None
elif count == 1:
return matched_stations[0]
else:
for i in xrange(0, count):
print(u'%d:\t%s' % (i + 1, matched_stations[i]['name']))
print(u'请选择站点(1~%d)' % (count))
index = raw_input()
if not index.isdigit():
print(u'只能输入数字序号(1~%d)' % (count))
return None
index = int(index)
if index < 1 or index > count:
print(u'输入的序号无效(1~%d)' % (count))
return None
else:
return matched_stations[index - 1]
def inputStation():
while True:
print(u'支持中文, 拼音和拼音缩写(如: 北京,beijing,bj)')
name = raw_input().decode('gb2312', 'ignore')
station = getStationByName(name)
if station:
return station
else:
print(u'站点错误, 没有站点"%s", 请重新输入.' % (name))
def selectTrain(trains):
trains_num = len(trains)
index = 0
while True: # 必须选择有效的车次
index = raw_input()
if not index.isdigit():
print(u'只能输入数字序号,请重新选择车次(1~%d)' % (trains_num))
continue
index = int(index)
if index < 1 or index > trains_num:
print(u'输入的序号无效,请重新选择车次(1~%d)' % (trains_num))
continue
if trains[index - 1]['queryLeftNewDTO']['canWebBuy'] != 'Y':
print(u'您选择的车次%s没票啦,请重新选择车次' % (
trains[index - 1]['queryLeftNewDTO']['station_train_code']))
continue
else:
break
return index
class TicketOrder(object):
'''docstring for TicketOrder'''
def __init__(
self,
username='',
password='',
train_date='',
from_city_name='',
to_city_name=''):
super(TicketOrder, self).__init__()
self.username = username # 账号
self.password = password # 密码
self.train_date = train_date # 乘车日期[2014-01-01]
today = datetime.datetime.now()
self.back_train_date = today.strftime('%Y-%m-%d') # 返程日期[2014-01-01]
self.tour_flag = 'dc' # 单程dc/往返wf
self.purpose_code = 'ADULT' # 成人票
self.from_city_name = from_city_name # 对应查询界面'出发地'输入框中的内容
self.to_city_name = to_city_name # 对应查询界面'目的地'输入框中的内容
self.from_station_telecode = '' # 出发站编码
self.to_station_telecode = '' # 目的站编码
self.passengers = [] # 乘车人列表,最多5人
self.normal_passengers = [] # 我的联系人列表
self.trains = [] # 列车列表, 查询余票后自动更新
self.current_train_index = 0 # 当前选中的列车索引序号
self.captcha = '' # 图片验证码
self.orderId = '' # 订单流水号
self.canWebBuy = False # 可预订
self.notify = {
'mail_enable': 0,
'mail_username': '',
'mail_password': '',
'mail_server': '',
'mail_to': [],
'dates': [],
'trains': [],
'xb': [],
'focus': {}
}
def initSession(self):
self.session = requests.Session()
self.session.headers = {
'Accept': 'application/x-ms-application, image/jpeg, application/xaml+xml, image/gif, image/pjpeg, application/x-ms-xbap, */*',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C)',
'Referer': 'https://kyfw.12306.cn/otn/index/init',
'Host': 'kyfw.12306.cn',
'Connection': 'Keep-Alive'
}
def updateHeaders(self, url):
d = {
'https://kyfw.12306.cn/otn/resources/js/framework/station_name.js': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/'
},
'https://kyfw.12306.cn/otn/login/init': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/'
},
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=login&rand=sjrand&': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/login/init'
},
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=passenger&rand=randp&': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'
},
'https://kyfw.12306.cn/otn/passcodeNew/checkRandCodeAnsyn': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/login/init',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/login/loginAysnSuggest': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/login/init',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/login/userLogin': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/login/init'
},
'https://kyfw.12306.cn/otn/index/init': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/login/init'
},
'https://kyfw.12306.cn/otn/leftTicket/init': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/index/init',
'Content-Type': 'application/x-www-form-urlencoded'
},
'https://kyfw.12306.cn/otn/leftTicket/log?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'x-requested-with': 'XMLHttpRequest',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0'
},
'https://kyfw.12306.cn/otn/leftTicket/query?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'x-requested-with': 'XMLHttpRequest',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0'
},
'https://kyfw.12306.cn/otn/leftTicket/queryT?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'x-requested-with': 'XMLHttpRequest',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0'
},
'https://kyfw.12306.cn/otn/login/checkUser': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/leftTicket/submitOrderRequest': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/initDc': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'Content-Type': 'application/x-www-form-urlencoded',
'Cache-Control': 'no-cache'
},
'https://kyfw.12306.cn/otn/confirmPassenger/getPassengerDTOs': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/checkOrderInfo': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/getQueueCount': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/confirmSingleForQueue': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/queryOrderWaitTime?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'x-requested-with': 'XMLHttpRequest'
},
'https://kyfw.12306.cn/otn/confirmPassenger/resultOrderForDcQueue': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn//payOrder/init?': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'Content-Type': 'application/x-www-form-urlencoded'
},
'https://kyfw.12306.cn/otn/queryOrder/initNoComplete': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn//payOrder/init?random=1417862054369'
},
'https://kyfw.12306.cn/otn/queryOrder/queryTicketOrderNoComplete': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/queryOrder/initNoComplete',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
}
}
l = [
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=login&rand=sjrand&',
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=passenger&rand=randp&',
'https://kyfw.12306.cn/otn/leftTicket/log?',
'https://kyfw.12306.cn/otn/leftTicket/query?',
'https://kyfw.12306.cn/otn/leftTicket/queryT?',
'https://kyfw.12306.cn/otn/confirmPassenger/queryOrderWaitTime?',
'https://kyfw.12306.cn/otn//payOrder/init?'
]
for s in l:
if url.find(s) == 0:
url = s
if not url in d:
print(u'未知 url: %s' % url)
return RET_ERR
self.session.headers.update({'Referer': d[url]['Referer']})
keys = [
'Referer',
'Cache-Control',
'x-requested-with',
'Content-Type'
]
for key in keys:
if key in d[url]:
self.session.headers.update({key: d[url][key]})
else:
self.session.headers.update({key: None})
def get(self, url):
self.updateHeaders(url)
tries = 0
while tries < MAX_TRIES:
tries += 1
try:
r = self.session.get(url, verify=False, timeout=16)
except requests.exceptions.ConnectionError as e:
print('ConnectionError(%s): e=%s' % (url, e))
continue
except requests.exceptions.Timeout as e:
print('Timeout(%s): e=%s' % (url, e))
continue
except requests.exceptions.TooManyRedirects as e:
print('TooManyRedirects(%s): e=%s' % (url, e))
continue
except requests.exceptions.HTTPError as e:
print('HTTPError(%s): e=%s' % (url, e))
continue
except requests.exceptions.RequestException as e:
print('RequestException(%s): e=%s' % (url, e))
continue
except:
print('Unknown exception(%s)' % (url))
continue
if r.status_code != 200:
print('Request %s failed %d times, status_code=%d' % (
url,
tries,
r.status_code))
else:
return r
else:
return None
def post(self, url, payload):
self.updateHeaders(url)
if url == 'https://kyfw.12306.cn/otn/passcodeNew/checkRandCodeAnsyn':
if payload.find('REPEAT_SUBMIT_TOKEN') != -1:
self.session.headers.update({'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'})
else:
self.session.headers.update({'Referer': 'https://kyfw.12306.cn/otn/login/init'})
tries = 0
while tries < MAX_TRIES:
tries += 1
try:
r = self.session.post(url, data=payload, verify=False, timeout=16)
except requests.exceptions.ConnectionError as e:
print('ConnectionError(%s): e=%s' % (url, e))
continue
except requests.exceptions.Timeout as e:
print('Timeout(%s): e=%s' % (url, e))
continue
except requests.exceptions.TooManyRedirects as e:
print('TooManyRedirects(%s): e=%s' % (url, e))
continue
except requests.exceptions.HTTPError as e:
print('HTTPError(%s): e=%s' % (url, e))
continue
except requests.exceptions.RequestException as e:
print('RequestException(%s): e=%s' % (url, e))
continue
except:
print('Unknown exception(%s)' % (url))
continue
if r.status_code != 200:
print('Request %s failed %d times, status_code=%d' % (
url,
tries,
r.status_code))
else:
return r
else:
return None
def getCaptcha(self, url):
self.updateHeaders(url)
r = self.session.get(url, verify=False, stream=True, timeout=16)
with open('captcha.png', 'wb') as fd:
for chunk in r.iter_content():
fd.write(chunk)
print(u'请输入4位图片验证码(回车刷新验证码):')
captcha = raw_input()
if len(captcha) == 4:
return captcha
elif len(captcha) != 0:
print(u'%s是无效的图片验证码, 必须是4位' % (captcha))
return None
else:
return 1 # 刷新
def initStation(self):
url = 'https://kyfw.12306.cn/otn/resources/js/framework/station_name.js'
r = self.get(url)
if not r:
print(u'站点数据库初始化失败, 请求异常')
return None
data = r.text
# tuples are split by '@'
station_list = data.split('@')
if len(station_list) < 1:
print(u'站点数据库初始化失败, 数据异常')
return None
station_list = station_list[1:]
for station in station_list:
# items in each tuple are separated by '|'
items = station.split('|') # bji|北京|BJP|beijing|bj|2
if len(items) < 5:
print(u'忽略无效站点: %s' % (items))
continue
stations.append({'abbr': items[0],
'name': items[1],
'telecode': items[2],
'pinyin': items[3],
'pyabbr': items[4]})
return stations
def readConfig(self, config_file='config.ini'):
cp = ConfigParser.ConfigParser()
try:
cp.readfp(open(config_file, 'r'))
except IOError as e:
print(u'打开配置文件"%s"失败啦, 请先创建或者拷贝一份配置文件config.ini' % (config_file))
raw_input('Press any key to continue')
sys.exit()
self.username = cp.get('login', 'username')
self.password = cp.get('login', 'password')
self.train_date = cp.get('train', 'date')
self.from_city_name = cp.get('train', 'from')
self.to_city_name = cp.get('train', 'to')
self.notify['mail_enable'] = int(cp.get('notify', 'mail_enable'))
self.notify['mail_username'] = cp.get('notify', 'mail_username')
self.notify['mail_password'] = cp.get('notify', 'mail_password')
self.notify['mail_server'] = cp.get('notify', 'mail_server')
self.notify['mail_to'] = cp.get('notify', 'mail_to').split(',')
self.notify['dates'] = cp.get('notify', 'dates').split(',')
self.notify['trains'] = cp.get('notify', 'trains').split(',')
self.notify['xb'] = cp.get('notify', 'xb').split(',')
for t in self.notify['trains']:
self.notify['focus'][t] = self.notify['xb']
# 检查出发站
station = getStationByName(self.from_city_name)
if not station:
print(u'出发站错误, 请重新输入')
station = inputStation()
self.from_city_name = station['name']
self.from_station_telecode = station['telecode']
# 检查目的站
station = getStationByName(self.to_city_name)
if not station:
print(u'目的站错误,请重新输入')
station = inputStation()
self.to_city_name = station['name']
self.to_station_telecode = station['telecode']
# 检查乘车日期
if not checkDate(self.train_date):
print(u'乘车日期无效, 请重新选择')
self.train_date = selectDate()
# 分析乘客信息
self.passengers = []
index = 1
passenger_sections = ['passenger%d' % (i) for i in xrange(1, 6)]
sections = cp.sections()
for section in passenger_sections:
if section in sections:
passenger = {}
passenger['index'] = index
passenger['name'] = cp.get(section, 'name') # 必选参数
passenger['cardtype'] = cp.get(
section,
'cardtype') if cp.has_option(
section,
'cardtype') else '1' # 证件类型:可选参数,默认值1,即二代身份证
passenger['id'] = cp.get(section, 'id') # 必选参数
passenger['phone'] = cp.get(
section,
'phone') if cp.has_option(
section,
'phone') else '13800138000' # 手机号码
passenger['seattype'] = cp.get(
section,
'seattype') if cp.has_option(
section,
'seattype') else '1' # 席别:可选参数, 默认值1, 即硬座
passenger['tickettype'] = cp.get(
section,
'tickettype') if cp.has_option(
section,
'tickettype') else '1' # 票种:可选参数, 默认值1, 即成人票
self.passengers.append(passenger)
index += 1
def printConfig(self):
printDelimiter()
print(u'订票信息:\n%s\t%s\t%s--->%s' % (
self.username,
self.train_date,
self.from_city_name,
self.to_city_name))
printDelimiter()
th = [u'序号', u'姓名', u'证件类型', u'证件号码', u'席别', u'票种']
print(u'%s\t%s\t%s\t%s\t%s\t%s' % (
th[0].ljust(2), th[1].ljust(4), th[2].ljust(5),
th[3].ljust(12), th[4].ljust(2), th[5].ljust(3)))
for p in self.passengers:
print(u'%s\t%s\t%s\t%s\t%s\t%s' % (
p['index'],
p['name'].decode('utf-8', 'ignore').ljust(4),
getCardType(p['cardtype']).ljust(5),
p['id'].ljust(20),
getSeatType(p['seattype']).ljust(2),
getTicketType(p['tickettype']).ljust(3)))
def checkRandCodeAnsyn(self, module):
d = {
'login': { # 登陆验证码
'rand': 'sjrand',
'referer': 'https://kyfw.12306.cn/otn/login/init'
},
'passenger': { # 订单验证码
'rand': 'randp',
'referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'
}
}
if not module in d:
print(u'无效的 module: %s' % (module))
return RET_ERR
tries = 0
while tries < MAX_TRIES:
tries += 1
url = 'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=%s&rand=%s&' % (module, d[module]['rand'])
if tries > 1:
url = '%s%1.16f' % (url, random.random())
print(u'正在等待验证码...')
self.captcha = self.getCaptcha(url)
if not self.captcha:
continue
if self.captcha == 1: # 刷新不计数
tries -= 1
continue
url = 'https://kyfw.12306.cn/otn/passcodeNew/checkRandCodeAnsyn'
parameters = [
('randCode', self.captcha),
('rand', d[module]['rand'])
]
if module == 'login':
parameters.append(('randCode_validate', ''))
else:
parameters.append(('_json_att', ''))
parameters.append(('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken))
payload = urllib.urlencode(parameters)
print(u'正在校验验证码...')
r = self.post(url, payload)
if not r:
print(u'校验验证码异常')
continue
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"result":"1","msg":"randCodeRight"},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['result', 'msg'])
and (obj['data']['result'] == '1')):
print(u'校验验证码成功')
return RET_OK
else:
print(u'校验验证码失败')
dumpObj(obj)
continue
else:
return RET_ERR
def login(self):
url = 'https://kyfw.12306.cn/otn/login/init'
r = self.get(url)
if not r:
print(u'登录失败, 请求异常')
return RET_ERR
if self.session.cookies:
cookies = requests.utils.dict_from_cookiejar(self.session.cookies)
if cookies['JSESSIONID']:
self.jsessionid = cookies['JSESSIONID']
if self.checkRandCodeAnsyn('login') == RET_ERR:
return RET_ERR
print(u'正在登录...')
url = 'https://kyfw.12306.cn/otn/login/loginAysnSuggest'
parameters = [
('loginUserDTO.user_name', self.username),
('userDTO.password', self.password),
('randCode', self.captcha),
('randCode_validate', ''),
#('ODg3NzQ0', 'OTIyNmFhNmQwNmI5ZmQ2OA%3D%3D'),
('NDU4MjQ0','YzM1ZjE5NTBiZmE4YzBiNw==')
('myversion', 'undefined')
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'登录失败, 请求异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"loginCheck":"Y"},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['loginCheck'])
and (obj['data']['loginCheck'] == 'Y')):
print(u'登陆成功^_^')
url = 'https://kyfw.12306.cn/otn/login/userLogin'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
return RET_OK
else:
print(u'登陆失败啦!重新登陆...')
dumpObj(obj)
return RET_ERR
def getPassengerDTOs(self):
url = 'https://kyfw.12306.cn/otn/confirmPassenger/getPassengerDTOs'
parameters = [
('', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'获取乘客信息异常')
return RET_ERR
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['normal_passengers'])
and obj['data']['normal_passengers']):
self.normal_passengers = obj['data']['normal_passengers']
return RET_OK
else:
print(u'获取乘客信息失败')
if hasKeys(obj, ['messages']):
dumpObj(obj['messages'])
if hasKeys(obj, ['data']) and hasKeys(obj['data'], ['exMsg']):
dumpObj(obj['data']['exMsg'])
return RET_ERR
def selectPassengers(self, prompt):
if prompt == 1:
print(u'是否重新选择乘客?(如需选择请输入y或者yes, 默认使用配置文件提供的乘客信息)')
act = raw_input()
act = act.lower()
if act != 'y' and act != 'yes':
self.printConfig()
return RET_OK
if not (self.normal_passengers and len(self.normal_passengers)):
tries = 0
while tries < MAX_TRIES:
tries += 1
if self.getPassengerDTOs() == RET_OK:
break
else:
print(u'获取乘客信息失败次数太多, 使用配置文件提供的乘客信息')
return RET_ERR
num = len(self.normal_passengers)
for i in xrange(0, num):
p = self.normal_passengers[i]
print(u'%d.%s \t' % (i + 1, p['passenger_name'])),
if (i + 1) % 5 == 0:
print('')
while True:
print(u'\n请选择乘车人(最多选择5个, 以逗号隔开, 如:1,2,3,4,5, 直接回车不选择, 使用配置文件中的乘客信息)')
buf = raw_input()
if not buf:
return RET_ERR
pattern = re.compile(r'^[0-9,]*\d$') # 只能输入数字和逗号, 并且必须以数字结束
if pattern.match(buf):
break
else:
print(u'输入格式错误, 只能输入数字和逗号, 并且必须以数字结束, 如:1,2,3,4,5')
ids = buf.split(',')
if not (ids and 1 <= len(ids) <= 5):
return RET_ERR
seattype = selectSeatType()
ids = [int(id) for id in ids]
del self.passengers[:]
for id in ids:
if id < 1 or id > num:
print(u'不存在的联系人, 忽略')
else:
passenger = {}
id = id - 1
passenger['index'] = len(self.passengers) + 1
passenger['name'] = self.normal_passengers[id]['passenger_name']
passenger['cardtype'] = self.normal_passengers[id]['passenger_id_type_code']
passenger['id'] = self.normal_passengers[id]['passenger_id_no']
passenger['phone'] = self.normal_passengers[id]['mobile_no']
passenger['seattype'] = seattype
passenger['tickettype'] = self.normal_passengers[id]['passenger_type']
self.passengers.append(passenger)
self.printConfig()
return RET_OK
def queryTickets(self):
self.canWebBuy = False
url = 'https://kyfw.12306.cn/otn/leftTicket/init'
parameters = [
('_json_att', ''),
('leftTicketDTO.from_station_name', self.from_city_name),
('leftTicketDTO.to_station_name', self.to_city_name),
('leftTicketDTO.from_station', self.from_station_telecode),
('leftTicketDTO.to_station', self.to_station_telecode),
('leftTicketDTO.train_date', self.train_date),
('back_train_date', self.back_train_date),
('purpose_codes', self.purpose_code),
('pre_step_flag', 'index')
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'查询车票异常')
url = 'https://kyfw.12306.cn/otn/leftTicket/log?'
parameters = [
('leftTicketDTO.train_date', self.train_date),
('leftTicketDTO.from_station', self.from_station_telecode),
('leftTicketDTO.to_station', self.to_station_telecode),
('purpose_codes', self.purpose_code),
]
url += urllib.urlencode(parameters)
r = self.get(url)
if not r:
print(u'查询车票异常')
url = 'https://kyfw.12306.cn/otn/leftTicket/queryT?'
parameters = [
('leftTicketDTO.train_date', self.train_date),
('leftTicketDTO.from_station', self.from_station_telecode),
('leftTicketDTO.to_station', self.to_station_telecode),
('purpose_codes', self.purpose_code),
]
url += urllib.urlencode(parameters)
r = self.get(url)
if not r:
print(u'查询车票异常')
return RET_ERR
obj = r.json()
if (hasKeys(obj, ['status', 'httpstatus', 'data']) and len(obj['data'])):
self.trains = obj['data']
return RET_OK
else:
print(u'查询车票失败')
if hasKeys(obj, ['messages']):
dumpObj(obj['messages'])
return RET_ERR
def sendMailNotification(self):
print(u'正在发送邮件提醒...')
me = u'订票提醒<%s>' % (self.notify['mail_username'])
msg = MIMEText(
self.notify['mail_content'],
_subtype='plain',
_charset='gb2312')
msg['Subject'] = u'余票信息'
msg['From'] = me
msg['To'] = ';'.join(self.notify['mail_to'])
try:
server = smtplib.SMTP()
server.connect(self.notify['mail_server'])
server.login(
self.notify['mail_username'],
self.notify['mail_password'])
server.sendmail(me, self.notify['mail_to'], msg.as_string())
server.close()
print(u'发送邮件提醒成功')
return True
except Exception as e:
print(u'发送邮件提醒失败, %s' % str(e))
return False
def printTrains(self):
printDelimiter()
print(u'余票查询结果如下:')
print(u"%s\t%s--->%s\n'有':票源充足 '无':票已售完 '*':未到起售时间 '--':无此席别" % (
self.train_date,
self.from_city_name,
self.to_city_name))
printDelimiter()
print(u'序号/车次\t乘车站\t目的站\t一等\t二等\t软卧\t硬卧\t硬座\t无座')
seatTypeCode = {
'swz': '商务座',
'tz': '特等座',
'zy': '一等座',
'ze': '二等座',
'gr': '高级软卧',
'rw': '软卧',
'yw': '硬卧',
'rz': '软座',
'yz': '硬座',
'wz': '无座',
'qt': '其它',
}
# TODO 余票数量和票价 https://kyfw.12306.cn/otn/leftTicket/queryTicketPrice?train_no=770000K77505&from_station_no=09&to_station_no=13&seat_types=1431&train_date=2014-01-01
# yp_info=4022300000301440004610078033421007800536 代表
# 4022300000 软卧0
# 3014400046 硬卧46
# 1007803342 无座342
# 1007800536 硬座536
index = 1
self.notify['mail_content'] = ''
for train in self.trains:
t = train['queryLeftNewDTO']
status = '售完' if t['canWebBuy'] == 'N' else '预定'
i = 0
ypInfo = {
'wz': { # 无座
'price': 0,
'left': 0
},
'yz': { # 硬座
'price': 0,
'left': 0
},
'yw': { # 硬卧
'price': 0,
'left': 0
},
'rw': { # 软卧
'price': 0,
'left': 0
},
}
# 分析票价和余票数量
while i < (len(t['yp_info']) / 10):
tmp = t['yp_info'][i * 10:(i + 1) * 10]
price = int(tmp[1:5])
left = int(tmp[-3:])
if tmp[0] == '1':
if tmp[6] == '3':
ypInfo['wz']['price'] = price
ypInfo['wz']['left'] = left
else:
ypInfo['yz']['price'] = price
ypInfo['yz']['left'] = left
elif tmp[0] == '3':
ypInfo['yw']['price'] = price
ypInfo['yw']['left'] = left
elif tmp[0] == '4':
ypInfo['rw']['price'] = price
ypInfo['rw']['left'] = left
i = i + 1
yz_price = u'硬座%s' % (
ypInfo['yz']['price']) if ypInfo['yz']['price'] else ''
yw_price = u'硬卧%s' % (
ypInfo['yw']['price']) if ypInfo['yw']['price'] else ''
print(u'(%d) %s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (
index,
t['station_train_code'],
t['from_station_name'][0:3], # 最多保留3个中文
t['to_station_name'][0:3], # 最多保留3个中文
t['zy_num'],
t['ze_num'],
ypInfo['rw']['left'] if ypInfo['rw']['left'] else t['rw_num'],
ypInfo['yw']['left'] if ypInfo['yw']['left'] else t['yw_num'],
#t['rz_num'],
ypInfo['yz']['left'] if ypInfo['yz']['left'] else t['yz_num'],
ypInfo['wz']['left'] if ypInfo['wz']['left'] else t['wz_num'],
#yz_price,
#yw_price
))
if t['canWebBuy'] == 'Y':
self.canWebBuy = True
index += 1
if self.notify['mail_enable'] == 1 and t['canWebBuy'] == 'Y':
msg = ''
prefix = u'[%s]车次%s[%s/%s->%s/%s, 历时%s]现在有票啦\n' % (
t['start_train_date'],
t['station_train_code'],
t['from_station_name'],
t['start_time'],
t['to_station_name'],
t['arrive_time'],
t['lishi'])
if 'all' in self.notify['focus']: # 任意车次
if self.notify['focus']['all'][0] == 'all': # 任意席位
msg = prefix
else: # 指定席位
for seat in self.notify['focus']['all']:
if seat in ypInfo and ypInfo[seat]['left']:
msg += u'座位类型:%s, 剩余车票数量:%s, 票价:%s \n' % (
seat if seat not in seatTypeCode else seatTypeCode[seat],
ypInfo[seat]['left'],
ypInfo[seat]['price'])
if msg:
msg = prefix + msg + u'\n'
elif t['station_train_code'] in self.notify['focus']: # 指定车次
# 任意席位
if self.notify['focus'][t['station_train_code']][0] == 'all':
msg = prefix
else: # 指定席位
for seat in self.notify['focus'][t['station_train_code']]:
if seat in ypInfo and ypInfo[seat]['left']:
msg += u'座位类型:%s, 剩余车票数量:%s, 票价:%s \n' % (
seat if seat not in seatTypeCode else seatTypeCode[seat],
ypInfo[seat]['left'],
ypInfo[seat]['price'])
if msg:
msg = prefix + msg + u'\n'
self.notify['mail_content'] += msg
printDelimiter()
if self.notify['mail_enable'] == 1:
if self.notify['mail_content']:
self.sendMailNotification()
return RET_OK
else:
length = len(self.notify['dates'])
if length > 1:
self.train_date = self.notify['dates'][
random.randint(
0,
length -
1)]
return RET_ERR
else:
return RET_OK
# -1->重新查询/0->退出程序/1~len->车次序号
def selectAction(self):
ret = -1
self.current_train_index = 0
trains_num = len(self.trains)
print(u'您可以选择:')
if self.canWebBuy:
print(u'1~%d.选择车次开始订票' % (trains_num))
print(u'p.更换乘车人')
print(u's.更改席别')
print(u'd.更改乘车日期')
print(u'f.更改出发站')
print(u't.更改目的站')
print(u'a.同时更改乘车日期,出发站和目的站')
print(u'u.查询未完成订单')
print(u'c.查看订票信息')
print(u'r.刷票模式')
print(u'n.普通模式')
print(u'q.退出')
print(u'刷新车票请直接回车')
printDelimiter()
select = raw_input()
select = select.lower()
if select.isdigit():
if not self.canWebBuy:
print(u'没有可预订的车次, 请刷新车票或者更改查询条件')
return -1
index = int(select)
if index < 1 or index > trains_num:
print(u'输入的序号无效,请重新选择车次(1~%d)' % (trains_num))
index = selectTrain(self.trains)
if self.trains[index - 1]['queryLeftNewDTO']['canWebBuy'] != 'Y':
print(u'您选择的车次%s没票啦,请重新选择车次' % (self.trains[index - 1]['queryLeftNewDTO']['station_train_code']))
index = selectTrain(self.trains)
ret = index
self.current_train_index = index - 1
elif select == 'p':
self.selectPassengers(0)
elif select == 's':
seattype = selectSeatType()
for p in self.passengers:
p['seattype'] = seattype
self.printConfig()
elif select == 'd':
self.train_date = selectDate()
elif select == 'f':
print(u'请输入出发站:')
station = inputStation()
self.from_city_name = station['name']
self.from_station_telecode = station['telecode']
elif select == 't':
print(u'请输入目的站:')
station = inputStation()
self.to_city_name = station['name']
self.to_station_telecode = station['telecode']
elif select == 'a':
self.train_date = selectDate()
print(u'请输入出发站:')
station = inputStation()
self.from_city_name = station['name']
self.from_station_telecode = station['telecode']
print(u'请输入目的站:')
station = inputStation()
self.to_city_name = station['name']
self.to_station_telecode = station['telecode']
elif select == 'u':
ret = self.queryTicketOrderNotComplete()
ret = self.selectAction()
elif select == 'c':
ret = self.printConfig()
ret = self.selectAction()
elif select == 'r':
self.notify['mail_enable'] = 1
ret = -1
elif select == 'n':
self.notify['mail_enable'] = 0
ret = -1
elif select == 'q':
ret = 0
return ret
def initOrder(self):
url = 'https://kyfw.12306.cn/otn/login/checkUser'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'初始化订单异常')
print(u'准备下单喽')
url = 'https://kyfw.12306.cn/otn/leftTicket/submitOrderRequest'
parameters = [
#('ODA4NzIx', 'MTU0MTczYmQ2N2I3MjJkOA%3D%3D'),
('myversion', 'undefined'),
('secretStr', self.trains[self.current_train_index]['secretStr']),
('train_date', self.train_date),
('back_train_date', self.back_train_date),
('tour_flag', self.tour_flag),
('purpose_codes', self.purpose_code),
('query_from_station_name', self.from_city_name),
('query_to_station_name', self.to_city_name),
('undefined', '')
]
# TODO 注意:此处post不需要做urlencode, 比较奇怪, 不能用urllib.urlencode(parameters)
payload = ''
length = len(parameters)
for i in range(0, length):
payload += parameters[i][0] + '=' + parameters[i][1]
if i < (length - 1):
payload += '&'
r = self.post(url, payload)
if not r:
print(u'下单异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"messages":[],"validateMessages":{}}
obj = r.json()
if not (hasKeys(obj, ['status', 'httpstatus'])
and obj['status']):
print(u'下单失败啦')
dumpObj(obj)
return RET_ERR
print(u'订单初始化...')
self.session.close() # TODO
url = 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'订单初始化异常')
return RET_ERR
data = r.text
s = data.find('globalRepeatSubmitToken') # TODO
e = data.find('global_lang')
if s == -1 or e == -1:
print(u'找不到 globalRepeatSubmitToken')
return RET_ERR
buf = data[s:e]
s = buf.find("'")
e = buf.find("';")
if s == -1 or e == -1:
print(u'很遗憾, 找不到 globalRepeatSubmitToken')
return RET_ERR
self.repeatSubmitToken = buf[s + 1:e]
s = data.find('key_check_isChange')
e = data.find('leftDetails')
if s == -1 or e == -1:
print(u'找不到 key_check_isChange')
return RET_ERR
self.keyCheckIsChange = data[s + len('key_check_isChange') + 3:e - 3]
return RET_OK
def checkOrderInfo(self):
if self.checkRandCodeAnsyn('passenger') == RET_ERR:
return RET_ERR
passengerTicketStr = ''
oldPassengerStr = ''
passenger_seat_detail = '0' # TODO [0->随机][1->下铺][2->中铺][3->上铺]
for p in self.passengers:
if p['index'] != 1:
passengerTicketStr += 'N_'
oldPassengerStr += '1_'
passengerTicketStr += '%s,%s,%s,%s,%s,%s,%s,' % (
p['seattype'],
passenger_seat_detail,
p['tickettype'],
p['name'],
p['cardtype'],
p['id'],
p['phone'])
oldPassengerStr += '%s,%s,%s,' % (
p['name'],
p['cardtype'],
p['id'])
passengerTicketStr += 'N'
oldPassengerStr += '1_'
self.passengerTicketStr = passengerTicketStr
self.oldPassengerStr = oldPassengerStr
print(u'检查订单...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/checkOrderInfo'
parameters = [
('cancel_flag', '2'), # TODO
('bed_level_order_num', '000000000000000000000000000000'), # TODO
('passengerTicketStr', self.passengerTicketStr),
('oldPassengerStr', self.oldPassengerStr),
('tour_flag', self.tour_flag),
('randCode', self.captcha),
#('NzA4MTc1', 'NmYyYzZkYWY2OWZkNzg2YQ%3D%3D'), # TODO
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'检查订单异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"submitStatus":true},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['submitStatus'])
and obj['status']
and obj['data']['submitStatus']):
print(u'检查订单成功')
return RET_OK
else:
print(u'检查订单失败')
dumpObj(obj)
return RET_ERR
def getQueueCount(self):
print(u'查询排队情况...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/getQueueCount'
t = self.trains[self.current_train_index]['queryLeftNewDTO']
parameters = [
('train_date', date2UTC(self.train_date)),
('train_no', t['train_no']),
('stationTrainCode', t['station_train_code']),
('seatType', '1'), # TODO
('fromStationTelecode', t['from_station_telecode']),
('toStationTelecode', t['to_station_telecode']),
('leftTicket', t['yp_info']),
('purpose_codes', '00'), # TODO
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken)
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'查询排队情况异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"count":"0","ticket":"100985109710098535003021350212","op_2":"false","countT":"0","op_1":"false"},"messages":[],"validateMessages":{}}
obj = r.json()
if not (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['op_1', 'op_2'])
and obj['status']):
print(u'查询排队情况失败')
dumpObj(obj)
return RET_ERR
if obj['data']['op_1'] != 'false':
print(u'已有人先于您提交相同的购票需求, 到处理您的需求时可能已无票, 建议根据当前余票确定是否排队.')
if obj['data']['op_2'] != 'false':
print(u'目前排队人数已经超过余票张数,请您选择其他席别或车次,特此提醒。')
if 'ticket' in obj['data']:
print(u'排队详情:%s' % (obj['data']['ticket'])) # TODO
return RET_OK
def confirmSingleForQueue(self):
print(u'提交订单排队...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/confirmSingleForQueue'
t = self.trains[self.current_train_index]['queryLeftNewDTO']
parameters = [
('passengerTicketStr', self.passengerTicketStr),
('oldPassengerStr', self.oldPassengerStr),
('randCode', self.captcha),
('purpose_codes', '00'), # TODO
('key_check_isChange', self.keyCheckIsChange),
('leftTicketStr', t['yp_info']),
('train_location', t['location_code']),
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'提交订单排队异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"submitStatus":true},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['submitStatus'])
and obj['status'] and obj['data']['submitStatus']):
print(u'订单排队中...')
return RET_OK
else:
print(u'提交订单排队失败')
dumpObj(obj)
return RET_ERR
def queryOrderWaitTime(self):
print(u'等待订单流水号...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/queryOrderWaitTime?random=%13d&tourFlag=dc&_json_att=&REPEAT_SUBMIT_TOKEN=%s' % (
random.randint(1000000000000, 1999999999999), self.repeatSubmitToken)
r = self.get(url)
if not r:
print(u'等待订单流水号异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"queryOrderWaitTimeStatus":true,"count":0,"waitTime":4,"requestId":5944637152210732219,"waitCount":2,"tourFlag":"dc","orderId":null},"messages":[],"validateMessages":{}}
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"queryOrderWaitTimeStatus":true,"count":0,"waitTime":-1,"requestId":5944637152210732219,"waitCount":0,"tourFlag":"dc","orderId":"E739900792"},"messages":[],"validateMessages":{}}
obj = r.json()
if not (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['orderId'])
and obj['status']
and obj['data']['orderId']):
print(u'等待订单流水号失败')
dumpObj(obj)
return RET_ERR
self.orderId = obj['data']['orderId']
if (self.orderId and self.orderId != 'null'):
print(u'订单流水号为:')
print(self.orderId)
return RET_OK
else:
print(u'等待订单流水号失败')
return RET_ERR
def payOrder(self):
print(u'等待订票结果...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/resultOrderForDcQueue'
parameters = [
('orderSequence_no', self.orderId),
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'等待订票结果异常')
return RET_ERR
# {'validateMessagesShowId':'_validatorMessage','status':true,'httpstatus':200,'data':{'submitStatus':true},'messages':[],'validateMessages':{}}
# {'validateMessagesShowId':'_validatorMessage','status':true,'httpstatus':200,'data':{'errMsg':'获取订单信息失败,请查看未完成订单,继续支付!','submitStatus':false},'messages':[],'validateMessages':{}}
obj = r.json()
if not (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['submitStatus'])
and obj['status']
and obj['data']['submitStatus']):
print(u'等待订票结果失败')
dumpObj(obj)
return RET_ERR
url = 'https://kyfw.12306.cn/otn//payOrder/init?random=%13d' % (
random.randint(1000000000000, 1999999999999))
parameters = [
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'请求异常')
return RET_ERR
data = r.text
if data.find(u'席位已锁定') != -1:
print(u'订票成功^_^请在45分钟内完成网上支付,否则系统将自动取消')
return RET_OK
else:
return RET_ERR
def queryTicketOrderNotComplete(self):
print(u'正在查询未完成订单...')
url = 'https://kyfw.12306.cn/otn/queryOrder/queryTicketOrderNoComplete'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'查询未完成订单异常')
return RET_ERR
obj = r.json()
if not (hasKeys(obj, ['status', 'httpstatus', 'data']) and obj['status']):
print(u'查询未完成订单失败')
dumpObj(obj)
return RET_ERR
if (hasKeys(obj['data'], ['orderDBList']) and len(obj['data']['orderDBList'])):
print(u'查询到有未完成订单,请先处理')
return RET_OK
if (
hasKeys(obj['data'], ['orderCacheDTO'])
and obj['data']['orderCacheDTO']
and hasKeys(obj['data']['orderCacheDTO'], ['status'])):
if obj['data']['orderCacheDTO']['status'] == 0:
print(u'查询到cache有未完成订单,请先处理')
return RET_OK
else:
if (hasKeys(obj['data']['orderCacheDTO'], ['message'])):
dumpObj(obj['data']['orderCacheDTO']['message'])
return RET_ERR
def main():
print(getTime())
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', help='Specify config file')
parser.add_argument('-u', '--username', help='Specify username to login')
parser.add_argument('-p', '--password', help='Specify password to login')
parser.add_argument('-d', '--date', help='Specify train date, 2014-01-01')
parser.add_argument('-m', '--mail', help='Send email notification')
args = parser.parse_args()
order = TicketOrder()
order.initSession()
order.initStation()
if args.config:
order.readConfig(args.config) # 使用指定的配置文件
else:
order.readConfig() # 使用默认的配置文件config.ini
if args.username:
order.username = args.username # 使用指定的账号代替配置文件中的账号
if args.password:
order.password = args.password # 使用指定的密码代替配置文件中的密码
if args.date:
if checkDate(args.date):
order.train_date = args.date # 使用指定的乘车日期代替配置文件中的乘车日期
else:
print(u'乘车日期无效, 请重新选择')
order.train_date = selectDate()
if args.mail:
# 有票时自动发送邮件通知
order.notify['mail_enable'] = 1 if args.mail == '1' else 0
tries = 0
while tries < MAX_TRIES:
tries += 1
if order.login() == RET_OK:
break
else:
#print(u'失败次数太多,自动退出程序')
#sys.exit()
print(u'登陆失败,但是你仍然可以进行查票操作!')
order.selectPassengers(1)
while True:
time.sleep(1)
# 查询车票
if order.queryTickets() != RET_OK:
continue
# 显示查询结果
if order.printTrains() != RET_OK:
continue
# 选择菜单列举的动作之一
action = order.selectAction()
if action == -1:
continue
elif action == 0:
break
# 订单初始化
if order.initOrder() != RET_OK:
continue
# 检查订单信息
if order.checkOrderInfo() != RET_OK:
continue
# 查询排队和余票情况
# if order.getQueueCount() != RET_OK:
# continue
# 提交订单到队里中
tries = 0
while tries < 2:
tries += 1
if order.confirmSingleForQueue() == RET_OK:
break
# 获取orderId
tries = 0
while tries < 2:
tries += 1
if order.queryOrderWaitTime() == RET_OK:
break
# 正式提交订单
if order.payOrder() == RET_OK:
break
# 访问未完成订单页面检查是否订票成功
if order.queryTicketOrderNotComplete() == RET_OK:
print(u'订票成功^_^请在45分钟内完成网上支付,否则系统将自动取消')
break
print(getTime())
raw_input('Press any key to continue')
if __name__ == '__main__':
main()
# EOF
| gpl-2.0 | 7,141,282,508,724,116,000 | 36.999359 | 273 | 0.492877 | false |
msleal/amspy | amspy/examples/create_streaming_endpoint.py | 2 | 2523 | """
Copyright (c) 2016, Marcelo Leal
Description: Simple Azure Media Services Python library
License: MIT (see LICENSE.txt file for details)
"""
import os
import json
import amspy
import time
import logging
import datetime
###########################################################################################
##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER #####
###########################################################################################
# ALL CODE IN THIS DIRECTOY (INCLUDING THIS FILE) ARE EXAMPLE CODES THAT WILL ACT ON YOUR
# AMS ACCOUNT. IT ASSUMES THAT THE AMS ACCOUNT IS CLEAN (e.g.: BRAND NEW), WITH NO DATA OR
# PRODUCTION CODE ON IT. DO NOT, AGAIN: DO NOT RUN ANY EXAMPLE CODE AGAINST PRODUCTION AMS
# ACCOUNT! IF YOU RUN ANY EXAMPLE CODE AGAINST YOUR PRODUCTION AMS ACCOUNT, YOU CAN LOSE
# DATA, AND/OR PUT YOUR AMS SERVICES IN A DEGRADED OR UNAVAILABLE STATE. BE WARNED!
###########################################################################################
##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER #####
###########################################################################################
# Load Azure app defaults
try:
with open('config.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("ERROR: Expecting config.json in current folder")
sys.exit()
account_name = configData['accountName']
account_key = configData['accountKey']
# Get the access token...
response = amspy.get_access_token(account_name, account_key)
resjson = response.json()
access_token = resjson["access_token"]
#Initialization...
print ("\n-----------------------= AMS Py =----------------------");
print ("Simple Python Library for Azure Media Services REST API");
print ("-------------------------------------------------------\n");
### create a streaming endpoint
print ("\n001 >>> Creating a Streaming Endpoint")
response = amspy.create_streaming_endpoint(access_token, "NewStreamingEndpoint", description="HLS Streaming Endpoint")
if (response.status_code == 202):
resjson = response.json()
streamingendpoint_id = str(resjson['d']['Id'])
print("POST Status.............................: " + str(response.status_code))
print("Streaming Endpoint Id..........................: " + streamingendpoint_id)
else:
print("POST Status.............................: " + str(response.status_code) + " - Streaming Endpoint Creation ERROR." + str(response.content))
| mit | 5,360,250,413,264,371,000 | 42.5 | 146 | 0.557273 | false |
oudalab/phyllo | phyllo/extractors/hydatiusDB.py | 1 | 5254 | import sqlite3
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
from phyllo.phyllo_logger import logger
def getBooks(soup):
siteURL = 'http://www.thelatinlibrary.com'
textsURL = []
# get links to books in the collection
for a in soup.find_all('a', href=True):
link = a['href']
textsURL.append("{}/{}".format(siteURL, a['href']))
# remove unnecessary URLs
while ("http://www.thelatinlibrary.com/index.html" in textsURL):
textsURL.remove("http://www.thelatinlibrary.com/index.html")
textsURL.remove("http://www.thelatinlibrary.com/classics.html")
textsURL.remove("http://www.thelatinlibrary.com/christian.html")
logger.info("\n".join(textsURL))
return textsURL
def main():
# The collection URL below.
collURL = 'http://www.thelatinlibrary.com/hydatius.html'
collOpen = urllib.request.urlopen(collURL)
collSOUP = BeautifulSoup(collOpen, 'html5lib')
author = collSOUP.title.string.strip()
colltitle = collSOUP.title.string.strip()
date = collSOUP.span.string.replace('(', '').replace(')', '').replace(u"\u2013", '-').strip()
textsURL = getBooks(collSOUP)
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author = 'Hydatius'")
for url in textsURL:
openurl = urllib.request.urlopen(url)
textsoup = BeautifulSoup(openurl, 'html5lib')
title = textsoup.title.string.split(":")[1].strip()
print(title)
chapter = -1
verse = 0
# this just numbers paragraphs as verses
# does not split chapters b/c I wasn't sure how to split them
if title.startswith("Chronicon"):
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallborder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
verses = []
text = p.get_text()
text = text.strip()
verses.append(text.strip())
for v in verses:
if v.startswith('Christian'):
continue
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
# leaves internal numbers as part of text and numbers each line
# poetry or prose?
else:
chapter = "-1"
verse = 0
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallborder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
txtstr = p.get_text().strip()
if txtstr.startswith("Christian"):
continue
brtags = p.findAll('br')
verses = []
try:
firstline = brtags[0].previous_sibling.previous_sibling.strip()
except:
firstline = brtags[0].previous_sibling.strip()
verses.append(firstline)
for br in brtags:
try:
text = br.next_sibling.next_sibling.strip()
except:
text = br.next_sibling.strip()
if text is None or text == '' or text.isspace():
continue
verses.append(text)
for v in verses:
if v.startswith('Christian'):
continue
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
if __name__ == '__main__':
main()
| apache-2.0 | -9,127,012,218,428,254,000 | 39.10687 | 115 | 0.467644 | false |
nmercier/linux-cross-gcc | linux/lib/python2.7/compiler/transformer.py | 176 | 53108 | """Parse tree transformation module.
Transforms Python source code into an abstract syntax tree (AST)
defined in the ast module.
The simplest ways to invoke this module are via parse and parseFile.
parse(buf) -> AST
parseFile(path) -> AST
"""
# Original version written by Greg Stein ([email protected])
# and Bill Tutt ([email protected])
# February 1997.
#
# Modifications and improvements for Python 2.0 by Jeremy Hylton and
# Mark Hammond
#
# Some fixes to try to have correct line number on almost all nodes
# (except Module, Discard and Stmt) added by Sylvain Thenault
#
# Portions of this file are:
# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
#
# This module is provided under a BSD-ish license. See
# http://www.opensource.org/licenses/bsd-license.html
# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
from compiler.ast import *
import parser
import symbol
import token
class WalkerError(StandardError):
pass
from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
from compiler.consts import OP_ASSIGN, OP_DELETE, OP_APPLY
def parseFile(path):
f = open(path, "U")
# XXX The parser API tolerates files without a trailing newline,
# but not strings without a trailing newline. Always add an extra
# newline to the file contents, since we're going through the string
# version of the API.
src = f.read() + "\n"
f.close()
return parse(src)
def parse(buf, mode="exec"):
if mode == "exec" or mode == "single":
return Transformer().parsesuite(buf)
elif mode == "eval":
return Transformer().parseexpr(buf)
else:
raise ValueError("compile() arg 3 must be"
" 'exec' or 'eval' or 'single'")
def asList(nodes):
l = []
for item in nodes:
if hasattr(item, "asList"):
l.append(item.asList())
else:
if type(item) is type( (None, None) ):
l.append(tuple(asList(item)))
elif type(item) is type( [] ):
l.append(asList(item))
else:
l.append(item)
return l
def extractLineNo(ast):
if not isinstance(ast[1], tuple):
# get a terminal node
return ast[2]
for child in ast[1:]:
if isinstance(child, tuple):
lineno = extractLineNo(child)
if lineno is not None:
return lineno
def Node(*args):
kind = args[0]
if kind in nodes:
try:
return nodes[kind](*args[1:])
except TypeError:
print nodes[kind], len(args), args
raise
else:
raise WalkerError, "Can't find appropriate Node type: %s" % str(args)
#return apply(ast.Node, args)
class Transformer:
"""Utility object for transforming Python parse trees.
Exposes the following methods:
tree = transform(ast_tree)
tree = parsesuite(text)
tree = parseexpr(text)
tree = parsefile(fileob | filename)
"""
def __init__(self):
self._dispatch = {}
for value, name in symbol.sym_name.items():
if hasattr(self, name):
self._dispatch[value] = getattr(self, name)
self._dispatch[token.NEWLINE] = self.com_NEWLINE
self._atom_dispatch = {token.LPAR: self.atom_lpar,
token.LSQB: self.atom_lsqb,
token.LBRACE: self.atom_lbrace,
token.BACKQUOTE: self.atom_backquote,
token.NUMBER: self.atom_number,
token.STRING: self.atom_string,
token.NAME: self.atom_name,
}
self.encoding = None
def transform(self, tree):
"""Transform an AST into a modified parse tree."""
if not (isinstance(tree, tuple) or isinstance(tree, list)):
tree = parser.st2tuple(tree, line_info=1)
return self.compile_node(tree)
def parsesuite(self, text):
"""Return a modified parse tree for the given suite text."""
return self.transform(parser.suite(text))
def parseexpr(self, text):
"""Return a modified parse tree for the given expression text."""
return self.transform(parser.expr(text))
def parsefile(self, file):
"""Return a modified parse tree for the contents of the given file."""
if type(file) == type(''):
file = open(file)
return self.parsesuite(file.read())
# --------------------------------------------------------------
#
# PRIVATE METHODS
#
def compile_node(self, node):
### emit a line-number node?
n = node[0]
if n == symbol.encoding_decl:
self.encoding = node[2]
node = node[1]
n = node[0]
if n == symbol.single_input:
return self.single_input(node[1:])
if n == symbol.file_input:
return self.file_input(node[1:])
if n == symbol.eval_input:
return self.eval_input(node[1:])
if n == symbol.lambdef:
return self.lambdef(node[1:])
if n == symbol.funcdef:
return self.funcdef(node[1:])
if n == symbol.classdef:
return self.classdef(node[1:])
raise WalkerError, ('unexpected node type', n)
def single_input(self, node):
### do we want to do anything about being "interactive" ?
# NEWLINE | simple_stmt | compound_stmt NEWLINE
n = node[0][0]
if n != token.NEWLINE:
return self.com_stmt(node[0])
return Pass()
def file_input(self, nodelist):
doc = self.get_docstring(nodelist, symbol.file_input)
if doc is not None:
i = 1
else:
i = 0
stmts = []
for node in nodelist[i:]:
if node[0] != token.ENDMARKER and node[0] != token.NEWLINE:
self.com_append_stmt(stmts, node)
return Module(doc, Stmt(stmts))
def eval_input(self, nodelist):
# from the built-in function input()
### is this sufficient?
return Expression(self.com_node(nodelist[0]))
def decorator_name(self, nodelist):
listlen = len(nodelist)
assert listlen >= 1 and listlen % 2 == 1
item = self.atom_name(nodelist)
i = 1
while i < listlen:
assert nodelist[i][0] == token.DOT
assert nodelist[i + 1][0] == token.NAME
item = Getattr(item, nodelist[i + 1][1])
i += 2
return item
def decorator(self, nodelist):
# '@' dotted_name [ '(' [arglist] ')' ]
assert len(nodelist) in (3, 5, 6)
assert nodelist[0][0] == token.AT
assert nodelist[-1][0] == token.NEWLINE
assert nodelist[1][0] == symbol.dotted_name
funcname = self.decorator_name(nodelist[1][1:])
if len(nodelist) > 3:
assert nodelist[2][0] == token.LPAR
expr = self.com_call_function(funcname, nodelist[3])
else:
expr = funcname
return expr
def decorators(self, nodelist):
# decorators: decorator ([NEWLINE] decorator)* NEWLINE
items = []
for dec_nodelist in nodelist:
assert dec_nodelist[0] == symbol.decorator
items.append(self.decorator(dec_nodelist[1:]))
return Decorators(items)
def decorated(self, nodelist):
assert nodelist[0][0] == symbol.decorators
if nodelist[1][0] == symbol.funcdef:
n = [nodelist[0]] + list(nodelist[1][1:])
return self.funcdef(n)
elif nodelist[1][0] == symbol.classdef:
decorators = self.decorators(nodelist[0][1:])
cls = self.classdef(nodelist[1][1:])
cls.decorators = decorators
return cls
raise WalkerError()
def funcdef(self, nodelist):
# -6 -5 -4 -3 -2 -1
# funcdef: [decorators] 'def' NAME parameters ':' suite
# parameters: '(' [varargslist] ')'
if len(nodelist) == 6:
assert nodelist[0][0] == symbol.decorators
decorators = self.decorators(nodelist[0][1:])
else:
assert len(nodelist) == 5
decorators = None
lineno = nodelist[-4][2]
name = nodelist[-4][1]
args = nodelist[-3][2]
if args[0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(args[1:])
else:
names = defaults = ()
flags = 0
doc = self.get_docstring(nodelist[-1])
# code for function
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Function(decorators, name, names, defaults, flags, doc, code,
lineno=lineno)
def lambdef(self, nodelist):
# lambdef: 'lambda' [varargslist] ':' test
if nodelist[2][0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(nodelist[2][1:])
else:
names = defaults = ()
flags = 0
# code for lambda
code = self.com_node(nodelist[-1])
return Lambda(names, defaults, flags, code, lineno=nodelist[1][2])
old_lambdef = lambdef
def classdef(self, nodelist):
# classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
name = nodelist[1][1]
doc = self.get_docstring(nodelist[-1])
if nodelist[2][0] == token.COLON:
bases = []
elif nodelist[3][0] == token.RPAR:
bases = []
else:
bases = self.com_bases(nodelist[3])
# code for class
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Class(name, bases, doc, code, lineno=nodelist[1][2])
def stmt(self, nodelist):
return self.com_stmt(nodelist[0])
small_stmt = stmt
flow_stmt = stmt
compound_stmt = stmt
def simple_stmt(self, nodelist):
# small_stmt (';' small_stmt)* [';'] NEWLINE
stmts = []
for i in range(0, len(nodelist), 2):
self.com_append_stmt(stmts, nodelist[i])
return Stmt(stmts)
def parameters(self, nodelist):
raise WalkerError
def varargslist(self, nodelist):
raise WalkerError
def fpdef(self, nodelist):
raise WalkerError
def fplist(self, nodelist):
raise WalkerError
def dotted_name(self, nodelist):
raise WalkerError
def comp_op(self, nodelist):
raise WalkerError
def trailer(self, nodelist):
raise WalkerError
def sliceop(self, nodelist):
raise WalkerError
def argument(self, nodelist):
raise WalkerError
# --------------------------------------------------------------
#
# STATEMENT NODES (invoked by com_node())
#
def expr_stmt(self, nodelist):
# augassign testlist | testlist ('=' testlist)*
en = nodelist[-1]
exprNode = self.lookup_node(en)(en[1:])
if len(nodelist) == 1:
return Discard(exprNode, lineno=exprNode.lineno)
if nodelist[1][0] == token.EQUAL:
nodesl = []
for i in range(0, len(nodelist) - 2, 2):
nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
return Assign(nodesl, exprNode, lineno=nodelist[1][2])
else:
lval = self.com_augassign(nodelist[0])
op = self.com_augassign_op(nodelist[1])
return AugAssign(lval, op[1], exprNode, lineno=op[2])
raise WalkerError, "can't get here"
def print_stmt(self, nodelist):
# print ([ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ])
items = []
if len(nodelist) == 1:
start = 1
dest = None
elif nodelist[1][0] == token.RIGHTSHIFT:
assert len(nodelist) == 3 \
or nodelist[3][0] == token.COMMA
dest = self.com_node(nodelist[2])
start = 4
else:
dest = None
start = 1
for i in range(start, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
if nodelist[-1][0] == token.COMMA:
return Print(items, dest, lineno=nodelist[0][2])
return Printnl(items, dest, lineno=nodelist[0][2])
def del_stmt(self, nodelist):
return self.com_assign(nodelist[1], OP_DELETE)
def pass_stmt(self, nodelist):
return Pass(lineno=nodelist[0][2])
def break_stmt(self, nodelist):
return Break(lineno=nodelist[0][2])
def continue_stmt(self, nodelist):
return Continue(lineno=nodelist[0][2])
def return_stmt(self, nodelist):
# return: [testlist]
if len(nodelist) < 2:
return Return(Const(None), lineno=nodelist[0][2])
return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
def yield_stmt(self, nodelist):
expr = self.com_node(nodelist[0])
return Discard(expr, lineno=expr.lineno)
def yield_expr(self, nodelist):
if len(nodelist) > 1:
value = self.com_node(nodelist[1])
else:
value = Const(None)
return Yield(value, lineno=nodelist[0][2])
def raise_stmt(self, nodelist):
# raise: [test [',' test [',' test]]]
if len(nodelist) > 5:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
if len(nodelist) > 3:
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
if len(nodelist) > 1:
expr1 = self.com_node(nodelist[1])
else:
expr1 = None
return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
def import_stmt(self, nodelist):
# import_stmt: import_name | import_from
assert len(nodelist) == 1
return self.com_node(nodelist[0])
def import_name(self, nodelist):
# import_name: 'import' dotted_as_names
return Import(self.com_dotted_as_names(nodelist[1]),
lineno=nodelist[0][2])
def import_from(self, nodelist):
# import_from: 'from' ('.'* dotted_name | '.') 'import' ('*' |
# '(' import_as_names ')' | import_as_names)
assert nodelist[0][1] == 'from'
idx = 1
while nodelist[idx][1] == '.':
idx += 1
level = idx - 1
if nodelist[idx][0] == symbol.dotted_name:
fromname = self.com_dotted_name(nodelist[idx])
idx += 1
else:
fromname = ""
assert nodelist[idx][1] == 'import'
if nodelist[idx + 1][0] == token.STAR:
return From(fromname, [('*', None)], level,
lineno=nodelist[0][2])
else:
node = nodelist[idx + 1 + (nodelist[idx + 1][0] == token.LPAR)]
return From(fromname, self.com_import_as_names(node), level,
lineno=nodelist[0][2])
def global_stmt(self, nodelist):
# global: NAME (',' NAME)*
names = []
for i in range(1, len(nodelist), 2):
names.append(nodelist[i][1])
return Global(names, lineno=nodelist[0][2])
def exec_stmt(self, nodelist):
# exec_stmt: 'exec' expr ['in' expr [',' expr]]
expr1 = self.com_node(nodelist[1])
if len(nodelist) >= 4:
expr2 = self.com_node(nodelist[3])
if len(nodelist) >= 6:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
else:
expr2 = expr3 = None
return Exec(expr1, expr2, expr3, lineno=nodelist[0][2])
def assert_stmt(self, nodelist):
# 'assert': test, [',' test]
expr1 = self.com_node(nodelist[1])
if (len(nodelist) == 4):
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
return Assert(expr1, expr2, lineno=nodelist[0][2])
def if_stmt(self, nodelist):
# if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
tests = []
for i in range(0, len(nodelist) - 3, 4):
testNode = self.com_node(nodelist[i + 1])
suiteNode = self.com_node(nodelist[i + 3])
tests.append((testNode, suiteNode))
if len(nodelist) % 4 == 3:
elseNode = self.com_node(nodelist[-1])
## elseNode.lineno = nodelist[-1][1][2]
else:
elseNode = None
return If(tests, elseNode, lineno=nodelist[0][2])
def while_stmt(self, nodelist):
# 'while' test ':' suite ['else' ':' suite]
testNode = self.com_node(nodelist[1])
bodyNode = self.com_node(nodelist[3])
if len(nodelist) > 4:
elseNode = self.com_node(nodelist[6])
else:
elseNode = None
return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
def for_stmt(self, nodelist):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
listNode = self.com_node(nodelist[3])
bodyNode = self.com_node(nodelist[5])
if len(nodelist) > 8:
elseNode = self.com_node(nodelist[8])
else:
elseNode = None
return For(assignNode, listNode, bodyNode, elseNode,
lineno=nodelist[0][2])
def try_stmt(self, nodelist):
return self.com_try_except_finally(nodelist)
def with_stmt(self, nodelist):
return self.com_with(nodelist)
def with_var(self, nodelist):
return self.com_with_var(nodelist)
def suite(self, nodelist):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if len(nodelist) == 1:
return self.com_stmt(nodelist[0])
stmts = []
for node in nodelist:
if node[0] == symbol.stmt:
self.com_append_stmt(stmts, node)
return Stmt(stmts)
# --------------------------------------------------------------
#
# EXPRESSION NODES (invoked by com_node())
#
def testlist(self, nodelist):
# testlist: expr (',' expr)* [',']
# testlist_safe: test [(',' test)+ [',']]
# exprlist: expr (',' expr)* [',']
return self.com_binary(Tuple, nodelist)
testlist_safe = testlist # XXX
testlist1 = testlist
exprlist = testlist
def testlist_comp(self, nodelist):
# test ( comp_for | (',' test)* [','] )
assert nodelist[0][0] == symbol.test
if len(nodelist) == 2 and nodelist[1][0] == symbol.comp_for:
test = self.com_node(nodelist[0])
return self.com_generator_expression(test, nodelist[1])
return self.testlist(nodelist)
def test(self, nodelist):
# or_test ['if' or_test 'else' test] | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
then = self.com_node(nodelist[0])
if len(nodelist) > 1:
assert len(nodelist) == 5
assert nodelist[1][1] == 'if'
assert nodelist[3][1] == 'else'
test = self.com_node(nodelist[2])
else_ = self.com_node(nodelist[4])
return IfExp(test, then, else_, lineno=nodelist[1][2])
return then
def or_test(self, nodelist):
# and_test ('or' and_test)* | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
return self.com_binary(Or, nodelist)
old_test = or_test
def and_test(self, nodelist):
# not_test ('and' not_test)*
return self.com_binary(And, nodelist)
def not_test(self, nodelist):
# 'not' not_test | comparison
result = self.com_node(nodelist[-1])
if len(nodelist) == 2:
return Not(result, lineno=nodelist[0][2])
return result
def comparison(self, nodelist):
# comparison: expr (comp_op expr)*
node = self.com_node(nodelist[0])
if len(nodelist) == 1:
return node
results = []
for i in range(2, len(nodelist), 2):
nl = nodelist[i-1]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
n = nl[1]
if n[0] == token.NAME:
type = n[1]
if len(nl) == 3:
if type == 'not':
type = 'not in'
else:
type = 'is not'
else:
type = _cmp_types[n[0]]
lineno = nl[1][2]
results.append((type, self.com_node(nodelist[i])))
# we need a special "compare" node so that we can distinguish
# 3 < x < 5 from (3 < x) < 5
# the two have very different semantics and results (note that the
# latter form is always true)
return Compare(node, results, lineno=lineno)
def expr(self, nodelist):
# xor_expr ('|' xor_expr)*
return self.com_binary(Bitor, nodelist)
def xor_expr(self, nodelist):
# xor_expr ('^' xor_expr)*
return self.com_binary(Bitxor, nodelist)
def and_expr(self, nodelist):
# xor_expr ('&' xor_expr)*
return self.com_binary(Bitand, nodelist)
def shift_expr(self, nodelist):
# shift_expr ('<<'|'>>' shift_expr)*
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.LEFTSHIFT:
node = LeftShift([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.RIGHTSHIFT:
node = RightShift([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def arith_expr(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.PLUS:
node = Add([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.MINUS:
node = Sub([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def term(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
t = nodelist[i-1][0]
if t == token.STAR:
node = Mul([node, right])
elif t == token.SLASH:
node = Div([node, right])
elif t == token.PERCENT:
node = Mod([node, right])
elif t == token.DOUBLESLASH:
node = FloorDiv([node, right])
else:
raise ValueError, "unexpected token: %s" % t
node.lineno = nodelist[1][2]
return node
def factor(self, nodelist):
elt = nodelist[0]
t = elt[0]
node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
# need to handle (unary op)constant here...
if t == token.PLUS:
return UnaryAdd(node, lineno=elt[2])
elif t == token.MINUS:
return UnarySub(node, lineno=elt[2])
elif t == token.TILDE:
node = Invert(node, lineno=elt[2])
return node
def power(self, nodelist):
# power: atom trailer* ('**' factor)*
node = self.com_node(nodelist[0])
for i in range(1, len(nodelist)):
elt = nodelist[i]
if elt[0] == token.DOUBLESTAR:
return Power([node, self.com_node(nodelist[i+1])],
lineno=elt[2])
node = self.com_apply_trailer(node, elt)
return node
def atom(self, nodelist):
return self._atom_dispatch[nodelist[0][0]](nodelist)
def atom_lpar(self, nodelist):
if nodelist[1][0] == token.RPAR:
return Tuple((), lineno=nodelist[0][2])
return self.com_node(nodelist[1])
def atom_lsqb(self, nodelist):
if nodelist[1][0] == token.RSQB:
return List((), lineno=nodelist[0][2])
return self.com_list_constructor(nodelist[1])
def atom_lbrace(self, nodelist):
if nodelist[1][0] == token.RBRACE:
return Dict((), lineno=nodelist[0][2])
return self.com_dictorsetmaker(nodelist[1])
def atom_backquote(self, nodelist):
return Backquote(self.com_node(nodelist[1]))
def atom_number(self, nodelist):
### need to verify this matches compile.c
k = eval(nodelist[0][1])
return Const(k, lineno=nodelist[0][2])
def decode_literal(self, lit):
if self.encoding:
# this is particularly fragile & a bit of a
# hack... changes in compile.c:parsestr and
# tokenizer.c must be reflected here.
if self.encoding not in ['utf-8', 'iso-8859-1']:
lit = unicode(lit, 'utf-8').encode(self.encoding)
return eval("# coding: %s\n%s" % (self.encoding, lit))
else:
return eval(lit)
def atom_string(self, nodelist):
k = ''
for node in nodelist:
k += self.decode_literal(node[1])
return Const(k, lineno=nodelist[0][2])
def atom_name(self, nodelist):
return Name(nodelist[0][1], lineno=nodelist[0][2])
# --------------------------------------------------------------
#
# INTERNAL PARSING UTILITIES
#
# The use of com_node() introduces a lot of extra stack frames,
# enough to cause a stack overflow compiling test.test_parser with
# the standard interpreter recursionlimit. The com_node() is a
# convenience function that hides the dispatch details, but comes
# at a very high cost. It is more efficient to dispatch directly
# in the callers. In these cases, use lookup_node() and call the
# dispatched node directly.
def lookup_node(self, node):
return self._dispatch[node[0]]
def com_node(self, node):
# Note: compile.c has handling in com_node for del_stmt, pass_stmt,
# break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
# and compound_stmt.
# We'll just dispatch them.
return self._dispatch[node[0]](node[1:])
def com_NEWLINE(self, *args):
# A ';' at the end of a line can make a NEWLINE token appear
# here, Render it harmless. (genc discards ('discard',
# ('const', xxxx)) Nodes)
return Discard(Const(None))
def com_arglist(self, nodelist):
# varargslist:
# (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME)
# | fpdef ['=' test] (',' fpdef ['=' test])* [',']
# fpdef: NAME | '(' fplist ')'
# fplist: fpdef (',' fpdef)* [',']
names = []
defaults = []
flags = 0
i = 0
while i < len(nodelist):
node = nodelist[i]
if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
if node[0] == token.STAR:
node = nodelist[i+1]
if node[0] == token.NAME:
names.append(node[1])
flags = flags | CO_VARARGS
i = i + 3
if i < len(nodelist):
# should be DOUBLESTAR
t = nodelist[i][0]
if t == token.DOUBLESTAR:
node = nodelist[i+1]
else:
raise ValueError, "unexpected token: %s" % t
names.append(node[1])
flags = flags | CO_VARKEYWORDS
break
# fpdef: NAME | '(' fplist ')'
names.append(self.com_fpdef(node))
i = i + 1
if i < len(nodelist) and nodelist[i][0] == token.EQUAL:
defaults.append(self.com_node(nodelist[i + 1]))
i = i + 2
elif len(defaults):
# we have already seen an argument with default, but here
# came one without
raise SyntaxError, "non-default argument follows default argument"
# skip the comma
i = i + 1
return names, defaults, flags
def com_fpdef(self, node):
# fpdef: NAME | '(' fplist ')'
if node[1][0] == token.LPAR:
return self.com_fplist(node[2])
return node[1][1]
def com_fplist(self, node):
# fplist: fpdef (',' fpdef)* [',']
if len(node) == 2:
return self.com_fpdef(node[1])
list = []
for i in range(1, len(node), 2):
list.append(self.com_fpdef(node[i]))
return tuple(list)
def com_dotted_name(self, node):
# String together the dotted names and return the string
name = ""
for n in node:
if type(n) == type(()) and n[0] == 1:
name = name + n[1] + '.'
return name[:-1]
def com_dotted_as_name(self, node):
assert node[0] == symbol.dotted_as_name
node = node[1:]
dot = self.com_dotted_name(node[0][1:])
if len(node) == 1:
return dot, None
assert node[1][1] == 'as'
assert node[2][0] == token.NAME
return dot, node[2][1]
def com_dotted_as_names(self, node):
assert node[0] == symbol.dotted_as_names
node = node[1:]
names = [self.com_dotted_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_dotted_as_name(node[i]))
return names
def com_import_as_name(self, node):
assert node[0] == symbol.import_as_name
node = node[1:]
assert node[0][0] == token.NAME
if len(node) == 1:
return node[0][1], None
assert node[1][1] == 'as', node
assert node[2][0] == token.NAME
return node[0][1], node[2][1]
def com_import_as_names(self, node):
assert node[0] == symbol.import_as_names
node = node[1:]
names = [self.com_import_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_import_as_name(node[i]))
return names
def com_bases(self, node):
bases = []
for i in range(1, len(node), 2):
bases.append(self.com_node(node[i]))
return bases
def com_try_except_finally(self, nodelist):
# ('try' ':' suite
# ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite]
# | 'finally' ':' suite))
if nodelist[3][0] == token.NAME:
# first clause is a finally clause: only try-finally
return TryFinally(self.com_node(nodelist[2]),
self.com_node(nodelist[5]),
lineno=nodelist[0][2])
#tryexcept: [TryNode, [except_clauses], elseNode)]
clauses = []
elseNode = None
finallyNode = None
for i in range(3, len(nodelist), 3):
node = nodelist[i]
if node[0] == symbol.except_clause:
# except_clause: 'except' [expr [(',' | 'as') expr]] */
if len(node) > 2:
expr1 = self.com_node(node[2])
if len(node) > 4:
expr2 = self.com_assign(node[4], OP_ASSIGN)
else:
expr2 = None
else:
expr1 = expr2 = None
clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
if node[0] == token.NAME:
if node[1] == 'else':
elseNode = self.com_node(nodelist[i+2])
elif node[1] == 'finally':
finallyNode = self.com_node(nodelist[i+2])
try_except = TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
lineno=nodelist[0][2])
if finallyNode:
return TryFinally(try_except, finallyNode, lineno=nodelist[0][2])
else:
return try_except
def com_with(self, nodelist):
# with_stmt: 'with' with_item (',' with_item)* ':' suite
body = self.com_node(nodelist[-1])
for i in range(len(nodelist) - 3, 0, -2):
ret = self.com_with_item(nodelist[i], body, nodelist[0][2])
if i == 1:
return ret
body = ret
def com_with_item(self, nodelist, body, lineno):
# with_item: test ['as' expr]
if len(nodelist) == 4:
var = self.com_assign(nodelist[3], OP_ASSIGN)
else:
var = None
expr = self.com_node(nodelist[1])
return With(expr, var, body, lineno=lineno)
def com_augassign_op(self, node):
assert node[0] == symbol.augassign
return node[1]
def com_augassign(self, node):
"""Return node suitable for lvalue of augmented assignment
Names, slices, and attributes are the only allowable nodes.
"""
l = self.com_node(node)
if l.__class__ in (Name, Slice, Subscript, Getattr):
return l
raise SyntaxError, "can't assign to %s" % l.__class__.__name__
def com_assign(self, node, assigning):
# return a node suitable for use as an "lvalue"
# loop to avoid trivial recursion
while 1:
t = node[0]
if t in (symbol.exprlist, symbol.testlist, symbol.testlist_safe, symbol.testlist_comp):
if len(node) > 2:
return self.com_assign_tuple(node, assigning)
node = node[1]
elif t in _assign_types:
if len(node) > 2:
raise SyntaxError, "can't assign to operator"
node = node[1]
elif t == symbol.power:
if node[1][0] != symbol.atom:
raise SyntaxError, "can't assign to operator"
if len(node) > 2:
primary = self.com_node(node[1])
for i in range(2, len(node)-1):
ch = node[i]
if ch[0] == token.DOUBLESTAR:
raise SyntaxError, "can't assign to operator"
primary = self.com_apply_trailer(primary, ch)
return self.com_assign_trailer(primary, node[-1],
assigning)
node = node[1]
elif t == symbol.atom:
t = node[1][0]
if t == token.LPAR:
node = node[2]
if node[0] == token.RPAR:
raise SyntaxError, "can't assign to ()"
elif t == token.LSQB:
node = node[2]
if node[0] == token.RSQB:
raise SyntaxError, "can't assign to []"
return self.com_assign_list(node, assigning)
elif t == token.NAME:
return self.com_assign_name(node[1], assigning)
else:
raise SyntaxError, "can't assign to literal"
else:
raise SyntaxError, "bad assignment (%s)" % t
def com_assign_tuple(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
assigns.append(self.com_assign(node[i], assigning))
return AssTuple(assigns, lineno=extractLineNo(node))
def com_assign_list(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
if i + 1 < len(node):
if node[i + 1][0] == symbol.list_for:
raise SyntaxError, "can't assign to list comprehension"
assert node[i + 1][0] == token.COMMA, node[i + 1]
assigns.append(self.com_assign(node[i], assigning))
return AssList(assigns, lineno=extractLineNo(node))
def com_assign_name(self, node, assigning):
return AssName(node[1], assigning, lineno=node[2])
def com_assign_trailer(self, primary, node, assigning):
t = node[1][0]
if t == token.DOT:
return self.com_assign_attr(primary, node[2], assigning)
if t == token.LSQB:
return self.com_subscriptlist(primary, node[2], assigning)
if t == token.LPAR:
raise SyntaxError, "can't assign to function call"
raise SyntaxError, "unknown trailer type: %s" % t
def com_assign_attr(self, primary, node, assigning):
return AssAttr(primary, node[1], assigning, lineno=node[-1])
def com_binary(self, constructor, nodelist):
"Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
l = len(nodelist)
if l == 1:
n = nodelist[0]
return self.lookup_node(n)(n[1:])
items = []
for i in range(0, l, 2):
n = nodelist[i]
items.append(self.lookup_node(n)(n[1:]))
return constructor(items, lineno=extractLineNo(nodelist))
def com_stmt(self, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
return result
return Stmt([result])
def com_append_stmt(self, stmts, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
stmts.extend(result.nodes)
else:
stmts.append(result)
def com_list_constructor(self, nodelist):
# listmaker: test ( list_for | (',' test)* [','] )
values = []
for i in range(1, len(nodelist)):
if nodelist[i][0] == symbol.list_for:
assert len(nodelist[i:]) == 1
return self.com_list_comprehension(values[0],
nodelist[i])
elif nodelist[i][0] == token.COMMA:
continue
values.append(self.com_node(nodelist[i]))
return List(values, lineno=values[0].lineno)
def com_list_comprehension(self, expr, node):
return self.com_comprehension(expr, None, node, 'list')
def com_comprehension(self, expr1, expr2, node, type):
# list_iter: list_for | list_if
# list_for: 'for' exprlist 'in' testlist [list_iter]
# list_if: 'if' test [list_iter]
# XXX should raise SyntaxError for assignment
# XXX(avassalotti) Set and dict comprehensions should have generator
# semantics. In other words, they shouldn't leak
# variables outside of the comprehension's scope.
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
compNode = self.com_node(node[4])
newfor = ListCompFor(assignNode, compNode, [])
newfor.lineno = node[1][2]
fors.append(newfor)
if len(node) == 5:
node = None
elif type == 'list':
node = self.com_list_iter(node[5])
else:
node = self.com_comp_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = ListCompIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
elif type == 'list':
node = self.com_list_iter(node[3])
else:
node = self.com_comp_iter(node[3])
else:
raise SyntaxError, \
("unexpected comprehension element: %s %d"
% (node, lineno))
if type == 'list':
return ListComp(expr1, fors, lineno=lineno)
elif type == 'set':
return SetComp(expr1, fors, lineno=lineno)
elif type == 'dict':
return DictComp(expr1, expr2, fors, lineno=lineno)
else:
raise ValueError("unexpected comprehension type: " + repr(type))
def com_list_iter(self, node):
assert node[0] == symbol.list_iter
return node[1]
def com_comp_iter(self, node):
assert node[0] == symbol.comp_iter
return node[1]
def com_generator_expression(self, expr, node):
# comp_iter: comp_for | comp_if
# comp_for: 'for' exprlist 'in' test [comp_iter]
# comp_if: 'if' test [comp_iter]
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
genNode = self.com_node(node[4])
newfor = GenExprFor(assignNode, genNode, [],
lineno=node[1][2])
fors.append(newfor)
if (len(node)) == 5:
node = None
else:
node = self.com_comp_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = GenExprIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_comp_iter(node[3])
else:
raise SyntaxError, \
("unexpected generator expression element: %s %d"
% (node, lineno))
fors[0].is_outmost = True
return GenExpr(GenExprInner(expr, fors), lineno=lineno)
def com_dictorsetmaker(self, nodelist):
# dictorsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) |
# (test (comp_for | (',' test)* [','])) )
assert nodelist[0] == symbol.dictorsetmaker
nodelist = nodelist[1:]
if len(nodelist) == 1 or nodelist[1][0] == token.COMMA:
# set literal
items = []
for i in range(0, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
return Set(items, lineno=items[0].lineno)
elif nodelist[1][0] == symbol.comp_for:
# set comprehension
expr = self.com_node(nodelist[0])
return self.com_comprehension(expr, None, nodelist[1], 'set')
elif len(nodelist) > 3 and nodelist[3][0] == symbol.comp_for:
# dict comprehension
assert nodelist[1][0] == token.COLON
key = self.com_node(nodelist[0])
value = self.com_node(nodelist[2])
return self.com_comprehension(key, value, nodelist[3], 'dict')
else:
# dict literal
items = []
for i in range(0, len(nodelist), 4):
items.append((self.com_node(nodelist[i]),
self.com_node(nodelist[i+2])))
return Dict(items, lineno=items[0][0].lineno)
def com_apply_trailer(self, primaryNode, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
return self.com_call_function(primaryNode, nodelist[2])
if t == token.DOT:
return self.com_select_member(primaryNode, nodelist[2])
if t == token.LSQB:
return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
raise SyntaxError, 'unknown node type: %s' % t
def com_select_member(self, primaryNode, nodelist):
if nodelist[0] != token.NAME:
raise SyntaxError, "member must be a name"
return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
def com_call_function(self, primaryNode, nodelist):
if nodelist[0] == token.RPAR:
return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
args = []
kw = 0
star_node = dstar_node = None
len_nodelist = len(nodelist)
i = 1
while i < len_nodelist:
node = nodelist[i]
if node[0]==token.STAR:
if star_node is not None:
raise SyntaxError, 'already have the varargs indentifier'
star_node = self.com_node(nodelist[i+1])
i = i + 3
continue
elif node[0]==token.DOUBLESTAR:
if dstar_node is not None:
raise SyntaxError, 'already have the kwargs indentifier'
dstar_node = self.com_node(nodelist[i+1])
i = i + 3
continue
# positional or named parameters
kw, result = self.com_argument(node, kw, star_node)
if len_nodelist != 2 and isinstance(result, GenExpr) \
and len(node) == 3 and node[2][0] == symbol.comp_for:
# allow f(x for x in y), but reject f(x for x in y, 1)
# should use f((x for x in y), 1) instead of f(x for x in y, 1)
raise SyntaxError, 'generator expression needs parenthesis'
args.append(result)
i = i + 2
return CallFunc(primaryNode, args, star_node, dstar_node,
lineno=extractLineNo(nodelist))
def com_argument(self, nodelist, kw, star_node):
if len(nodelist) == 3 and nodelist[2][0] == symbol.comp_for:
test = self.com_node(nodelist[1])
return 0, self.com_generator_expression(test, nodelist[2])
if len(nodelist) == 2:
if kw:
raise SyntaxError, "non-keyword arg after keyword arg"
if star_node:
raise SyntaxError, "only named arguments may follow *expression"
return 0, self.com_node(nodelist[1])
result = self.com_node(nodelist[3])
n = nodelist[1]
while len(n) == 2 and n[0] != token.NAME:
n = n[1]
if n[0] != token.NAME:
raise SyntaxError, "keyword can't be an expression (%s)"%n[0]
node = Keyword(n[1], result, lineno=n[2])
return 1, node
def com_subscriptlist(self, primary, nodelist, assigning):
# slicing: simple_slicing | extended_slicing
# simple_slicing: primary "[" short_slice "]"
# extended_slicing: primary "[" slice_list "]"
# slice_list: slice_item ("," slice_item)* [","]
# backwards compat slice for '[i:j]'
if len(nodelist) == 2:
sub = nodelist[1]
if (sub[1][0] == token.COLON or \
(len(sub) > 2 and sub[2][0] == token.COLON)) and \
sub[-1][0] != symbol.sliceop:
return self.com_slice(primary, sub, assigning)
subscripts = []
for i in range(1, len(nodelist), 2):
subscripts.append(self.com_subscript(nodelist[i]))
return Subscript(primary, assigning, subscripts,
lineno=extractLineNo(nodelist))
def com_subscript(self, node):
# slice_item: expression | proper_slice | ellipsis
ch = node[1]
t = ch[0]
if t == token.DOT and node[2][0] == token.DOT:
return Ellipsis()
if t == token.COLON or len(node) > 2:
return self.com_sliceobj(node)
return self.com_node(ch)
def com_sliceobj(self, node):
# proper_slice: short_slice | long_slice
# short_slice: [lower_bound] ":" [upper_bound]
# long_slice: short_slice ":" [stride]
# lower_bound: expression
# upper_bound: expression
# stride: expression
#
# Note: a stride may be further slicing...
items = []
if node[1][0] == token.COLON:
items.append(Const(None))
i = 2
else:
items.append(self.com_node(node[1]))
# i == 2 is a COLON
i = 3
if i < len(node) and node[i][0] == symbol.test:
items.append(self.com_node(node[i]))
i = i + 1
else:
items.append(Const(None))
# a short_slice has been built. look for long_slice now by looking
# for strides...
for j in range(i, len(node)):
ch = node[j]
if len(ch) == 2:
items.append(Const(None))
else:
items.append(self.com_node(ch[2]))
return Sliceobj(items, lineno=extractLineNo(node))
def com_slice(self, primary, node, assigning):
# short_slice: [lower_bound] ":" [upper_bound]
lower = upper = None
if len(node) == 3:
if node[1][0] == token.COLON:
upper = self.com_node(node[2])
else:
lower = self.com_node(node[1])
elif len(node) == 4:
lower = self.com_node(node[1])
upper = self.com_node(node[3])
return Slice(primary, assigning, lower, upper,
lineno=extractLineNo(node))
def get_docstring(self, node, n=None):
if n is None:
n = node[0]
node = node[1:]
if n == symbol.suite:
if len(node) == 1:
return self.get_docstring(node[0])
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.file_input:
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.atom:
if node[0][0] == token.STRING:
s = ''
for t in node:
s = s + eval(t[1])
return s
return None
if n == symbol.stmt or n == symbol.simple_stmt \
or n == symbol.small_stmt:
return self.get_docstring(node[0])
if n in _doc_nodes and len(node) == 1:
return self.get_docstring(node[0])
return None
_doc_nodes = [
symbol.expr_stmt,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
_cmp_types = {
token.LESS : '<',
token.GREATER : '>',
token.EQEQUAL : '==',
token.EQUAL : '==',
token.LESSEQUAL : '<=',
token.GREATEREQUAL : '>=',
token.NOTEQUAL : '!=',
}
_legal_node_types = [
symbol.funcdef,
symbol.classdef,
symbol.stmt,
symbol.small_stmt,
symbol.flow_stmt,
symbol.simple_stmt,
symbol.compound_stmt,
symbol.expr_stmt,
symbol.print_stmt,
symbol.del_stmt,
symbol.pass_stmt,
symbol.break_stmt,
symbol.continue_stmt,
symbol.return_stmt,
symbol.raise_stmt,
symbol.import_stmt,
symbol.global_stmt,
symbol.exec_stmt,
symbol.assert_stmt,
symbol.if_stmt,
symbol.while_stmt,
symbol.for_stmt,
symbol.try_stmt,
symbol.with_stmt,
symbol.suite,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.exprlist,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
symbol.atom,
]
if hasattr(symbol, 'yield_stmt'):
_legal_node_types.append(symbol.yield_stmt)
if hasattr(symbol, 'yield_expr'):
_legal_node_types.append(symbol.yield_expr)
_assign_types = [
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
]
_names = {}
for k, v in symbol.sym_name.items():
_names[k] = v
for k, v in token.tok_name.items():
_names[k] = v
def debug_tree(tree):
l = []
for elt in tree:
if isinstance(elt, int):
l.append(_names.get(elt, elt))
elif isinstance(elt, str):
l.append(elt)
else:
l.append(debug_tree(elt))
return l
| bsd-3-clause | -6,453,651,066,989,307,000 | 33.598046 | 99 | 0.522087 | false |
buuck/root | interpreter/cling/tools/Jupyter/kernel/clingkernel.py | 9 | 11312 | #!/usr/bin/env python
#------------------------------------------------------------------------------
# CLING - the C++ LLVM-based InterpreterG :)
# author: Min RK
# Copyright (c) Min RK
#
# This file is dual-licensed: you can choose to license it under the University
# of Illinois Open Source License or the GNU Lesser General Public License. See
# LICENSE.TXT for details.
#------------------------------------------------------------------------------
"""
Cling Kernel for Jupyter
Talks to Cling via ctypes
"""
from __future__ import print_function
__version__ = '0.0.2'
import ctypes
from contextlib import contextmanager
from fcntl import fcntl, F_GETFL, F_SETFL
import os
import shutil
import select
import struct
import sys
import threading
from traitlets import Unicode, Float, Dict, List, CaselessStrEnum
from ipykernel.kernelbase import Kernel
from ipykernel.kernelapp import kernel_aliases,kernel_flags, IPKernelApp
from ipykernel.ipkernel import IPythonKernel
from ipykernel.zmqshell import ZMQInteractiveShell
from IPython.core.profiledir import ProfileDir
from jupyter_client.session import Session
class my_void_p(ctypes.c_void_p):
pass
libc = ctypes.CDLL(None)
try:
c_stdout_p = ctypes.c_void_p.in_dll(libc, 'stdout')
c_stderr_p = ctypes.c_void_p.in_dll(libc, 'stderr')
except ValueError:
# libc.stdout is has a funny name on OS X
c_stdout_p = ctypes.c_void_p.in_dll(libc, '__stdoutp')
c_stderr_p = ctypes.c_void_p.in_dll(libc, '__stderrp')
class ClingKernel(Kernel):
"""Cling Kernel for Jupyter"""
implementation = 'cling_kernel'
implementation_version = __version__
language_version = 'X'
banner = Unicode()
def _banner_default(self):
return 'cling-%s' % self.language_version
return self._banner
# codemirror_mode='clike' *should* work but doesn't, using the mimetype instead
language_info = {'name': 'c++',
'codemirror_mode': 'text/x-c++src',
'mimetype': ' text/x-c++src',
'file_extension': '.c++'}
flush_interval = Float(0.25, config=True)
std = CaselessStrEnum(default_value='c++11',
values = ['c++11', 'c++14', 'c++17'],
help="C++ standard to use, either c++17, c++14 or c++11").tag(config=True);
def __init__(self, **kwargs):
super(ClingKernel, self).__init__(**kwargs)
try:
whichCling = os.readlink(shutil.which('cling'))
except OSError as e:
#If cling is not a symlink try a regular file
#readlink returns POSIX error EINVAL (22) if the
#argument is not a symlink
if e.args[0] == 22:
whichCling = shutil.which('cling')
else:
raise e
except AttributeError:
from distutils.spawn import find_executable
whichCling = find_executable('cling')
if whichCling:
clingInstDir = os.path.dirname(os.path.dirname(whichCling))
llvmResourceDir = clingInstDir
else:
raise RuntimeError('Cannot find cling in $PATH. No cling, no fun.')
for ext in ['so', 'dylib', 'dll']:
libFilename = clingInstDir + "/lib/libclingJupyter." + ext
if os.access(libFilename, os.R_OK):
self.libclingJupyter = ctypes.CDLL(clingInstDir + "/lib/libclingJupyter." + ext,
mode = ctypes.RTLD_GLOBAL)
break
if not getattr(self, 'libclingJupyter', None):
raise RuntimeError('Cannot find ' + clingInstDir + '/lib/libclingJupyter.{so,dylib,dll}')
self.libclingJupyter.cling_create.restype = my_void_p
self.libclingJupyter.cling_eval.restype = my_void_p
#build -std=c++11 or -std=c++14 option
stdopt = ("-std=" + self.std).encode('utf-8')
self.log.info("Using {}".format(stdopt.decode('utf-8')))
#from IPython.utils import io
#io.rprint("DBG: Using {}".format(stdopt.decode('utf-8')))
strarr = ctypes.c_char_p*5
argv = strarr(b"clingJupyter",stdopt, b"-I" + clingInstDir.encode('utf-8') + b"/include/",b"",b"")
llvmResourceDirCP = ctypes.c_char_p(llvmResourceDir.encode('utf8'))
self.output_pipe, pipe_in = os.pipe()
self.interp = self.libclingJupyter.cling_create(5, argv, llvmResourceDirCP, pipe_in)
self.libclingJupyter.cling_complete_start.restype = my_void_p
self.libclingJupyter.cling_complete_next.restype = my_void_p #c_char_p
self.output_thread = threading.Thread(target=self.publish_pipe_output)
self.output_thread.daemon = True
self.output_thread.start()
def _recv_dict(self, pipe):
"""Receive a serialized dict on a pipe
Returns the dictionary.
"""
# Wire format:
# // Pipe sees (all numbers are longs, except for the first):
# // - num bytes in a long (sent as a single unsigned char!)
# // - num elements of the MIME dictionary; Jupyter selects one to display.
# // For each MIME dictionary element:
# // - length of MIME type key
# // - MIME type key
# // - size of MIME data buffer (including the terminating 0 for
# // 0-terminated strings)
# // - MIME data buffer
data = {}
b1 = os.read(pipe, 1)
sizeof_long = struct.unpack('B', b1)[0]
if sizeof_long == 8:
fmt = 'Q'
else:
fmt = 'L'
buf = os.read(pipe, sizeof_long)
num_elements = struct.unpack(fmt, buf)[0]
for i in range(num_elements):
buf = os.read(pipe, sizeof_long)
len_key = struct.unpack(fmt, buf)[0]
key = os.read(pipe, len_key).decode('utf8')
buf = os.read(pipe, sizeof_long)
len_value = struct.unpack(fmt, buf)[0]
value = os.read(pipe, len_value).decode('utf8')
data[key] = value
return data
def publish_pipe_output(self):
"""Watch output_pipe for display-data messages
and publish them on IOPub when they arrive
"""
while True:
select.select([self.output_pipe], [], [])
data = self._recv_dict(self.output_pipe)
self.session.send(self.iopub_socket, 'display_data',
content={
'data': data,
'metadata': {},
},
parent=self._parent_header,
)
@contextmanager
def forward_stream(self, name):
"""Capture stdout and forward it as stream messages"""
# create pipe for stdout
if name == 'stdout':
c_flush_p = c_stdout_p
elif name == 'stderr':
c_flush_p = c_stderr_p
else:
raise ValueError("Name must be stdout or stderr, not %r" % name)
real_fd = getattr(sys, '__%s__' % name).fileno()
save_fd = os.dup(real_fd)
pipe_out, pipe_in = os.pipe()
os.dup2(pipe_in, real_fd)
os.close(pipe_in)
# make pipe_out non-blocking
flags = fcntl(pipe_out, F_GETFL)
fcntl(pipe_out, F_SETFL, flags|os.O_NONBLOCK)
def forwarder(pipe):
"""Forward bytes on a pipe to stream messages"""
while True:
r, w, x = select.select([pipe], [], [], self.flush_interval)
if not r:
# nothing to read, flush libc's stdout and check again
libc.fflush(c_flush_p)
continue
data = os.read(pipe, 1024)
if not data:
# pipe closed, we are done
break
# send output
self.session.send(self.iopub_socket, 'stream', {
'name': name,
'text': data.decode('utf8', 'replace'),
}, parent=self._parent_header)
t = threading.Thread(target=forwarder, args=(pipe_out,))
t.start()
try:
yield
finally:
# flush the pipe
libc.fflush(c_flush_p)
os.close(real_fd)
t.join()
# and restore original stdout
os.close(pipe_out)
os.dup2(save_fd, real_fd)
os.close(save_fd)
def run_cell(self, code, silent=False):
return self.libclingJupyter.cling_eval(self.interp, ctypes.c_char_p(code.encode('utf8')))
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
if not code.strip():
return {
'status': 'ok',
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
status = 'ok'
with self.forward_stream('stdout'), self.forward_stream('stderr'):
stringResult = self.run_cell(code, silent)
if not stringResult:
status = 'error'
else:
self.session.send(
self.iopub_socket,
'execute_result',
content={
'data': {
'text/plain': ctypes.cast(stringResult, ctypes.c_char_p).value.decode('utf8', 'replace'),
},
'metadata': {},
'execution_count': self.execution_count,
},
parent=self._parent_header
)
self.libclingJupyter.cling_eval_free(stringResult)
reply = {
'status': status,
'execution_count': self.execution_count,
}
if status == 'error':
err = {
'ename': 'ename',
'evalue': 'evalue',
'traceback': [],
}
self.send_response(self.iopub_socket, 'error', err)
reply.update(err)
elif status == 'ok':
reply.update({
'THIS DOES NOT WORK: payload': [{
'source': 'set_next_input',
'replace': True,
'text':'//THIS IS MAGIC\n' + code
}],
'user_expressions': {},
})
else:
raise ValueError("Invalid status: %r" % status)
return reply
def do_complete(self, code, cursor_pos):
"""Provide completions here"""
# if cursor_pos = cursor_start = cursor_end,
# matches should be a list of strings to be appended after the cursor
return {'matches' : [],
'cursor_end' : cursor_pos,
'cursor_start' : cursor_pos,
'metadata' : {},
'status' : 'ok'}
cling_flags = kernel_flags
class ClingKernelApp(IPKernelApp):
name='cling-kernel'
cling_aliases = kernel_aliases.copy()
cling_aliases['std']='ClingKernel.std'
aliases = Dict(cling_aliases)
flags = Dict(cling_flags)
classes = List([ ClingKernel, IPythonKernel, ZMQInteractiveShell, ProfileDir, Session ])
kernel_class = ClingKernel
def main():
"""launch a cling kernel"""
ClingKernelApp.launch_instance()
if __name__ == '__main__':
main()
| lgpl-2.1 | 4,679,301,531,515,320,000 | 34.239875 | 113 | 0.541284 | false |
cjmayo/xbmc | tools/EventClients/Clients/Kodi Send/kodi-send.py | 10 | 2592 | #!/usr/bin/python
#
# XBMC Media Center
# XBMC Send
# Copyright (c) 2009 team-xbmc
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import sys, os
import getopt
from socket import *
try:
from kodi.xbmcclient import *
except:
sys.path.append(os.path.join(os.path.realpath(os.path.dirname(__file__)), '../../lib/python'))
from xbmcclient import *
def usage():
print("kodi-send [OPTION] --action=ACTION")
print('Example')
print('\tkodi-send --host=192.168.0.1 --port=9777 --action="Quit"')
print("Options")
print("\t-?, --help\t\t\tWill bring up this message")
print("\t--host=HOST\t\t\tChoose what HOST to connect to (default=localhost)")
print("\t--port=PORT\t\t\tChoose what PORT to connect to (default=9777)")
print('\t--action=ACTION\t\t\tSends an action to XBMC, this option can be added multiple times to create a macro')
pass
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "?pa:v", ["help", "host=", "port=", "action="])
except getopt.GetoptError as err:
# print help information and exit:
print(str(err)) # will print something like "option -a not recognized"
usage()
sys.exit(2)
ip = "localhost"
port = 9777
actions = []
verbose = False
for o, a in opts:
if o in ("-?", "--help"):
usage()
sys.exit()
elif o == "--host":
ip = a
elif o == "--port":
port = int(a)
elif o in ("-a", "--action"):
actions.append(a)
else:
assert False, "unhandled option"
addr = (ip, port)
sock = socket(AF_INET,SOCK_DGRAM)
if len(actions) is 0:
usage()
sys.exit(0)
for action in actions:
print('Sending action: %s' % action)
packet = PacketACTION(actionmessage=action, actiontype=ACTION_BUTTON)
packet.send(sock, addr)
if __name__=="__main__":
main()
| gpl-2.0 | 9,141,084,477,955,502,000 | 31.4 | 118 | 0.623457 | false |
SanPen/GridCal | src/GridCal/Gui/TowerBuilder/LineBuilderDialogue.py | 1 | 9842 | # This file is part of GridCal.
#
# GridCal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GridCal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GridCal. If not, see <http://www.gnu.org/licenses/>.
import sys
from PySide2.QtWidgets import *
from GridCal.Gui.TowerBuilder.gui import *
from GridCal.Engine.Devices import *
from GridCal.Gui.TowerBuilder.tower_model import *
from GridCal.Gui.GuiFunctions import PandasModel
from GridCal.Gui.GeneralDialogues import LogsDialogue
class TowerBuilderGUI(QtWidgets.QDialog):
def __init__(self, parent=None, tower=None, wires_catalogue=list()):
"""
Constructor
Args:
parent:
"""
QtWidgets.QDialog.__init__(self, parent)
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.setWindowTitle('Line builder')
# 10:1
self.ui.main_splitter.setStretchFactor(0, 8)
self.ui.main_splitter.setStretchFactor(1, 2)
# create wire collection from the catalogue
self.wires_table = WiresTable(self)
for wire in wires_catalogue:
self.wires_table.add(wire)
# was there a tower passed? else create one
if tower is None:
self.tower_driver = TowerModel(self, edit_callback=self.plot)
else:
self.tower_driver = TowerModel(self, edit_callback=self.plot, tower=tower)
self.ui.name_lineEdit.setText(self.tower_driver.tower.name)
self.ui.rho_doubleSpinBox.setValue(self.tower_driver.tower.earth_resistivity)
# set models
self.ui.wires_tableView.setModel(self.wires_table)
self.ui.tower_tableView.setModel(self.tower_driver)
# button clicks
# self.ui.add_wire_pushButton.clicked.connect(self.add_wire_to_collection)
# self.ui.delete_wire_pushButton.clicked.connect(self.delete_wire_from_collection)
self.ui.add_to_tower_pushButton.clicked.connect(self.add_wire_to_tower)
self.ui.delete_from_tower_pushButton.clicked.connect(self.delete_wire_from_tower)
self.ui.compute_pushButton.clicked.connect(self.compute)
self.ui.name_lineEdit.textChanged.connect(self.name_changed)
def msg(self, text, title="Warning"):
"""
Message box
:param text: Text to display
:param title: Name of the window
"""
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setText(text)
# msg.setInformativeText("This is additional information")
msg.setWindowTitle(title)
# msg.setDetailedText("The details are as follows:")
msg.setStandardButtons(QMessageBox.Ok)
retval = msg.exec_()
def name_changed(self):
"""
Change name
:return:
"""
self.tower_driver.tower.tower_name = self.ui.name_lineEdit.text()
def add_wire_to_collection(self):
"""
Add new wire to collection
:return:
"""
name = 'Wire_' + str(len(self.wires_table.wires) + 1)
wire = Wire(name=name, gmr=0.01, r=0.01, x=0)
self.wires_table.add(wire)
def delete_wire_from_collection(self):
"""
Delete wire from the collection
:return:
"""
idx = self.ui.wires_tableView.currentIndex()
sel_idx = idx.row()
if sel_idx > -1:
# delete all the wires from the tower too
self.tower_driver.delete_by_name(self.wires_table.wires[sel_idx])
# delete from the catalogue
self.wires_table.delete(sel_idx)
self.plot()
else:
self.msg('Select a wire in the wires catalogue')
def add_wire_to_tower(self):
"""
Add wire to tower
:return:
"""
idx = self.ui.wires_tableView.currentIndex()
sel_idx = idx.row()
if sel_idx > -1:
selected_wire = self.wires_table.wires[sel_idx].copy()
self.tower_driver.add(WireInTower(selected_wire))
else:
self.msg('Select a wire in the wires catalogue')
def delete_wire_from_tower(self):
"""
Delete wire from the tower
:return:
"""
idx = self.ui.tower_tableView.currentIndex()
sel_idx = idx.row()
if sel_idx > -1:
self.tower_driver.delete(sel_idx)
self.plot()
else:
self.msg('Select a wire from the tower')
def compute(self):
"""
:return:
"""
self.tower_driver.tower.frequency = self.ui.frequency_doubleSpinBox.value()
self.tower_driver.tower.earth_resistivity = self.ui.rho_doubleSpinBox.value()
# heck the wires configuration
logs = Logger()
all_ok = self.tower_driver.tower.check(logs)
if not all_ok:
logger_diag = LogsDialogue(name='Tower computation', logs=logs)
logger_diag.exec_()
else:
try:
# compute the matrices
self.tower_driver.tower.compute()
# Impedances in Ohm/km
cols = ['Phase' + str(i) for i in self.tower_driver.tower.z_phases_abcn]
z_df = pd.DataFrame(data=self.tower_driver.tower.z_abcn, columns=cols, index=cols)
self.ui.z_tableView_abcn.setModel(PandasModel(z_df))
cols = ['Phase' + str(i) for i in self.tower_driver.tower.z_phases_abc]
z_df = pd.DataFrame(data=self.tower_driver.tower.z_abc, columns=cols, index=cols)
self.ui.z_tableView_abc.setModel(PandasModel(z_df))
cols = ['Sequence ' + str(i) for i in range(3)]
z_df = pd.DataFrame(data=self.tower_driver.tower.z_seq, columns=cols, index=cols)
self.ui.z_tableView_seq.setModel(PandasModel(z_df))
# Admittances in uS/km
cols = ['Phase' + str(i) for i in self.tower_driver.tower.y_phases_abcn]
z_df = pd.DataFrame(data=self.tower_driver.tower.y_abcn * 1e6, columns=cols, index=cols)
self.ui.y_tableView_abcn.setModel(PandasModel(z_df))
cols = ['Phase' + str(i) for i in self.tower_driver.tower.y_phases_abc]
z_df = pd.DataFrame(data=self.tower_driver.tower.y_abc * 1e6, columns=cols, index=cols)
self.ui.y_tableView_abc.setModel(PandasModel(z_df))
cols = ['Sequence ' + str(i) for i in range(3)]
z_df = pd.DataFrame(data=self.tower_driver.tower.y_seq * 1e6, columns=cols, index=cols)
self.ui.y_tableView_seq.setModel(PandasModel(z_df))
# plot
self.plot()
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.msg(str(exc_traceback) + '\n' + str(exc_value), 'Tower calculation')
def plot(self):
"""
PLot the tower distribution
"""
self.ui.plotwidget.clear()
fig = self.ui.plotwidget.get_figure()
fig.set_facecolor('white')
ax = self.ui.plotwidget.get_axis()
self.tower_driver.tower.plot(ax=ax)
self.ui.plotwidget.redraw()
def example_1(self):
name = '4/0 6/1 ACSR'
r = 0.367851632 # ohm / km
x = 0 # ohm / km
gmr = 0.002481072 # m
wire = Wire(name=name, gmr=gmr, r=r, x=x)
self.tower_driver.add(WireInTower(wire=wire, xpos=0, ypos=8.8392, phase=0))
self.tower_driver.add(WireInTower(wire=wire, xpos=0.762, ypos=8.8392, phase=1))
self.tower_driver.add(WireInTower(wire=wire, xpos=2.1336, ypos=8.8392, phase=2))
self.tower_driver.add(WireInTower(wire=wire, xpos=1.2192, ypos=7.62, phase=3))
self.wires_table.add(wire)
def example_2(self):
name = '4/0 6/1 ACSR'
r = 0.367851632 # ohm / km
x = 0 # ohm / km
gmr = 0.002481072 # m
incx = 0.1
incy = 0.1
wire = Wire(name=name, gmr=gmr, r=r, x=x)
self.tower_driver.add(WireInTower(wire, xpos=0, ypos=8.8392, phase=1))
self.tower_driver.add(WireInTower(wire, xpos=0.762, ypos=8.8392, phase=2))
self.tower_driver.add(WireInTower(wire, xpos=2.1336, ypos=8.8392, phase=3))
self.tower_driver.add(WireInTower(wire, xpos=1.2192, ypos=7.62, phase=0))
self.tower_driver.add(WireInTower(wire, xpos=incx + 0, ypos=8.8392, phase=1))
self.tower_driver.add(WireInTower(wire, xpos=incx + 0.762, ypos=8.8392, phase=2))
self.tower_driver.add(WireInTower(wire, xpos=incx + 2.1336, ypos=8.8392, phase=3))
# self.tower.add(Wire(name, xpos=incx+1.2192, ypos=7.62, gmr=gmr, r=r, x=x, phase=0))
self.tower_driver.add(WireInTower(wire, xpos=incx / 2 + 0, ypos=incy + 8.8392, phase=1))
self.tower_driver.add(WireInTower(wire, xpos=incx / 2 + 0.762, ypos=incy + 8.8392, phase=2))
self.tower_driver.add(WireInTower(wire, xpos=incx / 2 + 2.1336, ypos=incy + 8.8392, phase=3))
# self.tower.add(Wire(name, xpos=incx/2 + 1.2192, ypos=incy+7.62, gmr=gmr, r=r, x=x, phase=0))
self.wires_table.add(wire)
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
window = TowerBuilderGUI()
window.example_2()
window.compute()
window.resize(1.81 * 700.0, 700.0) # golden ratio
window.show()
sys.exit(app.exec_())
| gpl-3.0 | 7,292,113,586,083,879,000 | 35.051282 | 104 | 0.602723 | false |
rbalda/neural_ocr | env/lib/python2.7/site-packages/django/contrib/admin/sites.py | 120 | 21146 | from functools import update_wrapper
from django.apps import apps
from django.conf import settings
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.core.urlresolvers import NoReverseMatch, reverse
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.engine import Engine
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _, ugettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
system_check_errors = []
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = ugettext_lazy('Django site admin')
# Text to put in each page's <h1>.
site_header = ugettext_lazy('Django administration')
# Text to put at the top of the admin index page.
index_title = ugettext_lazy('Site administration')
# URL for the "View site" link at the top of each admin page.
site_url = '/'
_empty_value_display = '-'
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Instantiate the admin class to save in the registry
admin_obj = admin_class(model, self)
if admin_class is not ModelAdmin and settings.DEBUG:
system_check_errors.extend(admin_obj.check())
self._registry[model] = admin_obj
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
@property
def empty_value_display(self):
return self._empty_value_display
@empty_value_display.setter
def empty_value_display(self, empty_value_display):
self._empty_value_display = empty_value_display
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that admin and contenttypes apps are
installed, as well as the auth context processor.
"""
if not apps.is_installed('django.contrib.admin'):
raise ImproperlyConfigured(
"Put 'django.contrib.admin' in your INSTALLED_APPS "
"setting in order to use the admin application.")
if not apps.is_installed('django.contrib.contenttypes'):
raise ImproperlyConfigured(
"Put 'django.contrib.contenttypes' in your INSTALLED_APPS "
"setting in order to use the admin application.")
try:
default_template_engine = Engine.get_default()
except Exception:
# Skip this non-critical check:
# 1. if the user has a non-trivial TEMPLATES setting and Django
# can't find a default template engine
# 2. if anything goes wrong while loading template engines, in
# order to avoid raising an exception from a confusing location
# Catching ImproperlyConfigured suffices for 1. but 2. requires
# catching all exceptions.
pass
else:
if ('django.contrib.auth.context_processors.auth'
not in default_template_engine.context_processors):
raise ImproperlyConfigured(
"Enable 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATES setting in order to use the admin "
"application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse('admin:logout', current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse('admin:login', current_app=self.name)
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import url, include
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^$', wrap(self.index), name='index'),
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
urlpatterns += [
url(regex, wrap(self.app_index), name='app_list'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'admin', self.name
def each_context(self, request):
"""
Returns a dictionary of variables to put in the template context for
*every* page in the admin site.
"""
return {
'site_title': self.site_title,
'site_header': self.site_header,
'site_url': self.site_url,
'has_permission': self.has_permission(request),
'available_apps': self.get_app_list(request),
}
def password_change(self, request, extra_context=None):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'password_change_form': AdminPasswordChangeForm,
'post_change_redirect': url,
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
request.current_app = self.name
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin'])
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
request.current_app = self.name
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
if request.method == 'GET' and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
from django.contrib.auth.views import login
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
context = dict(self.each_context(request),
title=_('Log in'),
app_path=request.get_full_path(),
)
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name)
context.update(extra_context or {})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
request.current_app = self.name
return login(request, **defaults)
def _build_app_dict(self, request, label=None):
"""
Builds the app dictionary. Takes an optional label parameters to filter
models of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
if label:
raise PermissionDenied
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change'):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add'):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if label:
return app_dict.get(label)
return app_dict
def get_app_list(self, request):
"""
Returns a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return app_list
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = dict(
self.each_context(request),
title=self.index_title,
app_list=app_list,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.index_template or
'admin/index.html', context)
def app_index(self, request, app_label, extra_context=None):
app_dict = self._build_app_dict(request, app_label)
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
app_name = apps.get_app_config(app_label).verbose_name
context = dict(self.each_context(request),
title=_('%(app)s administration') % {'app': app_name},
app_list=[app_dict],
app_label=app_label,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| mit | -193,874,056,442,659,900 | 38.525234 | 109 | 0.594864 | false |
yawd/django-sphinxdoc | sphinxdoc/management/commands/updatedoc.py | 1 | 4830 | # encoding: utf-8
"""
Management command for updading the documentation of one or more projects.
"""
import json
import optparse
import os
import os.path
import subprocess
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
from sphinxdoc.models import Project, Document
BUILDDIR = '_build'
EXTENSION = '.fjson'
SPECIAL_TITLES = {
'genindex': 'General Index',
'py-modindex': 'Module Index',
'np-modindex': 'Module Index',
'search': 'Search',
}
class Command(BaseCommand):
"""
Update (and optionally build) the *Sphinx* documentation for one ore more
projects.
You need to pass the slug of at least one project. If you pass the optional
parameter ``-b``, the command ``sphinx-build`` will be run for each project
before their files are read. If your project(s) are located in a different
*virtualenv* than your django site, you can provide a path to its
interpreter with ``--virtualenv path/to/env/bin/``
"""
args = '[-b [--virtualenv <path/to/bin/>]] <project_slug project_slug ...>'
help = ('Updates the documentation and the search index for the specified '
'projects.')
option_list = BaseCommand.option_list + (
optparse.make_option('-b', '--build',
action='store_true',
dest='build',
default=False,
help='Run "sphinx-build" for each project before updating it.'),
optparse.make_option('--virtualenv',
dest='virtualenv',
default='',
help='Use this virtualenv to build project docs.',
)
)
def handle(self, *args, **options):
"""
Updates (and optionally builds) the documenation for all projects in
``args``.
"""
build = options['build']
virtualenv = options['virtualenv']
for slug in args:
try:
project = Project.objects.get(slug=slug)
except Project.DoesNotExist:
raise CommandError('Project "%s" does not exist' % slug)
if build:
print 'Running "sphinx--build" for "%s" ...' % slug
self.build(project, virtualenv)
print 'Deleting old entries from database ...'
self.delete_documents(project)
print 'Importing JSON files for "%s" ...' % slug
self.import_files(project)
print 'Updating search index for "%s" ...' % slug
self.update_haystack()
print 'Done'
def build(self, project, virtualenv=''):
"""
Runs ``sphinx-build`` for ``project``. You can also specify a path to
the bin-directory of a ``virtualenv``, if your project requires it.
"""
cmd = 'sphinx-build'
if virtualenv:
cmd = os.path.expanduser(os.path.join(virtualenv, cmd))
cmd = [
cmd,
'-b',
'json',
'-d',
os.path.join(project.path, BUILDDIR, 'doctrees'),
project.path,
os.path.join(project.path, BUILDDIR, 'json'),
]
print 'Executing %s' % ' '.join(cmd)
subprocess.call(cmd)
def delete_documents(self, project):
"""Deletes all documents for ``project``."""
Document.objects.filter(project=project).delete()
def import_files(self, project):
"""
Creates a :class:`~sphinxdoc.models.Document` instance for each JSON
file of ``project``.
"""
path = os.path.join(project.path, BUILDDIR, 'json')
for dirpath, dirnames, filenames in os.walk(path):
for name in filter(lambda x: x.endswith(EXTENSION), filenames):
# Full path to the json file
filepath = os.path.join(dirpath, name)
# Get path relative to the build dir w/o file extension
relpath = os.path.relpath(filepath, path)[:-len(EXTENSION)]
# Some files have no title or body attribute
doc = json.load(open(filepath, 'rb'))
if 'title' not in doc:
page_name = os.path.basename(relpath)
doc['title'] = SPECIAL_TITLES[page_name]
if 'body' not in doc:
doc['body'] = ''
# Finally create the Document
d = Document(
project=project,
path=relpath,
content=json.dumps(doc),
title=doc['title'],
body=doc['body'],
)
d.full_clean()
d.save()
def update_haystack(self):
"""Updates Haystack’s search index."""
call_command('rebuild_index', interactive=False)
| bsd-3-clause | -8,060,614,587,684,997,000 | 32.068493 | 79 | 0.558616 | false |
rasata/ansible | lib/ansible/plugins/connections/jail.py | 131 | 7291 | # Based on local.py (c) 2012, Michael DeHaan <[email protected]>
# and chroot.py (c) 2013, Maykel Moya <[email protected]>
# (c) 2013, Michael Scherer <[email protected]>
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import distutils.spawn
import traceback
import os
import shlex
import subprocess
from ansible import errors
from ansible.utils.unicode import to_bytes
from ansible.callbacks import vvv
import ansible.constants as C
BUFSIZE = 65536
class Connection(object):
''' Local BSD Jail based connections '''
def _search_executable(self, executable):
cmd = distutils.spawn.find_executable(executable)
if not cmd:
raise errors.AnsibleError("%s command not found in PATH") % executable
return cmd
def list_jails(self):
p = subprocess.Popen([self.jls_cmd, '-q', 'name'],
cwd=self.runner.basedir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.split()
def get_jail_path(self):
p = subprocess.Popen([self.jls_cmd, '-j', self.jail, '-q', 'path'],
cwd=self.runner.basedir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# remove \n
return stdout[:-1]
def __init__(self, runner, host, port, *args, **kwargs):
self.jail = host
self.runner = runner
self.host = host
self.has_pipelining = False
self.become_methods_supported=C.BECOME_METHODS
if os.geteuid() != 0:
raise errors.AnsibleError("jail connection requires running as root")
self.jls_cmd = self._search_executable('jls')
self.jexec_cmd = self._search_executable('jexec')
if not self.jail in self.list_jails():
raise errors.AnsibleError("incorrect jail name %s" % self.jail)
self.host = host
# port is unused, since this is local
self.port = port
def connect(self, port=None):
''' connect to the jail; nothing to do here '''
vvv("THIS IS A LOCAL JAIL DIR", host=self.jail)
return self
# a modifier
def _generate_cmd(self, executable, cmd):
if executable:
local_cmd = [self.jexec_cmd, self.jail, executable, '-c', cmd]
else:
# Prev to python2.7.3, shlex couldn't handle unicode type strings
cmd = to_bytes(cmd)
cmd = shlex.split(cmd)
local_cmd = [self.jexec_cmd, self.jail]
local_cmd += cmd
return local_cmd
def _buffered_exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None, stdin=subprocess.PIPE):
''' run a command on the jail. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file
into memory.
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
# We enter zone as root so we ignore privilege escalation (probably need to fix in case we have to become a specific used [ex: postgres admin])?
local_cmd = self._generate_cmd(executable, cmd)
vvv("EXEC %s" % (local_cmd), host=self.jail)
p = subprocess.Popen(local_cmd, shell=False,
cwd=self.runner.basedir,
stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the jail '''
p = self._buffered_exec_command(cmd, tmp_path, become_user, sudoable, executable, in_data)
stdout, stderr = p.communicate()
return (p.returncode, '', stdout, stderr)
def put_file(self, in_path, out_path):
''' transfer a file from local to jail '''
vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
try:
with open(in_path, 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), None, stdin=in_file)
except OSError:
raise errors.AnsibleError("jail connection requires dd command in the jail")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
raise errors.AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
raise errors.AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from jail to local '''
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE), None)
except OSError:
raise errors.AnsibleError("jail connection requires dd command in the jail")
with open(out_path, 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
raise errors.AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
pass
| gpl-3.0 | -925,818,814,883,096,300 | 38.625 | 152 | 0.602112 | false |
Phuehvk/gyp | test/mac/gyptest-depend-on-bundle.py | 34 | 1270 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a dependency on a bundle causes the whole bundle to be built.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
print "This test is currently disabled: https://crbug.com/483696."
sys.exit(0)
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='depend-on-bundle')
test.build('test.gyp', 'dependent_on_bundle', chdir='depend-on-bundle')
# Binary itself.
test.built_file_must_exist('dependent_on_bundle', chdir='depend-on-bundle')
# Bundle dependency.
test.built_file_must_exist(
'my_bundle.framework/Versions/A/my_bundle',
chdir='depend-on-bundle')
test.built_file_must_exist( # package_framework
'my_bundle.framework/my_bundle',
chdir='depend-on-bundle')
test.built_file_must_exist( # plist
'my_bundle.framework/Versions/A/Resources/Info.plist',
chdir='depend-on-bundle')
test.built_file_must_exist(
'my_bundle.framework/Versions/A/Resources/English.lproj/' # Resources
'InfoPlist.strings',
chdir='depend-on-bundle')
test.pass_test()
| bsd-3-clause | -4,666,447,960,846,709,000 | 28.534884 | 77 | 0.691339 | false |
krintoxi/NoobSec-Toolkit | NoobSecToolkit - MAC OSX/tools/inject/thirdparty/colorama/initialise.py | 49 | 1222 | import atexit
import sys
from .ansitowin32 import AnsiToWin32
orig_stdout = sys.stdout
orig_stderr = sys.stderr
wrapped_stdout = sys.stdout
wrapped_stderr = sys.stderr
atexit_done = False
def reset_all():
AnsiToWin32(orig_stdout).reset_all()
def init(autoreset=False, convert=None, strip=None, wrap=True):
if not wrap and any([autoreset, convert, strip]):
raise ValueError('wrap=False conflicts with any other arg=True')
global wrapped_stdout, wrapped_stderr
sys.stdout = wrapped_stdout = \
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
sys.stderr = wrapped_stderr = \
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
global atexit_done
if not atexit_done:
atexit.register(reset_all)
atexit_done = True
def deinit():
sys.stdout = orig_stdout
sys.stderr = orig_stderr
def reinit():
sys.stdout = wrapped_stdout
sys.stderr = wrapped_stdout
def wrap_stream(stream, convert, strip, autoreset, wrap):
if wrap:
wrapper = AnsiToWin32(stream,
convert=convert, strip=strip, autoreset=autoreset)
if wrapper.should_wrap():
stream = wrapper.stream
return stream
| gpl-2.0 | 5,269,144,528,427,634,000 | 21.218182 | 72 | 0.675941 | false |
vipmunot/Data-Analysis-using-Python | Exploratory Data Visualization/Multiple plots-216.py | 1 | 2691 | ## 1. Recap ##
import pandas as pd
import matplotlib.pyplot as plt
unrate = pd.read_csv('unrate.csv')
unrate['DATE'] = pd.to_datetime(unrate['DATE'])
plt.plot(unrate['DATE'].head(12),unrate['VALUE'].head(12))
plt.xticks(rotation=90)
plt.xlabel('Month')
plt.ylabel('Unemployment Rate')
plt.title('Monthly Unemployment Trends, 1948')
## 2. Matplotlib Classes ##
import matplotlib.pyplot as plt
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
plt.show()
## 4. Adding Data ##
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
ax1.plot(unrate['DATE'].head(12),unrate['VALUE'].head(12))
ax2.plot(unrate['DATE'].iloc[12:24],unrate['VALUE'].iloc[12:24])
plt.show()
## 5. Formatting And Spacing ##
fig = plt.figure(figsize=(12,6))
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
ax1.plot(unrate[0:12]['DATE'], unrate[0:12]['VALUE'])
ax1.set_title('Monthly Unemployment Rate, 1948')
ax2.plot(unrate[12:24]['DATE'], unrate[12:24]['VALUE'])
ax2.set_title('Monthly Unemployment Rate, 1949')
plt.show()
## 6. Comparing Across More Years ##
fig = plt.figure(figsize=(12,12))
x = [0,12,24,36,48]
y = [12,24,36,48,60]
for i in range(5):
ax = fig.add_subplot(5,1,(i+1))
ax.plot(unrate[x[i]:y[i]]['DATE'],unrate[x[i]:y[i]]['VALUE'])
plt.show()
## 7. Overlaying Line Charts ##
unrate['MONTH'] = unrate['DATE'].dt.month
fig = plt.figure(figsize=(6,3))
plt.plot(unrate[0:12]['MONTH'], unrate[0:12]['VALUE'],c='red')
plt.plot(unrate[12:24]['MONTH'], unrate[12:24]['VALUE'],c='blue')
plt.show()
## 8. Adding More Lines ##
fig = plt.figure(figsize=(10,6))
x = [0,12,24,36,48]
y = [12,24,36,48,60]
color = ['red','blue','green','orange','black']
for i in range(5):
plt.plot(unrate[x[i]:y[i]]['MONTH'],unrate[x[i]:y[i]]['VALUE'],c = color[i])
plt.show()
## 9. Adding A Legend ##
fig = plt.figure(figsize=(10,6))
colors = ['red', 'blue', 'green', 'orange', 'black']
for i in range(5):
start_index = i*12
end_index = (i+1)*12
label = str(1948 + i)
subset = unrate[start_index:end_index]
plt.plot(subset['MONTH'], subset['VALUE'], c=colors[i],label=label)
plt.legend(loc='upper left')
plt.show()
## 10. Final Tweaks ##
fig = plt.figure(figsize=(10,6))
colors = ['red', 'blue', 'green', 'orange', 'black']
for i in range(5):
start_index = i*12
end_index = (i+1)*12
subset = unrate[start_index:end_index]
label = str(1948 + i)
plt.plot(subset['MONTH'], subset['VALUE'], c=colors[i], label=label)
plt.legend(loc='upper left')
plt.title("Monthly Unemployment Trends, 1948-1952")
plt.xlabel('Month, Integer')
plt.ylabel('Unemployment Rate, Percent')
plt.show() | mit | -3,005,192,965,135,424,000 | 26.191919 | 80 | 0.635823 | false |
openstack/glance | glance/tests/unit/test_notifier.py | 1 | 34473 | # Copyright 2011 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from unittest import mock
import glance_store
from oslo_config import cfg
import oslo_messaging
import webob
import glance.async_
from glance.common import exception
from glance.common import timeutils
import glance.context
from glance import notifier
import glance.tests.unit.utils as unit_test_utils
from glance.tests import utils
DATETIME = datetime.datetime(2012, 5, 16, 15, 27, 36, 325355)
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
class ImageStub(glance.domain.Image):
def get_data(self, offset=0, chunk_size=None):
return ['01234', '56789']
def set_data(self, data, size, backend=None, set_active=True):
for chunk in data:
pass
class ImageRepoStub(object):
def remove(self, *args, **kwargs):
return 'image_from_get'
def save(self, *args, **kwargs):
return 'image_from_save'
def add(self, *args, **kwargs):
return 'image_from_add'
def get(self, *args, **kwargs):
return 'image_from_get'
def list(self, *args, **kwargs):
return ['images_from_list']
class ImageMemberRepoStub(object):
def remove(self, *args, **kwargs):
return 'image_member_from_remove'
def save(self, *args, **kwargs):
return 'image_member_from_save'
def add(self, *args, **kwargs):
return 'image_member_from_add'
def get(self, *args, **kwargs):
return 'image_member_from_get'
def list(self, *args, **kwargs):
return ['image_members_from_list']
class TaskStub(glance.domain.TaskStub):
def run(self, executor):
pass
class Task(glance.domain.Task):
def succeed(self, result):
pass
def fail(self, message):
pass
class TaskRepoStub(object):
def remove(self, *args, **kwargs):
return 'task_from_remove'
def save(self, *args, **kwargs):
return 'task_from_save'
def add(self, *args, **kwargs):
return 'task_from_add'
def get_task(self, *args, **kwargs):
return 'task_from_get'
def list(self, *args, **kwargs):
return ['tasks_from_list']
class TestNotifier(utils.BaseTestCase):
@mock.patch.object(oslo_messaging, 'Notifier')
@mock.patch.object(oslo_messaging, 'get_notification_transport')
def _test_load_strategy(self,
mock_get_transport, mock_notifier,
url, driver):
nfier = notifier.Notifier()
mock_get_transport.assert_called_with(cfg.CONF)
self.assertIsNotNone(nfier._transport)
mock_notifier.assert_called_with(nfier._transport,
publisher_id='image.localhost')
self.assertIsNotNone(nfier._notifier)
def test_notifier_load(self):
self._test_load_strategy(url=None, driver=None)
@mock.patch.object(oslo_messaging, 'set_transport_defaults')
def test_set_defaults(self, mock_set_trans_defaults):
notifier.set_defaults(control_exchange='foo')
mock_set_trans_defaults.assert_called_with('foo')
notifier.set_defaults()
mock_set_trans_defaults.assert_called_with('glance')
class TestImageNotifications(utils.BaseTestCase):
"""Test Image Notifications work"""
def setUp(self):
super(TestImageNotifications, self).setUp()
self.image = ImageStub(
image_id=UUID1, name='image-1', status='active', size=1024,
created_at=DATETIME, updated_at=DATETIME, owner=TENANT1,
visibility='public', container_format='ami', virtual_size=2048,
tags=['one', 'two'], disk_format='ami', min_ram=128,
min_disk=10, checksum='ca425b88f047ce8ec45ee90e813ada91',
locations=['http://127.0.0.1'])
self.context = glance.context.RequestContext(tenant=TENANT2,
user=USER1)
self.image_repo_stub = ImageRepoStub()
self.notifier = unit_test_utils.FakeNotifier()
self.image_repo_proxy = glance.notifier.ImageRepoProxy(
self.image_repo_stub, self.context, self.notifier)
self.image_proxy = glance.notifier.ImageProxy(
self.image, self.context, self.notifier)
def test_image_save_notification(self):
self.image_repo_proxy.save(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.update', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
def test_image_save_notification_disabled(self):
self.config(disabled_notifications=["image.update"])
self.image_repo_proxy.save(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_add_notification(self):
self.image_repo_proxy.add(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.create', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
def test_image_add_notification_disabled(self):
self.config(disabled_notifications=["image.create"])
self.image_repo_proxy.add(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_delete_notification(self):
self.image_repo_proxy.remove(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.delete', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertTrue(output_log['payload']['deleted'])
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
def test_image_delete_notification_disabled(self):
self.config(disabled_notifications=['image.delete'])
self.image_repo_proxy.remove(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_get(self):
image = self.image_repo_proxy.get(UUID1)
self.assertIsInstance(image, glance.notifier.ImageProxy)
self.assertEqual('image_from_get', image.repo)
def test_image_list(self):
images = self.image_repo_proxy.list()
self.assertIsInstance(images[0], glance.notifier.ImageProxy)
self.assertEqual('images_from_list', images[0].repo)
def test_image_get_data_should_call_next_image_get_data(self):
with mock.patch.object(self.image, 'get_data') as get_data_mock:
self.image_proxy.get_data()
self.assertTrue(get_data_mock.called)
def test_image_get_data_notification(self):
self.image_proxy.size = 10
data = ''.join(self.image_proxy.get_data())
self.assertEqual('0123456789', data)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.send', output_log['event_type'])
self.assertEqual(self.image.image_id,
output_log['payload']['image_id'])
self.assertEqual(TENANT2, output_log['payload']['receiver_tenant_id'])
self.assertEqual(USER1, output_log['payload']['receiver_user_id'])
self.assertEqual(10, output_log['payload']['bytes_sent'])
self.assertEqual(TENANT1, output_log['payload']['owner_id'])
def test_image_get_data_notification_disabled(self):
self.config(disabled_notifications=['image.send'])
self.image_proxy.size = 10
data = ''.join(self.image_proxy.get_data())
self.assertEqual('0123456789', data)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_get_data_size_mismatch(self):
self.image_proxy.size = 11
list(self.image_proxy.get_data())
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.send', output_log['event_type'])
self.assertEqual(self.image.image_id,
output_log['payload']['image_id'])
def test_image_set_data_prepare_notification(self):
insurance = {'called': False}
def data_iterator():
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.prepare', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertEqual(['store1', 'store2'], output_log['payload'][
'os_glance_importing_to_stores'])
self.assertEqual([],
output_log['payload']['os_glance_failed_import'])
yield 'abcd'
yield 'efgh'
insurance['called'] = True
self.image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store1,store2'
self.image_proxy.extra_properties['os_glance_failed_import'] = ''
self.image_proxy.set_data(data_iterator(), 8)
self.assertTrue(insurance['called'])
def test_image_set_data_prepare_notification_disabled(self):
insurance = {'called': False}
def data_iterator():
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
yield 'abcd'
yield 'efgh'
insurance['called'] = True
self.config(disabled_notifications=['image.prepare'])
self.image_proxy.set_data(data_iterator(), 8)
self.assertTrue(insurance['called'])
def test_image_set_data_upload_and_activate_notification(self):
image = ImageStub(image_id=UUID1, name='image-1', status='queued',
created_at=DATETIME, updated_at=DATETIME,
owner=TENANT1, visibility='public')
context = glance.context.RequestContext(tenant=TENANT2, user=USER1)
fake_notifier = unit_test_utils.FakeNotifier()
image_proxy = glance.notifier.ImageProxy(image, context, fake_notifier)
def data_iterator():
fake_notifier.log = []
yield 'abcde'
yield 'fghij'
image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store2'
image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store1,store2'
image_proxy.extra_properties['os_glance_failed_import'] = ''
image_proxy.set_data(data_iterator(), 10)
output_logs = fake_notifier.get_logs()
self.assertEqual(2, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertEqual(['store2'], output_log['payload'][
'os_glance_importing_to_stores'])
self.assertEqual([],
output_log['payload']['os_glance_failed_import'])
output_log = output_logs[1]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.activate', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
def test_image_set_data_upload_and_not_activate_notification(self):
insurance = {'called': False}
def data_iterator():
self.notifier.log = []
yield 'abcde'
yield 'fghij'
self.image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store2'
insurance['called'] = True
self.image_proxy.set_data(data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertTrue(insurance['called'])
def test_image_set_data_upload_and_activate_notification_disabled(self):
insurance = {'called': False}
image = ImageStub(image_id=UUID1, name='image-1', status='queued',
created_at=DATETIME, updated_at=DATETIME,
owner=TENANT1, visibility='public')
context = glance.context.RequestContext(tenant=TENANT2, user=USER1)
fake_notifier = unit_test_utils.FakeNotifier()
image_proxy = glance.notifier.ImageProxy(image, context, fake_notifier)
def data_iterator():
fake_notifier.log = []
yield 'abcde'
yield 'fghij'
insurance['called'] = True
self.config(disabled_notifications=['image.activate', 'image.upload'])
image_proxy.set_data(data_iterator(), 10)
self.assertTrue(insurance['called'])
output_logs = fake_notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_set_data_storage_full(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise glance_store.StorageFull(message='Modern Major General')
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Modern Major General', output_log['payload'])
def test_image_set_data_value_error(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise ValueError('value wrong')
self.assertRaises(webob.exc.HTTPBadRequest,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('value wrong', output_log['payload'])
def test_image_set_data_duplicate(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.Duplicate('Cant have duplicates')
self.assertRaises(webob.exc.HTTPConflict,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Cant have duplicates', output_log['payload'])
def test_image_set_data_storage_write_denied(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise glance_store.StorageWriteDenied(message='The Very Model')
self.assertRaises(webob.exc.HTTPServiceUnavailable,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('The Very Model', output_log['payload'])
def test_image_set_data_forbidden(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.Forbidden('Not allowed')
self.assertRaises(webob.exc.HTTPForbidden,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Not allowed', output_log['payload'])
def test_image_set_data_not_found(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.NotFound('Not found')
self.assertRaises(webob.exc.HTTPNotFound,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Not found', output_log['payload'])
def test_image_set_data_HTTP_error(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise webob.exc.HTTPError('Http issue')
self.assertRaises(webob.exc.HTTPError,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Http issue', output_log['payload'])
def test_image_set_data_error(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.GlanceException('Failed')
self.assertRaises(exception.GlanceException,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Failed', output_log['payload'])
class TestImageMemberNotifications(utils.BaseTestCase):
"""Test Image Member Notifications work"""
def setUp(self):
super(TestImageMemberNotifications, self).setUp()
self.context = glance.context.RequestContext(tenant=TENANT2,
user=USER1)
self.notifier = unit_test_utils.FakeNotifier()
self.image = ImageStub(
image_id=UUID1, name='image-1', status='active', size=1024,
created_at=DATETIME, updated_at=DATETIME, owner=TENANT1,
visibility='public', container_format='ami',
tags=['one', 'two'], disk_format='ami', min_ram=128,
min_disk=10, checksum='ca425b88f047ce8ec45ee90e813ada91',
locations=['http://127.0.0.1'])
self.image_member = glance.domain.ImageMembership(
id=1, image_id=UUID1, member_id=TENANT1, created_at=DATETIME,
updated_at=DATETIME, status='accepted')
self.image_member_repo_stub = ImageMemberRepoStub()
self.image_member_repo_proxy = glance.notifier.ImageMemberRepoProxy(
self.image_member_repo_stub, self.image,
self.context, self.notifier)
self.image_member_proxy = glance.notifier.ImageMemberProxy(
self.image_member, self.context, self.notifier)
def _assert_image_member_with_notifier(self, output_log, deleted=False):
self.assertEqual(self.image_member.member_id,
output_log['payload']['member_id'])
self.assertEqual(self.image_member.image_id,
output_log['payload']['image_id'])
self.assertEqual(self.image_member.status,
output_log['payload']['status'])
self.assertEqual(timeutils.isotime(self.image_member.created_at),
output_log['payload']['created_at'])
self.assertEqual(timeutils.isotime(self.image_member.updated_at),
output_log['payload']['updated_at'])
if deleted:
self.assertTrue(output_log['payload']['deleted'])
self.assertIsNotNone(output_log['payload']['deleted_at'])
else:
self.assertFalse(output_log['payload']['deleted'])
self.assertIsNone(output_log['payload']['deleted_at'])
def test_image_member_add_notification(self):
self.image_member_repo_proxy.add(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.member.create', output_log['event_type'])
self._assert_image_member_with_notifier(output_log)
def test_image_member_add_notification_disabled(self):
self.config(disabled_notifications=['image.member.create'])
self.image_member_repo_proxy.add(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_member_save_notification(self):
self.image_member_repo_proxy.save(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.member.update', output_log['event_type'])
self._assert_image_member_with_notifier(output_log)
def test_image_member_save_notification_disabled(self):
self.config(disabled_notifications=['image.member.update'])
self.image_member_repo_proxy.save(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_member_delete_notification(self):
self.image_member_repo_proxy.remove(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.member.delete', output_log['event_type'])
self._assert_image_member_with_notifier(output_log, deleted=True)
def test_image_member_delete_notification_disabled(self):
self.config(disabled_notifications=['image.member.delete'])
self.image_member_repo_proxy.remove(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_member_get(self):
image_member = self.image_member_repo_proxy.get(TENANT1)
self.assertIsInstance(image_member, glance.notifier.ImageMemberProxy)
self.assertEqual('image_member_from_get', image_member.repo)
def test_image_member_list(self):
image_members = self.image_member_repo_proxy.list()
self.assertIsInstance(image_members[0],
glance.notifier.ImageMemberProxy)
self.assertEqual('image_members_from_list', image_members[0].repo)
class TestTaskNotifications(utils.BaseTestCase):
"""Test Task Notifications work"""
def setUp(self):
super(TestTaskNotifications, self).setUp()
task_input = {"loc": "fake"}
self.task_stub = TaskStub(
task_id='aaa',
task_type='import',
status='pending',
owner=TENANT2,
expires_at=None,
created_at=DATETIME,
updated_at=DATETIME,
image_id='fake_image_id',
user_id='fake_user',
request_id='fake_request_id',
)
self.task = Task(
task_id='aaa',
task_type='import',
status='pending',
owner=TENANT2,
expires_at=None,
created_at=DATETIME,
updated_at=DATETIME,
task_input=task_input,
result='res',
message='blah',
image_id='fake_image_id',
user_id='fake_user',
request_id='fake_request_id',
)
self.context = glance.context.RequestContext(
tenant=TENANT2,
user=USER1
)
self.task_repo_stub = TaskRepoStub()
self.notifier = unit_test_utils.FakeNotifier()
self.task_repo_proxy = glance.notifier.TaskRepoProxy(
self.task_repo_stub,
self.context,
self.notifier
)
self.task_proxy = glance.notifier.TaskProxy(
self.task,
self.context,
self.notifier
)
self.task_stub_proxy = glance.notifier.TaskStubProxy(
self.task_stub,
self.context,
self.notifier
)
self.patcher = mock.patch.object(timeutils, 'utcnow')
mock_utcnow = self.patcher.start()
mock_utcnow.return_value = datetime.datetime.utcnow()
def tearDown(self):
super(TestTaskNotifications, self).tearDown()
self.patcher.stop()
def test_task_create_notification(self):
self.task_repo_proxy.add(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.create', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
self.assertEqual(
timeutils.isotime(self.task.updated_at),
output_log['payload']['updated_at']
)
self.assertEqual(
timeutils.isotime(self.task.created_at),
output_log['payload']['created_at']
)
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_create_notification_disabled(self):
self.config(disabled_notifications=['task.create'])
self.task_repo_proxy.add(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_delete_notification(self):
now = timeutils.isotime()
self.task_repo_proxy.remove(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.delete', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
self.assertEqual(
timeutils.isotime(self.task.updated_at),
output_log['payload']['updated_at']
)
self.assertEqual(
timeutils.isotime(self.task.created_at),
output_log['payload']['created_at']
)
self.assertEqual(
now,
output_log['payload']['deleted_at']
)
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_delete_notification_disabled(self):
self.config(disabled_notifications=['task.delete'])
self.task_repo_proxy.remove(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_run_notification(self):
with mock.patch('glance.async_.TaskExecutor') as mock_executor:
executor = mock_executor.return_value
executor._run.return_value = mock.Mock()
self.task_proxy.run(executor=mock_executor)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.run', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
self.assertFalse(
self.task.image_id in output_log['payload']
)
self.assertFalse(
self.task.user_id in output_log['payload']
)
self.assertFalse(
self.task.request_id in output_log['payload']
)
def test_task_run_notification_disabled(self):
self.config(disabled_notifications=['task.run'])
with mock.patch('glance.async_.TaskExecutor') as mock_executor:
executor = mock_executor.return_value
executor._run.return_value = mock.Mock()
self.task_proxy.run(executor=mock_executor)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_processing_notification(self):
self.task_proxy.begin_processing()
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.processing', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_processing_notification_disabled(self):
self.config(disabled_notifications=['task.processing'])
self.task_proxy.begin_processing()
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_success_notification(self):
self.task_proxy.begin_processing()
self.task_proxy.succeed(result=None)
output_logs = self.notifier.get_logs()
self.assertEqual(2, len(output_logs))
output_log = output_logs[1]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.success', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_success_notification_disabled(self):
self.config(disabled_notifications=['task.processing', 'task.success'])
self.task_proxy.begin_processing()
self.task_proxy.succeed(result=None)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_failure_notification(self):
self.task_proxy.fail(message=None)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.failure', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_failure_notification_disabled(self):
self.config(disabled_notifications=['task.failure'])
self.task_proxy.fail(message=None)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
| apache-2.0 | -3,377,431,714,844,462,600 | 40.039286 | 79 | 0.623096 | false |
ramadhane/odoo | addons/decimal_precision/__openerp__.py | 261 | 1628 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Decimal Precision Configuration',
'description': """
Configure the price accuracy you need for different kinds of usage: accounting, sales, purchases.
=================================================================================================
The decimal precision is configured per company.
""",
'author': 'OpenERP SA',
'version': '0.1',
'depends': ['base'],
'category' : 'Hidden/Dependency',
'data': [
'decimal_precision_view.xml',
'security/ir.model.access.csv',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 8,182,150,570,526,899,000 | 36 | 97 | 0.574324 | false |
serverdensity/sd-agent-core-plugins | gunicorn/check.py | 1 | 4768 | # (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
"""
Collects metrics from the gunicorn web server.
http://gunicorn.org/
"""
# stdlib
import time
# 3rd party
import psutil
# project
from checks import AgentCheck
class GUnicornCheck(AgentCheck):
# Config
PROC_NAME = 'proc_name'
# Number of seconds to sleep between cpu time checks.
CPU_SLEEP_SECS = 0.1
# Worker state tags.
IDLE_TAGS = ["state:idle"]
WORKING_TAGS = ["state:working"]
SVC_NAME = "gunicorn.is_running"
def get_library_versions(self):
return {"psutil": psutil.__version__}
def check(self, instance):
""" Collect metrics for the given gunicorn instance. """
self.log.debug("Running instance: %s", instance)
# Validate the config.
if not instance or self.PROC_NAME not in instance:
raise GUnicornCheckError("instance must specify: %s" % self.PROC_NAME)
# Load the gunicorn master procedure.
proc_name = instance.get(self.PROC_NAME)
master_proc = self._get_master_proc_by_name(proc_name)
# Fetch the worker procs and count their states.
worker_procs = master_proc.children()
working, idle = self._count_workers(worker_procs)
# if no workers are running, alert CRITICAL, otherwise OK
msg = "%s working and %s idle workers for %s" % (working, idle, proc_name)
status = AgentCheck.CRITICAL if working == 0 and idle == 0 else AgentCheck.OK
tags = ['app:' + proc_name]
self.service_check(self.SVC_NAME, status, tags=tags, message=msg)
# Submit the data.
self.log.debug("instance %s procs - working:%s idle:%s" % (proc_name, working, idle))
self.gauge("gunicorn.workers", working, tags + self.WORKING_TAGS)
self.gauge("gunicorn.workers", idle, tags + self.IDLE_TAGS)
def _count_workers(self, worker_procs):
working = 0
idle = 0
if not worker_procs:
return working, idle
# Count how much sleep time is used by the workers.
cpu_time_by_pid = {}
for proc in worker_procs:
# cpu time is the sum of user + system time.
try:
cpu_time_by_pid[proc.pid] = sum(proc.cpu_times())
except psutil.NoSuchProcess:
self.warning('Process %s disappeared while scanning' % proc.name)
continue
# Let them do a little bit more work.
time.sleep(self.CPU_SLEEP_SECS)
# Processes which have used more CPU are considered active (this is a very
# naive check, but gunicorn exposes no stats API)
for proc in worker_procs:
if proc.pid not in cpu_time_by_pid:
# The process is not running anymore, we didn't collect initial cpu times
continue
try:
cpu_time = sum(proc.cpu_times())
except Exception:
# couldn't collect cpu time. assume it's dead.
self.log.debug("Couldn't collect cpu time for %s" % proc)
continue
if cpu_time == cpu_time_by_pid[proc.pid]:
idle += 1
else:
working += 1
return working, idle
def _get_master_proc_by_name(self, name):
""" Return a psutil process for the master gunicorn process with the given name. """
master_name = GUnicornCheck._get_master_proc_name(name)
master_procs = [p for p in psutil.process_iter() if p.cmdline() and p.cmdline()[0] == master_name]
if len(master_procs) == 0:
# process not found, it's dead.
self.service_check(self.SVC_NAME, AgentCheck.CRITICAL, tags=['app:' + name],
message="No gunicorn process with name %s found" % name)
raise GUnicornCheckError("Found no master process with name: %s" % master_name)
elif len(master_procs) > 1:
raise GUnicornCheckError("Found more than one master process with name: %s" % master_name)
else:
return master_procs[0]
@staticmethod
def _get_master_proc_name(name):
""" Return the name of the master gunicorn process for the given proc name. """
# Here's an example of a process list for a gunicorn box with name web1
# root 22976 0.1 0.1 60364 13424 ? Ss 19:30 0:00 gunicorn: master [web1]
# web 22984 20.7 2.3 521924 176136 ? Sl 19:30 1:58 gunicorn: worker [web1]
# web 22985 26.4 6.1 795288 449596 ? Sl 19:30 2:32 gunicorn: worker [web1]
return "gunicorn: master [%s]" % name
class GUnicornCheckError(Exception):
pass
| bsd-3-clause | -7,727,209,935,171,708,000 | 36.25 | 106 | 0.599832 | false |
stahnma/rpmdistro-gitoverlay | rdgo/utils.py | 2 | 2141 | #!/usr/bin/env python
#
# Copyright (C) 2015 Colin Walters <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import sys
import stat
import shutil
import errno
import subprocess
import os
from gi.repository import GLib, Gio
def fatal(msg):
print >>sys.stderr, msg
sys.exit(1)
def log(msg):
"Print to standard output and flush it"
sys.stdout.write(msg)
sys.stdout.write('\n')
sys.stdout.flush()
def run_sync(args, **kwargs):
"""Wraps subprocess.check_call(), logging the command line too."""
if isinstance(args, str) or isinstance(args, unicode):
argstr = args
else:
argstr = subprocess.list2cmdline(args)
log("Running: {0}".format(argstr))
subprocess.check_call(args, **kwargs)
def rmrf(path):
try:
stbuf = os.lstat(path)
except OSError as e:
return
if stat.S_ISDIR(stbuf.st_mode):
shutil.rmtree(path)
else:
try:
os.unlink(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def hardlink_or_copy(src, dest):
try:
os.link(src, dest)
except OSError as e:
if e.errno != errno.EXDEV:
raise
shutil.copy(src,dest)
def ensuredir(path, with_parents=False):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def ensure_clean_dir(path):
rmrf(path)
ensuredir(path)
| lgpl-2.1 | -4,058,728,493,583,607,300 | 25.7625 | 70 | 0.660906 | false |
theochem/horton | horton/meanfield/test/test_scf_cdiis.py | 4 | 2038 | # -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import numpy as np
from nose.tools import assert_raises
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
from horton.meanfield.test.common import check_hf_cs_hf, check_lih_os_hf, \
check_water_cs_hfs, check_n2_cs_hfs, check_h3_os_hfs, check_h3_os_pbe, \
check_co_cs_pbe, check_vanadium_sc_hf, check_water_cs_m05, \
check_methyl_os_tpss
def test_hf_cs_hf():
check_hf_cs_hf(CDIISSCFSolver(threshold=1e-7))
def test_lih_os_hf():
check_lih_os_hf(CDIISSCFSolver(threshold=1e-7))
def test_water_cs_hfs():
check_water_cs_hfs(CDIISSCFSolver(threshold=1e-6))
def test_n2_cs_hfs():
check_n2_cs_hfs(CDIISSCFSolver(threshold=1e-6))
def test_h3_os_hfs():
check_h3_os_hfs(CDIISSCFSolver(threshold=1e-6))
def test_co_cs_pbe():
check_co_cs_pbe(CDIISSCFSolver(threshold=1e-5))
def test_h3_os_pbe():
check_h3_os_pbe(CDIISSCFSolver(threshold=1e-6))
def test_vanadium_sc_hf():
with assert_raises(NoSCFConvergence):
check_vanadium_sc_hf(CDIISSCFSolver(threshold=1e-10, maxiter=10))
def test_water_cs_m05():
check_water_cs_m05(CDIISSCFSolver(threshold=1e-6))
def test_methyl_os_tpss():
check_methyl_os_tpss(CDIISSCFSolver(threshold=1e-5))
| gpl-3.0 | -6,417,097,978,299,825,000 | 27.305556 | 78 | 0.723258 | false |
MQQiang/kbengine | kbe/res/scripts/common/Lib/xml/sax/xmlreader.py | 824 | 12612 | """An XML Reader is the SAX 2 name for an XML parser. XML Parsers
should be based on this code. """
from . import handler
from ._exceptions import SAXNotSupportedException, SAXNotRecognizedException
# ===== XMLREADER =====
class XMLReader:
"""Interface for reading an XML document using callbacks.
XMLReader is the interface that an XML parser's SAX2 driver must
implement. This interface allows an application to set and query
features and properties in the parser, to register event handlers
for document processing, and to initiate a document parse.
All SAX interfaces are assumed to be synchronous: the parse
methods must not return until parsing is complete, and readers
must wait for an event-handler callback to return before reporting
the next event."""
def __init__(self):
self._cont_handler = handler.ContentHandler()
self._dtd_handler = handler.DTDHandler()
self._ent_handler = handler.EntityResolver()
self._err_handler = handler.ErrorHandler()
def parse(self, source):
"Parse an XML document from a system identifier or an InputSource."
raise NotImplementedError("This method must be implemented!")
def getContentHandler(self):
"Returns the current ContentHandler."
return self._cont_handler
def setContentHandler(self, handler):
"Registers a new object to receive document content events."
self._cont_handler = handler
def getDTDHandler(self):
"Returns the current DTD handler."
return self._dtd_handler
def setDTDHandler(self, handler):
"Register an object to receive basic DTD-related events."
self._dtd_handler = handler
def getEntityResolver(self):
"Returns the current EntityResolver."
return self._ent_handler
def setEntityResolver(self, resolver):
"Register an object to resolve external entities."
self._ent_handler = resolver
def getErrorHandler(self):
"Returns the current ErrorHandler."
return self._err_handler
def setErrorHandler(self, handler):
"Register an object to receive error-message events."
self._err_handler = handler
def setLocale(self, locale):
"""Allow an application to set the locale for errors and warnings.
SAX parsers are not required to provide localization for errors
and warnings; if they cannot support the requested locale,
however, they must raise a SAX exception. Applications may
request a locale change in the middle of a parse."""
raise SAXNotSupportedException("Locale support not implemented")
def getFeature(self, name):
"Looks up and returns the state of a SAX2 feature."
raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
def setFeature(self, name, state):
"Sets the state of a SAX2 feature."
raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
def getProperty(self, name):
"Looks up and returns the value of a SAX2 property."
raise SAXNotRecognizedException("Property '%s' not recognized" % name)
def setProperty(self, name, value):
"Sets the value of a SAX2 property."
raise SAXNotRecognizedException("Property '%s' not recognized" % name)
class IncrementalParser(XMLReader):
"""This interface adds three extra methods to the XMLReader
interface that allow XML parsers to support incremental
parsing. Support for this interface is optional, since not all
underlying XML parsers support this functionality.
When the parser is instantiated it is ready to begin accepting
data from the feed method immediately. After parsing has been
finished with a call to close the reset method must be called to
make the parser ready to accept new data, either from feed or
using the parse method.
Note that these methods must _not_ be called during parsing, that
is, after parse has been called and before it returns.
By default, the class also implements the parse method of the XMLReader
interface using the feed, close and reset methods of the
IncrementalParser interface as a convenience to SAX 2.0 driver
writers."""
def __init__(self, bufsize=2**16):
self._bufsize = bufsize
XMLReader.__init__(self)
def parse(self, source):
from . import saxutils
source = saxutils.prepare_input_source(source)
self.prepareParser(source)
file = source.getByteStream()
buffer = file.read(self._bufsize)
while buffer:
self.feed(buffer)
buffer = file.read(self._bufsize)
self.close()
def feed(self, data):
"""This method gives the raw XML data in the data parameter to
the parser and makes it parse the data, emitting the
corresponding events. It is allowed for XML constructs to be
split across several calls to feed.
feed may raise SAXException."""
raise NotImplementedError("This method must be implemented!")
def prepareParser(self, source):
"""This method is called by the parse implementation to allow
the SAX 2.0 driver to prepare itself for parsing."""
raise NotImplementedError("prepareParser must be overridden!")
def close(self):
"""This method is called when the entire XML document has been
passed to the parser through the feed method, to notify the
parser that there are no more data. This allows the parser to
do the final checks on the document and empty the internal
data buffer.
The parser will not be ready to parse another document until
the reset method has been called.
close may raise SAXException."""
raise NotImplementedError("This method must be implemented!")
def reset(self):
"""This method is called after close has been called to reset
the parser so that it is ready to parse new documents. The
results of calling parse or feed after close without calling
reset are undefined."""
raise NotImplementedError("This method must be implemented!")
# ===== LOCATOR =====
class Locator:
"""Interface for associating a SAX event with a document
location. A locator object will return valid results only during
calls to DocumentHandler methods; at any other time, the
results are unpredictable."""
def getColumnNumber(self):
"Return the column number where the current event ends."
return -1
def getLineNumber(self):
"Return the line number where the current event ends."
return -1
def getPublicId(self):
"Return the public identifier for the current event."
return None
def getSystemId(self):
"Return the system identifier for the current event."
return None
# ===== INPUTSOURCE =====
class InputSource:
"""Encapsulation of the information needed by the XMLReader to
read entities.
This class may include information about the public identifier,
system identifier, byte stream (possibly with character encoding
information) and/or the character stream of an entity.
Applications will create objects of this class for use in the
XMLReader.parse method and for returning from
EntityResolver.resolveEntity.
An InputSource belongs to the application, the XMLReader is not
allowed to modify InputSource objects passed to it from the
application, although it may make copies and modify those."""
def __init__(self, system_id = None):
self.__system_id = system_id
self.__public_id = None
self.__encoding = None
self.__bytefile = None
self.__charfile = None
def setPublicId(self, public_id):
"Sets the public identifier of this InputSource."
self.__public_id = public_id
def getPublicId(self):
"Returns the public identifier of this InputSource."
return self.__public_id
def setSystemId(self, system_id):
"Sets the system identifier of this InputSource."
self.__system_id = system_id
def getSystemId(self):
"Returns the system identifier of this InputSource."
return self.__system_id
def setEncoding(self, encoding):
"""Sets the character encoding of this InputSource.
The encoding must be a string acceptable for an XML encoding
declaration (see section 4.3.3 of the XML recommendation).
The encoding attribute of the InputSource is ignored if the
InputSource also contains a character stream."""
self.__encoding = encoding
def getEncoding(self):
"Get the character encoding of this InputSource."
return self.__encoding
def setByteStream(self, bytefile):
"""Set the byte stream (a Python file-like object which does
not perform byte-to-character conversion) for this input
source.
The SAX parser will ignore this if there is also a character
stream specified, but it will use a byte stream in preference
to opening a URI connection itself.
If the application knows the character encoding of the byte
stream, it should set it with the setEncoding method."""
self.__bytefile = bytefile
def getByteStream(self):
"""Get the byte stream for this input source.
The getEncoding method will return the character encoding for
this byte stream, or None if unknown."""
return self.__bytefile
def setCharacterStream(self, charfile):
"""Set the character stream for this input source. (The stream
must be a Python 2.0 Unicode-wrapped file-like that performs
conversion to Unicode strings.)
If there is a character stream specified, the SAX parser will
ignore any byte stream and will not attempt to open a URI
connection to the system identifier."""
self.__charfile = charfile
def getCharacterStream(self):
"Get the character stream for this input source."
return self.__charfile
# ===== ATTRIBUTESIMPL =====
class AttributesImpl:
def __init__(self, attrs):
"""Non-NS-aware implementation.
attrs should be of the form {name : value}."""
self._attrs = attrs
def getLength(self):
return len(self._attrs)
def getType(self, name):
return "CDATA"
def getValue(self, name):
return self._attrs[name]
def getValueByQName(self, name):
return self._attrs[name]
def getNameByQName(self, name):
if name not in self._attrs:
raise KeyError(name)
return name
def getQNameByName(self, name):
if name not in self._attrs:
raise KeyError(name)
return name
def getNames(self):
return list(self._attrs.keys())
def getQNames(self):
return list(self._attrs.keys())
def __len__(self):
return len(self._attrs)
def __getitem__(self, name):
return self._attrs[name]
def keys(self):
return list(self._attrs.keys())
def __contains__(self, name):
return name in self._attrs
def get(self, name, alternative=None):
return self._attrs.get(name, alternative)
def copy(self):
return self.__class__(self._attrs)
def items(self):
return list(self._attrs.items())
def values(self):
return list(self._attrs.values())
# ===== ATTRIBUTESNSIMPL =====
class AttributesNSImpl(AttributesImpl):
def __init__(self, attrs, qnames):
"""NS-aware implementation.
attrs should be of the form {(ns_uri, lname): value, ...}.
qnames of the form {(ns_uri, lname): qname, ...}."""
self._attrs = attrs
self._qnames = qnames
def getValueByQName(self, name):
for (nsname, qname) in self._qnames.items():
if qname == name:
return self._attrs[nsname]
raise KeyError(name)
def getNameByQName(self, name):
for (nsname, qname) in self._qnames.items():
if qname == name:
return nsname
raise KeyError(name)
def getQNameByName(self, name):
return self._qnames[name]
def getQNames(self):
return list(self._qnames.values())
def copy(self):
return self.__class__(self._attrs, self._qnames)
def _test():
XMLReader()
IncrementalParser()
Locator()
if __name__ == "__main__":
_test()
| lgpl-3.0 | -7,126,149,537,136,257,000 | 32.365079 | 78 | 0.662147 | false |
techdragon/django | tests/project_template/test_settings.py | 23 | 1644 | import os
import shutil
import unittest
from django import conf
from django.test import TestCase
from django.utils import six
from django.utils._os import upath
@unittest.skipIf(
six.PY2,
'Python 2 cannot import the project template because '
'django/conf/project_template doesn\'t have an __init__.py file.'
)
class TestStartProjectSettings(TestCase):
def setUp(self):
# Ensure settings.py exists
project_dir = os.path.join(
os.path.dirname(upath(conf.__file__)),
'project_template',
'project_name',
)
template_settings_py = os.path.join(project_dir, 'settings.py-tpl')
test_settings_py = os.path.join(project_dir, 'settings.py')
shutil.copyfile(template_settings_py, test_settings_py)
self.addCleanup(os.remove, test_settings_py)
def test_middleware_headers(self):
"""
Ensure headers sent by the default MIDDLEWARE don't inadvertently
change. For example, we never want "Vary: Cookie" to appear in the list
since it prevents the caching of responses.
"""
from django.conf.project_template.project_name.settings import MIDDLEWARE
with self.settings(
MIDDLEWARE=MIDDLEWARE,
ROOT_URLCONF='project_template.urls',
):
response = self.client.get('/empty/')
headers = sorted(response.serialize_headers().split(b'\r\n'))
self.assertEqual(headers, [
b'Content-Length: 0',
b'Content-Type: text/html; charset=utf-8',
b'X-Frame-Options: SAMEORIGIN',
])
| bsd-3-clause | -8,128,524,887,018,648,000 | 33.978723 | 81 | 0.627737 | false |
t-tran/libcloud | libcloud/test/loadbalancer/test_dimensiondata_v2_3.py | 11 | 26689 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataVIPNode, DimensionDataPool
from libcloud.common.dimensiondata import DimensionDataPoolMember
from libcloud.loadbalancer.base import LoadBalancer, Member, Algorithm
from libcloud.loadbalancer.drivers.dimensiondata \
import DimensionDataLBDriver as DimensionData
from libcloud.loadbalancer.types import State
from libcloud.test import MockHttp, unittest
from libcloud.test.file_fixtures import LoadBalancerFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
class DimensionData_v2_3_Tests(unittest.TestCase):
def setUp(self):
DimensionData.connectionCls.active_api_version = '2.3'
DimensionData.connectionCls.conn_class = DimensionDataMockHttp
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
with self.assertRaises(ValueError):
self.driver = DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
with self.assertRaises(InvalidCredsError):
self.driver.list_balancers()
def test_create_balancer(self):
self.driver.ex_set_current_network_domain('1234')
members = []
members.append(Member(
id=None,
ip='1.2.3.4',
port=80))
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members,
ex_listener_ip_address='5.6.7.8')
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
self.assertEqual(balancer.extra['listener_ip_address'], '5.6.7.8')
def test_create_balancer_with_defaults(self):
self.driver.ex_set_current_network_domain('1234')
balancer = self.driver.create_balancer(
name='test',
port=None,
protocol=None,
algorithm=None,
members=None)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, None)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_no_members(self):
self.driver.ex_set_current_network_domain('1234')
members = None
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_empty_members(self):
self.driver.ex_set_current_network_domain('1234')
members = []
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_list_balancers(self):
bal = self.driver.list_balancers()
self.assertEqual(bal[0].name, 'myProduction.Virtual.Listener')
self.assertEqual(bal[0].id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal[0].port, '8899')
self.assertEqual(bal[0].ip, '165.180.12.22')
self.assertEqual(bal[0].state, State.RUNNING)
def test_balancer_list_members(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
members = self.driver.balancer_list_members(balancer)
self.assertEqual(2, len(members))
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].port, 9889)
def test_balancer_attach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id=None,
ip='112.12.2.2',
port=80,
balancer=balancer,
extra=None)
member = self.driver.balancer_attach_member(balancer, member)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
def test_balancer_attach_member_without_port(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id=None,
ip='112.12.2.2',
port=None,
balancer=balancer,
extra=None)
member = self.driver.balancer_attach_member(balancer, member)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.port, None)
def test_balancer_detach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id='3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0',
ip='112.12.2.2',
port=80,
balancer=balancer,
extra=None)
result = self.driver.balancer_detach_member(balancer, member)
self.assertEqual(result, True)
def test_destroy_balancer(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
response = self.driver.destroy_balancer(balancer)
self.assertEqual(response, True)
def test_set_get_network_domain_id(self):
self.driver.ex_set_current_network_domain('1234')
nwd = self.driver.ex_get_current_network_domain()
self.assertEqual(nwd, '1234')
def test_ex_create_pool_member(self):
pool = DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
)
node = DimensionDataVIPNode(
id='2344',
name='test',
status=State.RUNNING,
ip='123.23.3.2'
)
member = self.driver.ex_create_pool_member(
pool=pool,
node=node,
port=80
)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.ip, '123.23.3.2')
def test_ex_create_node(self):
node = self.driver.ex_create_node(
network_domain_id='12345',
name='test',
ip='123.12.32.2',
ex_description='',
connection_limit=25000,
connection_rate_limit=2000)
self.assertEqual(node.name, 'myProductionNode.1')
self.assertEqual(node.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
def test_ex_create_pool(self, ):
pool = self.driver.ex_create_pool(
network_domain_id='1234',
name='test',
balancer_method='ROUND_ROBIN',
ex_description='test',
service_down_action='NONE',
slow_ramp_time=30)
self.assertEqual(pool.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(pool.name, 'test')
self.assertEqual(pool.status, State.RUNNING)
def test_ex_create_virtual_listener(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=80,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_unusual_port(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=8900,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_without_port(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_without_pool(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test')
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_get_balancer(self):
bal = self.driver.get_balancer('6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.name, 'myProduction.Virtual.Listener')
self.assertEqual(bal.id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.port, '8899')
self.assertEqual(bal.ip, '165.180.12.22')
self.assertEqual(bal.state, State.RUNNING)
def test_list_protocols(self):
protocols = self.driver.list_protocols()
self.assertNotEqual(0, len(protocols))
def test_ex_get_nodes(self):
nodes = self.driver.ex_get_nodes()
self.assertEqual(2, len(nodes))
self.assertEqual(nodes[0].name, 'ProductionNode.1')
self.assertEqual(nodes[0].id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(nodes[0].ip, '10.10.10.101')
def test_ex_get_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.name, 'ProductionNode.2')
self.assertEqual(node.id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.ip, '10.10.10.101')
def test_ex_update_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
node.connection_limit = '100'
result = self.driver.ex_update_node(node)
self.assertEqual(result.connection_limit, '100')
def test_ex_destroy_node(self):
result = self.driver.ex_destroy_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertTrue(result)
def test_ex_set_node_state(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
result = self.driver.ex_set_node_state(node, False)
self.assertEqual(result.connection_limit, '10000')
def test_ex_get_pools(self):
pools = self.driver.ex_get_pools()
self.assertNotEqual(0, len(pools))
self.assertEqual(pools[0].name, 'myDevelopmentPool.1')
self.assertEqual(pools[0].id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_get_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(pool.name, 'myDevelopmentPool.1')
self.assertEqual(pool.id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_update_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
pool.slow_ramp_time = '120'
result = self.driver.ex_update_pool(pool)
self.assertTrue(result)
def test_ex_destroy_pool(self):
response = self.driver.ex_destroy_pool(
pool=DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None))
self.assertTrue(response)
def test_get_pool_members(self):
members = self.driver.ex_get_pool_members('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(2, len(members))
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].name, '10.0.3.13')
self.assertEqual(members[0].status, 'NORMAL')
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].port, 9889)
self.assertEqual(members[0].node_id, '3c207269-e75e-11e4-811f-005056806999')
def test_get_pool_member(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.status, 'NORMAL')
self.assertEqual(member.ip, '10.0.3.13')
self.assertEqual(member.port, 9889)
def test_set_pool_member_state(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
result = self.driver.ex_set_pool_member_state(member, True)
self.assertTrue(result)
def test_ex_destroy_pool_member(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='3c207269-e75e-11e4-811f-005056806999'),
destroy_node=False)
self.assertTrue(response)
def test_ex_destroy_pool_member_with_node(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='34de6ed6-46a4-4dae-a753-2f8d3840c6f9'),
destroy_node=True)
self.assertTrue(response)
def test_ex_get_default_health_monitors(self):
monitors = self.driver.ex_get_default_health_monitors(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(monitors), 6)
self.assertEqual(monitors[0].id, '01683574-d487-11e4-811f-005056806999')
self.assertEqual(monitors[0].name, 'CCDEFAULT.Http')
self.assertFalse(monitors[0].node_compatible)
self.assertTrue(monitors[0].pool_compatible)
def test_ex_get_default_persistence_profiles(self):
profiles = self.driver.ex_get_default_persistence_profiles(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(profiles), 4)
self.assertEqual(profiles[0].id, 'a34ca024-f3db-11e4-b010-005056806999')
self.assertEqual(profiles[0].name, 'CCDEFAULT.Cookie')
self.assertEqual(profiles[0].fallback_compatible, False)
self.assertEqual(len(profiles[0].compatible_listeners), 1)
self.assertEqual(profiles[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
def test_ex_get_default_irules(self):
irules = self.driver.ex_get_default_irules(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(irules), 4)
self.assertEqual(irules[0].id, '2b20cb2c-ffdc-11e4-b010-005056806999')
self.assertEqual(irules[0].name, 'CCDEFAULT.HttpsRedirect')
self.assertEqual(len(irules[0].compatible_listeners), 1)
self.assertEqual(irules[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
class DimensionDataMockHttp(MockHttp):
fixtures = LoadBalancerFileFixtures('dimensiondata')
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_virtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_pool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_poolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createPool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_createPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createNode(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_createNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_addPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_addPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_createVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_removePoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_removePoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_deleteVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deletePool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_deletePool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteNode(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_deleteNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_node.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editNode(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_editNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_editPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_editPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultHealthMonitor(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_defaultHealthMonitor.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultPersistenceProfile(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_defaultPersistenceProfile.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultIrule(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_defaultIrule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 | 1,228,496,386,367,163,100 | 42.046774 | 159 | 0.63232 | false |
NLeSC/cptm | cptm/utils/topics.py | 1 | 1882 | import pandas as pd
def get_top_topic_words(topics, opinions, t, top=10):
"""Return dataframe containing top topics and opinions.
Parameters
t : str - index of topic number
top : int - the number of words to store in the dataframe
Returns Pandas DataFrame
The DataFrame contains top topic words, weights of topic words and for
each perspective opinion words and weigths of opinion words.
"""
t = str(t)
topic = topics[t].copy()
topic.sort(ascending=False)
topic = topic[0:top]
df_t = pd.DataFrame(topic)
df_t.reset_index(level=0, inplace=True)
df_t.columns = ['topic', 'weights_topic']
dfs = [df_t]
for p, o in opinions.iteritems():
opinion = o[t].copy()
opinion.sort(ascending=False)
opinion = opinion[0:top]
df_o = pd.DataFrame(opinion)
df_o.reset_index(level=0, inplace=True)
df_o.columns = ['{}'.format(p),
'weights_{}'.format(p)]
dfs.append(df_o)
return pd.concat(dfs, axis=1)
def topic_str(df, single_line=False, weights=False, opinions=True):
if opinions:
opinion_labels = [l for l in df.columns if not l.startswith('weights')]
else:
opinion_labels = [l for l in df.columns if l.startswith('topic')]
if not single_line:
if not weights:
return str(df[opinion_labels])
else:
return str(df)
else:
lines = []
if not weights:
for l in opinion_labels:
lines.append(u'{}:\t'.format(l)+' '.join(df[l]))
else:
for l in opinion_labels:
zipped = zip(df[l], df['weights_{}'.format(l)])
line = [u'{}*{:.4f}'.format(wo, we) for wo, we in zipped]
lines.append(' '.join([u'{}:\t'.format(l)]+line))
return u'\n'.join(lines)
| apache-2.0 | 7,845,588,280,792,093,000 | 32.017544 | 79 | 0.567481 | false |
Oxygem/canaryd | canaryd_packages/requests/packages/chardet/latin1prober.py | 1778 | 5232 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe
from .compat import wrap_ord
FREQ_CAT_NUM = 4
UDF = 0 # undefined
OTH = 1 # other
ASC = 2 # ascii capital letter
ASS = 3 # ascii small letter
ACV = 4 # accent capital vowel
ACO = 5 # accent capital other
ASV = 6 # accent small vowel
ASO = 7 # accent small other
CLASS_NUM = 8 # total classes
Latin1_CharToClass = (
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
)
# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
0, 3, 3, 3, 1, 1, 3, 3, # ASS
0, 3, 3, 3, 1, 2, 1, 2, # ACV
0, 3, 3, 3, 3, 3, 3, 3, # ACO
0, 3, 1, 3, 1, 1, 1, 3, # ASV
0, 3, 1, 3, 1, 1, 3, 3, # ASO
)
class Latin1Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self.reset()
def reset(self):
self._mLastCharClass = OTH
self._mFreqCounter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
def get_charset_name(self):
return "windows-1252"
def feed(self, aBuf):
aBuf = self.filter_with_english_letters(aBuf)
for c in aBuf:
charClass = Latin1_CharToClass[wrap_ord(c)]
freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ charClass]
if freq == 0:
self._mState = eNotMe
break
self._mFreqCounter[freq] += 1
self._mLastCharClass = charClass
return self.get_state()
def get_confidence(self):
if self.get_state() == eNotMe:
return 0.01
total = sum(self._mFreqCounter)
if total < 0.01:
confidence = 0.0
else:
confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
/ total)
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
confidence = confidence * 0.73
return confidence
| mit | -8,520,321,343,722,364,000 | 36.640288 | 80 | 0.559251 | false |
tund/kaggle-galaxy-zoo | avg_result.py | 1 | 2774 | # Copyright (c) 2014, Tu Dinh Nguyen ([email protected])
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
average several results
"""
import sys
import os
import csv
import cPickle as pickle
import numpy as np
import gzip
from numpy import isnan
RESULT_FILE = ["./RUN/avg_res/0.07889.csv",
"./RUN/avg_res/0.07895.csv",
"./RUN/avg_res/0.07911.csv",
"./RUN/avg_res/0.07939.csv"]
OUTPUT_FILE = "./RUN/avg_res/final_submission.csv"
def refine_result(res):
# all values > 1 should be = 1
res[res > 1] = 1
# al values < 0 should = 0
res[res < 0] = 0
def main():
res_total = np.zeros((79975, 37))
for i in xrange(len(RESULT_FILE)):
result = np.genfromtxt(RESULT_FILE[i], dtype=np.float32, delimiter=',', skip_header=1)
result = result[:, 1:]
res_total += result
res_total /= len(RESULT_FILE)
first_col = np.genfromtxt("./raw_data/kaggle_submission.csv",
dtype=np.int32, delimiter=',', skip_header=1, usecols=0)
first_col = first_col.reshape(len(first_col), 1)
r = csv.reader(open("./raw_data/kaggle_submission.csv", 'rb'), delimiter=",")
h = r.next()
refine_result(res_total)
with open(OUTPUT_FILE, 'wb') as f_out:
w = csv.writer(f_out, delimiter=",")
w.writerow(h)
for i in range(res_total.shape[0]):
w.writerow(np.hstack([first_col[i, 0], res_total[i, :]]).astype(np.single))
if __name__ == '__main__':
main()
| bsd-3-clause | -6,406,368,317,995,723,000 | 34.113924 | 94 | 0.679524 | false |
chirilo/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/config/urls.py | 117 | 3095 | # Copyright (c) 2010, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
def view_source_url(local_path):
return "http://trac.webkit.org/browser/trunk/%s" % local_path
def view_revision_url(revision_number):
return "http://trac.webkit.org/changeset/%s" % revision_number
contribution_guidelines = "http://webkit.org/coding/contributing.html"
bug_server_domain = "webkit.org"
bug_server_host = "bugs." + bug_server_domain
_bug_server_regex = "https?://%s/" % re.sub('\.', '\\.', bug_server_host)
bug_server_url = "https://%s/" % bug_server_host
bug_url_long = _bug_server_regex + r"show_bug\.cgi\?id=(?P<bug_id>\d+)(&ctype=xml|&excludefield=attachmentdata)*"
bug_url_short = r"https?\://%s/b/(?P<bug_id>\d+)" % bug_server_domain
attachment_url = _bug_server_regex + r"attachment\.cgi\?id=(?P<attachment_id>\d+)(&action=(?P<action>\w+))?"
direct_attachment_url = r"https?://bug-(?P<bug_id>\d+)-attachments.%s/attachment\.cgi\?id=(?P<attachment_id>\d+)" % bug_server_domain
buildbot_url = "http://build.webkit.org"
def parse_bug_id(string):
if not string:
return None
match = re.search(bug_url_short, string)
if match:
return int(match.group('bug_id'))
match = re.search(bug_url_long, string)
if match:
return int(match.group('bug_id'))
return None
def parse_attachment_id(string):
if not string:
return None
match = re.search(attachment_url, string)
if match:
return int(match.group('attachment_id'))
match = re.search(direct_attachment_url, string)
if match:
return int(match.group('attachment_id'))
return None
| bsd-3-clause | 3,027,697,509,184,938,000 | 40.266667 | 133 | 0.717286 | false |
arxcruz/tempest-tool | tempestmail/tests/test_bugs.py | 1 | 2571 | import mock
import unittest
import tempestmail.bugs as bugs
class TestBugs(unittest.TestCase):
def setUp(self):
self.bz_url = 'https://bugzilla.redhat.com/show_bug.cgi?id=1386421'
self.lp_url = 'https://bugs.launchpad.net/tripleo/+bug/1634824'
self.error_url = 'https://www.google.com'
super(TestBugs, self).setUp()
def test_bug_factory_launchpad(self):
connector = bugs.bug_factory(self.lp_url)
self.assertIsInstance(connector, bugs.Launchpad)
def test_bug_factory_bugzilla(self):
connector = bugs.bug_factory(self.bz_url)
self.assertIsInstance(connector, bugs.Bugzilla)
def test_bug_factory_error(self):
with self.assertRaises(ValueError):
bugs.bug_factory(self.error_url)
@mock.patch('tempestmail.utils.get_html')
def test_bug_status_bugzilla(self, get_html_mock):
title = ('<title>Bug 1386421 – Tempest fail: tempest.api.'
'compute.servers.test_server_actions.ServerActionsTestJSON'
'</title>')
returned_title = (u'Bug 1386421 \u2013 Tempest fail: tempest.api.'
'compute.servers.test_server_actions.'
'ServerActionsTestJSON')
expected_return = (returned_title, None)
get_html_mock.return_value.content.decode.return_value = title
connector = bugs.connect_to_bug_system(self.bz_url)
name = connector.bug_status()
self.assertEquals(name, expected_return)
get_html_mock.return_value = None
connector = bugs.connect_to_bug_system(self.bz_url)
name = connector.bug_status()
self.assertEquals(name, ('', None))
get_html_mock.assert_called()
@mock.patch('tempestmail.utils.get_html')
def test_bug_status_launchpad(self, get_html_mock):
title = ('<title>Bug #1633713 "puppet-ntp is breaking ci" : Bugs : '
'tripleo</title>')
returned_title = ('Bug #1633713 "puppet-ntp is breaking ci" : Bugs : '
'tripleo')
expected_return = (returned_title, None)
get_html_mock.return_value.content.decode.return_value = title
connector = bugs.connect_to_bug_system(self.lp_url)
name = connector.bug_status()
self.assertEquals(name, expected_return)
get_html_mock.return_value = None
connector = bugs.connect_to_bug_system(self.lp_url)
name = connector.bug_status()
self.assertEquals(name, ('', None))
get_html_mock.assert_called()
| gpl-3.0 | 2,011,308,115,772,769,500 | 36.808824 | 78 | 0.627771 | false |
victorbriz/sumatrapdf | scripts/trans_langs.py | 16 | 5002 | # List of languages we support, their iso codes and id as understood
# by Windows SDK (LANG_* and SUBLANG_*_*).
# See http://msdn.microsoft.com/en-us/library/dd318693.aspx for the full list.
g_langs = [
('af', 'Afrikaans', '_LANGID(LANG_AFRIKAANS)'),
('am', 'Armenian (Հայերեն)', '_LANGID(LANG_ARMENIAN)'),
('ar', 'Arabic (الْعَرَبيّة)', '_LANGID(LANG_ARABIC)', 'RTL'),
('az', 'Azerbaijani (Azərbaycanca)', '_LANGID(LANG_AZERI)'),
('bg', 'Bulgarian (Български)', '_LANGID(LANG_BULGARIAN)'),
('bn', 'Bengali (বাংলা)', '_LANGID(LANG_BENGALI)'),
('br', 'Portuguese - Brazil (Português)', 'MAKELANGID(LANG_PORTUGUESE, SUBLANG_PORTUGUESE_BRAZILIAN)'),
('bs', 'Bosnian (Bosanski)', 'MAKELANGID(LANG_BOSNIAN, SUBLANG_BOSNIAN_BOSNIA_HERZEGOVINA_LATIN)'),
('by', 'Belarusian (Беларуская)', '_LANGID(LANG_BELARUSIAN)'),
('ca', 'Catalan (Català)', '_LANGID(LANG_CATALAN)'),
('ca-xv', 'Catalan-Valencian (Català-Valencià)', '(LANGID)-1'),
('cn', 'Chinese Simplified (简体中文)', 'MAKELANGID(LANG_CHINESE, SUBLANG_CHINESE_SIMPLIFIED)'),
('cy', 'Welsh (Cymraeg)', '_LANGID(LANG_WELSH)'),
('cz', 'Czech (Čeština)', '_LANGID(LANG_CZECH)'),
('de', 'German (Deutsch)', '_LANGID(LANG_GERMAN)'),
('dk', 'Danish (Dansk)', '_LANGID(LANG_DANISH)'),
('el', 'Greek (Ελληνικά)', '_LANGID(LANG_GREEK)'),
('en', 'English', '_LANGID(LANG_ENGLISH)'),
('es', 'Spanish (Español)', '_LANGID(LANG_SPANISH)'),
('et', 'Estonian (Eesti)', '_LANGID(LANG_ESTONIAN)'),
('eu', 'Basque (Euskara)', '_LANGID(LANG_BASQUE)'),
('fa', 'Persian (فارسی)', '_LANGID(LANG_FARSI)', 'RTL'),
('fi', 'Finnish (Suomi)', '_LANGID(LANG_FINNISH)'),
('fr', 'French (Français)', '_LANGID(LANG_FRENCH)'),
('fy-nl', 'Frisian (Frysk)', '_LANGID(LANG_FRISIAN)'),
('ga', 'Irish (Gaeilge)', '_LANGID(LANG_IRISH)'),
('gl', 'Galician (Galego)', '_LANGID(LANG_GALICIAN)'),
('he', 'Hebrew (עברית)', '_LANGID(LANG_HEBREW)', 'RTL'),
('hi', 'Hindi (हिंदी)', '_LANGID(LANG_HINDI)'),
('hr', 'Croatian (Hrvatski)', '_LANGID(LANG_CROATIAN)'),
('hu', 'Hungarian (Magyar)', '_LANGID(LANG_HUNGARIAN)'),
('id', 'Indonesian (Bahasa Indonesia)', '_LANGID(LANG_INDONESIAN)'),
('it', 'Italian (Italiano)', '_LANGID(LANG_ITALIAN)'),
('ja', 'Japanese (日本語)', '_LANGID(LANG_JAPANESE)'),
('jv', 'Javanese (ꦧꦱꦗꦮ)', '(LANGID)-1'),
('ka', 'Georgian (ქართული)', '_LANGID(LANG_GEORGIAN)'),
('kr', 'Korean (한국어)', '_LANGID(LANG_KOREAN)'),
('ku', 'Kurdish (كوردی)', 'MAKELANGID(LANG_CENTRAL_KURDISH, SUBLANG_CENTRAL_KURDISH_CENTRAL_KURDISH_IRAQ)', 'RTL'),
('kw', 'Cornish (Kernewek)', '(LANGID)-1'),
('lt', 'Lithuanian (Lietuvių)', '_LANGID(LANG_LITHUANIAN)'),
('lv', 'Latvian (latviešu valoda)', '_LANGID(LANG_LATVIAN)'),
('mk', 'Macedonian (македонски)', '_LANGID(LANG_MACEDONIAN)'),
('ml', 'Malayalam (മലയാളം)', '_LANGID(LANG_MALAYALAM)'),
('mm', 'Burmese (ဗမာ စာ)', '(LANGID)-1'),
('my', 'Malaysian (Bahasa Melayu)', '_LANGID(LANG_MALAY)'),
('ne', 'Nepali (नेपाली)', '_LANGID(LANG_NEPALI)'),
('nl', 'Dutch (Nederlands)', '_LANGID(LANG_DUTCH)'),
('nn', 'Norwegian Neo-Norwegian (Norsk nynorsk)', 'MAKELANGID(LANG_NORWEGIAN, SUBLANG_NORWEGIAN_NYNORSK)'),
('no', 'Norwegian (Norsk)', 'MAKELANGID(LANG_NORWEGIAN, SUBLANG_NORWEGIAN_BOKMAL)'),
('pa', 'Punjabi (ਪੰਜਾਬੀ)', '_LANGID(LANG_PUNJABI)'),
('pl', 'Polish (Polski)', '_LANGID(LANG_POLISH)'),
('pt', 'Portuguese - Portugal (Português)', '_LANGID(LANG_PORTUGUESE)'),
('ro', 'Romanian (Română)', '_LANGID(LANG_ROMANIAN)'),
('ru', 'Russian (Русский)', '_LANGID(LANG_RUSSIAN)'),
('si', 'Sinhala (සිංහල)', '_LANGID(LANG_SINHALESE)'),
('sk', 'Slovak (Slovenčina)', '_LANGID(LANG_SLOVAK)'),
('sl', 'Slovenian (Slovenščina)', '_LANGID(LANG_SLOVENIAN)'),
('sn', 'Shona (Shona)', '(LANGID)-1'),
('sp-rs', 'Serbian (Latin)', 'MAKELANGID(LANG_SERBIAN, SUBLANG_SERBIAN_LATIN)'),
('sq', 'Albanian (Shqip)', '_LANGID(LANG_ALBANIAN)'),
('sr-rs', 'Serbian (Cyrillic)', 'MAKELANGID(LANG_SERBIAN, SUBLANG_SERBIAN_CYRILLIC)'),
('sv', 'Swedish (Svenska)', '_LANGID(LANG_SWEDISH)'),
('ta', 'Tamil (தமிழ்)', '_LANGID(LANG_TAMIL)'),
('th', 'Thai (ภาษาไทย)', '_LANGID(LANG_THAI)'),
('tl', 'Tagalog (Tagalog)', '_LANGID(LANG_FILIPINO)'),
('tr', 'Turkish (Türkçe)', '_LANGID(LANG_TURKISH)'),
('tw', 'Chinese Traditional (繁體中文)', 'MAKELANGID(LANG_CHINESE, SUBLANG_CHINESE_TRADITIONAL)'),
('uk', 'Ukrainian (Українська)', '_LANGID(LANG_UKRAINIAN)'),
('uz', 'Uzbek (O\'zbek)', '_LANGID(LANG_UZBEK)'),
('vn', 'Vietnamese (Việt Nam)', '_LANGID(LANG_VIETNAMESE)'),
]
| gpl-3.0 | -2,316,670,806,617,547,000 | 61.226667 | 119 | 0.584353 | false |
greenlion/mysql-server | storage/ndb/mcc/tst/unittest2/__init__.py | 155 | 2406 | """
unittest2
unittest2 is a backport of the new features added to the unittest testing
framework in Python 2.7. It is tested to run on Python 2.4 - 2.6.
To use unittest2 instead of unittest simply replace ``import unittest`` with
``import unittest2``.
Copyright (c) 1999-2003 Steve Purcell
Copyright (c) 2003-2010 Python Software Foundation
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
__all__ = ['TestResult', 'TestCase', 'TestSuite',
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
'expectedFailure', 'TextTestResult', '__version__', 'collector']
__version__ = '0.5.1'
# Expose obsolete functions for backwards compatibility
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
from unittest2.collector import collector
from unittest2.result import TestResult
from unittest2.case import (
TestCase, FunctionTestCase, SkipTest, skip, skipIf,
skipUnless, expectedFailure
)
from unittest2.suite import BaseTestSuite, TestSuite
from unittest2.loader import (
TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases
)
from unittest2.main import TestProgram, main, main_
from unittest2.runner import TextTestRunner, TextTestResult
try:
from unittest2.signals import (
installHandler, registerResult, removeResult, removeHandler
)
except ImportError:
# Compatibility with platforms that don't have the signal module
pass
else:
__all__.extend(['installHandler', 'registerResult', 'removeResult',
'removeHandler'])
# deprecated
_TextTestResult = TextTestResult
__unittest = True | gpl-2.0 | 5,880,972,309,938,316,000 | 34.397059 | 76 | 0.752286 | false |
AlperSaltabas/OR_Tools_Google_API | examples/python/volsay.py | 34 | 1857 | # Copyright 2011 Hakan Kjellerstrand [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Volsay problem in Google or-tools.
From the OPL model volsay.mod
This model was created by Hakan Kjellerstrand ([email protected])
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from ortools.linear_solver import pywraplp
def main(unused_argv):
# Create the solver.
# using GLPK
solver = pywraplp.Solver('CoinsGridGLPK',
pywraplp.Solver.GLPK_LINEAR_PROGRAMMING)
# Using CLP
# solver = pywraplp.Solver('CoinsGridCLP',
# pywraplp.Solver.CLP_LINEAR_PROGRAMMING)
# data
# declare variables
Gas = solver.NumVar(0, 100000, 'Gas')
Chloride = solver.NumVar(0, 100000, 'Cloride')
#
# constraints
#
solver.Add(Gas + Chloride <= 50)
solver.Add(3 * Gas + 4 * Chloride <= 180)
# objective
objective = solver.Maximize(40 * Gas + 50 * Chloride)
print 'NumConstraints:', solver.NumConstraints()
#
# solution and search
#
solver.Solve()
print
print 'objective = ', solver.Objective().Value()
print 'Gas = ', Gas.SolutionValue(), 'ReducedCost =', Gas.ReducedCost()
print 'Chloride:', Chloride.SolutionValue(), 'ReducedCost =', Chloride.ReducedCost()
if __name__ == '__main__':
main('Volsay')
| apache-2.0 | -7,302,601,027,543,097,000 | 25.913043 | 86 | 0.689284 | false |
tibotic/simple-pokemongo-bot | pokemongo_bot/health_record/bot_event.py | 17 | 2639 | # -*- coding: utf-8 -*-
from time import sleep
import logging
from raven import Client
import raven
import os
import uuid
import requests
import time
class BotEvent(object):
def __init__(self, config):
self.config = config
self.logger = logging.getLogger(__name__)
# UniversalAnalytics can be reviewed here:
# https://github.com/analytics-pros/universal-analytics-python
if self.config.health_record:
self.logger.info('Health check is enabled. For more information:')
self.logger.info('https://github.com/PokemonGoF/PokemonGo-Bot/tree/dev#analytics')
self.client = Client(
dsn='https://8abac56480f34b998813d831de262514:[email protected]/90254',
name='PokemonGof-Bot',
processors = (
'raven.processors.SanitizePasswordsProcessor',
'raven.processors.RemoveStackLocalsProcessor'
),
install_logging_hook = False,
hook_libraries = (),
enable_breadcrumbs = False,
logging = False,
context = {}
)
self.client_id = uuid.uuid4()
self.heartbeat_wait = 30 # seconds
self.last_heartbeat = time.time()
def capture_error(self):
if self.config.health_record:
self.client.captureException()
def login_success(self):
if self.config.health_record:
self.last_heartbeat = time.time()
self.track_url('/loggedin')
def login_failed(self):
if self.config.health_record:
self.track_url('/login')
def login_retry(self):
if self.config.health_record:
self.track_url('/relogin')
def logout(self):
if self.config.health_record:
self.track_url('/logout')
def heartbeat(self):
if self.config.health_record:
current_time = time.time()
if current_time - self.heartbeat_wait > self.last_heartbeat:
self.last_heartbeat = current_time
self.track_url('/heartbeat')
def track_url(self, path):
data = {
'v': '1',
'tid': 'UA-81469507-1',
'aip': '1', # Anonymize IPs
'cid': self.client_id,
't': 'pageview',
'dp': path
}
try:
response = requests.post(
'http://www.google-analytics.com/collect', data=data)
response.raise_for_status()
except requests.exceptions.HTTPError:
pass
| mit | 3,042,109,113,533,339,000 | 31.182927 | 120 | 0.562713 | false |
andreasots/qrpth.eu | prism.py | 1 | 2192 | from server import app
import datetime
import psycopg2
import flask
import pytz
import re
TIMEZONE = pytz.timezone("America/Vancouver")
CONN = "postgres:///lrrbot"
def convert_timezone(row):
return (row[0], row[1], TIMEZONE.normalize(row[2].astimezone(TIMEZONE)))
@app.route("/prism/")
def prism():
with psycopg2.connect(CONN) as conn:
with conn.cursor() as cur:
cur.execute("SELECT MIN(time) FROM log")
start = next(iter(cur))[0]
start = TIMEZONE.localize(datetime.datetime(start.year, start.month, start.day))
stop = datetime.datetime.now(TIMEZONE)
stop = TIMEZONE.localize(datetime.datetime(stop.year, stop.month, stop.day, 23, 59, 59)) + datetime.timedelta(seconds=1)
days = []
while start < stop:
days += [(start, start+datetime.timedelta(days=1))]
start += datetime.timedelta(days=1)
return flask.render_template("prism.html", page="prism", days=days)
@app.route("/prism/log")
def logs():
start = datetime.datetime.fromtimestamp(float(flask.request.args["start"]), pytz.utc)
stop = datetime.datetime.fromtimestamp(float(flask.request.args["stop"]), pytz.utc)
with psycopg2.connect(CONN) as conn:
with conn.cursor() as cur:
cur.execute("SELECT id, messagehtml, time FROM log WHERE target = '#loadingreadyrun' AND time BETWEEN %s AND %s ORDER BY TIME", (start, stop))
return flask.render_template("prism-log.html", page="prism", messages=map(convert_timezone, cur))
@app.route("/prism/search")
def search():
query = flask.request.args["q"]
with psycopg2.connect(CONN) as conn:
with conn.cursor() as cur:
cur.execute("""
SELECT id, messagehtml, time, TS_RANK_CD(TO_TSVECTOR('english', message), query) AS rank
FROM log, PLAINTO_TSQUERY('english', %s) query
WHERE TO_TSVECTOR('english', message) @@ query
ORDER BY rank DESC
""", (query, ))
return flask.render_template("prism-search.html", page="prism", messages=map(convert_timezone, cur), query=query)
| gpl-2.0 | -586,031,914,001,621,900 | 41.153846 | 154 | 0.625456 | false |
Franky333/crazyflie-clients-python | src/cfclient/headless.py | 1 | 6707 | # -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Headless client for the Crazyflie.
"""
import logging
import os
import signal
import sys
import cfclient.utils
import cflib.crtp
from cfclient.utils.input import JoystickReader
from cflib.crazyflie import Crazyflie
if os.name == 'posix':
print('Disabling standard output for libraries!')
stdout = os.dup(1)
os.dup2(os.open('/dev/null', os.O_WRONLY), 1)
sys.stdout = os.fdopen(stdout, 'w')
# set SDL to use the dummy NULL video driver,
# so it doesn't need a windowing system.
os.environ["SDL_VIDEODRIVER"] = "dummy"
class HeadlessClient():
"""Crazyflie headless client"""
def __init__(self):
"""Initialize the headless client and libraries"""
cflib.crtp.init_drivers()
self._jr = JoystickReader(do_device_discovery=False)
self._cf = Crazyflie(ro_cache=None,
rw_cache=cfclient.config_path + "/cache")
signal.signal(signal.SIGINT, signal.SIG_DFL)
self._devs = []
for d in self._jr.available_devices():
self._devs.append(d.name)
def setup_controller(self, input_config, input_device=0, xmode=False):
"""Set up the device reader"""
# Set up the joystick reader
self._jr.device_error.add_callback(self._input_dev_error)
print("Client side X-mode: %s" % xmode)
if (xmode):
self._cf.commander.set_client_xmode(xmode)
devs = self._jr.available_devices() # noqa, is this a bug?
print("Will use [%s] for input" % self._devs[input_device])
self._jr.start_input(self._devs[input_device])
self._jr.set_input_map(self._devs[input_device], input_config)
def controller_connected(self):
""" Return True if a controller is connected"""
return True if (len(self._jr.available_devices()) > 0) else False
def list_controllers(self):
"""List the available controllers and input mapping"""
print("\nAvailable controllers:")
for i, dev in enumerate(self._devs):
print(" - Controller #{}: {}".format(i, dev))
print("\nAvailable input mapping:")
for map in os.listdir(cfclient.config_path + '/input'):
print(" - " + map.split(".json")[0])
def connect_crazyflie(self, link_uri):
"""Connect to a Crazyflie on the given link uri"""
self._cf.connection_failed.add_callback(self._connection_failed)
# 2014-11-25 chad: Add a callback for when we have a good connection.
self._cf.connected.add_callback(self._connected)
self._cf.param.add_update_callback(
group="imu_sensors", name="HMC5883L", cb=(
lambda name, found: self._jr.set_alt_hold_available(
eval(found))))
self._jr.assisted_control_updated.add_callback(
lambda enabled: self._cf.param.set_value("flightmode.althold",
enabled))
self._cf.open_link(link_uri)
self._jr.input_updated.add_callback(self._cf.commander.send_setpoint)
def _connected(self, link):
"""Callback for a successful Crazyflie connection."""
print("Connected to {}".format(link))
def _connection_failed(self, link, message):
"""Callback for a failed Crazyflie connection"""
print("Connection failed on {}: {}".format(link, message))
sys.exit(-1)
def _input_dev_error(self, message):
"""Callback for an input device error"""
print("Error when reading device: {}".format(message))
sys.exit(-1)
def main():
"""Main Crazyflie headless application"""
import argparse
parser = argparse.ArgumentParser(prog="cfheadless")
parser.add_argument("-u", "--uri", action="store", dest="uri", type=str,
default="radio://0/10/250K",
help="URI to use for connection to the Crazyradio"
" dongle, defaults to radio://0/10/250K")
parser.add_argument("-i", "--input", action="store", dest="input",
type=str, default="PS3_Mode_1",
help="Input mapping to use for the controller,"
"defaults to PS3_Mode_1")
parser.add_argument("-d", "--debug", action="store_true", dest="debug",
help="Enable debug output")
parser.add_argument("-c", "--controller", action="store", type=int,
dest="controller", default=0,
help="Use controller with specified id,"
" id defaults to 0")
parser.add_argument("--controllers", action="store_true",
dest="list_controllers",
help="Only display available controllers and exit")
parser.add_argument("-x", "--x-mode", action="store_true",
dest="xmode",
help="Enable client-side X-mode")
(args, unused) = parser.parse_known_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
headless = HeadlessClient()
if (args.list_controllers):
headless.list_controllers()
else:
if headless.controller_connected():
headless.setup_controller(input_config=args.input,
input_device=args.controller,
xmode=args.xmode)
headless.connect_crazyflie(link_uri=args.uri)
else:
print("No input-device connected, exiting!")
if __name__ == "__main__":
main()
| gpl-2.0 | 3,634,465,478,972,241,400 | 37.768786 | 77 | 0.580438 | false |
NaturalGIS/naturalgis_qgis | python/plugins/processing/modeler/ModelerScene.py | 33 | 1934 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ModelerScene.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
from qgis.gui import QgsModelGraphicsScene
from processing.modeler.ModelerGraphicItem import (
ModelerInputGraphicItem,
ModelerOutputGraphicItem,
ModelerChildAlgorithmGraphicItem
)
class ModelerScene(QgsModelGraphicsScene):
"""
IMPORTANT! This is intentionally a MINIMAL class, only containing code which HAS TO BE HERE
because it contains Python code for compatibility with deprecated methods ONLY.
Don't add anything here -- edit the c++ base class instead!
"""
def __init__(self, parent=None):
super().__init__(parent)
def createParameterGraphicItem(self, model, param):
return ModelerInputGraphicItem(param.clone(), model)
def createChildAlgGraphicItem(self, model, child):
return ModelerChildAlgorithmGraphicItem(child.clone(), model)
def createOutputGraphicItem(self, model, output):
return ModelerOutputGraphicItem(output.clone(), model)
| gpl-2.0 | -6,975,243,522,019,736,000 | 37.68 | 95 | 0.532058 | false |
rahul67/hue | desktop/core/ext-py/boto-2.38.0/boto/sdb/queryresultset.py | 153 | 3674 | from boto.compat import six
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
def query_lister(domain, query='', max_items=None, attr_names=None):
more_results = True
num_results = 0
next_token = None
while more_results:
rs = domain.connection.query_with_attributes(domain, query, attr_names,
next_token=next_token)
for item in rs:
if max_items:
if num_results == max_items:
raise StopIteration
yield item
num_results += 1
next_token = rs.next_token
more_results = next_token is not None
class QueryResultSet(object):
def __init__(self, domain=None, query='', max_items=None, attr_names=None):
self.max_items = max_items
self.domain = domain
self.query = query
self.attr_names = attr_names
def __iter__(self):
return query_lister(self.domain, self.query, self.max_items, self.attr_names)
def select_lister(domain, query='', max_items=None):
more_results = True
num_results = 0
next_token = None
while more_results:
rs = domain.connection.select(domain, query, next_token=next_token)
for item in rs:
if max_items:
if num_results == max_items:
raise StopIteration
yield item
num_results += 1
next_token = rs.next_token
more_results = next_token is not None
class SelectResultSet(object):
def __init__(self, domain=None, query='', max_items=None,
next_token=None, consistent_read=False):
self.domain = domain
self.query = query
self.consistent_read = consistent_read
self.max_items = max_items
self.next_token = next_token
def __iter__(self):
more_results = True
num_results = 0
while more_results:
rs = self.domain.connection.select(self.domain, self.query,
next_token=self.next_token,
consistent_read=self.consistent_read)
for item in rs:
if self.max_items and num_results >= self.max_items:
raise StopIteration
yield item
num_results += 1
self.next_token = rs.next_token
if self.max_items and num_results >= self.max_items:
raise StopIteration
more_results = self.next_token is not None
def next(self):
return next(self.__iter__())
| apache-2.0 | 8,177,225,511,681,540,000 | 38.505376 | 85 | 0.615406 | false |
imtapps/django-imt-fork | tests/regressiontests/expressions_regress/tests.py | 46 | 15966 | """
Spanning tests for all the operations that F() expressions can perform.
"""
from __future__ import absolute_import
import datetime
from django.db import connection
from django.db.models import F
from django.test import TestCase, Approximate, skipUnlessDBFeature
from .models import Number, Experiment
class ExpressionsRegressTests(TestCase):
def setUp(self):
Number(integer=-1).save()
Number(integer=42).save()
Number(integer=1337).save()
self.assertEqual(Number.objects.update(float=F('integer')), 3)
def test_fill_with_value_from_same_object(self):
"""
We can fill a value in all objects with an other value of the
same object.
"""
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 42, 42.000>',
'<Number: 1337, 1337.000>'
]
)
def test_increment_value(self):
"""
We can increment a value of all objects in a query set.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_filter_not_equals_other_field(self):
"""
We can filter for objects, where a value is not equals the value
of an other field.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.exclude(float=F('integer')),
[
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_complex_expressions(self):
"""
Complex expressions of different connection types are possible.
"""
n = Number.objects.create(integer=10, float=123.45)
self.assertEqual(Number.objects.filter(pk=n.pk)
.update(float=F('integer') + F('float') * 2),
1)
self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
class ExpressionOperatorTests(TestCase):
def setUp(self):
self.n = Number.objects.create(integer=42, float=15.5)
def test_lefthand_addition(self):
# LH Addition of floats and integers
Number.objects.filter(pk=self.n.pk).update(
integer=F('integer') + 15,
float=F('float') + 42.7
)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_lefthand_subtraction(self):
# LH Subtraction of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15,
float=F('float') - 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
def test_lefthand_multiplication(self):
# Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15,
float=F('float') * 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_lefthand_division(self):
# LH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2,
float=F('float') / 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
def test_lefthand_modulo(self):
# LH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_lefthand_bitwise_and(self):
# LH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
@skipUnlessDBFeature('supports_bitwise_or')
def test_lefthand_bitwise_or(self):
# LH Bitwise or on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitor(48))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_right_hand_addition(self):
# Right hand operators
Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'),
float=42.7 + F('float'))
# RH Addition of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_right_hand_subtraction(self):
Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'),
float=42.7 - F('float'))
# RH Subtraction of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
def test_right_hand_multiplication(self):
# RH Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'),
float=42.7 * F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_right_hand_division(self):
# RH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'),
float=42.7 / F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
def test_right_hand_modulo(self):
# RH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
class FTimeDeltaTests(TestCase):
def setUp(self):
sday = datetime.date(2010, 6, 25)
stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
midnight = datetime.time(0)
delta0 = datetime.timedelta(0)
delta1 = datetime.timedelta(microseconds=253000)
delta2 = datetime.timedelta(seconds=44)
delta3 = datetime.timedelta(hours=21, minutes=8)
delta4 = datetime.timedelta(days=10)
# Test data is set so that deltas and delays will be
# strictly increasing.
self.deltas = []
self.delays = []
self.days_long = []
# e0: started same day as assigned, zero duration
end = stime+delta0
e0 = Experiment.objects.create(name='e0', assigned=sday, start=stime,
end=end, completed=end.date())
self.deltas.append(delta0)
self.delays.append(e0.start-
datetime.datetime.combine(e0.assigned, midnight))
self.days_long.append(e0.completed-e0.assigned)
# e1: started one day after assigned, tiny duration, data
# set so that end time has no fractional seconds, which
# tests an edge case on sqlite. This Experiment is only
# included in the test data when the DB supports microsecond
# precision.
if connection.features.supports_microsecond_precision:
delay = datetime.timedelta(1)
end = stime + delay + delta1
e1 = Experiment.objects.create(name='e1', assigned=sday,
start=stime+delay, end=end, completed=end.date())
self.deltas.append(delta1)
self.delays.append(e1.start-
datetime.datetime.combine(e1.assigned, midnight))
self.days_long.append(e1.completed-e1.assigned)
# e2: started three days after assigned, small duration
end = stime+delta2
e2 = Experiment.objects.create(name='e2',
assigned=sday-datetime.timedelta(3), start=stime, end=end,
completed=end.date())
self.deltas.append(delta2)
self.delays.append(e2.start-
datetime.datetime.combine(e2.assigned, midnight))
self.days_long.append(e2.completed-e2.assigned)
# e3: started four days after assigned, medium duration
delay = datetime.timedelta(4)
end = stime + delay + delta3
e3 = Experiment.objects.create(name='e3',
assigned=sday, start=stime+delay, end=end, completed=end.date())
self.deltas.append(delta3)
self.delays.append(e3.start-
datetime.datetime.combine(e3.assigned, midnight))
self.days_long.append(e3.completed-e3.assigned)
# e4: started 10 days after assignment, long duration
end = stime + delta4
e4 = Experiment.objects.create(name='e4',
assigned=sday-datetime.timedelta(10), start=stime, end=end,
completed=end.date())
self.deltas.append(delta4)
self.delays.append(e4.start-
datetime.datetime.combine(e4.assigned, midnight))
self.days_long.append(e4.completed-e4.assigned)
self.expnames = [e.name for e in Experiment.objects.all()]
def test_delta_add(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.filter(end__lt=F('start')+delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(end__lte=F('start')+delta)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_delta_subtract(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.filter(start__gt=F('end')-delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(start__gte=F('end')-delta)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_exclude(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.exclude(end__lt=F('start')+delta)]
self.assertEqual(test_set, self.expnames[i:])
test_set = [e.name for e in
Experiment.objects.exclude(end__lte=F('start')+delta)]
self.assertEqual(test_set, self.expnames[i+1:])
def test_date_comparison(self):
for i in range(len(self.days_long)):
days = self.days_long[i]
test_set = [e.name for e in
Experiment.objects.filter(completed__lt=F('assigned')+days)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(completed__lte=F('assigned')+days)]
self.assertEqual(test_set, self.expnames[:i+1])
@skipUnlessDBFeature("supports_mixed_date_datetime_comparisons")
def test_mixed_comparisons1(self):
for i in range(len(self.delays)):
delay = self.delays[i]
if not connection.features.supports_microsecond_precision:
delay = datetime.timedelta(delay.days, delay.seconds)
test_set = [e.name for e in
Experiment.objects.filter(assigned__gt=F('start')-delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(assigned__gte=F('start')-delay)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_mixed_comparisons2(self):
delays = [datetime.timedelta(delay.days) for delay in self.delays]
for i in range(len(delays)):
delay = delays[i]
test_set = [e.name for e in
Experiment.objects.filter(start__lt=F('assigned')+delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(start__lte=F('assigned')+delay+
datetime.timedelta(1))]
self.assertEqual(test_set, self.expnames[:i+1])
def test_delta_update(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
exps = Experiment.objects.all()
expected_durations = [e.duration() for e in exps]
expected_starts = [e.start+delta for e in exps]
expected_ends = [e.end+delta for e in exps]
Experiment.objects.update(start=F('start')+delta, end=F('end')+delta)
exps = Experiment.objects.all()
new_starts = [e.start for e in exps]
new_ends = [e.end for e in exps]
new_durations = [e.duration() for e in exps]
self.assertEqual(expected_starts, new_starts)
self.assertEqual(expected_ends, new_ends)
self.assertEqual(expected_durations, new_durations)
def test_delta_invalid_op_mult(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')*self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to multiply datetime by timedelta.")
def test_delta_invalid_op_div(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')/self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to divide datetime by timedelta.")
def test_delta_invalid_op_mod(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')%self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.")
def test_delta_invalid_op_and(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start').bitand(self.deltas[0])))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to binary and a datetime with a timedelta.")
def test_delta_invalid_op_or(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start').bitor(self.deltas[0])))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to binary or a datetime with a timedelta.")
| bsd-3-clause | 8,147,960,345,964,412,000 | 40.47013 | 109 | 0.595703 | false |
factorlibre/OCB | addons/note/__openerp__.py | 260 | 2182 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Notes',
'version': '1.0',
'category': 'Tools',
'description': """
This module allows users to create their own notes inside OpenERP
=================================================================
Use notes to write meeting minutes, organize ideas, organize personal todo
lists, etc. Each user manages his own personal Notes. Notes are available to
their authors only, but they can share notes to others users so that several
people can work on the same note in real time. It's very efficient to share
meeting minutes.
Notes can be found in the 'Home' menu.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/notes',
'summary': 'Sticky notes, Collaborative, Memos',
'sequence': 9,
'depends': [
'mail',
],
'data': [
'security/note_security.xml',
'security/ir.rule.xml',
'security/ir.model.access.csv',
'note_data.xml',
'note_view.xml',
'views/note.xml',
],
'demo': [
'note_demo.xml',
],
'test': [
],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -6,735,806,817,518,069,000 | 33.634921 | 78 | 0.591659 | false |
emilroz/openmicroscopy | components/tools/OmeroPy/src/path.py | 1 | 34018 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" path.py - An object representing a path to a file or directory.
Example:
from path import path
d = path('/home/guido/bin')
for f in d.files('*.py'):
f.chmod(0755)
This module requires Python 2.2 or later.
URL: http://www.jorendorff.com/articles/python/path
Author: Jason Orendorff <jason.orendorff\x40gmail\x2ecom> (and others - see the url!)
Date: 9 Mar 2007
+------------------------------------------------------------------------------------+
Added by Colin Blackburn 17 Jun 2009
The above url is dead and development of path.py has ceased. This copy of path.py
is now maintained by the OMERO developers.
Fo reference, the original package is now available from:
http://pypi.python.org/pypi/path.py/2.2
Notes:
* The functionality of the method read_md5() has been removed from this copy.
This is due to the changes in the hash libraries between Python 2.4 and 2.5.
* Added parpath (2009/09/21)
* Improved listdir to handle unreadable directories. See #9156. (2013/03/06)
* Merged changes from https://github.com/jaraco/path.py/commit/bee13d0de4d44599d397b603e1c83dce1ce9818d (2014/04/28)
"""
# TODO
# - Tree-walking functions don't avoid symlink loops. Matt Harrison
# sent me a patch for this.
# - Bug in write_text(). It doesn't support Universal newline mode.
# - Better error message in listdir() when self isn't a
# directory. (On Windows, the error message really sucks.)
# - Make sure everything has a good docstring.
# - Add methods for regex find and replace.
# - guess_content_type() method?
# - Perhaps support arguments to touch().
from __future__ import generators
import sys, warnings, os, fnmatch, glob, shutil, codecs
__version__ = '2.2'
__all__ = ['path']
# Platform-specific support for path.owner
if os.name == 'nt':
try:
import win32security
except ImportError:
win32security = None
else:
try:
import pwd
except ImportError:
pwd = None
# Pre-2.3 support. Are unicode filenames supported?
_base = str
_getcwd = os.getcwd
try:
if os.path.supports_unicode_filenames:
_base = unicode
_getcwd = os.getcwdu
except AttributeError:
pass
# Pre-2.3 workaround for booleans
try:
True, False
except NameError:
True, False = 1, 0
# Pre-2.3 workaround for basestring.
try:
basestring
except NameError:
basestring = (str, unicode)
# Universal newline support
_textmode = 'r'
if hasattr(file, 'newlines'):
_textmode = 'U'
class TreeWalkWarning(Warning):
pass
class path(_base):
""" Represents a filesystem path.
For documentation on individual methods, consult their
counterparts in os.path.
"""
# --- Special Python methods.
def __repr__(self):
return 'path(%s)' % _base.__repr__(self)
# Adding a path and a string yields a path.
def __add__(self, more):
try:
resultStr = _base.__add__(self, more)
except TypeError: #Python bug
resultStr = NotImplemented
if resultStr is NotImplemented:
return resultStr
return self.__class__(resultStr)
def __radd__(self, other):
if isinstance(other, basestring):
return self.__class__(other.__add__(self))
else:
return NotImplemented
# The / operator joins paths.
def __div__(self, rel):
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
Join two path components, adding a separator character if
needed.
"""
return self.__class__(os.path.join(self, rel))
# Make the / operator work even when true division is enabled.
__truediv__ = __div__
def getcwd(cls):
""" Return the current working directory as a path object. """
return cls(_getcwd())
getcwd = classmethod(getcwd)
# --- Operations on path strings.
isabs = os.path.isabs
def abspath(self): return self.__class__(os.path.abspath(self))
def normcase(self): return self.__class__(os.path.normcase(self))
def normpath(self): return self.__class__(os.path.normpath(self))
def realpath(self): return self.__class__(os.path.realpath(self))
def expanduser(self): return self.__class__(os.path.expanduser(self))
def expandvars(self): return self.__class__(os.path.expandvars(self))
def dirname(self): return self.__class__(os.path.dirname(self))
basename = os.path.basename
def expand(self):
""" Clean up a filename by calling expandvars(),
expanduser(), and normpath() on it.
This is commonly everything needed to clean up a filename
read from a configuration file, for example.
"""
return self.expandvars().expanduser().normpath()
def _get_namebase(self):
base, ext = os.path.splitext(self.name)
return base
def _get_ext(self):
f, ext = os.path.splitext(_base(self))
return ext
def _get_drive(self):
drive, r = os.path.splitdrive(self)
return self.__class__(drive)
parent = property(
dirname, None, None,
""" This path's parent directory, as a new path object.
For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib')
""")
name = property(
basename, None, None,
""" The name of this file or directory without the full path.
For example, path('/usr/local/lib/libpython.so').name == 'libpython.so'
""")
namebase = property(
_get_namebase, None, None,
""" The same as path.name, but with one file extension stripped off.
For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz',
but path('/home/guido/python.tar.gz').namebase == 'python.tar'
""")
ext = property(
_get_ext, None, None,
""" The file extension, for example '.py'. """)
drive = property(
_get_drive, None, None,
""" The drive specifier, for example 'C:'.
This is always empty on systems that don't use drive specifiers.
""")
def splitpath(self):
""" p.splitpath() -> Return (p.parent, p.name). """
parent, child = os.path.split(self)
return self.__class__(parent), child
def splitdrive(self):
""" p.splitdrive() -> Return (p.drive, <the rest of p>).
Split the drive specifier from this path. If there is
no drive specifier, p.drive is empty, so the return value
is simply (path(''), p). This is always the case on Unix.
"""
drive, rel = os.path.splitdrive(self)
return self.__class__(drive), rel
def splitext(self):
""" p.splitext() -> Return (p.stripext(), p.ext).
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from '.' to the end of the
last path segment. This has the property that if
(a, b) == p.splitext(), then a + b == p.
"""
filename, ext = os.path.splitext(self)
return self.__class__(filename), ext
def stripext(self):
""" p.stripext() -> Remove one file extension from the path.
For example, path('/home/guido/python.tar.gz').stripext()
returns path('/home/guido/python.tar').
"""
return self.splitext()[0]
if hasattr(os.path, 'splitunc'):
def splitunc(self):
unc, rest = os.path.splitunc(self)
return self.__class__(unc), rest
def _get_uncshare(self):
unc, r = os.path.splitunc(self)
return self.__class__(unc)
uncshare = property(
_get_uncshare, None, None,
""" The UNC mount point for this path.
This is empty for paths on local drives. """)
def joinpath(self, *args):
""" Join two or more path components, adding a separator
character (os.sep) if needed. Returns a new path
object.
"""
return self.__class__(os.path.join(self, *args))
def splitall(self):
r""" Return a list of the path components in this path.
The first item in the list will be a path. Its value will be
either os.curdir, os.pardir, empty, or the root directory of
this path (for example, '/' or 'C:\\'). The other items in
the list will be strings.
path.path.joinpath(*result) will yield the original path.
"""
parts = []
loc = self
while loc != os.curdir and loc != os.pardir:
prev = loc
loc, child = prev.splitpath()
if loc == prev:
break
parts.append(child)
parts.append(loc)
parts.reverse()
return parts
def relpath(self):
""" Return this path as a relative path,
based from the current working directory.
"""
cwd = self.__class__(os.getcwd())
return cwd.relpathto(self)
def parpath(self, dest):
"""
Return the command to move up the directory hierarchy
from the current path to the dest path. This method
can be used to test if one path object is contained
within another:
p1 = path.path("/tmp")
p2 = path.path("/tmp/foo")
if p2.parpath(p1):
print "p2 is contained in p1"
This method does not follow symlinks.
"""
dest = self.__class__(dest)
rel = self.relpathto(dest)
dots = rel.split(os.sep)
for dot in dots:
if os.pardir != str(dot) and os.curdir != str(dot):
return []
return dots
def relpathto(self, dest):
""" Return a relative path from self to dest.
If there is no relative path from self to dest, for example if
they reside on different drives in Windows, then this returns
dest.abspath().
"""
origin = self.abspath()
dest = self.__class__(dest).abspath()
orig_list = origin.normcase().splitall()
# Don't normcase dest! We want to preserve the case.
dest_list = dest.splitall()
if orig_list[0] != os.path.normcase(dest_list[0]):
# Can't get here from there.
return dest
# Find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != os.path.normcase(dest_seg):
break
i += 1
# Now i is the point where the two paths diverge.
# Need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = [os.pardir] * (len(orig_list) - i)
# Need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# If they happen to be identical, use os.curdir.
relpath = os.curdir
else:
relpath = os.path.join(*segments)
return self.__class__(relpath)
# --- Listing, searching, walking, and matching
def listdir(self, pattern=None):
""" D.listdir() -> List of items in this directory.
Use D.files() or D.dirs() instead if you want a listing
of just files or just subdirectories.
The elements of the list are path objects.
With the optional 'pattern' argument, this only lists
items whose names match the given pattern.
try/except added to handle unreadable directories. See #9156.
"""
try:
names = os.listdir(self)
except OSError:
# ignore unreadable directories
names = []
if pattern is not None:
names = fnmatch.filter(names, pattern)
return [self / child for child in names]
def dirs(self, pattern=None):
""" D.dirs() -> List of this directory's subdirectories.
The elements of the list are path objects.
This does not walk recursively into subdirectories
(but see path.walkdirs).
With the optional 'pattern' argument, this only lists
directories whose names match the given pattern. For
example, d.dirs('build-*').
"""
return [p for p in self.listdir(pattern) if p.isdir()]
def files(self, pattern=None):
""" D.files() -> List of the files in this directory.
The elements of the list are path objects.
This does not walk into subdirectories (see path.walkfiles).
With the optional 'pattern' argument, this only lists files
whose names match the given pattern. For example,
d.files('*.pyc').
"""
return [p for p in self.listdir(pattern) if p.isfile()]
def walk(self, pattern=None, errors='strict'):
""" D.walk() -> iterator over files and subdirs, recursively.
The iterator yields path objects naming each child item of
this directory and its descendants. This requires that
D.isdir().
This performs a depth-first traversal of the directory tree.
Each directory is returned just before all its children.
The errors= keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via warnings.warn(), and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
if pattern is None or child.fnmatch(pattern):
yield child
try:
isdir = child.isdir()
except Exception:
if errors == 'ignore':
isdir = False
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (child, sys.exc_info()[1]),
TreeWalkWarning)
isdir = False
else:
raise
if isdir:
for item in child.walk(pattern, errors):
yield item
def walkdirs(self, pattern=None, errors='strict'):
""" D.walkdirs() -> iterator over subdirs, recursively.
With the optional 'pattern' argument, this yields only
directories whose names match the given pattern. For
example, mydir.walkdirs('*test') yields only directories
with names ending in 'test'.
The errors= keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via warnings.warn(), and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
dirs = self.dirs()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in dirs:
if pattern is None or child.fnmatch(pattern):
yield child
for subsubdir in child.walkdirs(pattern, errors):
yield subsubdir
def walkfiles(self, pattern=None, errors='strict'):
""" D.walkfiles() -> iterator over files in D, recursively.
The optional argument, pattern, limits the results to files
with names that match the pattern. For example,
mydir.walkfiles('*.tmp') yields only files with the .tmp
extension.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
try:
isfile = child.isfile()
isdir = not isfile and child.isdir()
except:
if errors == 'ignore':
continue
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
continue
else:
raise
if isfile:
if pattern is None or child.fnmatch(pattern):
yield child
elif isdir:
for f in child.walkfiles(pattern, errors):
yield f
def fnmatch(self, pattern):
""" Return True if self.name matches the given pattern.
pattern - A filename pattern with wildcards,
for example '*.py'.
"""
return fnmatch.fnmatch(self.name, pattern)
def glob(self, pattern):
""" Return a list of path objects that match the pattern.
pattern - a path relative to this directory, with wildcards.
For example, path('/users').glob('*/bin/*') returns a list
of all the files users have in their bin directories.
"""
cls = self.__class__
return [cls(s) for s in glob.glob(_base(self / pattern))]
# --- Reading or writing an entire file at once.
def open(self, mode='r'):
""" Open this file. Return a file object. """
return file(self, mode)
def bytes(self):
""" Open this file, read all bytes, return them as a string. """
f = self.open('rb')
try:
return f.read()
finally:
f.close()
def write_bytes(self, bytes, append=False):
""" Open this file and write the given bytes to it.
Default behavior is to overwrite any existing file.
Call p.write_bytes(bytes, append=True) to append instead.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
f = self.open(mode)
try:
f.write(bytes)
finally:
f.close()
def text(self, encoding=None, errors='strict'):
r""" Open this file, read it in, return the content as a string.
This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r'
are automatically translated to '\n'.
Optional arguments:
encoding - The Unicode encoding (or character set) of
the file. If present, the content of the file is
decoded and returned as a unicode object; otherwise
it is returned as an 8-bit str.
errors - How to handle Unicode errors; see help(str.decode)
for the options. Default is 'strict'.
"""
if encoding is None:
# 8-bit
f = self.open(_textmode)
try:
return f.read()
finally:
f.close()
else:
# Unicode
f = codecs.open(self, 'r', encoding, errors)
# (Note - Can't use 'U' mode here, since codecs.open
# doesn't support 'U' mode, even in Python 2.3.)
try:
t = f.read()
finally:
f.close()
return (t.replace(u'\r\n', u'\n')
.replace(u'\r\x85', u'\n')
.replace(u'\r', u'\n')
.replace(u'\x85', u'\n')
.replace(u'\u2028', u'\n'))
def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False):
r""" Write the given text to this file.
The default behavior is to overwrite any existing file;
to append instead, use the 'append=True' keyword argument.
There are two differences between path.write_text() and
path.write_bytes(): newline handling and Unicode handling.
See below.
Parameters:
- text - str/unicode - The text to be written.
- encoding - str - The Unicode encoding that will be used.
This is ignored if 'text' isn't a Unicode string.
- errors - str - How to handle Unicode encoding errors.
Default is 'strict'. See help(unicode.encode) for the
options. This is ignored if 'text' isn't a Unicode
string.
- linesep - keyword argument - str/unicode - The sequence of
characters to be used to mark end-of-line. The default is
os.linesep. You can also specify None; this means to
leave all newlines as they are in 'text'.
- append - keyword argument - bool - Specifies what to do if
the file already exists (True: append to the end of it;
False: overwrite it.) The default is False.
--- Newline handling.
write_text() converts all standard end-of-line sequences
('\n', '\r', and '\r\n') to your platform's default end-of-line
sequence (see os.linesep; on Windows, for example, the
end-of-line marker is '\r\n').
If you don't like your platform's default, you can override it
using the 'linesep=' keyword argument. If you specifically want
write_text() to preserve the newlines as-is, use 'linesep=None'.
This applies to Unicode text the same as to 8-bit text, except
there are three additional standard Unicode end-of-line sequences:
u'\x85', u'\r\x85', and u'\u2028'.
(This is slightly different from when you open a file for
writing with fopen(filename, "w") in C or file(filename, 'w')
in Python.)
--- Unicode
If 'text' isn't Unicode, then apart from newline handling, the
bytes are written verbatim to the file. The 'encoding' and
'errors' arguments are not used and must be omitted.
If 'text' is Unicode, it is first converted to bytes using the
specified 'encoding' (or the default encoding if 'encoding'
isn't specified). The 'errors' argument applies only to this
conversion.
"""
if isinstance(text, unicode):
if linesep is not None:
# Convert all standard end-of-line sequences to
# ordinary newline characters.
text = (text.replace(u'\r\n', u'\n')
.replace(u'\r\x85', u'\n')
.replace(u'\r', u'\n')
.replace(u'\x85', u'\n')
.replace(u'\u2028', u'\n'))
text = text.replace(u'\n', linesep)
if encoding is None:
encoding = sys.getdefaultencoding()
bytes = text.encode(encoding, errors)
else:
# It is an error to specify an encoding if 'text' is
# an 8-bit string.
assert encoding is None
if linesep is not None:
text = (text.replace('\r\n', '\n')
.replace('\r', '\n'))
bytes = text.replace('\n', linesep)
self.write_bytes(bytes, append)
def lines(self, encoding=None, errors='strict', retain=True):
r""" Open this file, read all lines, return them in a list.
Optional arguments:
encoding - The Unicode encoding (or character set) of
the file. The default is None, meaning the content
of the file is read as 8-bit characters and returned
as a list of (non-Unicode) str objects.
errors - How to handle Unicode errors; see help(str.decode)
for the options. Default is 'strict'
retain - If true, retain newline characters; but all newline
character combinations ('\r', '\n', '\r\n') are
translated to '\n'. If false, newline characters are
stripped off. Default is True.
This uses 'U' mode in Python 2.3 and later.
"""
if encoding is None and retain:
f = self.open(_textmode)
try:
return f.readlines()
finally:
f.close()
else:
return self.text(encoding, errors).splitlines(retain)
def write_lines(self, lines, encoding=None, errors='strict',
linesep=os.linesep, append=False):
r""" Write the given lines of text to this file.
By default this overwrites any existing file at this path.
This puts a platform-specific newline sequence on every line.
See 'linesep' below.
lines - A list of strings.
encoding - A Unicode encoding to use. This applies only if
'lines' contains any Unicode strings.
errors - How to handle errors in Unicode encoding. This
also applies only to Unicode strings.
linesep - The desired line-ending. This line-ending is
applied to every line. If a line already has any
standard line ending ('\r', '\n', '\r\n', u'\x85',
u'\r\x85', u'\u2028'), that will be stripped off and
this will be used instead. The default is os.linesep,
which is platform-dependent ('\r\n' on Windows, '\n' on
Unix, etc.) Specify None to write the lines as-is,
like file.writelines().
Use the keyword argument append=True to append lines to the
file. The default is to overwrite the file. Warning:
When you use this with Unicode data, if the encoding of the
existing data in the file is different from the encoding
you specify with the encoding= parameter, the result is
mixed-encoding data, which can really confuse someone trying
to read the file later.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
f = self.open(mode)
try:
for line in lines:
isUnicode = isinstance(line, unicode)
if linesep is not None:
# Strip off any existing line-end and add the
# specified linesep string.
if isUnicode:
if line[-2:] in (u'\r\n', u'\x0d\x85'):
line = line[:-2]
elif line[-1:] in (u'\r', u'\n',
u'\x85', u'\u2028'):
line = line[:-1]
else:
if line[-2:] == '\r\n':
line = line[:-2]
elif line[-1:] in ('\r', '\n'):
line = line[:-1]
line += linesep
if isUnicode:
if encoding is None:
encoding = sys.getdefaultencoding()
line = line.encode(encoding, errors)
f.write(line)
finally:
f.close()
def read_md5(self):
""" Calculate the md5 hash for this file.
This reads through the entire file.
"""
raise Exception("read_md5 has been removed from this installation of path.py")
# --- Methods for querying the filesystem.
# N.B. On some platforms, the os.path functions may be implemented in C
# (e.g. isdir on Windows, Python 3.2.2), and compiled functions don't get
# bound. Playing it safe and wrapping them all in method calls.
def isabs(self): return os.path.isabs(self)
def exists(self): return os.path.exists(self)
def isdir(self): return os.path.isdir(self)
def isfile(self): return os.path.isfile(self)
def islink(self): return os.path.islink(self)
def ismount(self): return os.path.ismount(self)
if hasattr(os.path, 'samefile'):
def samefile(self): return os.path.samefile(self)
def getatime(self): return os.path.getatime(self)
atime = property(
getatime, None, None,
""" Last access time of the file. """)
def getmtime(self): return os.path.getmtime(self)
mtime = property(
getmtime, None, None,
""" Last-modified time of the file. """)
if hasattr(os.path, 'getctime'):
def getctime(self): return os.path.getctime(self)
ctime = property(
getctime, None, None,
""" Creation time of the file. """)
def getsize(self): return os.path.getsize(self)
size = property(
getsize, None, None,
""" Size of the file, in bytes. """)
if hasattr(os, 'access'):
def access(self, mode):
""" Return true if current user has access to this path.
mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK
"""
return os.access(self, mode)
def stat(self):
""" Perform a stat() system call on this path. """
return os.stat(self)
def lstat(self):
""" Like path.stat(), but do not follow symbolic links. """
return os.lstat(self)
def get_owner(self):
r""" Return the name of the owner of this file or directory.
This follows symbolic links.
On Windows, this returns a name of the form ur'DOMAIN\User Name'.
On Windows, a group can own a file or directory.
"""
if os.name == 'nt':
if win32security is None:
raise Exception("path.owner requires win32all to be installed")
desc = win32security.GetFileSecurity(
self, win32security.OWNER_SECURITY_INFORMATION)
sid = desc.GetSecurityDescriptorOwner()
account, domain, typecode = win32security.LookupAccountSid(None, sid)
return domain + u'\\' + account
else:
if pwd is None:
raise NotImplementedError("path.owner is not implemented on this platform.")
st = self.stat()
return pwd.getpwuid(st.st_uid).pw_name
owner = property(
get_owner, None, None,
""" Name of the owner of this file or directory. """)
if hasattr(os, 'statvfs'):
def statvfs(self):
""" Perform a statvfs() system call on this path. """
return os.statvfs(self)
if hasattr(os, 'pathconf'):
def pathconf(self, name):
return os.pathconf(self, name)
# --- Modifying operations on files and directories
def utime(self, times):
""" Set the access and modified times of this file. """
os.utime(self, times)
def chmod(self, mode):
os.chmod(self, mode)
if hasattr(os, 'chown'):
def chown(self, uid, gid):
os.chown(self, uid, gid)
def rename(self, new):
os.rename(self, new)
def renames(self, new):
os.renames(self, new)
# --- Create/delete operations on directories
def mkdir(self, mode=0777):
os.mkdir(self, mode)
def makedirs(self, mode=0777):
os.makedirs(self, mode)
def rmdir(self):
os.rmdir(self)
def removedirs(self):
os.removedirs(self)
# --- Modifying operations on files
def touch(self):
""" Set the access/modified times of this file to the current time.
Create the file if it does not exist.
"""
fd = os.open(self, os.O_WRONLY | os.O_CREAT, 0666)
os.close(fd)
os.utime(self, None)
def remove(self):
os.remove(self)
def unlink(self):
os.unlink(self)
# --- Links
if hasattr(os, 'link'):
def link(self, newpath):
""" Create a hard link at 'newpath', pointing to this file. """
os.link(self, newpath)
if hasattr(os, 'symlink'):
def symlink(self, newlink):
""" Create a symbolic link at 'newlink', pointing here. """
os.symlink(self, newlink)
if hasattr(os, 'readlink'):
def readlink(self):
""" Return the path to which this symbolic link points.
The result may be an absolute or a relative path.
"""
return self.__class__(os.readlink(self))
def readlinkabs(self):
""" Return the path to which this symbolic link points.
The result is always an absolute path.
"""
p = self.readlink()
if p.isabs():
return p
else:
return (self.parent / p).abspath()
# --- High-level functions from shutil
copyfile = shutil.copyfile
copymode = shutil.copymode
copystat = shutil.copystat
copy = shutil.copy
copy2 = shutil.copy2
copytree = shutil.copytree
if hasattr(shutil, 'move'):
move = shutil.move
rmtree = shutil.rmtree
# --- Special stuff from os
if hasattr(os, 'chroot'):
def chroot(self):
os.chroot(self)
if hasattr(os, 'startfile'):
def startfile(self):
os.startfile(self)
| gpl-2.0 | 1,163,964,049,289,666,300 | 32.515271 | 117 | 0.558146 | false |
cloud9UG/odoo | doc/conf.py | 184 | 8222 | # -*- coding: utf-8 -*-
import sys, os
import sphinx
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
DIR = os.path.dirname(__file__)
sys.path.append(
os.path.abspath(
os.path.join(DIR, '_extensions')))
# autodoc
sys.path.append(os.path.abspath(os.path.join(DIR, '..')))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.ifconfig',
'sphinx.ext.todo',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.linkcode',
'github_link',
'odoo',
'html_domain',
'exercise_admonition',
'patchqueue'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'odoo'
copyright = u'Odoo S.A.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '8.0'
# The full version, including alpha/beta/rc tags.
release = '8.0'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'odoo'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'odoo'
odoo_cover_default = 'banners/installing_odoo.jpg'
odoo_cover_external = {
'https://odoo.com/documentation/functional/accounting.html' : 'banners/m_accounting.jpg',
'https://odoo.com/documentation/functional/double-entry.html' : 'banners/m_1.jpg',
'https://odoo.com/documentation/functional/valuation.html' : 'banners/m_2.jpg',
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_extensions']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_add_permalinks = u''
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# FIXME: no sidebar on index?
html_sidebars = {
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
latex_elements = {
'papersize': r'a4paper',
'preamble': u'''\\setcounter{tocdepth}{2}
''',
}
# default must be set otherwise ifconfig blows up
todo_include_todos = False
intersphinx_mapping = {
'python': ('https://docs.python.org/2/', None),
'werkzeug': ('http://werkzeug.pocoo.org/docs/', None),
'sqlalchemy': ('http://docs.sqlalchemy.org/en/rel_0_9/', None),
'django': ('https://django.readthedocs.org/en/latest/', None),
}
github_user = 'odoo'
github_project = 'odoo'
# monkeypatch PHP lexer to not require <?php
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True)
def setup(app):
app.connect('html-page-context', canonicalize)
app.add_config_value('canonical_root', None, 'env')
app.add_config_value('canonical_branch', 'master', 'env')
app.connect('html-page-context', versionize)
app.add_config_value('versions', '', 'env')
app.connect('html-page-context', analytics)
app.add_config_value('google_analytics_key', '', 'env')
def canonicalize(app, pagename, templatename, context, doctree):
""" Adds a 'canonical' URL for the current document in the rendering
context. Requires the ``canonical_root`` setting being set. The canonical
branch is ``master`` but can be overridden using ``canonical_branch``.
"""
if not app.config.canonical_root:
return
context['canonical'] = _build_url(
app.config.canonical_root, app.config.canonical_branch, pagename)
def versionize(app, pagename, templatename, context, doctree):
""" Adds a version switcher below the menu, requires ``canonical_root``
and ``versions`` (an ordered, space-separated lists of all possible
versions).
"""
if not (app.config.canonical_root and app.config.versions):
return
context['versions'] = [
(vs, _build_url(app.config.canonical_root, vs, pagename))
for vs in app.config.versions.split(',')
if vs != app.config.version
]
def analytics(app, pagename, templatename, context, doctree):
if not app.config.google_analytics_key:
return
context['google_analytics_key'] = app.config.google_analytics_key
def _build_url(root, branch, pagename):
return "{canonical_url}{canonical_branch}/{canonical_page}".format(
canonical_url=root,
canonical_branch=branch,
canonical_page=(pagename + '.html').replace('index.html', '')
.replace('index/', ''),
)
| agpl-3.0 | 8,444,170,709,892,893,000 | 32.287449 | 95 | 0.693262 | false |
dwfreed/mitmproxy | test/mitmproxy/console/test_pathedit.py | 4 | 2245 | import os
from os.path import normpath
from mitmproxy.tools.console import pathedit
from mitmproxy.test import tutils
from unittest.mock import patch
class TestPathCompleter:
def test_lookup_construction(self):
c = pathedit._PathCompleter()
cd = os.path.normpath(tutils.test_data.path("mitmproxy/completion"))
ca = os.path.join(cd, "a")
assert c.complete(ca).endswith(normpath("/completion/aaa"))
assert c.complete(ca).endswith(normpath("/completion/aab"))
c.reset()
ca = os.path.join(cd, "aaa")
assert c.complete(ca).endswith(normpath("/completion/aaa"))
assert c.complete(ca).endswith(normpath("/completion/aaa"))
c.reset()
assert c.complete(cd).endswith(normpath("/completion/aaa"))
def test_completion(self):
c = pathedit._PathCompleter(True)
c.reset()
c.lookup = [
("a", "x/a"),
("aa", "x/aa"),
]
assert c.complete("a") == "a"
assert c.final == "x/a"
assert c.complete("a") == "aa"
assert c.complete("a") == "a"
c = pathedit._PathCompleter(True)
r = c.complete("l")
assert c.final.endswith(r)
c.reset()
assert c.complete("/nonexistent") == "/nonexistent"
assert c.final == "/nonexistent"
c.reset()
assert c.complete("~") != "~"
c.reset()
s = "thisisatotallynonexistantpathforsure"
assert c.complete(s) == s
assert c.final == s
class TestPathEdit:
def test_keypress(self):
pe = pathedit.PathEdit("", "")
with patch('urwid.widget.Edit.get_edit_text') as get_text, \
patch('urwid.widget.Edit.set_edit_text') as set_text:
cd = os.path.normpath(tutils.test_data.path("mitmproxy/completion"))
get_text.return_value = os.path.join(cd, "a")
# Pressing tab should set completed path
pe.keypress((1,), "tab")
set_text_called_with = set_text.call_args[0][0]
assert set_text_called_with.endswith(normpath("/completion/aaa"))
# Pressing any other key should reset
pe.keypress((1,), "a")
assert pe.lookup is None
| mit | -6,447,890,245,607,143,000 | 30.180556 | 80 | 0.577283 | false |
veltzer/demos-python | src/examples/short/parsing/math_expression.py | 1 | 1056 | #!/usr/bin/env python
import pprint
import sys
import simpleparse.dispatchprocessor
declaration = r'''# note use of raw string when embedding in python code...
full := ws,expr,ws
number := [0-9eE+.-]+
expr := number,'+',number/number,'-',number
ws := [ \t\v]*
'''
class MyProcessorClass(simpleparse.dispatchprocessor.DispatchProcessor):
# def __init__(self):
# print('cons')
def number(self, tup, buf):
print('in number')
'''Process the given production and it's children'''
def expr(self, tup, buf):
print('in expr')
'''Process the given production and it's children'''
def __call__(self, value, data):
print('value is ' + str(value))
print('data is ' + str(data))
return value
# return super(self.__class__,self).__call__(self,value,data)
class MyParser(simpleparse.parser.Parser):
def buildProcessor(self):
return MyProcessorClass()
parser = MyParser(declaration, 'full')
pprint.pprint(parser.parse(sys.argv[1]))
| gpl-3.0 | -2,012,229,150,798,002,200 | 24.756098 | 75 | 0.617424 | false |
sfepy/sfepy | examples/linear_elasticity/dispersion_analysis.py | 2 | 35004 | #!/usr/bin/env python
"""
Dispersion analysis of a heterogeneous finite scale periodic cell.
The periodic cell mesh has to contain two subdomains Y1 (with the cell ids 1),
Y2 (with the cell ids 2), so that different material properties can be defined
in each of the subdomains (see ``--pars`` option). The command line parameters
can be given in any consistent unit set, for example the basic SI units. The
``--unit-multipliers`` option can be used to rescale the input units to ones
more suitable to the simulation, for example to prevent having different
matrix blocks with large differences of matrix entries magnitudes. The results
are then in the rescaled units.
Usage Examples
--------------
Default material parameters, a square periodic cell with a spherical inclusion,
logs also standard pressure dilatation and shear waves, no eigenvectors::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --log-std-waves --eigs-only
As above, with custom eigenvalue solver parameters, and different number of
eigenvalues, mesh size and units used in the calculation::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --solver-conf="kind='eig.scipy', method='eigsh', tol=1e-10, maxiter=1000, which='LM', sigma=0" --log-std-waves -n 5 --range=0,640,101 --mode=omega --unit-multipliers=1e-6,1e-2,1e-3 --mesh-size=1e-2 --eigs-only
Default material parameters, a square periodic cell with a square inclusion,
and a very small mesh to allow comparing the omega and kappa modes (full matrix
solver required!)::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/square_2m.mesh --solver-conf="kind='eig.scipy', method='eigh'" --log-std-waves -n 10 --range=0,640,101 --mesh-size=1e-2 --mode=omega --eigs-only --no-legends --unit-multipliers=1e-6,1e-2,1e-3 -o output/omega
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/square_2m.mesh --solver-conf="kind='eig.qevp', method='companion', mode='inverted', solver={kind='eig.scipy', method='eig'}" --log-std-waves -n 500 --range=0,4000000,1001 --mesh-size=1e-2 --mode=kappa --eigs-only --no-legends --unit-multipliers=1e-6,1e-2,1e-3 -o output/kappa
View/compare the resulting logs::
python script/plot_logs.py output/omega/frequencies.txt --no-legends -g 1 -o mode-omega.png
python script/plot_logs.py output/kappa/wave-numbers.txt --no-legends -o mode-kappa.png
python script/plot_logs.py output/kappa/wave-numbers.txt --no-legends --swap-axes -o mode-kappa-t.png
In contrast to the heterogeneous square periodic cell, a homogeneous
square periodic cell (the region Y2 is empty)::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/square_1m.mesh --solver-conf="kind='eig.scipy', method='eigh'" --log-std-waves -n 10 --range=0,640,101 --mesh-size=1e-2 --mode=omega --eigs-only --no-legends --unit-multipliers=1e-6,1e-2,1e-3 -o output/omega-h
python script/plot_logs.py output/omega-h/frequencies.txt --no-legends -g 1 -o mode-omega-h.png
Use the Brillouin stepper::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --log-std-waves -n=60 --eigs-only --no-legends --stepper=brillouin
python script/plot_logs.py output/frequencies.txt -g 0 --rc="'font.size':14, 'lines.linewidth' : 3, 'lines.markersize' : 4" -o brillouin-stepper-kappas.png
python script/plot_logs.py output/frequencies.txt -g 1 --no-legends --rc="'font.size':14, 'lines.linewidth' : 3, 'lines.markersize' : 4" -o brillouin-stepper-omegas.png
Additional arguments can be passed to the problem configuration's
:func:`define()` function using the ``--define-kwargs`` option. In this file,
only the mesh vertex separation parameter `mesh_eps` can be used::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --log-std-waves --eigs-only --define-kwargs="mesh_eps=1e-10" --save-regions
"""
from __future__ import absolute_import
import os
import sys
sys.path.append('.')
import gc
from copy import copy
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import numpy as nm
import matplotlib.pyplot as plt
from sfepy.base.base import import_file, output, Struct
from sfepy.base.conf import dict_from_string, ProblemConf
from sfepy.base.ioutils import ensure_path, remove_files_patterns, save_options
from sfepy.base.log import Log
from sfepy.discrete.fem import MeshIO
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson as stiffness
import sfepy.mechanics.matcoefs as mc
from sfepy.mechanics.units import apply_unit_multipliers, apply_units_to_pars
import sfepy.discrete.fem.periodic as per
from sfepy.discrete.fem.meshio import convert_complex_output
from sfepy.homogenization.utils import define_box_regions
from sfepy.discrete import Problem
from sfepy.mechanics.tensors import get_von_mises_stress
from sfepy.solvers import Solver
from sfepy.solvers.ts import get_print_info, TimeStepper
from sfepy.linalg.utils import output_array_stats, max_diff_csr
pars_kinds = {
'young1' : 'stress',
'poisson1' : 'one',
'density1' : 'density',
'young2' : 'stress',
'poisson2' : 'one',
'density2' : 'density',
}
def define(filename_mesh, pars, approx_order, refinement_level, solver_conf,
plane='strain', post_process=False, mesh_eps=1e-8):
io = MeshIO.any_from_filename(filename_mesh)
bbox = io.read_bounding_box()
dim = bbox.shape[1]
options = {
'absolute_mesh_path' : True,
'refinement_level' : refinement_level,
'allow_empty_regions' : True,
'post_process_hook' : 'compute_von_mises' if post_process else None,
}
fields = {
'displacement': ('complex', dim, 'Omega', approx_order),
}
materials = {
'm' : ({
'D' : {'Y1' : stiffness(dim,
young=pars.young1,
poisson=pars.poisson1,
plane=plane),
'Y2' : stiffness(dim,
young=pars.young2,
poisson=pars.poisson2,
plane=plane)},
'density' : {'Y1' : pars.density1, 'Y2' : pars.density2},
},),
'wave' : 'get_wdir',
}
variables = {
'u' : ('unknown field', 'displacement', 0),
'v' : ('test field', 'displacement', 'u'),
}
regions = {
'Omega' : 'all',
'Y1': 'cells of group 1',
'Y2': 'cells of group 2',
}
regions.update(define_box_regions(dim,
bbox[0], bbox[1], mesh_eps))
ebcs = {
}
if dim == 3:
epbcs = {
'periodic_x' : (['Left', 'Right'], {'u.all' : 'u.all'},
'match_x_plane'),
'periodic_y' : (['Near', 'Far'], {'u.all' : 'u.all'},
'match_y_plane'),
'periodic_z' : (['Top', 'Bottom'], {'u.all' : 'u.all'},
'match_z_plane'),
}
else:
epbcs = {
'periodic_x' : (['Left', 'Right'], {'u.all' : 'u.all'},
'match_y_line'),
'periodic_y' : (['Bottom', 'Top'], {'u.all' : 'u.all'},
'match_x_line'),
}
per.set_accuracy(mesh_eps)
functions = {
'match_x_plane' : (per.match_x_plane,),
'match_y_plane' : (per.match_y_plane,),
'match_z_plane' : (per.match_z_plane,),
'match_x_line' : (per.match_x_line,),
'match_y_line' : (per.match_y_line,),
'get_wdir' : (get_wdir,),
}
integrals = {
'i' : 2 * approx_order,
}
equations = {
'K' : 'dw_lin_elastic.i.Omega(m.D, v, u)',
'S' : 'dw_elastic_wave.i.Omega(m.D, wave.vec, v, u)',
'R' : """1j * dw_elastic_wave_cauchy.i.Omega(m.D, wave.vec, u, v)
- 1j * dw_elastic_wave_cauchy.i.Omega(m.D, wave.vec, v, u)""",
'M' : 'dw_dot.i.Omega(m.density, v, u)',
}
solver_0 = solver_conf.copy()
solver_0['name'] = 'eig'
return locals()
def get_wdir(ts, coors, mode=None,
equations=None, term=None, problem=None, wdir=None, **kwargs):
if mode == 'special':
return {'vec' : wdir}
def set_wave_dir(pb, wdir):
materials = pb.get_materials()
wave_mat = materials['wave']
wave_mat.set_extra_args(wdir=wdir)
def save_materials(output_dir, pb, options):
stiffness = pb.evaluate('ev_integrate_mat.2.Omega(m.D, u)',
mode='el_avg', copy_materials=False, verbose=False)
young, poisson = mc.youngpoisson_from_stiffness(stiffness,
plane=options.plane)
density = pb.evaluate('ev_integrate_mat.2.Omega(m.density, u)',
mode='el_avg', copy_materials=False, verbose=False)
out = {}
out['young'] = Struct(name='young', mode='cell',
data=young[..., None, None])
out['poisson'] = Struct(name='poisson', mode='cell',
data=poisson[..., None, None])
out['density'] = Struct(name='density', mode='cell', data=density)
materials_filename = os.path.join(output_dir, 'materials.vtk')
pb.save_state(materials_filename, out=out)
def get_std_wave_fun(pb, options):
stiffness = pb.evaluate('ev_integrate_mat.2.Omega(m.D, u)',
mode='el_avg', copy_materials=False, verbose=False)
young, poisson = mc.youngpoisson_from_stiffness(stiffness,
plane=options.plane)
density = pb.evaluate('ev_integrate_mat.2.Omega(m.density, u)',
mode='el_avg', copy_materials=False, verbose=False)
lam, mu = mc.lame_from_youngpoisson(young, poisson,
plane=options.plane)
alam = nm.average(lam)
amu = nm.average(mu)
adensity = nm.average(density)
cp = nm.sqrt((alam + 2.0 * amu) / adensity)
cs = nm.sqrt(amu / adensity)
output('average p-wave speed:', cp)
output('average shear wave speed:', cs)
log_names = [r'$\omega_p$', r'$\omega_s$']
log_plot_kwargs = [{'ls' : '--', 'color' : 'k'},
{'ls' : '--', 'color' : 'gray'}]
if options.mode == 'omega':
fun = lambda wmag, wdir: (cp * wmag, cs * wmag)
else:
fun = lambda wmag, wdir: (wmag / cp, wmag / cs)
return fun, log_names, log_plot_kwargs
def get_stepper(rng, pb, options):
if options.stepper == 'linear':
stepper = TimeStepper(rng[0], rng[1], dt=None, n_step=rng[2])
return stepper
bbox = pb.domain.mesh.get_bounding_box()
bzone = 2.0 * nm.pi / (bbox[1] - bbox[0])
num = rng[2] // 3
class BrillouinStepper(Struct):
"""
Step over 1. Brillouin zone in xy plane.
"""
def __init__(self, t0, t1, dt=None, n_step=None, step=None, **kwargs):
Struct.__init__(self, t0=t0, t1=t1, dt=dt, n_step=n_step, step=step)
self.n_digit, self.format, self.suffix = get_print_info(self.n_step)
def __iter__(self):
ts = TimeStepper(0, bzone[0], dt=None, n_step=num)
for ii, val in ts:
yield ii, val, nm.array([1.0, 0.0])
if ii == (num-2): break
ts = TimeStepper(0, bzone[1], dt=None, n_step=num)
for ii, k1 in ts:
wdir = nm.array([bzone[0], k1])
val = nm.linalg.norm(wdir)
wdir = wdir / val
yield num + ii, val, wdir
if ii == (num-2): break
wdir = nm.array([bzone[0], bzone[1]])
val = nm.linalg.norm(wdir)
wdir = wdir / val
ts = TimeStepper(0, 1, dt=None, n_step=num)
for ii, _ in ts:
yield 2 * num + ii, val * (1.0 - float(ii)/(num-1)), wdir
stepper = BrillouinStepper(0, 1, n_step=rng[2])
return stepper
def compute_von_mises(out, pb, state, extend=False, wmag=None, wdir=None):
"""
Calculate the von Mises stress.
"""
stress = pb.evaluate('ev_cauchy_stress.i.Omega(m.D, u)', mode='el_avg')
vms = get_von_mises_stress(stress.squeeze())
vms.shape = (vms.shape[0], 1, 1, 1)
out['von_mises_stress'] = Struct(name='output_data', mode='cell',
data=vms)
return out
def save_eigenvectors(filename, svecs, wmag, wdir, pb):
if svecs is None: return
variables = pb.get_variables()
# Make full eigenvectors (add DOFs fixed by boundary conditions).
vecs = nm.empty((variables.di.ptr[-1], svecs.shape[1]),
dtype=svecs.dtype)
for ii in range(svecs.shape[1]):
vecs[:, ii] = variables.make_full_vec(svecs[:, ii])
# Save the eigenvectors.
out = {}
state = pb.create_state()
pp_name = pb.conf.options.get('post_process_hook')
pp = getattr(pb.conf.funmod, pp_name if pp_name is not None else '',
lambda out, *args, **kwargs: out)
for ii in range(svecs.shape[1]):
state.set_full(vecs[:, ii])
aux = state.create_output_dict()
aux2 = {}
pp(aux2, pb, state, wmag=wmag, wdir=wdir)
aux.update(convert_complex_output(aux2))
out.update({key + '%03d' % ii : aux[key] for key in aux})
pb.save_state(filename, out=out)
def assemble_matrices(define, mod, pars, set_wave_dir, options, wdir=None):
"""
Assemble the blocks of dispersion eigenvalue problem matrices.
"""
define_dict = define(filename_mesh=options.mesh_filename,
pars=pars,
approx_order=options.order,
refinement_level=options.refine,
solver_conf=options.solver_conf,
plane=options.plane,
post_process=options.post_process,
**options.define_kwargs)
conf = ProblemConf.from_dict(define_dict, mod)
pb = Problem.from_conf(conf)
pb.dispersion_options = options
pb.set_output_dir(options.output_dir)
dim = pb.domain.shape.dim
# Set the normalized wave vector direction to the material(s).
if wdir is None:
wdir = nm.asarray(options.wave_dir[:dim], dtype=nm.float64)
wdir = wdir / nm.linalg.norm(wdir)
set_wave_dir(pb, wdir)
bbox = pb.domain.mesh.get_bounding_box()
size = (bbox[1] - bbox[0]).max()
scaling0 = apply_unit_multipliers([1.0], ['length'],
options.unit_multipliers)[0]
scaling = scaling0
if options.mesh_size is not None:
scaling *= options.mesh_size / size
output('scaling factor of periodic cell mesh coordinates:', scaling)
output('new mesh size with applied unit multipliers:', scaling * size)
pb.domain.mesh.coors[:] *= scaling
pb.set_mesh_coors(pb.domain.mesh.coors, update_fields=True)
bzone = 2.0 * nm.pi / (scaling * size)
output('1. Brillouin zone size:', bzone * scaling0)
output('1. Brillouin zone size with applied unit multipliers:', bzone)
pb.time_update()
pb.update_materials()
# Assemble the matrices.
mtxs = {}
for key, eq in pb.equations.iteritems():
mtxs[key] = mtx = pb.mtx_a.copy()
mtx = eq.evaluate(mode='weak', dw_mode='matrix', asm_obj=mtx)
mtx.eliminate_zeros()
output_array_stats(mtx.data, 'nonzeros in %s' % key)
output('symmetry checks:')
output('%s - %s^T:' % (key, key), max_diff_csr(mtx, mtx.T))
output('%s - %s^H:' % (key, key), max_diff_csr(mtx, mtx.H))
return pb, wdir, bzone, mtxs
def setup_n_eigs(options, pb, mtxs):
"""
Setup the numbers of eigenvalues based on options and numbers of DOFs.
"""
solver_n_eigs = n_eigs = options.n_eigs
n_dof = mtxs['K'].shape[0]
if options.mode == 'omega':
if options.n_eigs > n_dof:
n_eigs = n_dof
solver_n_eigs = None
else:
if options.n_eigs > 2 * n_dof:
n_eigs = 2 * n_dof
solver_n_eigs = None
return solver_n_eigs, n_eigs
def build_evp_matrices(mtxs, val, mode, pb):
"""
Build the matrices of the dispersion eigenvalue problem.
"""
if mode == 'omega':
mtx_a = mtxs['K'] + val**2 * mtxs['S'] + val * mtxs['R']
output('A - A^H:', max_diff_csr(mtx_a, mtx_a.H))
evp_mtxs = (mtx_a, mtxs['M'])
else:
evp_mtxs = (mtxs['S'], mtxs['R'], mtxs['K'] - val**2 * mtxs['M'])
return evp_mtxs
def process_evp_results(eigs, svecs, val, wdir, bzone, pb, mtxs, options,
std_wave_fun=None):
"""
Transform eigenvalues to either omegas or kappas, depending on `mode`.
Transform eigenvectors, if available, depending on `mode`.
Return also the values to log.
"""
if options.mode == 'omega':
omegas = nm.sqrt(eigs)
output('eigs, omegas:')
for ii, om in enumerate(omegas):
output('{:>3}. {: .10e}, {:.10e}'.format(ii, eigs[ii], om))
if options.stepper == 'linear':
out = tuple(eigs) + tuple(omegas)
else:
out = tuple(val * wdir) + tuple(omegas)
if std_wave_fun is not None:
out = out + std_wave_fun(val, wdir)
return omegas, svecs, out
else:
kappas = eigs.copy()
rks = kappas.copy()
# Mask modes far from 1. Brillouin zone.
max_kappa = 1.2 * bzone
kappas[kappas.real > max_kappa] = nm.nan
# Mask non-physical modes.
kappas[kappas.real < 0] = nm.nan
kappas[nm.abs(kappas.imag) > 1e-10] = nm.nan
out = tuple(kappas.real)
output('raw kappas, masked real part:',)
for ii, kr in enumerate(kappas.real):
output('{:>3}. {: 23.5e}, {:.10e}'.format(ii, rks[ii], kr))
if svecs is not None:
n_dof = mtxs['K'].shape[0]
# Select only vectors corresponding to physical modes.
ii = nm.isfinite(kappas.real)
svecs = svecs[:n_dof, ii]
if std_wave_fun is not None:
out = out + tuple(ii if ii <= max_kappa else nm.nan
for ii in std_wave_fun(val, wdir))
return kappas, svecs, out
helps = {
'pars' :
'material parameters in Y1, Y2 subdomains in basic units.'
' The default parameters are:'
' young1, poisson1, density1, young2, poisson2, density2'
' [default: %(default)s]',
'conf' :
'if given, an alternative problem description file with apply_units() and'
' define() functions [default: %(default)s]',
'define_kwargs' : 'additional keyword arguments passed to define()',
'mesh_size' :
'desired mesh size (max. of bounding box dimensions) in basic units'
' - the input periodic cell mesh is rescaled to this size'
' [default: %(default)s]',
'unit_multipliers' :
'basic unit multipliers (time, length, mass) [default: %(default)s]',
'plane' :
'for 2D problems, plane strain or stress hypothesis selection'
' [default: %(default)s]',
'wave_dir' : 'the wave vector direction (will be normalized)'
' [default: %(default)s]',
'mode' : 'solution mode: omega = solve a generalized EVP for omega,'
' kappa = solve a quadratic generalized EVP for kappa'
' [default: %(default)s]',
'stepper' : 'the range stepper. For "brillouin", only the number'
' of items from --range is used'
' [default: %(default)s]',
'range' : 'the wave vector magnitude / frequency range'
' (like numpy.linspace) depending on the mode option'
' [default: %(default)s]',
'order' : 'displacement field approximation order [default: %(default)s]',
'refine' : 'number of uniform mesh refinements [default: %(default)s]',
'n_eigs' : 'the number of eigenvalues to compute [default: %(default)s]',
'eigs_only' : 'compute only eigenvalues, not eigenvectors',
'post_process' : 'post-process eigenvectors',
'solver_conf' : 'eigenvalue problem solver configuration options'
' [default: %(default)s]',
'save_regions' : 'save defined regions into'
' <output_directory>/regions.vtk',
'save_materials' : 'save material parameters into'
' <output_directory>/materials.vtk',
'log_std_waves' : 'log also standard pressure dilatation and shear waves',
'no_legends' :
'do not show legends in the log plots',
'no_show' :
'do not show the log figure',
'silent' : 'do not print messages to screen',
'clear' :
'clear old solution files from output directory',
'output_dir' :
'output directory [default: %(default)s]',
'mesh_filename' :
'input periodic cell mesh file name [default: %(default)s]',
}
def main():
# Aluminium and epoxy.
default_pars = '70e9,0.35,2.799e3,3.8e9,0.27,1.142e3'
default_solver_conf = ("kind='eig.scipy',method='eigsh',tol=1.0e-5,"
"maxiter=1000,which='LM',sigma=0.0")
parser = ArgumentParser(description=__doc__,
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('--pars', metavar='name1=value1,name2=value2,...'
' or value1,value2,...',
action='store', dest='pars',
default=default_pars, help=helps['pars'])
parser.add_argument('--conf', metavar='filename',
action='store', dest='conf',
default=None, help=helps['conf'])
parser.add_argument('--define-kwargs', metavar='dict-like',
action='store', dest='define_kwargs',
default=None, help=helps['define_kwargs'])
parser.add_argument('--mesh-size', type=float, metavar='float',
action='store', dest='mesh_size',
default=None, help=helps['mesh_size'])
parser.add_argument('--unit-multipliers',
metavar='c_time,c_length,c_mass',
action='store', dest='unit_multipliers',
default='1.0,1.0,1.0', help=helps['unit_multipliers'])
parser.add_argument('--plane', action='store', dest='plane',
choices=['strain', 'stress'],
default='strain', help=helps['plane'])
parser.add_argument('--wave-dir', metavar='float,float[,float]',
action='store', dest='wave_dir',
default='1.0,0.0,0.0', help=helps['wave_dir'])
parser.add_argument('--mode', action='store', dest='mode',
choices=['omega', 'kappa'],
default='omega', help=helps['mode'])
parser.add_argument('--stepper', action='store', dest='stepper',
choices=['linear', 'brillouin'],
default='linear', help=helps['stepper'])
parser.add_argument('--range', metavar='start,stop,count',
action='store', dest='range',
default='0,6.4,33', help=helps['range'])
parser.add_argument('--order', metavar='int', type=int,
action='store', dest='order',
default=1, help=helps['order'])
parser.add_argument('--refine', metavar='int', type=int,
action='store', dest='refine',
default=0, help=helps['refine'])
parser.add_argument('-n', '--n-eigs', metavar='int', type=int,
action='store', dest='n_eigs',
default=6, help=helps['n_eigs'])
group = parser.add_mutually_exclusive_group()
group.add_argument('--eigs-only',
action='store_true', dest='eigs_only',
default=False, help=helps['eigs_only'])
group.add_argument('--post-process',
action='store_true', dest='post_process',
default=False, help=helps['post_process'])
parser.add_argument('--solver-conf', metavar='dict-like',
action='store', dest='solver_conf',
default=default_solver_conf, help=helps['solver_conf'])
parser.add_argument('--save-regions',
action='store_true', dest='save_regions',
default=False, help=helps['save_regions'])
parser.add_argument('--save-materials',
action='store_true', dest='save_materials',
default=False, help=helps['save_materials'])
parser.add_argument('--log-std-waves',
action='store_true', dest='log_std_waves',
default=False, help=helps['log_std_waves'])
parser.add_argument('--no-legends',
action='store_false', dest='show_legends',
default=True, help=helps['no_legends'])
parser.add_argument('--no-show',
action='store_false', dest='show',
default=True, help=helps['no_show'])
parser.add_argument('--silent',
action='store_true', dest='silent',
default=False, help=helps['silent'])
parser.add_argument('-c', '--clear',
action='store_true', dest='clear',
default=False, help=helps['clear'])
parser.add_argument('-o', '--output-dir', metavar='path',
action='store', dest='output_dir',
default='output', help=helps['output_dir'])
parser.add_argument('mesh_filename', default='',
help=helps['mesh_filename'])
options = parser.parse_args()
output_dir = options.output_dir
output.set_output(filename=os.path.join(output_dir,'output_log.txt'),
combined=options.silent == False)
if options.conf is not None:
mod = import_file(options.conf)
else:
mod = sys.modules[__name__]
pars_kinds = mod.pars_kinds
define = mod.define
set_wave_dir = mod.set_wave_dir
setup_n_eigs = mod.setup_n_eigs
build_evp_matrices = mod.build_evp_matrices
save_materials = mod.save_materials
get_std_wave_fun = mod.get_std_wave_fun
get_stepper = mod.get_stepper
process_evp_results = mod.process_evp_results
save_eigenvectors = mod.save_eigenvectors
try:
options.pars = dict_from_string(options.pars)
except:
aux = [float(ii) for ii in options.pars.split(',')]
options.pars = {key : aux[ii]
for ii, key in enumerate(pars_kinds.keys())}
options.unit_multipliers = [float(ii)
for ii in options.unit_multipliers.split(',')]
options.wave_dir = [float(ii)
for ii in options.wave_dir.split(',')]
aux = options.range.split(',')
options.range = [float(aux[0]), float(aux[1]), int(aux[2])]
options.solver_conf = dict_from_string(options.solver_conf)
options.define_kwargs = dict_from_string(options.define_kwargs)
if options.clear:
remove_files_patterns(output_dir,
['*.h5', '*.vtk', '*.txt'],
ignores=['output_log.txt'],
verbose=True)
filename = os.path.join(output_dir, 'options.txt')
ensure_path(filename)
save_options(filename, [('options', vars(options))],
quote_command_line=True)
pars = apply_units_to_pars(options.pars, pars_kinds,
options.unit_multipliers)
output('material parameter names and kinds:')
output(pars_kinds)
output('material parameters with applied unit multipliers:')
output(pars)
pars = Struct(**pars)
if options.mode == 'omega':
rng = copy(options.range)
rng[:2] = apply_unit_multipliers(options.range[:2],
['wave_number', 'wave_number'],
options.unit_multipliers)
output('wave number range with applied unit multipliers:', rng)
else:
if options.stepper == 'brillouin':
raise ValueError('Cannot use "brillouin" stepper in kappa mode!')
rng = copy(options.range)
rng[:2] = apply_unit_multipliers(options.range[:2],
['frequency', 'frequency'],
options.unit_multipliers)
output('frequency range with applied unit multipliers:', rng)
pb, wdir, bzone, mtxs = assemble_matrices(define, mod, pars, set_wave_dir,
options)
dim = pb.domain.shape.dim
if dim != 2:
options.plane = 'strain'
if options.save_regions:
pb.save_regions_as_groups(os.path.join(output_dir, 'regions'))
if options.save_materials:
save_materials(output_dir, pb, options)
conf = pb.solver_confs['eig']
eig_solver = Solver.any_from_conf(conf)
n_eigs, options.n_eigs = setup_n_eigs(options, pb, mtxs)
get_color = lambda ii: plt.cm.viridis((float(ii)
/ (max(options.n_eigs, 2) - 1)))
plot_kwargs = [{'color' : get_color(ii), 'ls' : '', 'marker' : 'o'}
for ii in range(options.n_eigs)]
get_color_dim = lambda ii: plt.cm.viridis((float(ii) / (max(dim, 2) -1)))
plot_kwargs_dim = [{'color' : get_color_dim(ii), 'ls' : '', 'marker' : 'o'}
for ii in range(dim)]
log_names = []
log_plot_kwargs = []
if options.log_std_waves:
std_wave_fun, log_names, log_plot_kwargs = get_std_wave_fun(
pb, options)
else:
std_wave_fun = None
stepper = get_stepper(rng, pb, options)
if options.mode == 'omega':
eigenshapes_filename = os.path.join(output_dir,
'frequency-eigenshapes-%s.vtk'
% stepper.suffix)
if options.stepper == 'linear':
log = Log([[r'$\lambda_{%d}$' % ii for ii in range(options.n_eigs)],
[r'$\omega_{%d}$'
% ii for ii in range(options.n_eigs)] + log_names],
plot_kwargs=[plot_kwargs, plot_kwargs + log_plot_kwargs],
formats=[['{:.12e}'] * options.n_eigs,
['{:.12e}'] * (options.n_eigs + len(log_names))],
yscales=['linear', 'linear'],
xlabels=[r'$\kappa$', r'$\kappa$'],
ylabels=[r'eigenvalues $\lambda_i$',
r'frequencies $\omega_i$'],
show_legends=options.show_legends,
is_plot=options.show,
log_filename=os.path.join(output_dir, 'frequencies.txt'),
aggregate=1000, sleep=0.1)
else:
log = Log([[r'$\kappa_{%d}$'% ii for ii in range(dim)],
[r'$\omega_{%d}$'
% ii for ii in range(options.n_eigs)] + log_names],
plot_kwargs=[plot_kwargs_dim,
plot_kwargs + log_plot_kwargs],
formats=[['{:.12e}'] * dim,
['{:.12e}'] * (options.n_eigs + len(log_names))],
yscales=['linear', 'linear'],
xlabels=[r'', r''],
ylabels=[r'wave vector $\kappa$',
r'frequencies $\omega_i$'],
show_legends=options.show_legends,
is_plot=options.show,
log_filename=os.path.join(output_dir, 'frequencies.txt'),
aggregate=1000, sleep=0.1)
for aux in stepper:
if options.stepper == 'linear':
iv, wmag = aux
else:
iv, wmag, wdir = aux
output('step %d: wave vector %s' % (iv, wmag * wdir))
if options.stepper == 'brillouin':
pb, _, bzone, mtxs = assemble_matrices(
define, mod, pars, set_wave_dir, options, wdir=wdir)
evp_mtxs = build_evp_matrices(mtxs, wmag, options.mode, pb)
if options.eigs_only:
eigs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=False)
svecs = None
else:
eigs, svecs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=True)
omegas, svecs, out = process_evp_results(
eigs, svecs, wmag, wdir, bzone, pb, mtxs, options,
std_wave_fun=std_wave_fun
)
if options.stepper == 'linear':
log(*out, x=[wmag, wmag])
else:
log(*out, x=[iv, iv])
save_eigenvectors(eigenshapes_filename % iv, svecs, wmag, wdir, pb)
gc.collect()
log(save_figure=os.path.join(output_dir, 'frequencies.png'))
log(finished=True)
else:
eigenshapes_filename = os.path.join(output_dir,
'wave-number-eigenshapes-%s.vtk'
% stepper.suffix)
log = Log([[r'$\kappa_{%d}$' % ii for ii in range(options.n_eigs)]
+ log_names],
plot_kwargs=[plot_kwargs + log_plot_kwargs],
formats=[['{:.12e}'] * (options.n_eigs + len(log_names))],
yscales=['linear'],
xlabels=[r'$\omega$'],
ylabels=[r'wave numbers $\kappa_i$'],
show_legends=options.show_legends,
is_plot=options.show,
log_filename=os.path.join(output_dir, 'wave-numbers.txt'),
aggregate=1000, sleep=0.1)
for io, omega in stepper:
output('step %d: frequency %s' % (io, omega))
evp_mtxs = build_evp_matrices(mtxs, omega, options.mode, pb)
if options.eigs_only:
eigs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=False)
svecs = None
else:
eigs, svecs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=True)
kappas, svecs, out = process_evp_results(
eigs, svecs, omega, wdir, bzone, pb, mtxs, options,
std_wave_fun=std_wave_fun
)
log(*out, x=[omega])
save_eigenvectors(eigenshapes_filename % io, svecs, kappas, wdir,
pb)
gc.collect()
log(save_figure=os.path.join(output_dir, 'wave-numbers.png'))
log(finished=True)
if __name__ == '__main__':
main()
| bsd-3-clause | 9,215,424,213,724,976,000 | 39.702326 | 344 | 0.557222 | false |
rajanandakumar/DIRAC | FrameworkSystem/private/monitoring/Activity.py | 19 | 3551 | # $HeadURL$
__RCSID__ = "$Id$"
import types
from string import Template
class Activity:
dbFields = [ 'activities.unit',
'activities.type',
'activities.description',
'activities.filename',
'activities.bucketLength',
'sources.site',
'sources.componentType',
'sources.componentLocation',
'sources.componentName'
]
dbMapping = {
}
def __init__( self, dataList ):
"""
Init an activity
"""
self.dataList = dataList
self.groupList = []
self.groupLabel = ""
self.__initMapping()
self.templateMap = {}
self.scaleFactor = 1
self.labelTemplate = ""
for fieldName in self.dbFields:
capsFieldName = fieldName.split(".")[1].upper()
self.templateMap[ capsFieldName ] = self.__getField( fieldName )
def __initMapping(self):
"""
Init static maping
"""
if not self.dbMapping:
for index in range( len( self.dbFields ) ):
self.dbMapping[ self.dbFields[index] ] = index
def setBucketScaleFactor( self, scaleFactor ):
self.scaleFactor = scaleFactor
self.__calculateUnit()
def __calculateUnit( self ):
self.dataList = list( self.dataList )
unit = self.dataList[ self.dbMapping[ 'activities.unit' ] ].split("/")[0]
if self.getType() in ( "sum" ):
sF = int( self.getBucketLength() * self.scaleFactor ) / 60
if sF == 1:
unit = "%s/min" % unit
else:
unit = "%s/%s mins" % ( unit, sF )
if self.getType() in ( "rate" ):
unit = "%s/seconds" % unit
self.dataList[ self.dbMapping[ 'activities.unit' ] ] = unit
self.templateMap[ 'UNIT' ] = unit
def setGroup( self, group ):
"""
Set group to which this activity belongs
"""
self.groupList = group
self.groupLabel = "Grouped for"
for fieldName in self.groupList:
self.groupLabel += " %s," % fieldName
self.groupLabel = self.groupLabel[:-1]
def setLabel( self, labelTemplate ):
"""
Set activity label
"""
self.labelTemplate = labelTemplate
def __getField( self, name ):
"""
Get field value
"""
return self.dataList[ self.dbMapping[ name ] ]
def getUnit(self):
return self.__getField( 'activities.unit' )
def getFile(self):
return self.__getField( 'activities.filename' )
def getType(self):
return self.__getField( 'activities.type' )
def getDescription(self):
return self.__getField( 'activities.description' )
def getBucketLength(self):
return self.__getField( 'activities.bucketLength' )
def getSite(self):
return self.__getField( 'sources.site' )
def getComponentType(self):
return self.__getField( 'sources.componentType' )
def getComponentName(self):
return self.__getField( 'sources.componentName' )
def getComponentLocation(self):
return self.__getField( 'sources.componentLocation' )
def getGroupLabel(self):
return self.groupLabel
def getLabel(self):
if type( self.labelTemplate ) == types.UnicodeType:
self.labelTemplate = self.labelTemplate.encode( "utf-8" )
return Template( self.labelTemplate ).safe_substitute( self.templateMap )
def __str__( self ):
return "[%s][%s][%s]" % ( self.getLabel(), self.getGroupLabel(), str( self.dataList ) )
def __repr__( self ):
return self.__str__()
def __lt__( self, act ):
label = self.getLabel()
try:
return label < act.getLabel()
except:
return label < act | gpl-3.0 | -8,634,838,668,705,523,000 | 25.909091 | 91 | 0.608279 | false |
ging/horizon | openstack_dashboard/dashboards/admin/defaults/tables.py | 9 | 2665 | # Copyright 2013 Kylin, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class QuotaFilterAction(tables.FilterAction):
def filter(self, table, tenants, filter_string):
q = filter_string.lower()
def comp(tenant):
if q in tenant.name.lower():
return True
return False
return filter(comp, tenants)
class UpdateDefaultQuotas(tables.LinkAction):
name = "update_defaults"
verbose_name = _("Update Defaults")
url = "horizon:admin:defaults:update_defaults"
classes = ("ajax-modal", "btn-edit")
def get_quota_name(quota):
QUOTA_NAMES = {
'injected_file_content_bytes': _('Injected File Content Bytes'),
'injected_file_path_bytes': _('Length of Injected File Path'),
'metadata_items': _('Metadata Items'),
'cores': _('VCPUs'),
'instances': _('Instances'),
'injected_files': _('Injected Files'),
'volumes': _('Volumes'),
'snapshots': _('Volume Snapshots'),
'gigabytes': _('Total Size of Volumes and Snapshots (GB)'),
'ram': _('RAM (MB)'),
'floating_ips': _('Floating IPs'),
'security_groups': _('Security Groups'),
'security_group_rules': _('Security Group Rules'),
'key_pairs': _('Key Pairs'),
'fixed_ips': _('Fixed IPs'),
'volumes_volume_luks': _('LUKS Volumes'),
'snapshots_volume_luks': _('LUKS Volume Snapshots'),
'gigabytes_volume_luks':
_('Total Size of LUKS Volumes and Snapshots (GB)'),
'dm-crypt': _('dm-crypt'),
}
return QUOTA_NAMES.get(quota.name, quota.name.replace("_", " ").title())
class QuotasTable(tables.DataTable):
name = tables.Column(get_quota_name, verbose_name=_('Quota Name'))
limit = tables.Column("limit", verbose_name=_('Limit'))
def get_object_id(self, obj):
return obj.name
class Meta:
name = "quotas"
verbose_name = _("Quotas")
table_actions = (QuotaFilterAction, UpdateDefaultQuotas)
multi_select = False
| apache-2.0 | 6,507,200,925,656,266,000 | 34.065789 | 78 | 0.62439 | false |
pplatek/odoo | addons/portal/mail_thread.py | 390 | 2004 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-TODAY OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv
class mail_thread(osv.AbstractModel):
""" Update of mail_mail class, to add the signin URL to notifications. """
_inherit = 'mail.thread'
def _get_inbox_action_xml_id(self, cr, uid, context=None):
""" For a given message, return an action that either
- opens the form view of the related document if model, res_id, and
read access to the document
- opens the Inbox with a default search on the conversation if model,
res_id
- opens the Inbox with context propagated
"""
cur_user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
# if uid is a portal user -> action is different
if any(group.is_portal for group in cur_user.groups_id):
return ('portal', 'action_mail_inbox_feeds_portal')
else:
return super(mail_thread, self)._get_inbox_action_xml_id(cr, uid, context=context)
| agpl-3.0 | -4,965,695,226,466,483,000 | 45.604651 | 94 | 0.619261 | false |
jfmartinez64/test | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/vine.py | 25 | 3373 | from __future__ import unicode_literals
import re
import json
import itertools
from .common import InfoExtractor
from ..utils import unified_strdate
class VineIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?vine\.co/v/(?P<id>\w+)'
_TEST = {
'url': 'https://vine.co/v/b9KOOWX7HUx',
'md5': '2f36fed6235b16da96ce9b4dc890940d',
'info_dict': {
'id': 'b9KOOWX7HUx',
'ext': 'mp4',
'title': 'Chicken.',
'alt_title': 'Vine by Jack Dorsey',
'description': 'Chicken.',
'upload_date': '20130519',
'uploader': 'Jack Dorsey',
'uploader_id': '76',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage('https://vine.co/v/' + video_id, video_id)
data = json.loads(self._html_search_regex(
r'window\.POST_DATA = { %s: ({.+?}) }' % video_id, webpage, 'vine data'))
formats = [{
'url': data['videoLowURL'],
'ext': 'mp4',
'format_id': 'low',
}, {
'url': data['videoUrl'],
'ext': 'mp4',
'format_id': 'standard',
}]
return {
'id': video_id,
'title': self._og_search_title(webpage),
'alt_title': self._og_search_description(webpage),
'description': data['description'],
'thumbnail': data['thumbnailUrl'],
'upload_date': unified_strdate(data['created']),
'uploader': data['username'],
'uploader_id': data['userIdStr'],
'like_count': data['likes']['count'],
'comment_count': data['comments']['count'],
'repost_count': data['reposts']['count'],
'formats': formats,
}
class VineUserIE(InfoExtractor):
IE_NAME = 'vine:user'
_VALID_URL = r'(?:https?://)?vine\.co/(?P<u>u/)?(?P<user>[^/]+)/?(\?.*)?$'
_VINE_BASE_URL = "https://vine.co/"
_TESTS = [
{
'url': 'https://vine.co/Visa',
'info_dict': {
'id': 'Visa',
},
'playlist_mincount': 46,
},
{
'url': 'https://vine.co/u/941705360593584128',
'only_matching': True,
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user = mobj.group('user')
u = mobj.group('u')
profile_url = "%sapi/users/profiles/%s%s" % (
self._VINE_BASE_URL, 'vanity/' if not u else '', user)
profile_data = self._download_json(
profile_url, user, note='Downloading user profile data')
user_id = profile_data['data']['userId']
timeline_data = []
for pagenum in itertools.count(1):
timeline_url = "%sapi/timelines/users/%s?page=%s&size=100" % (
self._VINE_BASE_URL, user_id, pagenum)
timeline_page = self._download_json(
timeline_url, user, note='Downloading page %d' % pagenum)
timeline_data.extend(timeline_page['data']['records'])
if timeline_page['data']['nextPage'] is None:
break
entries = [
self.url_result(e['permalinkUrl'], 'Vine') for e in timeline_data]
return self.playlist_result(entries, user)
| gpl-3.0 | -8,124,081,033,084,654,000 | 32.068627 | 85 | 0.50252 | false |
etetoolkit/ete | ete3/tools/ete_build_lib/task/uhire.py | 4 | 4331 | from __future__ import absolute_import
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: [email protected]
#
#
# #END_LICENSE#############################################################
import os
import logging
log = logging.getLogger("main")
from ..master_task import AlgTask
from ..master_job import Job
from ..utils import SeqGroup, OrderedDict
__all__ = ["Uhire"]
class Uhire(AlgTask):
def __init__(self, nodeid, multiseq_file, seqtype, conf):
# Initialize task
AlgTask.__init__(self, nodeid, "alg", "Usearch-Uhire",
OrderedDict(), conf["uhire"])
self.conf = conf
self.seqtype = seqtype
self.multiseq_file = multiseq_file
self.init()
self.alg_fasta_file = os.path.join(self.taskdir, "final_alg.fasta")
self.alg_phylip_file = os.path.join(self.taskdir, "final_alg.iphylip")
def load_jobs(self):
# split the original set of sequences in clusters.
uhire_args = {
"--clumpfasta": "./",
"--maxclump": "%s" %self.conf["uhire"]["_maxclump"],
"--usersort": "",
"--uhire": self.multiseq_file,
}
uhire_job = Job(self.conf["app"]["usearch"], uhire_args, "usearch-uhire",
parent_ids=[self.nodeid])
# Builds a muscle alignment for each of those clusters. (This
# is a special job to align all clumps independently. The
# whole shell command is used as job binary, so it is very
# important that there is no trailing lines at the end of the
# command.)
cmd = """
(mkdir clumpalgs/;
for fname in %s/clump.* %s/master;
do %s -in $fname -out clumpalgs/`basename $fname` -maxiters %s;
done;) """ %(
os.path.join("../",uhire_job.jobid),
os.path.join("../",uhire_job.jobid),
self.conf["app"]["muscle"],
self.conf["uhire"]["_muscle_maxiters"])
alg_job = Job(cmd, {}, "uhire_muscle_algs", parent_ids=[uhire_job.jobid])
alg_job.dependencies.add(uhire_job)
# Merge the cluster alignemnts into a single one
umerge_args = {
"--maxlen": self.conf["uhire"]["_max_seq_length"],
"--mergeclumps": "../%s/clumpalgs/" %alg_job.jobid,
"--output": "alg.fasta",
}
umerge_job = Job(self.conf["app"]["usearch"], umerge_args, "usearch-umerge",
parent_ids=[alg_job.jobid])
umerge_job.dependencies.add(alg_job)
# Add all jobs to the task queue queue
self.jobs.extend([uhire_job, alg_job, umerge_job])
def finish(self):
# Once executed, alignment is converted into relaxed
# interleaved phylip format.
final_job = self.jobs[2]
alg = SeqGroup(os.path.join(final_job.jobdir, "alg.fasta"))
alg.write(outfile=self.alg_fasta_file, format="fasta")
alg.write(outfile=self.alg_phylip_file, format="iphylip_relaxed")
AlgTask.finish(self)
| gpl-3.0 | 8,621,354,002,911,737,000 | 36.991228 | 84 | 0.600554 | false |
CEG-FYP-OpenStack/scheduler | nova/api/openstack/compute/legacy_v2/contrib/rescue.py | 5 | 3852 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The rescue mode extension."""
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions as exts
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova import utils
authorize = exts.extension_authorizer('compute', 'rescue')
class RescueController(wsgi.Controller):
def __init__(self, ext_mgr, *args, **kwargs):
super(RescueController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.ext_mgr = ext_mgr
@wsgi.action('rescue')
def _rescue(self, req, id, body):
"""Rescue an instance."""
context = req.environ["nova.context"]
authorize(context)
if body['rescue'] and 'adminPass' in body['rescue']:
password = body['rescue']['adminPass']
else:
password = utils.generate_password()
instance = common.get_instance(self.compute_api, context, id)
try:
rescue_image_ref = None
if self.ext_mgr.is_loaded("os-extended-rescue-with-image"):
if body['rescue'] and 'rescue_image_ref' in body['rescue']:
rescue_image_ref = body['rescue']['rescue_image_ref']
self.compute_api.rescue(context, instance,
rescue_password=password, rescue_image_ref=rescue_image_ref)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'rescue', id)
except exception.InvalidVolume as volume_error:
raise exc.HTTPConflict(explanation=volume_error.format_message())
except exception.InstanceNotRescuable as non_rescuable:
raise exc.HTTPBadRequest(
explanation=non_rescuable.format_message())
return {'adminPass': password}
@wsgi.action('unrescue')
def _unrescue(self, req, id, body):
"""Unrescue an instance."""
context = req.environ["nova.context"]
authorize(context)
instance = common.get_instance(self.compute_api, context, id)
try:
self.compute_api.unrescue(context, instance)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'unrescue',
id)
return webob.Response(status_int=202)
class Rescue(exts.ExtensionDescriptor):
"""Instance rescue mode."""
name = "Rescue"
alias = "os-rescue"
namespace = "http://docs.openstack.org/compute/ext/rescue/api/v1.1"
updated = "2011-08-18T00:00:00Z"
def get_controller_extensions(self):
controller = RescueController(self.ext_mgr)
extension = exts.ControllerExtension(self, 'servers', controller)
return [extension]
| apache-2.0 | 5,531,573,291,606,104,000 | 38.306122 | 79 | 0.632918 | false |
HazenBabcock/opensdraw | opensdraw/scripts/list_parts_in_mpd.py | 1 | 3902 | #!/usr/bin/env python
"""
Lists all the unique parts and their colors in a .mpd file. This
is sometimes useful for determining the name of a part and/or a
color.
Hazen 04/15
"""
import os
import re
import sys
import opensdraw.lcad_lib.datFileParser as datFileParser
if (len(sys.argv) != 2):
print("usage: <ldraw file (input)>")
exit()
parts = {}
def arrayToString(array):
"""
Convert an array to a string & clean it up. The assumption is that
no part in the LDraw library is going to have a space in the name..
"""
return re.sub(r'[^a-zA-Z0-9\.]', '_', "".join(array))
class PartsFinder(datFileParser.Parser):
"""
This class finds all the parts in ldraw format file and records the
name, id and color of the parts that also exist in the ldraw parts
library.
"""
def __init__(self):
datFileParser.Parser.__init__(self, None, None)
self.sub_parts = {}
self.parts = {}
def command(self, parsed_line):
pass
def endFile(self):
pass
def line(self, parsed_line):
pass
def newFile(self, parsed_line):
ldraw_color = parsed_line[1]
ldraw_id = parsed_line[14]
part_file = None
# Try and find part in parts folder.
try:
part_file = datFileParser.findPartFile(ldraw_id)
except IOError:
pass
# If this part exists, figure out whether it is a part or a sub-part
# based on the path & add to the appropriate dictionary.
if part_file is not None:
is_part = True
if (os.path.split(os.path.split(part_file)[0])[1] != "parts"):
is_part = False
fp = open(part_file)
description = fp.readline()[2:].strip()
fp.close()
part_id = ldraw_id + "_" + ldraw_color
if is_part:
self.parts[part_id] = description
else:
self.sub_parts[part_id] = description
def optionalLine(self, parsed_line):
pass
def quadrilateral(self, parsed_line):
pass
def startFile(self, depth):
pass
def triangle(self, parsed_line):
pass
# Find all the parts.
partsFinder = PartsFinder()
datFileParser.parsePartFile(partsFinder, sys.argv[1])
print("Parts:")
for key in sorted(partsFinder.parts, key = partsFinder.parts.get):
[part_id, part_color] = key.split("_")
print(part_id[:-4] + ", " + part_color + ", " + partsFinder.parts[key])
print("\n")
print("Sub Parts:")
for key in sorted(partsFinder.sub_parts, key = partsFinder.sub_parts.get):
[part_id, part_color] = key.split("_")
print(part_id[:-4] + ", " + part_color + ", " + partsFinder.sub_parts[key])
print("\n")
#
# The MIT License
#
# Copyright (c) 2015 Hazen Babcock
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
| mit | 3,650,317,381,765,155,000 | 29.248062 | 79 | 0.641722 | false |
tectronics/timeside | tests/test_yaafe.py | 2 | 2645 | #! /usr/bin/env python
from unit_timeside import *
from timeside.decoder import *
from timeside.analyzer import Yaafe
from yaafelib import DataFlow,FeaturePlan
class TestYaafe(unittest.TestCase):
def setUp(self):
self.sample_rate = 16000
def testOnSweepWithFeaturePlan(self):
"runs on sweep and define feature plan manually"
self.source = os.path.join (os.path.dirname(__file__), "samples", "sweep.wav")
# Setup Yaafe Analyzer
# Define Yaafe Feature Plan
fp = FeaturePlan(sample_rate=self.sample_rate)
# add feature definitions manually
fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256')
fp.addFeature('mfcc_d1: MFCC blockSize=512 stepSize=256 > Derivate DOrder=1')
fp.addFeature('mfcc_d2: MFCC blockSize=512 stepSize=256 > Derivate DOrder=2')
# Setup a new Yaafe TimeSide analyzer
# from FeaturePlan
self.analyzer = Yaafe(fp)
# Expected Results
self.result_length = 3
def testOnGuitarWithFeaturePlanFromFile(self):
"runs on guitar and load Yaafe feature plan from file"
self.source = os.path.join (os.path.dirname(__file__), "samples", "guitar.wav")
# Setup Yaafe Analyzer
# Load Yaafe Feature Plan
fp = FeaturePlan(sample_rate=self.sample_rate)
fp_file = os.path.join (os.path.dirname(__file__), "yaafe_config", "yaafeFeaturePlan")
fp.loadFeaturePlan(fp_file)
# Setup a new Yaafe TimeSide analyzer
# from FeaturePlan
self.analyzer = Yaafe(fp)
# Expected Results
self.result_length = 3
def testOnGuitarWithDataFlow(self):
"runs on guitar and load Yaafe dataflow from file"
self.source = os.path.join (os.path.dirname(__file__), "samples", "guitar.wav")
# Setup Yaafe Analyzer
# Load DataFlow from file
df = DataFlow()
df_file = os.path.join (os.path.dirname(__file__), "yaafe_config", "yaafeDataFlow")
df.load(df_file)
# Setup a new Yaafe TimeSide analyzer
# from DataFlow
self.analyzer = Yaafe(df)
# Expected Results
self.result_length = 5
def tearDown(self):
decoder = FileDecoder(self.source)
decoder.output_samplerate = self.sample_rate
(decoder | self.analyzer).run()
results = self.analyzer.results
self.assertEquals(self.result_length, len(results))
#print results
#print results.to_yaml()
#print results.to_json()
#print results.to_xml()
if __name__ == '__main__':
unittest.main(testRunner=TestRunner())
| gpl-2.0 | -480,269,139,921,902,000 | 33.802632 | 95 | 0.636295 | false |
harmy/kbengine | kbe/res/scripts/common/Lib/cgi.py | 3 | 35512 | #! /usr/local/bin/python
# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
# intentionally NOT "/usr/bin/env python". On many systems
# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
# scripts, and /usr/local/bin is the default directory where Python is
# installed, so /usr/bin/env would be unable to find python. Granted,
# binary installations by Linux vendors often install Python in
# /usr/bin. So let those vendors patch cgi.py to match their choice
# of installation.
"""Support module for CGI (Common Gateway Interface) scripts.
This module defines a number of utilities for use by CGI scripts
written in Python.
"""
# History
# -------
#
# Michael McLay started this module. Steve Majewski changed the
# interface to SvFormContentDict and FormContentDict. The multipart
# parsing was inspired by code submitted by Andreas Paepcke. Guido van
# Rossum rewrote, reformatted and documented the module and is currently
# responsible for its maintenance.
#
__version__ = "2.6"
# Imports
# =======
from io import StringIO, BytesIO, TextIOWrapper
import sys
import os
import urllib.parse
from email.parser import FeedParser
from warnings import warn
import html
import locale
import tempfile
__all__ = ["MiniFieldStorage", "FieldStorage",
"parse", "parse_qs", "parse_qsl", "parse_multipart",
"parse_header", "print_exception", "print_environ",
"print_form", "print_directory", "print_arguments",
"print_environ_usage", "escape"]
# Logging support
# ===============
logfile = "" # Filename to log to, if not empty
logfp = None # File object to log to, if not None
def initlog(*allargs):
"""Write a log message, if there is a log file.
Even though this function is called initlog(), you should always
use log(); log is a variable that is set either to initlog
(initially), to dolog (once the log file has been opened), or to
nolog (when logging is disabled).
The first argument is a format string; the remaining arguments (if
any) are arguments to the % operator, so e.g.
log("%s: %s", "a", "b")
will write "a: b" to the log file, followed by a newline.
If the global logfp is not None, it should be a file object to
which log data is written.
If the global logfp is None, the global logfile may be a string
giving a filename to open, in append mode. This file should be
world writable!!! If the file can't be opened, logging is
silently disabled (since there is no safe place where we could
send an error message).
"""
global logfp, log
if logfile and not logfp:
try:
logfp = open(logfile, "a")
except IOError:
pass
if not logfp:
log = nolog
else:
log = dolog
log(*allargs)
def dolog(fmt, *args):
"""Write a log message to the log file. See initlog() for docs."""
logfp.write(fmt%args + "\n")
def nolog(*allargs):
"""Dummy function, assigned to log when logging is disabled."""
pass
log = initlog # The current logging function
# Parsing functions
# =================
# Maximum input we will accept when REQUEST_METHOD is POST
# 0 ==> unlimited input
maxlen = 0
def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
fp : file pointer; default: sys.stdin.buffer
environ : environment dictionary; default: os.environ
keep_blank_values: flag indicating whether blank values in
percent-encoded forms should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
if fp is None:
fp = sys.stdin
# field keys and values (except for files) are returned as strings
# an encoding is required to decode the bytes read from self.fp
if hasattr(fp,'encoding'):
encoding = fp.encoding
else:
encoding = 'latin-1'
# fp.read() must return bytes
if isinstance(fp, TextIOWrapper):
fp = fp.buffer
if not 'REQUEST_METHOD' in environ:
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
if environ['REQUEST_METHOD'] == 'POST':
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
if ctype == 'multipart/form-data':
return parse_multipart(fp, pdict)
elif ctype == 'application/x-www-form-urlencoded':
clength = int(environ['CONTENT_LENGTH'])
if maxlen and clength > maxlen:
raise ValueError('Maximum content length exceeded')
qs = fp.read(clength).decode(encoding)
else:
qs = '' # Unknown content-type
if 'QUERY_STRING' in environ:
if qs: qs = qs + '&'
qs = qs + environ['QUERY_STRING']
elif sys.argv[1:]:
if qs: qs = qs + '&'
qs = qs + sys.argv[1]
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
elif 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
else:
if sys.argv[1:]:
qs = sys.argv[1]
else:
qs = ""
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
encoding=encoding)
# parse query string function called from urlparse,
# this is done in order to maintain backward compatiblity.
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument."""
warn("cgi.parse_qs is deprecated, use urllib.parse.parse_qs instead",
DeprecationWarning, 2)
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing)
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument."""
warn("cgi.parse_qsl is deprecated, use urllib.parse.parse_qsl instead",
DeprecationWarning, 2)
return urllib.parse.parse_qsl(qs, keep_blank_values, strict_parsing)
def parse_multipart(fp, pdict):
"""Parse multipart input.
Arguments:
fp : input file
pdict: dictionary containing other parameters of content-type header
Returns a dictionary just like parse_qs(): keys are the field names, each
value is a list of values for that field. This is easy to use but not
much good if you are expecting megabytes to be uploaded -- in that case,
use the FieldStorage class instead which is much more flexible. Note
that content-type is the raw, unparsed contents of the content-type
header.
XXX This does not parse nested multipart parts -- use FieldStorage for
that.
XXX This should really be subsumed by FieldStorage altogether -- no
point in having two implementations of the same parsing algorithm.
Also, FieldStorage protects itself better against certain DoS attacks
by limiting the size of the data read in one chunk. The API here
does not support that kind of protection. This also affects parse()
since it can call parse_multipart().
"""
import http.client
boundary = ""
if 'boundary' in pdict:
boundary = pdict['boundary']
if not valid_boundary(boundary):
raise ValueError('Invalid boundary in multipart form: %r'
% (boundary,))
nextpart = "--" + boundary
lastpart = "--" + boundary + "--"
partdict = {}
terminator = ""
while terminator != lastpart:
bytes = -1
data = None
if terminator:
# At start of next part. Read headers first.
headers = http.client.parse_headers(fp)
clength = headers.get('content-length')
if clength:
try:
bytes = int(clength)
except ValueError:
pass
if bytes > 0:
if maxlen and bytes > maxlen:
raise ValueError('Maximum content length exceeded')
data = fp.read(bytes)
else:
data = ""
# Read lines until end of part.
lines = []
while 1:
line = fp.readline()
if not line:
terminator = lastpart # End outer loop
break
if line.startswith("--"):
terminator = line.rstrip()
if terminator in (nextpart, lastpart):
break
lines.append(line)
# Done with part.
if data is None:
continue
if bytes < 0:
if lines:
# Strip final line terminator
line = lines[-1]
if line[-2:] == "\r\n":
line = line[:-2]
elif line[-1:] == "\n":
line = line[:-1]
lines[-1] = line
data = "".join(lines)
line = headers['content-disposition']
if not line:
continue
key, params = parse_header(line)
if key != 'form-data':
continue
if 'name' in params:
name = params['name']
else:
continue
if name in partdict:
partdict[name].append(data)
else:
partdict[name] = [data]
return partdict
def _parseparam(s):
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and s.count('"', 0, end) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]
def parse_header(line):
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
parts = _parseparam(';' + line)
key = parts.__next__()
pdict = {}
for p in parts:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i+1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
return key, pdict
# Classes for field storage
# =========================
class MiniFieldStorage:
"""Like FieldStorage, for use when no file uploads are possible."""
# Dummy attributes
filename = None
list = None
type = None
file = None
type_options = {}
disposition = None
disposition_options = {}
headers = {}
def __init__(self, name, value):
"""Constructor from field name and value."""
self.name = name
self.value = value
# self.file = StringIO(value)
def __repr__(self):
"""Return printable representation."""
return "MiniFieldStorage(%r, %r)" % (self.name, self.value)
class FieldStorage:
"""Store a sequence of fields, reading multipart/form-data.
This class provides naming, typing, files stored on disk, and
more. At the top level, it is accessible like a dictionary, whose
keys are the field names. (Note: None can occur as a field name.)
The items are either a Python list (if there's multiple values) or
another FieldStorage or MiniFieldStorage object. If it's a single
object, it has the following attributes:
name: the field name, if specified; otherwise None
filename: the filename, if specified; otherwise None; this is the
client side filename, *not* the file name on which it is
stored (that's a temporary file you don't deal with)
value: the value as a *string*; for file uploads, this
transparently reads the file every time you request the value
and returns *bytes*
file: the file(-like) object from which you can read the data *as
bytes* ; None if the data is stored a simple string
type: the content-type, or None if not specified
type_options: dictionary of options specified on the content-type
line
disposition: content-disposition, or None if not specified
disposition_options: dictionary of corresponding options
headers: a dictionary(-like) object (sometimes email.message.Message or a
subclass thereof) containing *all* headers
The class is subclassable, mostly for the purpose of overriding
the make_file() method, which is called internally to come up with
a file open for reading and writing. This makes it possible to
override the default choice of storing all files in a temporary
directory and unlinking them as soon as they have been opened.
"""
def __init__(self, fp=None, headers=None, outerboundary=b'',
environ=os.environ, keep_blank_values=0, strict_parsing=0,
limit=None, encoding='utf-8', errors='replace'):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
fp : file pointer; default: sys.stdin.buffer
(not used when the request method is GET)
Can be :
1. a TextIOWrapper object
2. an object whose read() and readline() methods return bytes
headers : header dictionary-like object; default:
taken from environ as per CGI spec
outerboundary : terminating multipart boundary
(for internal use only)
environ : environment dictionary; default: os.environ
keep_blank_values: flag indicating whether blank values in
percent-encoded forms should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
limit : used internally to read parts of multipart/form-data forms,
to exit from the reading loop when reached. It is the difference
between the form content-length and the number of bytes already
read
encoding, errors : the encoding and error handler used to decode the
binary stream to strings. Must be the same as the charset defined
for the page sending the form (content-type : meta http-equiv or
header)
"""
method = 'GET'
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
self.qs_on_post = None
if method == 'GET' or method == 'HEAD':
if 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
elif sys.argv[1:]:
qs = sys.argv[1]
else:
qs = ""
qs = qs.encode(locale.getpreferredencoding(), 'surrogateescape')
fp = BytesIO(qs)
if headers is None:
headers = {'content-type':
"application/x-www-form-urlencoded"}
if headers is None:
headers = {}
if method == 'POST':
# Set default content-type for POST to what's traditional
headers['content-type'] = "application/x-www-form-urlencoded"
if 'CONTENT_TYPE' in environ:
headers['content-type'] = environ['CONTENT_TYPE']
if 'QUERY_STRING' in environ:
self.qs_on_post = environ['QUERY_STRING']
if 'CONTENT_LENGTH' in environ:
headers['content-length'] = environ['CONTENT_LENGTH']
if fp is None:
self.fp = sys.stdin.buffer
# self.fp.read() must return bytes
elif isinstance(fp, TextIOWrapper):
self.fp = fp.buffer
else:
self.fp = fp
self.encoding = encoding
self.errors = errors
self.headers = headers
if not isinstance(outerboundary, bytes):
raise TypeError('outerboundary must be bytes, not %s'
% type(outerboundary).__name__)
self.outerboundary = outerboundary
self.bytes_read = 0
self.limit = limit
# Process content-disposition header
cdisp, pdict = "", {}
if 'content-disposition' in self.headers:
cdisp, pdict = parse_header(self.headers['content-disposition'])
self.disposition = cdisp
self.disposition_options = pdict
self.name = None
if 'name' in pdict:
self.name = pdict['name']
self.filename = None
if 'filename' in pdict:
self.filename = pdict['filename']
self._binary_file = self.filename is not None
# Process content-type header
#
# Honor any existing content-type header. But if there is no
# content-type header, use some sensible defaults. Assume
# outerboundary is "" at the outer level, but something non-false
# inside a multi-part. The default for an inner part is text/plain,
# but for an outer part it should be urlencoded. This should catch
# bogus clients which erroneously forget to include a content-type
# header.
#
# See below for what we do if there does exist a content-type header,
# but it happens to be something we don't understand.
if 'content-type' in self.headers:
ctype, pdict = parse_header(self.headers['content-type'])
elif self.outerboundary or method != 'POST':
ctype, pdict = "text/plain", {}
else:
ctype, pdict = 'application/x-www-form-urlencoded', {}
self.type = ctype
self.type_options = pdict
if 'boundary' in pdict:
self.innerboundary = pdict['boundary'].encode(self.encoding)
else:
self.innerboundary = b""
clen = -1
if 'content-length' in self.headers:
try:
clen = int(self.headers['content-length'])
except ValueError:
pass
if maxlen and clen > maxlen:
raise ValueError('Maximum content length exceeded')
self.length = clen
if self.limit is None and clen:
self.limit = clen
self.list = self.file = None
self.done = 0
if ctype == 'application/x-www-form-urlencoded':
self.read_urlencoded()
elif ctype[:10] == 'multipart/':
self.read_multi(environ, keep_blank_values, strict_parsing)
else:
self.read_single()
def __repr__(self):
"""Return a printable representation."""
return "FieldStorage(%r, %r, %r)" % (
self.name, self.filename, self.value)
def __iter__(self):
return iter(self.keys())
def __getattr__(self, name):
if name != 'value':
raise AttributeError(name)
if self.file:
self.file.seek(0)
value = self.file.read()
self.file.seek(0)
elif self.list is not None:
value = self.list
else:
value = None
return value
def __getitem__(self, key):
"""Dictionary style indexing."""
if self.list is None:
raise TypeError("not indexable")
found = []
for item in self.list:
if item.name == key: found.append(item)
if not found:
raise KeyError(key)
if len(found) == 1:
return found[0]
else:
return found
def getvalue(self, key, default=None):
"""Dictionary style get() method, including 'value' lookup."""
if key in self:
value = self[key]
if isinstance(value, list):
return [x.value for x in value]
else:
return value.value
else:
return default
def getfirst(self, key, default=None):
""" Return the first value received."""
if key in self:
value = self[key]
if isinstance(value, list):
return value[0].value
else:
return value.value
else:
return default
def getlist(self, key):
""" Return list of received values."""
if key in self:
value = self[key]
if isinstance(value, list):
return [x.value for x in value]
else:
return [value.value]
else:
return []
def keys(self):
"""Dictionary style keys() method."""
if self.list is None:
raise TypeError("not indexable")
return list(set(item.name for item in self.list))
def __contains__(self, key):
"""Dictionary style __contains__ method."""
if self.list is None:
raise TypeError("not indexable")
return any(item.name == key for item in self.list)
def __len__(self):
"""Dictionary style len(x) support."""
return len(self.keys())
def __nonzero__(self):
return bool(self.list)
def read_urlencoded(self):
"""Internal: read data in query string format."""
qs = self.fp.read(self.length)
if not isinstance(qs, bytes):
raise ValueError("%s should return bytes, got %s" \
% (self.fp, type(qs).__name__))
qs = qs.decode(self.encoding, self.errors)
if self.qs_on_post:
qs += '&' + self.qs_on_post
self.list = []
query = urllib.parse.parse_qsl(
qs, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors)
for key, value in query:
self.list.append(MiniFieldStorage(key, value))
self.skip_lines()
FieldStorageClass = None
def read_multi(self, environ, keep_blank_values, strict_parsing):
"""Internal: read a part that is itself multipart."""
ib = self.innerboundary
if not valid_boundary(ib):
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
self.list = []
if self.qs_on_post:
query = urllib.parse.parse_qsl(
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors)
for key, value in query:
self.list.append(MiniFieldStorage(key, value))
FieldStorageClass = None
klass = self.FieldStorageClass or self.__class__
first_line = self.fp.readline() # bytes
if not isinstance(first_line, bytes):
raise ValueError("%s should return bytes, got %s" \
% (self.fp, type(first_line).__name__))
self.bytes_read += len(first_line)
# first line holds boundary ; ignore it, or check that
# b"--" + ib == first_line.strip() ?
while True:
parser = FeedParser()
hdr_text = b""
while True:
data = self.fp.readline()
hdr_text += data
if not data.strip():
break
if not hdr_text:
break
# parser takes strings, not bytes
self.bytes_read += len(hdr_text)
parser.feed(hdr_text.decode(self.encoding, self.errors))
headers = parser.close()
part = klass(self.fp, headers, ib, environ, keep_blank_values,
strict_parsing,self.limit-self.bytes_read,
self.encoding, self.errors)
self.bytes_read += part.bytes_read
self.list.append(part)
if self.bytes_read >= self.length:
break
self.skip_lines()
def read_single(self):
"""Internal: read an atomic part."""
if self.length >= 0:
self.read_binary()
self.skip_lines()
else:
self.read_lines()
self.file.seek(0)
bufsize = 8*1024 # I/O buffering size for copy to file
def read_binary(self):
"""Internal: read binary data."""
self.file = self.make_file()
todo = self.length
if todo >= 0:
while todo > 0:
data = self.fp.read(min(todo, self.bufsize)) # bytes
if not isinstance(data, bytes):
raise ValueError("%s should return bytes, got %s"
% (self.fp, type(data).__name__))
self.bytes_read += len(data)
if not data:
self.done = -1
break
self.file.write(data)
todo = todo - len(data)
def read_lines(self):
"""Internal: read lines until EOF or outerboundary."""
if self._binary_file:
self.file = self.__file = BytesIO() # store data as bytes for files
else:
self.file = self.__file = StringIO() # as strings for other fields
if self.outerboundary:
self.read_lines_to_outerboundary()
else:
self.read_lines_to_eof()
def __write(self, line):
"""line is always bytes, not string"""
if self.__file is not None:
if self.__file.tell() + len(line) > 1000:
self.file = self.make_file()
data = self.__file.getvalue()
self.file.write(data)
self.__file = None
if self._binary_file:
# keep bytes
self.file.write(line)
else:
# decode to string
self.file.write(line.decode(self.encoding, self.errors))
def read_lines_to_eof(self):
"""Internal: read lines until EOF."""
while 1:
line = self.fp.readline(1<<16) # bytes
self.bytes_read += len(line)
if not line:
self.done = -1
break
self.__write(line)
def read_lines_to_outerboundary(self):
"""Internal: read lines until outerboundary.
Data is read as bytes: boundaries and line ends must be converted
to bytes for comparisons.
"""
next_boundary = b"--" + self.outerboundary
last_boundary = next_boundary + b"--"
delim = b""
last_line_lfend = True
_read = 0
while 1:
if _read >= self.limit:
break
line = self.fp.readline(1<<16) # bytes
self.bytes_read += len(line)
_read += len(line)
if not line:
self.done = -1
break
if line.startswith(b"--") and last_line_lfend:
strippedline = line.rstrip()
if strippedline == next_boundary:
break
if strippedline == last_boundary:
self.done = 1
break
odelim = delim
if line.endswith(b"\r\n"):
delim = b"\r\n"
line = line[:-2]
last_line_lfend = True
elif line.endswith(b"\n"):
delim = b"\n"
line = line[:-1]
last_line_lfend = True
else:
delim = b""
last_line_lfend = False
self.__write(odelim + line)
def skip_lines(self):
"""Internal: skip lines until outer boundary if defined."""
if not self.outerboundary or self.done:
return
next_boundary = b"--" + self.outerboundary
last_boundary = next_boundary + b"--"
last_line_lfend = True
while True:
line = self.fp.readline(1<<16)
self.bytes_read += len(line)
if not line:
self.done = -1
break
if line.endswith(b"--") and last_line_lfend:
strippedline = line.strip()
if strippedline == next_boundary:
break
if strippedline == last_boundary:
self.done = 1
break
last_line_lfend = line.endswith(b'\n')
def make_file(self):
"""Overridable: return a readable & writable file.
The file will be used as follows:
- data is written to it
- seek(0)
- data is read from it
The file is opened in binary mode for files, in text mode
for other fields
This version opens a temporary file for reading and writing,
and immediately deletes (unlinks) it. The trick (on Unix!) is
that the file can still be used, but it can't be opened by
another process, and it will automatically be deleted when it
is closed or when the current process terminates.
If you want a more permanent file, you derive a class which
overrides this method. If you want a visible temporary file
that is nevertheless automatically deleted when the script
terminates, try defining a __del__ method in a derived class
which unlinks the temporary files you have created.
"""
if self._binary_file:
return tempfile.TemporaryFile("wb+")
else:
return tempfile.TemporaryFile("w+",
encoding=self.encoding, newline = '\n')
# Test/debug code
# ===============
def test(environ=os.environ):
"""Robust test CGI script, usable as main program.
Write minimal HTTP headers and dump all information provided to
the script in HTML form.
"""
print("Content-type: text/html")
print()
sys.stderr = sys.stdout
try:
form = FieldStorage() # Replace with other classes to test those
print_directory()
print_arguments()
print_form(form)
print_environ(environ)
print_environ_usage()
def f():
exec("testing print_exception() -- <I>italics?</I>")
def g(f=f):
f()
print("<H3>What follows is a test, not an actual exception:</H3>")
g()
except:
print_exception()
print("<H1>Second try with a small maxlen...</H1>")
global maxlen
maxlen = 50
try:
form = FieldStorage() # Replace with other classes to test those
print_directory()
print_arguments()
print_form(form)
print_environ(environ)
except:
print_exception()
def print_exception(type=None, value=None, tb=None, limit=None):
if type is None:
type, value, tb = sys.exc_info()
import traceback
print()
print("<H3>Traceback (most recent call last):</H3>")
list = traceback.format_tb(tb, limit) + \
traceback.format_exception_only(type, value)
print("<PRE>%s<B>%s</B></PRE>" % (
html.escape("".join(list[:-1])),
html.escape(list[-1]),
))
del tb
def print_environ(environ=os.environ):
"""Dump the shell environment as HTML."""
keys = sorted(environ.keys())
print()
print("<H3>Shell Environment:</H3>")
print("<DL>")
for key in keys:
print("<DT>", html.escape(key), "<DD>", html.escape(environ[key]))
print("</DL>")
print()
def print_form(form):
"""Dump the contents of a form as HTML."""
keys = sorted(form.keys())
print()
print("<H3>Form Contents:</H3>")
if not keys:
print("<P>No form fields.")
print("<DL>")
for key in keys:
print("<DT>" + html.escape(key) + ":", end=' ')
value = form[key]
print("<i>" + html.escape(repr(type(value))) + "</i>")
print("<DD>" + html.escape(repr(value)))
print("</DL>")
print()
def print_directory():
"""Dump the current directory as HTML."""
print()
print("<H3>Current Working Directory:</H3>")
try:
pwd = os.getcwd()
except os.error as msg:
print("os.error:", html.escape(str(msg)))
else:
print(html.escape(pwd))
print()
def print_arguments():
print()
print("<H3>Command Line Arguments:</H3>")
print()
print(sys.argv)
print()
def print_environ_usage():
"""Dump a list of environment variables used by CGI as HTML."""
print("""
<H3>These environment variables could have been set:</H3>
<UL>
<LI>AUTH_TYPE
<LI>CONTENT_LENGTH
<LI>CONTENT_TYPE
<LI>DATE_GMT
<LI>DATE_LOCAL
<LI>DOCUMENT_NAME
<LI>DOCUMENT_ROOT
<LI>DOCUMENT_URI
<LI>GATEWAY_INTERFACE
<LI>LAST_MODIFIED
<LI>PATH
<LI>PATH_INFO
<LI>PATH_TRANSLATED
<LI>QUERY_STRING
<LI>REMOTE_ADDR
<LI>REMOTE_HOST
<LI>REMOTE_IDENT
<LI>REMOTE_USER
<LI>REQUEST_METHOD
<LI>SCRIPT_NAME
<LI>SERVER_NAME
<LI>SERVER_PORT
<LI>SERVER_PROTOCOL
<LI>SERVER_ROOT
<LI>SERVER_SOFTWARE
</UL>
In addition, HTTP headers sent by the server may be passed in the
environment as well. Here are some common variable names:
<UL>
<LI>HTTP_ACCEPT
<LI>HTTP_CONNECTION
<LI>HTTP_HOST
<LI>HTTP_PRAGMA
<LI>HTTP_REFERER
<LI>HTTP_USER_AGENT
</UL>
""")
# Utilities
# =========
def escape(s, quote=None):
"""Deprecated API."""
warn("cgi.escape is deprecated, use html.escape instead",
PendingDeprecationWarning, stacklevel=2)
s = s.replace("&", "&") # Must be done first!
s = s.replace("<", "<")
s = s.replace(">", ">")
if quote:
s = s.replace('"', """)
return s
def valid_boundary(s, _vb_pattern=None):
import re
if isinstance(s, bytes):
_vb_pattern = b"^[ -~]{0,200}[!-~]$"
else:
_vb_pattern = "^[ -~]{0,200}[!-~]$"
return re.match(_vb_pattern, s)
# Invoke mainline
# ===============
# Call test() when this file is run as a script (not imported as a module)
if __name__ == '__main__':
test()
| lgpl-3.0 | 2,785,291,935,713,919,000 | 32.544747 | 79 | 0.552292 | false |
adrgerez/ardublockly | package/wxcef_build/pack_ardublockly_wxcef.py | 4 | 9610 | #!/usr/bin/env python2
# -*- coding: utf-8 -*- #
#
# Creates a zip file of the self executable Ardublockly application.
#
# Copyright (c) 2015 carlosperate https://github.com/carlosperate/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IMPORTANT: This script is designed to be located one directory level under the
# project root folder.
#
# This script file will create a copy of the project folder in its parent folder
# So if project folder is located in ~/projects/ardublockly it will create a
# copy in ~/projects/ardublockly_<timestamp>_<tag>.
# It will then delete unnecessary files for a working version of the self
# executable application and zip the contents of the folder.
#
import os
import sys
import time
import shutil
import struct
import zipfile
script_tag = "[Ardublockly pack] "
script_tab = " "
# The project_root_dir depends on this file location, assumed to be two levels
# below project root, so it cannot be moved without updating this variable
project_root_dir = \
os.path.dirname( # going up 1 level
os.path.dirname( # going up 1 level
os.path.dirname(os.path.realpath(__file__)))) # folder dir of this
# This script copies the ardublockly folder with a different name on the same
# directory level to easily filter what to included in the packed version
copy_dir_name = "ardublockly_packed"
copied_project_dir = os.path.join(os.path.dirname(project_root_dir),
copy_dir_name)
def set_tag(tag):
"""
Sets the packaged zip file and copied folder tag to the input argument. So,
the copied folder will be names "ardublockly_<tag>" and the zip file
"ardublockly_<tag>.zip.
:tag: String to indicate the tag to use.
"""
print(script_tag + "Setting the Ardublockly package tag to '%s'" % tag)
global copy_dir_name
global copied_project_dir
copy_dir_name = "ardublockly_%s" % tag
copied_project_dir = os.path.join(os.path.dirname(project_root_dir),
copy_dir_name)
def copy_ardublockly_folder():
"""
Copies all the contents of the project root directory into a new folder on
the same level.
The copy operation ignores a list of directories.
:return: Boolean indicating the success state of the operation.
"""
ignore_pat = (".git*", ".svn", ".travis*", ".appveyor*", "circle.yml",
".ruby-version", "TestTemp_*", "package")
if not os.path.exists(copied_project_dir):
print(script_tab + "Copying contents of %s\n" % project_root_dir +
script_tab + "into %s" % copied_project_dir)
shutil.copytree(project_root_dir,
copied_project_dir,
symlinks=False,
ignore=shutil.ignore_patterns(*ignore_pat))
else:
print(script_tab + "ERROR: %s directory already exists!" %
copied_project_dir)
return False
return True
def remove_directory(dir_to_remove):
""" Removes the a given directory. """
if os.path.exists(dir_to_remove):
print(script_tab + "Removing directory %s" % dir_to_remove)
shutil.rmtree(dir_to_remove)
else:
print(script_tab + "Directory %s was not found." % dir_to_remove)
def zip_ardublockly_copy(name_append):
"""
Zips the contents of the copied project folder into a subdirectory of
the original project folder.
"""
zip_file_dir = os.path.join(project_root_dir, "upload")
zip_file_location = os.path.join(
zip_file_dir, "ardublockly_%s.zip" % name_append)
# First ensure the upload folder exists
if not os.path.exists(zip_file_dir):
os.makedirs(zip_file_dir)
os.chdir(os.path.dirname(project_root_dir))
print(script_tab + "Working directory changed to %s" % os.getcwd())
print(script_tab + "Zipping the contents of %s\n" % copied_project_dir +
script_tab + "into %s\n" % zip_file_location)
zip_file = zipfile.ZipFile(zip_file_location, "w", zipfile.ZIP_DEFLATED)
for dir_name, sub_dirs, files in os.walk(copy_dir_name):
zip_file.write(dir_name)
for filename in files:
zip_file.write(os.path.join(dir_name, filename))
zip_file.close()
def pack_ardublockly(tag):
# Set the copied folder name to the stamp
set_tag(tag)
print(script_tag + "Copying the project root folder:")
success = copy_ardublockly_folder()
if not success:
raise SystemExit(script_tab + "Exiting due to project root copy error.")
print(script_tag + "Removing unnecessary Blockly files:")
remove_directory(os.path.join(copied_project_dir, "blockly", "demos"))
remove_directory(os.path.join(copied_project_dir, "blockly", "appengine"))
remove_directory(os.path.join(copied_project_dir, "blockly", "tests"))
print(script_tag + "Removing an already zipped Ardublockly version:")
remove_directory(os.path.join(copied_project_dir, "upload"))
print(script_tag + "Removing CEF temporary files:")
remove_directory(os.path.join(copied_project_dir, "webcache"))
print(script_tag + "Creating zip file of the new Ardublockly folder:")
zip_ardublockly_copy(tag)
def tag_from_ci_env_vars(ci_name, pull_request_var, branch_var, commit_var):
"""
Checks if the CI environmental variables to check for a pull request,
commit id and band commit branch are present.
:return: String with the CI build information, or None if the CI
environmental variables could not be found.
"""
pull_request = os.environ.get(pull_request_var)
branch = os.environ.get(branch_var)
commit = os.environ.get(commit_var)
if pull_request and pull_request != "false":
try:
int(pull_request)
print(script_tab + "Pull request valid '%s' variable found: %s" %
(ci_name, pull_request))
return "pull_%s" % pull_request
except ValueError:
print(script_tab + "The pull request environmental variable " +
"'%s' value '%s' from %s is not a valid number." %
(pull_request_var, pull_request, ci_name))
if branch and commit:
print(script_tab + "Branch and commit valid '%s' variables found: %s %s"
% (ci_name, branch, commit))
# We only return first 10 digits from the commit ID (normal length 40)
commit = "%s" % commit
return "%s_%s" % (branch, commit[:10])
print(script_tab + "The environmental variables for %s " % ci_name +
"were deemed invalid:\n" +
script_tab + "\t%s: %s\n" % (pull_request_var, pull_request) +
script_tab + "\t%s: %s\n" % (branch_var, branch) +
script_tab + "\t%s: %s" % (commit_var, commit))
return None
def get_tag():
"""
The tag will always contain the timestamp and architecture version.
If provided as a command line argument it will add an additional string,
if not it will check for environmental variables set in build servers to
create an identification tag.
:return: String with the final tag.
"""
# All tags begging with architecture type (based on the Python version) and
# the current time stamp
arch_time_stamp = "%sbit_%s" % ((struct.calcsize('P') * 8),
time.strftime("%Y-%m-%d_%H.%M.%S"))
# Check if a command line argument has been given
if len(sys.argv) > 1:
# Take the first argument and use it as a tag appendage
print(script_tab + "Command line argument '%s' found and will be used "
"for package tag." % sys.argv[1])
return "%s_%s" % (arch_time_stamp, sys.argv[1])
else:
print(script_tab + "No command line argument found")
# Check for Travis-CI environmental variables to create tag appendage
print(script_tab + "Checking Travis-CI environment variables for tag:")
travis_tag = tag_from_ci_env_vars(ci_name="Travis-CI",
pull_request_var="TRAVIS_PULL_REQUEST",
branch_var="TRAVIS_BRANCH",
commit_var="TRAVIS_COMMIT")
if travis_tag:
return "%s_%s" % (arch_time_stamp, travis_tag)
# Check for AppVeyor environmental variables to create tag appendage
print(script_tab + "Checking AppVeyor environment variables for tag:")
appveyor_tag = tag_from_ci_env_vars(
ci_name="AppVeyor",
pull_request_var="APPVEYOR_PULL_REQUEST_NUMBER",
branch_var="APPVEYOR_REPO_BRANCH",
commit_var="APPVEYOR_REPO_COMMIT")
if appveyor_tag:
return "%s_%s" % (arch_time_stamp, appveyor_tag)
return arch_time_stamp
def main():
print(script_tag + "Pack Ardublockly script started.")
print(script_tag + "Checking for tag to attach to zip file:")
tag = get_tag()
pack_ardublockly(tag)
if __name__ == "__main__":
main()
| apache-2.0 | -5,441,320,208,492,101,000 | 39.041667 | 80 | 0.634547 | false |
tosolveit/scikit-learn | sklearn/datasets/lfw.py | 141 | 19372 | """Loader for the Labeled Faces in the Wild (LFW) dataset
This dataset is a collection of JPEG pictures of famous people collected
over the internet, all details are available on the official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. The typical task is called
Face Verification: given a pair of two pictures, a binary classifier
must predict whether the two images are from the same person.
An alternative task, Face Recognition or Face Identification is:
given the picture of the face of an unknown person, identify the name
of the person by referring to a gallery of previously seen pictures of
identified persons.
Both Face Verification and Face Recognition are tasks that are typically
performed on the output of a model trained to perform Face Detection. The
most popular model for Face Detection is called Viola-Johns and is
implemented in the OpenCV library. The LFW faces were extracted by this face
detector from various online websites.
"""
# Copyright (c) 2011 Olivier Grisel <[email protected]>
# License: BSD 3 clause
from os import listdir, makedirs, remove
from os.path import join, exists, isdir
from sklearn.utils import deprecated
import logging
import numpy as np
try:
import urllib.request as urllib # for backwards compatibility
except ImportError:
import urllib
from .base import get_data_home, Bunch
from ..externals.joblib import Memory
from ..externals.six import b
logger = logging.getLogger(__name__)
BASE_URL = "http://vis-www.cs.umass.edu/lfw/"
ARCHIVE_NAME = "lfw.tgz"
FUNNELED_ARCHIVE_NAME = "lfw-funneled.tgz"
TARGET_FILENAMES = [
'pairsDevTrain.txt',
'pairsDevTest.txt',
'pairs.txt',
]
def scale_face(face):
"""Scale back to 0-1 range in case of normalization for plotting"""
scaled = face - face.min()
scaled /= scaled.max()
return scaled
#
# Common private utilities for data fetching from the original LFW website
# local disk caching, and image decoding.
#
def check_fetch_lfw(data_home=None, funneled=True, download_if_missing=True):
"""Helper function to download any missing LFW data"""
data_home = get_data_home(data_home=data_home)
lfw_home = join(data_home, "lfw_home")
if funneled:
archive_path = join(lfw_home, FUNNELED_ARCHIVE_NAME)
data_folder_path = join(lfw_home, "lfw_funneled")
archive_url = BASE_URL + FUNNELED_ARCHIVE_NAME
else:
archive_path = join(lfw_home, ARCHIVE_NAME)
data_folder_path = join(lfw_home, "lfw")
archive_url = BASE_URL + ARCHIVE_NAME
if not exists(lfw_home):
makedirs(lfw_home)
for target_filename in TARGET_FILENAMES:
target_filepath = join(lfw_home, target_filename)
if not exists(target_filepath):
if download_if_missing:
url = BASE_URL + target_filename
logger.warning("Downloading LFW metadata: %s", url)
urllib.urlretrieve(url, target_filepath)
else:
raise IOError("%s is missing" % target_filepath)
if not exists(data_folder_path):
if not exists(archive_path):
if download_if_missing:
logger.warning("Downloading LFW data (~200MB): %s", archive_url)
urllib.urlretrieve(archive_url, archive_path)
else:
raise IOError("%s is missing" % target_filepath)
import tarfile
logger.info("Decompressing the data archive to %s", data_folder_path)
tarfile.open(archive_path, "r:gz").extractall(path=lfw_home)
remove(archive_path)
return lfw_home, data_folder_path
def _load_imgs(file_paths, slice_, color, resize):
"""Internally used to load images"""
# Try to import imread and imresize from PIL. We do this here to prevent
# the whole sklearn.datasets module from depending on PIL.
try:
try:
from scipy.misc import imread
except ImportError:
from scipy.misc.pilutil import imread
from scipy.misc import imresize
except ImportError:
raise ImportError("The Python Imaging Library (PIL)"
" is required to load data from jpeg files")
# compute the portion of the images to load to respect the slice_ parameter
# given by the caller
default_slice = (slice(0, 250), slice(0, 250))
if slice_ is None:
slice_ = default_slice
else:
slice_ = tuple(s or ds for s, ds in zip(slice_, default_slice))
h_slice, w_slice = slice_
h = (h_slice.stop - h_slice.start) // (h_slice.step or 1)
w = (w_slice.stop - w_slice.start) // (w_slice.step or 1)
if resize is not None:
resize = float(resize)
h = int(resize * h)
w = int(resize * w)
# allocate some contiguous memory to host the decoded image slices
n_faces = len(file_paths)
if not color:
faces = np.zeros((n_faces, h, w), dtype=np.float32)
else:
faces = np.zeros((n_faces, h, w, 3), dtype=np.float32)
# iterate over the collected file path to load the jpeg files as numpy
# arrays
for i, file_path in enumerate(file_paths):
if i % 1000 == 0:
logger.info("Loading face #%05d / %05d", i + 1, n_faces)
# Checks if jpeg reading worked. Refer to issue #3594 for more
# details.
img = imread(file_path)
if img.ndim is 0:
raise RuntimeError("Failed to read the image file %s, "
"Please make sure that libjpeg is installed"
% file_path)
face = np.asarray(img[slice_], dtype=np.float32)
face /= 255.0 # scale uint8 coded colors to the [0.0, 1.0] floats
if resize is not None:
face = imresize(face, resize)
if not color:
# average the color channels to compute a gray levels
# representaion
face = face.mean(axis=2)
faces[i, ...] = face
return faces
#
# Task #1: Face Identification on picture with names
#
def _fetch_lfw_people(data_folder_path, slice_=None, color=False, resize=None,
min_faces_per_person=0):
"""Perform the actual data loading for the lfw people dataset
This operation is meant to be cached by a joblib wrapper.
"""
# scan the data folder content to retain people with more that
# `min_faces_per_person` face pictures
person_names, file_paths = [], []
for person_name in sorted(listdir(data_folder_path)):
folder_path = join(data_folder_path, person_name)
if not isdir(folder_path):
continue
paths = [join(folder_path, f) for f in listdir(folder_path)]
n_pictures = len(paths)
if n_pictures >= min_faces_per_person:
person_name = person_name.replace('_', ' ')
person_names.extend([person_name] * n_pictures)
file_paths.extend(paths)
n_faces = len(file_paths)
if n_faces == 0:
raise ValueError("min_faces_per_person=%d is too restrictive" %
min_faces_per_person)
target_names = np.unique(person_names)
target = np.searchsorted(target_names, person_names)
faces = _load_imgs(file_paths, slice_, color, resize)
# shuffle the faces with a deterministic RNG scheme to avoid having
# all faces of the same person in a row, as it would break some
# cross validation and learning algorithms such as SGD and online
# k-means that make an IID assumption
indices = np.arange(n_faces)
np.random.RandomState(42).shuffle(indices)
faces, target = faces[indices], target[indices]
return faces, target, target_names
def fetch_lfw_people(data_home=None, funneled=True, resize=0.5,
min_faces_per_person=0, color=False,
slice_=(slice(70, 195), slice(78, 172)),
download_if_missing=True):
"""Loader for the Labeled Faces in the Wild (LFW) people dataset
This dataset is a collection of JPEG pictures of famous people
collected on the internet, all details are available on the
official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. Each pixel of each channel
(color in RGB) is encoded by a float in range 0.0 - 1.0.
The task is called Face Recognition (or Identification): given the
picture of a face, find the name of the person given a training set
(gallery).
The original images are 250 x 250 pixels, but the default slice and resize
arguments reduce them to 62 x 74.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
funneled : boolean, optional, default: True
Download and use the funneled variant of the dataset.
resize : float, optional, default 0.5
Ratio used to resize the each face picture.
min_faces_per_person : int, optional, default None
The extracted dataset will only retain pictures of people that have at
least `min_faces_per_person` different pictures.
color : boolean, optional, default False
Keep the 3 RGB channels instead of averaging them to a single
gray level channel. If color is True the shape of the data has
one more dimension than than the shape with color = False.
slice_ : optional
Provide a custom 2D slice (height, width) to extract the
'interesting' part of the jpeg files and avoid use statistical
correlation from the background
download_if_missing : optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
dataset : dict-like object with the following attributes:
dataset.data : numpy array of shape (13233, 2914)
Each row corresponds to a ravelled face image of original size 62 x 47
pixels. Changing the ``slice_`` or resize parameters will change the shape
of the output.
dataset.images : numpy array of shape (13233, 62, 47)
Each row is a face image corresponding to one of the 5749 people in
the dataset. Changing the ``slice_`` or resize parameters will change the shape
of the output.
dataset.target : numpy array of shape (13233,)
Labels associated to each face image. Those labels range from 0-5748
and correspond to the person IDs.
dataset.DESCR : string
Description of the Labeled Faces in the Wild (LFW) dataset.
"""
lfw_home, data_folder_path = check_fetch_lfw(
data_home=data_home, funneled=funneled,
download_if_missing=download_if_missing)
logger.info('Loading LFW people faces from %s', lfw_home)
# wrap the loader in a memoizing function that will return memmaped data
# arrays for optimal memory usage
m = Memory(cachedir=lfw_home, compress=6, verbose=0)
load_func = m.cache(_fetch_lfw_people)
# load and memoize the pairs as np arrays
faces, target, target_names = load_func(
data_folder_path, resize=resize,
min_faces_per_person=min_faces_per_person, color=color, slice_=slice_)
# pack the results as a Bunch instance
return Bunch(data=faces.reshape(len(faces), -1), images=faces,
target=target, target_names=target_names,
DESCR="LFW faces dataset")
#
# Task #2: Face Verification on pairs of face pictures
#
def _fetch_lfw_pairs(index_file_path, data_folder_path, slice_=None,
color=False, resize=None):
"""Perform the actual data loading for the LFW pairs dataset
This operation is meant to be cached by a joblib wrapper.
"""
# parse the index file to find the number of pairs to be able to allocate
# the right amount of memory before starting to decode the jpeg files
with open(index_file_path, 'rb') as index_file:
split_lines = [ln.strip().split(b('\t')) for ln in index_file]
pair_specs = [sl for sl in split_lines if len(sl) > 2]
n_pairs = len(pair_specs)
# interating over the metadata lines for each pair to find the filename to
# decode and load in memory
target = np.zeros(n_pairs, dtype=np.int)
file_paths = list()
for i, components in enumerate(pair_specs):
if len(components) == 3:
target[i] = 1
pair = (
(components[0], int(components[1]) - 1),
(components[0], int(components[2]) - 1),
)
elif len(components) == 4:
target[i] = 0
pair = (
(components[0], int(components[1]) - 1),
(components[2], int(components[3]) - 1),
)
else:
raise ValueError("invalid line %d: %r" % (i + 1, components))
for j, (name, idx) in enumerate(pair):
try:
person_folder = join(data_folder_path, name)
except TypeError:
person_folder = join(data_folder_path, str(name, 'UTF-8'))
filenames = list(sorted(listdir(person_folder)))
file_path = join(person_folder, filenames[idx])
file_paths.append(file_path)
pairs = _load_imgs(file_paths, slice_, color, resize)
shape = list(pairs.shape)
n_faces = shape.pop(0)
shape.insert(0, 2)
shape.insert(0, n_faces // 2)
pairs.shape = shape
return pairs, target, np.array(['Different persons', 'Same person'])
@deprecated("Function 'load_lfw_people' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_people(download_if_missing=False) instead.")
def load_lfw_people(download_if_missing=False, **kwargs):
"""Alias for fetch_lfw_people(download_if_missing=False)
Check fetch_lfw_people.__doc__ for the documentation and parameter list.
"""
return fetch_lfw_people(download_if_missing=download_if_missing, **kwargs)
def fetch_lfw_pairs(subset='train', data_home=None, funneled=True, resize=0.5,
color=False, slice_=(slice(70, 195), slice(78, 172)),
download_if_missing=True):
"""Loader for the Labeled Faces in the Wild (LFW) pairs dataset
This dataset is a collection of JPEG pictures of famous people
collected on the internet, all details are available on the
official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. Each pixel of each channel
(color in RGB) is encoded by a float in range 0.0 - 1.0.
The task is called Face Verification: given a pair of two pictures,
a binary classifier must predict whether the two images are from
the same person.
In the official `README.txt`_ this task is described as the
"Restricted" task. As I am not sure as to implement the
"Unrestricted" variant correctly, I left it as unsupported for now.
.. _`README.txt`: http://vis-www.cs.umass.edu/lfw/README.txt
The original images are 250 x 250 pixels, but the default slice and resize
arguments reduce them to 62 x 74.
Read more in the :ref:`User Guide <labeled_faces_in_the_wild>`.
Parameters
----------
subset : optional, default: 'train'
Select the dataset to load: 'train' for the development training
set, 'test' for the development test set, and '10_folds' for the
official evaluation set that is meant to be used with a 10-folds
cross validation.
data_home : optional, default: None
Specify another download and cache folder for the datasets. By
default all scikit learn data is stored in '~/scikit_learn_data'
subfolders.
funneled : boolean, optional, default: True
Download and use the funneled variant of the dataset.
resize : float, optional, default 0.5
Ratio used to resize the each face picture.
color : boolean, optional, default False
Keep the 3 RGB channels instead of averaging them to a single
gray level channel. If color is True the shape of the data has
one more dimension than than the shape with color = False.
slice_ : optional
Provide a custom 2D slice (height, width) to extract the
'interesting' part of the jpeg files and avoid use statistical
correlation from the background
download_if_missing : optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
The data is returned as a Bunch object with the following attributes:
data : numpy array of shape (2200, 5828)
Each row corresponds to 2 ravel'd face images of original size 62 x 47
pixels. Changing the ``slice_`` or resize parameters will change the shape
of the output.
pairs : numpy array of shape (2200, 2, 62, 47)
Each row has 2 face images corresponding to same or different person
from the dataset containing 5749 people. Changing the ``slice_`` or resize
parameters will change the shape of the output.
target : numpy array of shape (13233,)
Labels associated to each pair of images. The two label values being
different persons or the same person.
DESCR : string
Description of the Labeled Faces in the Wild (LFW) dataset.
"""
lfw_home, data_folder_path = check_fetch_lfw(
data_home=data_home, funneled=funneled,
download_if_missing=download_if_missing)
logger.info('Loading %s LFW pairs from %s', subset, lfw_home)
# wrap the loader in a memoizing function that will return memmaped data
# arrays for optimal memory usage
m = Memory(cachedir=lfw_home, compress=6, verbose=0)
load_func = m.cache(_fetch_lfw_pairs)
# select the right metadata file according to the requested subset
label_filenames = {
'train': 'pairsDevTrain.txt',
'test': 'pairsDevTest.txt',
'10_folds': 'pairs.txt',
}
if subset not in label_filenames:
raise ValueError("subset='%s' is invalid: should be one of %r" % (
subset, list(sorted(label_filenames.keys()))))
index_file_path = join(lfw_home, label_filenames[subset])
# load and memoize the pairs as np arrays
pairs, target, target_names = load_func(
index_file_path, data_folder_path, resize=resize, color=color,
slice_=slice_)
# pack the results as a Bunch instance
return Bunch(data=pairs.reshape(len(pairs), -1), pairs=pairs,
target=target, target_names=target_names,
DESCR="'%s' segment of the LFW pairs dataset" % subset)
@deprecated("Function 'load_lfw_pairs' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_pairs(download_if_missing=False) instead.")
def load_lfw_pairs(download_if_missing=False, **kwargs):
"""Alias for fetch_lfw_pairs(download_if_missing=False)
Check fetch_lfw_pairs.__doc__ for the documentation and parameter list.
"""
return fetch_lfw_pairs(download_if_missing=download_if_missing, **kwargs)
| bsd-3-clause | -7,925,854,865,507,911,000 | 36.90998 | 87 | 0.653108 | false |
illicitonion/givabit | lib/sdks/google_appengine_1.7.1/google_appengine/lib/django_1_2/tests/modeltests/validation/tests.py | 50 | 5106 | from django import forms
from django.test import TestCase
from django.core.exceptions import NON_FIELD_ERRORS
from modeltests.validation import ValidationTestCase
from modeltests.validation.models import Author, Article, ModelToValidate
# Import other tests for this package.
from modeltests.validation.validators import TestModelsWithValidators
from modeltests.validation.test_unique import GetUniqueCheckTests, PerformUniqueChecksTest
from modeltests.validation.test_custom_messages import CustomMessagesTest
class BaseModelValidationTests(ValidationTestCase):
def test_missing_required_field_raises_error(self):
mtv = ModelToValidate(f_with_custom_validator=42)
self.assertFailsValidation(mtv.full_clean, ['name', 'number'])
def test_with_correct_value_model_validates(self):
mtv = ModelToValidate(number=10, name='Some Name')
self.assertEqual(None, mtv.full_clean())
def test_custom_validate_method(self):
mtv = ModelToValidate(number=11)
self.assertFailsValidation(mtv.full_clean, [NON_FIELD_ERRORS, 'name'])
def test_wrong_FK_value_raises_error(self):
mtv=ModelToValidate(number=10, name='Some Name', parent_id=3)
self.assertFailsValidation(mtv.full_clean, ['parent'])
def test_correct_FK_value_validates(self):
parent = ModelToValidate.objects.create(number=10, name='Some Name')
mtv = ModelToValidate(number=10, name='Some Name', parent_id=parent.pk)
self.assertEqual(None, mtv.full_clean())
def test_limitted_FK_raises_error(self):
# The limit_choices_to on the parent field says that a parent object's
# number attribute must be 10, so this should fail validation.
parent = ModelToValidate.objects.create(number=11, name='Other Name')
mtv = ModelToValidate(number=10, name='Some Name', parent_id=parent.pk)
self.assertFailsValidation(mtv.full_clean, ['parent'])
def test_wrong_email_value_raises_error(self):
mtv = ModelToValidate(number=10, name='Some Name', email='not-an-email')
self.assertFailsValidation(mtv.full_clean, ['email'])
def test_correct_email_value_passes(self):
mtv = ModelToValidate(number=10, name='Some Name', email='[email protected]')
self.assertEqual(None, mtv.full_clean())
def test_wrong_url_value_raises_error(self):
mtv = ModelToValidate(number=10, name='Some Name', url='not a url')
self.assertFieldFailsValidationWithMessage(mtv.full_clean, 'url', [u'Enter a valid value.'])
def test_correct_url_but_nonexisting_gives_404(self):
mtv = ModelToValidate(number=10, name='Some Name', url='http://google.com/we-love-microsoft.html')
self.assertFieldFailsValidationWithMessage(mtv.full_clean, 'url', [u'This URL appears to be a broken link.'])
def test_correct_url_value_passes(self):
mtv = ModelToValidate(number=10, name='Some Name', url='http://www.djangoproject.com/')
self.assertEqual(None, mtv.full_clean()) # This will fail if there's no Internet connection
def test_text_greater_that_charfields_max_length_eaises_erros(self):
mtv = ModelToValidate(number=10, name='Some Name'*100)
self.assertFailsValidation(mtv.full_clean, ['name',])
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
exclude = ['author']
class ModelFormsTests(TestCase):
def setUp(self):
self.author = Author.objects.create(name='Joseph Kocherhans')
def test_partial_validation(self):
# Make sure the "commit=False and set field values later" idiom still
# works with model validation.
data = {
'title': 'The state of model validation',
'pub_date': '2010-1-10 14:49:00'
}
form = ArticleForm(data)
self.assertEqual(form.errors.keys(), [])
article = form.save(commit=False)
article.author = self.author
article.save()
def test_validation_with_empty_blank_field(self):
# Since a value for pub_date wasn't provided and the field is
# blank=True, model-validation should pass.
# Also, Article.clean() should be run, so pub_date will be filled after
# validation, so the form should save cleanly even though pub_date is
# not allowed to be null.
data = {
'title': 'The state of model validation',
}
article = Article(author_id=self.author.id)
form = ArticleForm(data, instance=article)
self.assertEqual(form.errors.keys(), [])
self.assertNotEqual(form.instance.pub_date, None)
article = form.save()
def test_validation_with_invalid_blank_field(self):
# Even though pub_date is set to blank=True, an invalid value was
# provided, so it should fail validation.
data = {
'title': 'The state of model validation',
'pub_date': 'never'
}
article = Article(author_id=self.author.id)
form = ArticleForm(data, instance=article)
self.assertEqual(form.errors.keys(), ['pub_date'])
| apache-2.0 | -1,848,085,657,245,873,700 | 43.789474 | 117 | 0.680572 | false |
b3j0f/schema | b3j0f/schema/test/registry.py | 1 | 6783 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016 Jonathan Labéjof <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------
from unittest import main
from b3j0f.utils.ut import UTCase
from ..base import Schema
from ..registry import (
SchemaRegistry, registercls, getbydatatype, unregistercls
)
from uuid import uuid4
from numbers import Number
class AAA(object):
pass
class UpdateContentTest(UTCase):
def setUp(self):
class AAA(object):
pass
self.AAA = AAA
class BBB(object):
pass
self.BBB = BBB
class CCC(object):
pass
self.CCC = CCC
@registercls([AAA])
class AAASchema(Schema):
def _validate(self, data, *args, **kwargs):
return isinstance(data, AAA)
self.AAASchema = AAASchema
@registercls([BBB])
class BBBSchema(Schema):
def _validate(self, data, *args, **kwargs):
return isinstance(data, BBB)
self.BBBSchema = BBBSchema
@registercls([CCC])
class CCCSchema(Schema):
def _validate(self, data, *args, **kwargs):
return isinstance(data, CCC)
self.CCCSchema = CCCSchema
def tearDown(self):
unregistercls(self.AAASchema)
unregistercls(self.BBBSchema)
unregistercls(self.CCCSchema)
def test_number(self):
schemacls = getbydatatype(self.AAA)
self.assertIs(schemacls, self.AAASchema)
def test_str(self):
schemacls = getbydatatype(self.BBB)
self.assertIs(schemacls, self.BBBSchema)
def test_object(self):
schemacls = getbydatatype(self.CCC)
self.assertIs(schemacls, self.CCCSchema)
class DefaultTest(UTCase):
def test(self):
class TestSchema(Schema):
default = 0
schema = TestSchema()
self.assertEqual(schema.default, 0)
schema = TestSchema(default=None)
self.assertIsNone(schema._default_)
class TestSchema(Schema):
def __init__(
self, name=None, uuid=None, _=None, default=None, *args, **kwargs
):
super(TestSchema, self).__init__(*args, **kwargs)
self.name = name or TestSchema.__name__
self.uuid = str(uuid or uuid4())
self._testschema = _ or TestSchema(_=self)
self.default = default
def __hash__(self):
return hash(self.uuid)
def getschemas(self):
return {'one': self, 'two': self._testschema}
class SchemaRegistryTest(UTCase):
def setUp(self):
self.registry = SchemaRegistry()
self.schemas = set([TestSchema() for i in range(5)])
def test_init(self):
schemaregistry = SchemaRegistry()
self.assertFalse(schemaregistry._schbyname)
self.assertFalse(schemaregistry._schbyuuid)
self.assertFalse(schemaregistry._schbytype)
def test_init_w_params(self):
schemaregistry = SchemaRegistry(schbyname=2, schbyuuid=3, schbytype=4)
self.assertEqual(schemaregistry._schbyname, 2)
self.assertEqual(schemaregistry._schbyuuid, 3)
self.assertEqual(schemaregistry._schbytype, 4)
def test_register(self):
for schema in self.schemas:
self.registry.register(schema)
schemas = self.registry.getbyname(TestSchema.__name__)
self.assertEqual(schemas, self.schemas)
for schema in self.schemas:
uuid = schema.uuid
_schema = self.registry.getbyuuid(uuid)
self.assertEqual(schema, _schema)
self.registry.unregister(uuid)
self.assertRaises(KeyError, self.registry.getbyuuid, uuid)
def test_registertype(self):
class Schema(object):
def __init__(self, default, *args, **kwargs):
super(Schema, self).__init__(*args, **kwargs)
self.default = default
class IntSchema(Schema):
pass
class BoolSchema(Schema):
pass
class AAASchema(Schema):
pass
self.registry.registercls(schemacls=IntSchema, data_types=[int])
self.registry.registercls(schemacls=BoolSchema, data_types=[bool])
self.registry.registercls(schemacls=AAASchema, data_types=[Number])
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, IntSchema)
schemacls = self.registry.getbydatatype(bool)
self.assertIs(schemacls, BoolSchema)
self.registry.unregistercls(schemacls=IntSchema)
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, AAASchema)
def test_registertype_decorator(self):
class Schema(object):
def __init__(self, default, *args, **kwargs):
super(Schema, self).__init__(*args, **kwargs)
self.default = default
@self.registry.registercls([int])
class IntSchema(Schema):
pass
@self.registry.registercls([bool])
class BoolSchema(Schema):
pass
@self.registry.registercls([Number])
class AAASchema(Schema):
pass
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, IntSchema)
schemacls = self.registry.getbydatatype(bool)
self.assertIs(schemacls, BoolSchema)
self.registry.unregistercls(schemacls=IntSchema)
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, AAASchema)
if __name__ == '__main__':
main()
| mit | -3,113,530,968,089,494,000 | 25.912698 | 79 | 0.625774 | false |
memakura/s2a_fm | Snap!Files/Snap!Mobile/arduino/serial/urlhandler/protocol_loop.py | 141 | 9516 | #! python
#
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# see __init__.py
#
# This module implements a loop back connection receiving itself what it sent.
#
# The purpose of this module is.. well... You can run the unit tests with it.
# and it was so easy to implement ;-)
#
# (C) 2001-2011 Chris Liechti <[email protected]>
# this is distributed under a free software license, see license.txt
#
# URL format: loop://[option[/option...]]
# options:
# - "debug" print diagnostic messages
from serial.serialutil import *
import threading
import time
import logging
# map log level names to constants. used in fromURL()
LOGGER_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
}
class LoopbackSerial(SerialBase):
"""Serial port implementation that simulates a loop back connection in plain software."""
BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800,
9600, 19200, 38400, 57600, 115200)
def open(self):
"""Open port with current settings. This may throw a SerialException
if the port cannot be opened."""
if self._isOpen:
raise SerialException("Port is already open.")
self.logger = None
self.buffer_lock = threading.Lock()
self.loop_buffer = bytearray()
self.cts = False
self.dsr = False
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
# not that there is anything to open, but the function applies the
# options found in the URL
self.fromURL(self.port)
# not that there anything to configure...
self._reconfigurePort()
# all things set up get, now a clean start
self._isOpen = True
if not self._rtscts:
self.setRTS(True)
self.setDTR(True)
self.flushInput()
self.flushOutput()
def _reconfigurePort(self):
"""Set communication parameters on opened port. for the loop://
protocol all settings are ignored!"""
# not that's it of any real use, but it helps in the unit tests
if not isinstance(self._baudrate, (int, long)) or not 0 < self._baudrate < 2**32:
raise ValueError("invalid baudrate: %r" % (self._baudrate))
if self.logger:
self.logger.info('_reconfigurePort()')
def close(self):
"""Close port"""
if self._isOpen:
self._isOpen = False
# in case of quick reconnects, give the server some time
time.sleep(0.3)
def makeDeviceName(self, port):
raise SerialException("there is no sensible way to turn numbers into URLs")
def fromURL(self, url):
"""extract host and port from an URL string"""
if url.lower().startswith("loop://"): url = url[7:]
try:
# process options now, directly altering self
for option in url.split('/'):
if '=' in option:
option, value = option.split('=', 1)
else:
value = None
if not option:
pass
elif option == 'logging':
logging.basicConfig() # XXX is that good to call it here?
self.logger = logging.getLogger('pySerial.loop')
self.logger.setLevel(LOGGER_LEVELS[value])
self.logger.debug('enabled logging')
else:
raise ValueError('unknown option: %r' % (option,))
except ValueError, e:
raise SerialException('expected a string in the form "[loop://][option[/option...]]": %s' % e)
# - - - - - - - - - - - - - - - - - - - - - - - -
def inWaiting(self):
"""Return the number of characters currently in the input buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
# attention the logged value can differ from return value in
# threaded environments...
self.logger.debug('inWaiting() -> %d' % (len(self.loop_buffer),))
return len(self.loop_buffer)
def read(self, size=1):
"""Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if not self._isOpen: raise portNotOpenError
if self._timeout is not None:
timeout = time.time() + self._timeout
else:
timeout = None
data = bytearray()
while size > 0:
self.buffer_lock.acquire()
try:
block = to_bytes(self.loop_buffer[:size])
del self.loop_buffer[:size]
finally:
self.buffer_lock.release()
data += block
size -= len(block)
# check for timeout now, after data has been read.
# useful for timeout = 0 (non blocking) read
if timeout and time.time() > timeout:
break
return bytes(data)
def write(self, data):
"""Output the given string over the serial port. Can block if the
connection is blocked. May raise SerialException if the connection is
closed."""
if not self._isOpen: raise portNotOpenError
# ensure we're working with bytes
data = to_bytes(data)
# calculate aprox time that would be used to send the data
time_used_to_send = 10.0*len(data) / self._baudrate
# when a write timeout is configured check if we would be successful
# (not sending anything, not even the part that would have time)
if self._writeTimeout is not None and time_used_to_send > self._writeTimeout:
time.sleep(self._writeTimeout) # must wait so that unit test succeeds
raise writeTimeoutError
self.buffer_lock.acquire()
try:
self.loop_buffer += data
finally:
self.buffer_lock.release()
return len(data)
def flushInput(self):
"""Clear input buffer, discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('flushInput()')
self.buffer_lock.acquire()
try:
del self.loop_buffer[:]
finally:
self.buffer_lock.release()
def flushOutput(self):
"""Clear output buffer, aborting the current output and
discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('flushOutput()')
def sendBreak(self, duration=0.25):
"""Send break condition. Timed, returns to idle state after given
duration."""
if not self._isOpen: raise portNotOpenError
def setBreak(self, level=True):
"""Set break: Controls TXD. When active, to transmitting is
possible."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setBreak(%r)' % (level,))
def setRTS(self, level=True):
"""Set terminal status line: Request To Send"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setRTS(%r) -> state of CTS' % (level,))
self.cts = level
def setDTR(self, level=True):
"""Set terminal status line: Data Terminal Ready"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setDTR(%r) -> state of DSR' % (level,))
self.dsr = level
def getCTS(self):
"""Read terminal status line: Clear To Send"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('getCTS() -> state of RTS (%r)' % (self.cts,))
return self.cts
def getDSR(self):
"""Read terminal status line: Data Set Ready"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('getDSR() -> state of DTR (%r)' % (self.dsr,))
return self.dsr
def getRI(self):
"""Read terminal status line: Ring Indicator"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('returning dummy for getRI()')
return False
def getCD(self):
"""Read terminal status line: Carrier Detect"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('returning dummy for getCD()')
return True
# - - - platform specific - - -
# None so far
# assemble Serial class with the platform specific implementation and the base
# for file-like behavior. for Python 2.6 and newer, that provide the new I/O
# library, derive from io.RawIOBase
try:
import io
except ImportError:
# classic version with our own file-like emulation
class Serial(LoopbackSerial, FileLike):
pass
else:
# io library present
class Serial(LoopbackSerial, io.RawIOBase):
pass
# simple client test
if __name__ == '__main__':
import sys
s = Serial('loop://')
sys.stdout.write('%s\n' % s)
sys.stdout.write("write...\n")
s.write("hello\n")
s.flush()
sys.stdout.write("read: %s\n" % s.read(5))
s.close()
| gpl-3.0 | -4,304,743,622,947,002,000 | 34.909434 | 106 | 0.589533 | false |
pleaseproject/python-for-android | python3-alpha/python3-src/Lib/wsgiref/simple_server.py | 51 | 4859 | """BaseHTTPServer that implements the Python WSGI protocol (PEP 3333)
This is both an example of how WSGI can be implemented, and a basis for running
simple web applications on a local machine, such as might be done when testing
or debugging an application. It has not been reviewed for security issues,
however, and we strongly recommend that you use a "real" web server for
production use.
For example usage, see the 'if __name__=="__main__"' block at the end of the
module. See also the BaseHTTPServer module docs for other API information.
"""
from http.server import BaseHTTPRequestHandler, HTTPServer
import sys
import urllib.parse
from wsgiref.handlers import SimpleHandler
__version__ = "0.2"
__all__ = ['WSGIServer', 'WSGIRequestHandler', 'demo_app', 'make_server']
server_version = "WSGIServer/" + __version__
sys_version = "Python/" + sys.version.split()[0]
software_version = server_version + ' ' + sys_version
class ServerHandler(SimpleHandler):
server_software = software_version
def close(self):
try:
self.request_handler.log_request(
self.status.split(' ',1)[0], self.bytes_sent
)
finally:
SimpleHandler.close(self)
class WSGIServer(HTTPServer):
"""BaseHTTPServer that implements the Python WSGI protocol"""
application = None
def server_bind(self):
"""Override server_bind to store the server name."""
HTTPServer.server_bind(self)
self.setup_environ()
def setup_environ(self):
# Set up base environment
env = self.base_environ = {}
env['SERVER_NAME'] = self.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PORT'] = str(self.server_port)
env['REMOTE_HOST']=''
env['CONTENT_LENGTH']=''
env['SCRIPT_NAME'] = ''
def get_app(self):
return self.application
def set_app(self,application):
self.application = application
class WSGIRequestHandler(BaseHTTPRequestHandler):
server_version = "WSGIServer/" + __version__
def get_environ(self):
env = self.server.base_environ.copy()
env['SERVER_PROTOCOL'] = self.request_version
env['SERVER_SOFTWARE'] = self.server_version
env['REQUEST_METHOD'] = self.command
if '?' in self.path:
path,query = self.path.split('?',1)
else:
path,query = self.path,''
env['PATH_INFO'] = urllib.parse.unquote_to_bytes(path).decode('iso-8859-1')
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
if self.headers.get('content-type') is None:
env['CONTENT_TYPE'] = self.headers.get_content_type()
else:
env['CONTENT_TYPE'] = self.headers['content-type']
length = self.headers.get('content-length')
if length:
env['CONTENT_LENGTH'] = length
for k, v in self.headers.items():
k=k.replace('-','_').upper(); v=v.strip()
if k in env:
continue # skip content length, type,etc.
if 'HTTP_'+k in env:
env['HTTP_'+k] += ','+v # comma-separate multiple headers
else:
env['HTTP_'+k] = v
return env
def get_stderr(self):
return sys.stderr
def handle(self):
"""Handle a single HTTP request"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
handler = ServerHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
def demo_app(environ,start_response):
from io import StringIO
stdout = StringIO()
print("Hello world!", file=stdout)
print(file=stdout)
h = sorted(environ.items())
for k,v in h:
print(k,'=',repr(v), file=stdout)
start_response("200 OK", [('Content-Type','text/plain; charset=utf-8')])
return [stdout.getvalue().encode("utf-8")]
def make_server(
host, port, app, server_class=WSGIServer, handler_class=WSGIRequestHandler
):
"""Create a new WSGI server listening on `host` and `port` for `app`"""
server = server_class((host, port), handler_class)
server.set_app(app)
return server
if __name__ == '__main__':
httpd = make_server('', 8000, demo_app)
sa = httpd.socket.getsockname()
print("Serving HTTP on", sa[0], "port", sa[1], "...")
import webbrowser
webbrowser.open('http://localhost:8000/xyz?abc')
httpd.handle_request() # serve one request, then exit
| apache-2.0 | 1,274,100,884,623,313,700 | 30.147436 | 83 | 0.611648 | false |
ASCrookes/django | django/contrib/gis/db/backends/mysql/operations.py | 328 | 2746 | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import \
BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
gis_operators = {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # .. ..
'contained': SpatialOperator(func='MBRWithin'), # .. ..
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func='MBREqual'),
'exact': SpatialOperator(func='MBREqual'),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func='MBREqual'),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
function_names = {
'Distance': 'ST_Distance',
'Length': 'GLength',
'Union': 'ST_Union',
}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle',
'Difference', 'ForceRHR', 'GeoHash', 'Intersection', 'MemSize',
'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid',
'SymDifference', 'Transform', 'Translate',
}
if self.connection.mysql_version < (5, 6, 1):
unsupported.update({'Distance', 'Union'})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, compiler):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = compiler.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
| bsd-3-clause | -16,405,264,092,433,432 | 37.138889 | 91 | 0.647123 | false |
vmp32k/litecoin | contrib/devtools/update-translations.py | 23 | 8426 | #!/usr/bin/env python3
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'bitcoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
# Regexp to check for Bitcoin addresses
ADDRESS_REGEXP = re.compile('([13]|bc1)[a-zA-Z0-9]{30,}')
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
sys.exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
sys.exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def contains_bitcoin_addr(text, errors):
if text != None and ADDRESS_REGEXP.search(text) != None:
errors.append('Translation "%s" contains a bitcoin address. This will be removed.' % (text))
return True
return False
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus) and not contains_bitcoin_addr(translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
| mit | 363,860,189,383,693,100 | 38.190698 | 140 | 0.624377 | false |
icsi-berkeley/framework_code | src/main/nluas/app/core_solver.py | 2 | 7749 | """
Simple solver "core". Contains capabilities for unpacking
a JSON n-tuple, as well as routing this n-tuple based
on the predicate_type (command, query, assertion, etc.).
Other general capabilities can be added. The design
is general enough that the same "unpacking" and "routing"
method can be used, as long as a new method is written for a given
predicate_type.
"Route_action" can be called by command/query/assertion methods,
to route each parameter to the task-specific method. E.g., "solve_move",
or "solve_push_move", etc.
Author: seantrott <[email protected]>
------
See LICENSE.txt for licensing information.
------
"""
from nluas.ntuple_decoder import *
from nluas.core_agent import *
import sys, traceback
import pprint
import os
path = os.path.dirname(os.path.realpath(__file__))
def check_complexity(n):
s = int(n)
if s not in [1, 2, 3]:
raise argparse.ArgumentTypeError("{} is an invalid entry for the complexity level. Should be 1, 2, or 3.".format(n))
return s
class CoreProblemSolver(CoreAgent):
def __init__(self, args):
self.__path__ = os.getcwd() + "/src/main/nluas/"
self.ntuple = None
self.decoder = NtupleDecoder()
CoreAgent.__init__(self, args)
self.world = []
self.solver_parser = self.setup_solver_parser()
args = self.solver_parser.parse_args(self.unknown)
self.complexity = args.complexity
self.ui_address = "{}_{}".format(self.federation, "AgentUI")
self.transport.subscribe(self.ui_address, self.callback)
self._incapable = "I cannot do that yet."
self.history = list()
self.p_features = None
self.eventFeatures=None
self.parameter_templates = OrderedDict()
def setup_solver_parser(self):
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--complexity", default=1, type=check_complexity, help="indicate level of complexity: 1, 2, or 3.")
return parser
def callback(self, ntuple):
if self.is_quit(ntuple):
return self.close()
self.solve(ntuple)
def initialize_templates(self):
""" Initializes templates from path, set above. """
self.parameter_templates = self.read_templates(self.__path__+"parameter_templates.json")
def request_clarification(self, ntuple, message="This ntuple requires clarification."):
request = {'ntuple': ntuple, 'message': message, 'type': 'clarification', 'tag': self.address}
self.transport.send(self.ui_address, request)
def identification_failure(self, message):
request = {'type': 'id_failure', 'message': message, 'tag': self.address}
self.transport.send(self.ui_address, request)
def respond_to_query(self, message):
request = {'type': 'response', 'message': message, 'tag': self.address}
self.transport.send(self.ui_address, request)
def return_error_descriptor(self, message):
request = {'type': 'error_descriptor', 'message': message, 'tag': self.address}
self.transport.send(self.ui_address, request)
def solve(self, ntuple):
if self.check_for_clarification(ntuple):
self.request_clarification(ntuple=ntuple)
else:
self.ntuple = ntuple
predicate_type = ntuple['predicate_type']
try:
dispatch = getattr(self, "solve_%s" %predicate_type)
dispatch(ntuple)
self.broadcast()
self.p_features = None # Testing, took it out from route_action
except AttributeError as e:
traceback.print_exc()
message = "I cannot solve a(n) {}.".format(predicate_type)
self.identification_failure(message)
def broadcast(self):
""" Here, does nothing. Later, an AgentSolver will broadcast information back to BossSolver. """
pass
def update_world(self, discovered=[]):
for item in discovered:
self.world.append(item)
def solve_command(self, ntuple):
self.route_event(ntuple['eventDescriptor'], "command")
if self.verbose:
self.decoder.pprint_ntuple(ntuple)
def solve_query(self, ntuple):
self.route_event(ntuple['eventDescriptor'], "query")
if self.verbose:
self.decoder.pprint_ntuple(ntuple)
def solve_assertion(self, ntuple):
self.route_event(ntuple['eventDescriptor'], "assertion")
if self.verbose:
self.decoder.pprint_ntuple(ntuple)
def solve_conditional_command(self, ntuple):
""" Takes in conditionalED. (API changed 5/26/16, ST) """
print("Function is deprecated!")
print(ntuple.keys())
def solve_conditional_assertion(self, ntuple):
""" Takes in conditionalED. (API changed 5/26/16, ST) """
print("Function is deprecated!")
print(ntuple.keys())
def solve_conditional_query(self, ntuple):
""" Takes in conditionalED. (API changed 5/26/16, ST) """
print("Function is deprecated!")
print(ntuple.keys())
def route_event(self, eventDescription, predicate):
if "complexKind" in eventDescription and eventDescription['complexKind'] == "conditional":
dispatch = getattr(self, "solve_conditional_{}".format(predicate))
return dispatch(eventDescription)
features = eventDescription['e_features']
if features:
# Set eventFeatures
self.eventFeatures = features['eventFeatures']
parameters = eventDescription['eventProcess']
return_value = self.route_action(parameters, predicate)
self.eventFeatures = None
if return_value:
if predicate == "query":
self.respond_to_query(return_value)
elif predicate == "command":
self.return_error_descriptor(return_value)
return return_value
def route_action(self, parameters, predicate):
if "complexKind" in parameters and parameters['complexKind'] == "serial":
return self.solve_serial(parameters, predicate)
elif "complexKind" in parameters and parameters['complexKind'] == "causal":
return self.solve_causal(parameters, predicate)
else:
template = parameters['template']
action = parameters['actionary']
try:
if parameters['p_features']:
self.p_features = parameters['p_features']['processFeatures']
dispatch = getattr(self, "{}_{}".format(predicate, action))
return_value = self.route_dispatch(dispatch, parameters)
self.history.insert(0, (parameters, True))
self.p_features = None
return return_value
except AttributeError as e:
message = "I cannot solve the '{}_{}' action".format(predicate,action)
self.history.insert(0, (parameters, False))
self.identification_failure(message)
def route_dispatch(self, dispatch_function, parameters):
""" Simply runs dispatch_function on PARAMETERS. """
return dispatch_function(parameters)
def check_for_clarification(self, ntuple):
""" Will need to be replaced by a process that checks whether ntuple needs clarification.
Requires some sort of context/world model. """
#return random.choice([True, False])
return False
def solve_serial(self, parameters, predicate):
self.route_action(parameters['process1'], predicate)
self.route_action(parameters['process2'], predicate)
if __name__ == '__main__':
ps = CoreProblemSolver(sys.argv[1:])
| apache-2.0 | -3,054,324,586,030,613,000 | 38.335025 | 133 | 0.630017 | false |
kirisetsz/kisstudou | kisstudou.py | 1 | 7276 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import re
import httplib
import urllib
from pyquery import PyQuery as pq
parser = argparse.ArgumentParser(
description='Download video resource from tudou.com',
epilog="Parse the url to video address using flvcd.com")
parser.add_argument('-q', '--quality',
default=4, type=int, dest='quality',
help="""Quality of source to download,
values in 0(256P),1(360P),2(480P),3(720P),4(REAL).
REAL by default.
Note:
If the specific resolution is not avaliable the lower nearest will be downloaded""")
parser.add_argument('-o', '--output-pattern',
default='%{n}%-{x}', dest='pattern',
help="""Define the output filename format(%%n by default):
%%{n} - Video name section.
%%{x} - Clip index of the video.
e.g. %{n}%-{x} will produce filename-0001.vod or filename.vod
""")
parser.add_argument('-w', '--wait',
default=2, type=int, dest='wait',
help="Set the time to wait between start next task(in second, default 2).")
parser.add_argument('-D', '--debug',
default=False, dest='debug', action='store_true',
help="Run command in debug mode")
parser.add_argument('-d', '--new-directory',
default=False, dest='mkdir', action='store_true',
help="Create new directory for the download")
parser.add_argument('-c', '--clean',
default=False, dest='clean', action='store_true',
help="Clean old file before start(for sites unavaliable for partial)")
parser.add_argument('-m', '--merge-split',
default=False, dest='merge', action='store_true',
help="Auto merge videos together(Not Implemented)")
parser.add_argument('-s', '--spider',
default=False, dest='detect', action='store_true',
help="Only detect for video information but not download.")
parser.add_argument('-U', '--user-agent',
default=r"Mozilla/5.0 (X11; Linux x86_64; rv:7.0.1) Gecko/20100101 Firefox/7.0.1",
dest='ua',
help="Specific the User-Agent.")
parser.add_argument('-O', '--wget-options',
default="",
dest='wgetopt',
help="Specific the wget Parameter.")
parser.add_argument('url', help='The URL of the video')
#arguments here
global args
args = parser.parse_args()
resolution = [
('normal' , 'Normal'),
('high' , '360P'),
('super' , '480P'),
('super2' , '720P'),
('real' , 'REAL(DEFAULT)')
]
print "Video address to parse:"
print "\t%s" % (args.url)
print "Quality:", resolution[args.quality][1]
print "Pattern:", args.pattern, "+ *ext*"
print "User-Agent:"
print "\t%s" % (args.ua)
if args.debug:
print "Debug:", args.debug
print "New Dir.:", args.mkdir
def parse(url, ua, fmt):
http = httplib.HTTP("www.flvcd.com")
http.putrequest("GET", "/parse.php?format=%s&kw=%s" % (fmt,
urllib.quote(url)))
http.putheader("User-Agent", ua)
http.putheader("Host", "www.flvcd.com")
http.putheader("Accept", "*/*")
http.endheaders()
errcode, errmsg, headers = http.getreply()
print "Status:", errcode, errmsg
if errcode!=200:
print "Error encountered while parsing url"
return -1
res = http.getfile()
print 'Parsing video address...'
html = ''
data = res.read(512)
while data != '':
html += data
data = res.read(512)
html = html.decode('gbk')
return html
html = parse(args.url, args.ua, resolution[args.quality][0])
if html == -1:
exit(1)
q = pq(html)
# Address Parsing Procedure
form = q('form[name="mform"]')
file_a = form.parent('td').parent('tr').prev().children().children('a')
filelist = []
for i in file_a:
a = pq(i)
filelist.append(a.attr('href'))
filename = form('input[name="name"]').val()
formats = form.parent().children('a')
if not filename:
print """
Video is not available for download.
Check http://www.flvcd.com/url.php for available sites.
Or the video is protected from playing by guests.
"""
exit(0)
print "Video Title:"
print "\t%s" % (filename)
print
if args.debug:
print "Download Address:"
for i in filelist:
print i
print
if len(formats) > 0:
print "Optional format:"
for i in formats:
f = pq(i)
href = f.attr('href')
text = f.text()
for i in xrange(len(resolution)):
k, v = resolution[i]
if href.find(k) != -1:
print "\t%d - %s[%s]" % (i, v, text)
break
print
if args.detect:
exit(0)
filepath = filename.replace("/", "_").encode('utf8')
print "Found %d video clip(s) to download" % len(filelist)
print
import os, time
if args.mkdir:
print 'Creating new dir:', filepath
os.system('mkdir "%s" 2>/dev/null 1>/dev/null' % filepath)
os.chdir(filepath)
print 'Current directory:'
print "\t", os.getcwd()
os.system('''echo "#!/bin/bash
%s -q%s -O=\\"%s\\" \\"%s\\" \$@" > "%s.to" && chmod +x "%s.to"
''' % \
(__file__,args.quality,args.wgetopt,args.url,
filepath,filepath))
print
def getFileExt(u):
if u.find('f4v')!=-1:
return '.f4v'
if u.find('mp4')!=-1:
return '.mp4'
if u.find('flv')!=-1:
return '.flv'
if u.find('hlv')!=-1:
return '.flv'
return ".video"
fSuccess = True
def sformat(string, symbol, value):
tokens = string.split('%')
filtered = []
for s in tokens:
if s.find('{' + symbol + '}') < 0:
filtered.append(s)
else:
if value:
filtered.append(s.replace('{' + symbol + '}', value))
return '%'.join(filtered)
for i in xrange(len(filelist)):
url = filelist[i]
local = args.pattern
local = sformat(local, 'n', filepath)
if len(filelist) > 1:
local = sformat(local, 'x', '%04d' % (i + 1))
else:
local = sformat(local, 'x', None)
local = local.replace('%',"").replace('/',"_") + getFileExt(url)
print "Download", local, "..."
if os.path.exists(local):
print "Target already exists, skip to next file!"
continue
rmcmd = "rm -f %s 1>/dev/null 2>/dev/null" % (local+" ."+local)
if args.clean:
print "Before we start, clean the unfinished file"
os.system(rmcmd)
syscmd = 'wget -c ' + args.wgetopt + ' "' + url + '" -U "' + args.ua + '" -O ".' + local + '"'
if args.debug:
print syscmd
continue
rtn = os.system(syscmd)
mvcmd = 'mv "%s" "%s" 1>/dev/null 2>/dev/null' % ('.' + local, local)
if rtn == 0:
os.system(mvcmd)
elif rtn == 2048:
# Server issued an error response.
print "Server Error detected, remove part file and retry."
os.system(rmcmd)
rtn = os.system(syscmd)
if rtn == 0:
os.system(mvcmd)
else:
fSuccess = False;
if rtn == 2048:
print "Server error again, address may be expired."
if args.clean:
os.system(rmcmd)
continue
else:
fSuccess = False;
time.sleep(args.wait + 0.1)
if fSuccess:
os.system('rm "%s.to"' % (filepath))
print "All tasks completed."
exit(0)
| apache-2.0 | 4,526,727,220,789,610,000 | 27.421875 | 98 | 0.572018 | false |
jef-n/QGIS | tests/src/python/test_qgspallabeling_tests.py | 30 | 14051 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsPalLabeling: base suite of render check tests
Class is meant to be inherited by classes that test different labeling outputs
See <qgis-src-dir>/tests/testdata/labeling/README.rst for description.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Larry Shaffer'
__date__ = '07/16/2013'
__copyright__ = 'Copyright 2013, The QGIS Project'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import Qt, QPointF, QSizeF
from qgis.PyQt.QtGui import QFont
from qgis.core import QgsLabelingEngineSettings, QgsPalLayerSettings, QgsUnitTypes, QgsTextBackgroundSettings, \
QgsProject, QgsExpressionContextUtils, QgsExpressionContext
from qgis.core import QgsCoordinateReferenceSystem
from utilities import svgSymbolsPath
# noinspection PyPep8Naming
class TestPointBase(object):
def __init__(self):
"""Dummy assignments, intended to be overridden in subclasses"""
self.lyr = QgsPalLayerSettings()
""":type: QgsPalLayerSettings"""
# noinspection PyArgumentList
self._TestFont = QFont() # will become a standard test font
self._Canvas = None
""":type: QgsMapCanvas"""
# custom mismatches per group/test (should not mask any needed anomaly)
# e.g. self._Mismatches['TestClassName'] = 300
# check base output class's checkTest() or subclasses for any defaults
self._Mismatches = dict()
# custom color tolerances per group/test: 1 - 20 (0 default, 20 max)
# (should not mask any needed anomaly)
# e.g. self._ColorTols['TestClassName'] = 10
# check base output class's checkTest() or subclasses for any defaults
self._ColorTols = dict()
# noinspection PyMethodMayBeStatic
def checkTest(self, **kwargs):
"""Intended to be overridden in subclasses"""
pass
def test_default_label(self):
# Default label placement, with text size in points
self._Mismatches['TestCanvasPoint'] = 776
self._ColorTols['TestComposerPdfPoint'] = 2
self.checkTest()
def test_text_size_map_unit(self):
# Label text size in map units
format = self.lyr.format()
format.setSizeUnit(QgsUnitTypes.RenderMapUnits)
format.setSize(460)
font = QFont(self._TestFont)
format.setFont(font)
self.lyr.setFormat(format)
self._Mismatches['TestCanvasPoint'] = 776
self._ColorTols['TestComposerPdfPoint'] = 2
self.checkTest()
def test_text_color(self):
self._Mismatches['TestCanvasPoint'] = 774
self._ColorTols['TestComposerPdfPoint'] = 2
# Label color change
format = self.lyr.format()
format.setColor(Qt.blue)
self.lyr.setFormat(format)
self.checkTest()
def test_background_rect(self):
self._Mismatches['TestComposerImageVsCanvasPoint'] = 800
self._Mismatches['TestComposerImagePoint'] = 800
format = self.lyr.format()
format.background().setEnabled(True)
self.lyr.setFormat(format)
self._Mismatches['TestCanvasPoint'] = 776
self._ColorTols['TestComposerPdfPoint'] = 1
self.checkTest()
def test_background_rect_w_offset(self):
# Label rectangular background
self._Mismatches['TestComposerImageVsCanvasPoint'] = 800
self._Mismatches['TestComposerImagePoint'] = 800
# verify fix for issues
# https://github.com/qgis/QGIS/issues/17705
# http://gis.stackexchange.com/questions/86900
format = self.lyr.format()
format.setSizeUnit(QgsUnitTypes.RenderMapUnits)
format.setSize(460)
font = QFont(self._TestFont)
format.setFont(font)
format.background().setEnabled(True)
format.background().setOffsetUnit(QgsUnitTypes.RenderMapUnits)
format.background().setOffset(QPointF(-2900.0, -450.0))
self.lyr.setFormat(format)
self._Mismatches['TestCanvasPoint'] = 774
self._ColorTols['TestComposerPdfPoint'] = 2
self.checkTest()
def test_background_svg(self):
# Label SVG background
format = self.lyr.format()
format.setSizeUnit(QgsUnitTypes.RenderMapUnits)
format.setSize(460)
font = QFont(self._TestFont)
format.setFont(font)
format.background().setEnabled(True)
format.background().setType(QgsTextBackgroundSettings.ShapeSVG)
svg = os.path.join(
svgSymbolsPath(), 'backgrounds', 'background_square.svg')
format.background().setSvgFile(svg)
format.background().setSizeUnit(QgsUnitTypes.RenderMapUnits)
format.background().setSizeType(QgsTextBackgroundSettings.SizeBuffer)
format.background().setSize(QSizeF(100.0, 0.0))
self.lyr.setFormat(format)
self._Mismatches['TestComposerPdfVsComposerPoint'] = 580
self._Mismatches['TestCanvasPoint'] = 776
self._ColorTols['TestComposerPdfPoint'] = 2
self.checkTest()
def test_background_svg_w_offset(self):
# Label SVG background
format = self.lyr.format()
format.setSizeUnit(QgsUnitTypes.RenderMapUnits)
format.setSize(460)
font = QFont(self._TestFont)
format.setFont(font)
format.background().setEnabled(True)
format.background().setType(QgsTextBackgroundSettings.ShapeSVG)
svg = os.path.join(
svgSymbolsPath(), 'backgrounds', 'background_square.svg')
format.background().setSvgFile(svg)
format.background().setSizeUnit(QgsUnitTypes.RenderMapUnits)
format.background().setSizeType(QgsTextBackgroundSettings.SizeBuffer)
format.background().setSize(QSizeF(100.0, 0.0))
format.background().setOffsetUnit(QgsUnitTypes.RenderMapUnits)
format.background().setOffset(QPointF(-2850.0, 500.0))
self.lyr.setFormat(format)
self._Mismatches['TestComposerPdfVsComposerPoint'] = 760
self._Mismatches['TestCanvasPoint'] = 776
self._ColorTols['TestComposerPdfPoint'] = 2
self.checkTest()
def test_partials_labels_enabled(self):
# Set Big font size
format = self.lyr.format()
font = QFont(self._TestFont)
format.setFont(font)
format.setSize(84)
self.lyr.setFormat(format)
# Enable partials labels
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.UsePartialCandidates, True)
self._TestMapSettings.setLabelingEngineSettings(engine_settings)
self._Mismatches['TestCanvasPoint'] = 779
self._ColorTols['TestComposerPdfPoint'] = 2
self.checkTest()
def test_partials_labels_disabled(self):
# Set Big font size
format = self.lyr.format()
font = QFont(self._TestFont)
format.setFont(font)
format.setSize(84)
self.lyr.setFormat(format)
# Disable partials labels
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.UsePartialCandidates, False)
self._TestMapSettings.setLabelingEngineSettings(engine_settings)
self.checkTest()
def test_buffer(self):
# Label with buffer
format = self.lyr.format()
format.buffer().setEnabled(True)
format.buffer().setSize(2)
self.lyr.setFormat(format)
self.checkTest()
def test_shadow(self):
# Label with shadow
format = self.lyr.format()
format.shadow().setEnabled(True)
format.shadow().setOffsetDistance(2)
format.shadow().setOpacity(1)
self.lyr.setFormat(format)
self.checkTest()
def test_letter_spacing(self):
# Modified letter spacing
format = self.lyr.format()
font = QFont(self._TestFont)
font.setLetterSpacing(QFont.AbsoluteSpacing, 3.5)
format.setFont(font)
format.setSize(30)
self.lyr.setFormat(format)
self.checkTest()
def test_word_spacing(self):
# Modified word spacing
format = self.lyr.format()
font = QFont(self._TestFont)
font.setWordSpacing(20.5)
format.setFont(font)
format.setSize(30)
self.lyr.setFormat(format)
self.checkTest()
# noinspection PyPep8Naming
class TestLineBase(object):
def __init__(self):
"""Dummy assignments, intended to be overridden in subclasses"""
self.lyr = QgsPalLayerSettings()
""":type: QgsPalLayerSettings"""
# noinspection PyArgumentList
self._TestFont = QFont() # will become a standard test font
self._Pal = None
""":type: QgsPalLabeling"""
self._Canvas = None
""":type: QgsMapCanvas"""
# custom mismatches per group/test (should not mask any needed anomaly)
# e.g. self._Mismatches['TestClassName'] = 300
# check base output class's checkTest() or subclasses for any defaults
self._Mismatches = dict()
# custom color tolerances per group/test: 1 - 20 (0 default, 20 max)
# (should not mask any needed anomaly)
# e.g. self._ColorTols['TestClassName'] = 10
# check base output class's checkTest() or subclasses for any defaults
self._ColorTols = dict()
# noinspection PyMethodMayBeStatic
def checkTest(self, **kwargs):
"""Intended to be overridden in subclasses"""
pass
def test_line_placement_above_line_orientation(self):
# Line placement, above, follow line orientation
self.lyr.placement = QgsPalLayerSettings.Line
self.lyr.placementFlags = QgsPalLayerSettings.AboveLine
self.checkTest()
def test_line_placement_online(self):
# Line placement, on line
self.lyr.placement = QgsPalLayerSettings.Line
self.lyr.placementFlags = QgsPalLayerSettings.OnLine
self.checkTest()
def test_line_placement_below_line_orientation(self):
# Line placement, below, follow line orientation
self.lyr.placement = QgsPalLayerSettings.Line
self.lyr.placementFlags = QgsPalLayerSettings.BelowLine
self.checkTest()
def test_line_placement_above_map_orientation(self):
# Line placement, above, follow map orientation
self.lyr.placement = QgsPalLayerSettings.Line
self.lyr.placementFlags = QgsPalLayerSettings.AboveLine | QgsPalLayerSettings.MapOrientation
self.checkTest()
def test_line_placement_below_map_orientation(self):
# Line placement, below, follow map orientation
self.lyr.placement = QgsPalLayerSettings.Line
self.lyr.placementFlags = QgsPalLayerSettings.BelowLine | QgsPalLayerSettings.MapOrientation
self.checkTest()
def test_curved_placement_online(self):
# Curved placement, on line
self.lyr.placement = QgsPalLayerSettings.Curved
self.lyr.placementFlags = QgsPalLayerSettings.OnLine
self.checkTest()
def test_curved_placement_above(self):
# Curved placement, on line
self.lyr.placement = QgsPalLayerSettings.Curved
self.lyr.placementFlags = QgsPalLayerSettings.AboveLine | QgsPalLayerSettings.MapOrientation
self.checkTest()
def test_curved_placement_below(self):
# Curved placement, on line
self.lyr.placement = QgsPalLayerSettings.Curved
self.lyr.placementFlags = QgsPalLayerSettings.BelowLine | QgsPalLayerSettings.MapOrientation
self.checkTest()
def test_curved_placement_online_html(self):
# Curved placement, on line
self.lyr.placement = QgsPalLayerSettings.Curved
self.lyr.placementFlags = QgsPalLayerSettings.OnLine
format = self.lyr.format()
format.setAllowHtmlFormatting(True)
self.lyr.setFormat(format)
self.lyr.fieldName = "'<span style=\"color: red\">aaa</span><s>aa</s><span style=\"text-decoration: overline\">a</span>'"
self.lyr.isExpression = True
self.checkTest()
def test_length_expression(self):
# compare length using the ellipsoid in kms and the planimetric distance in meters
self.lyr.fieldName = "round($length,5) || ' - ' || round(length($geometry),2)"
self.lyr.isExpression = True
QgsProject.instance().setCrs(QgsCoordinateReferenceSystem("EPSG:32613"))
QgsProject.instance().setEllipsoid("WGS84")
QgsProject.instance().setDistanceUnits(QgsUnitTypes.DistanceKilometers)
ctxt = QgsExpressionContext()
ctxt.appendScope(QgsExpressionContextUtils.projectScope(QgsProject.instance()))
ctxt.appendScope(QgsExpressionContextUtils.layerScope(self.layer))
self._TestMapSettings.setExpressionContext(ctxt)
self.lyr.placement = QgsPalLayerSettings.Curved
self.lyr.placementFlags = QgsPalLayerSettings.AboveLine | QgsPalLayerSettings.MapOrientation
self.checkTest()
# noinspection PyPep8Naming
def suiteTests():
"""
Use to define which tests are run when PAL_SUITE is set.
Use sp_vs_suite for comparison of server and layout outputs to canvas
"""
sp_suite = [
# 'test_default_label',
# 'test_text_size_map_unit',
# 'test_text_color',
# 'test_background_rect',
# 'test_background_rect_w_offset',
# 'test_background_svg',
# 'test_background_svg_w_offset',
# 'test_partials_labels_enabled',
# 'test_partials_labels_disabled',
]
sp_vs_suite = [
# 'test_something_specific',
]
# extended separately for finer control of PAL_SUITE (comment-out undesired)
sp_vs_suite.extend(sp_suite)
return {
'sp_suite': sp_suite,
'sp_vs_suite': sp_vs_suite
}
if __name__ == '__main__':
pass
| gpl-2.0 | -6,683,096,155,341,673,000 | 36.569519 | 129 | 0.664437 | false |
gw0/myhdl | myhdl/_resize.py | 1 | 4719 | # This file is part of the myhdl library, a Python package for using
# Python as a Hardware Description Language.
#
# Copyright (C) 2013 Christopher L. Felton
#
# The myhdl library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" Module with the fixbv resize function """
import math
from _fixbv import fixbv
from _fixbv import FixedPointFormat
# round :
ROUND_MODES = ( # towards :
'ceil', # +infinity: always round up
'fix', # 0 : always down
'floor', # -infinity: truncate, always round down
'nearest', # nearest : tie towards largest absolute value
'round', # nearest : ties to +infinity
'convergent', # nearest : tie to closest even (round_even)
'round_even', # nearest : tie to closest even (convergent)
)
OVERFLOW_MODES = (
'saturate',
'ring',
'wrap',
)
def is_round_mode(mode):
if mode.lower() in ROUND_MODES:
found = True
else:
# @todo: is there a close match?
found = False
return found
def is_overflow_mode(mode):
if mode.lower() in OVERFLOW_MODES:
found = True
else:
# @todo: is there a close match?
found = False
return found
def _overflow(val, fmt, overflow_mode):
"""handle overflow"""
assert is_overflow_mode(overflow_mode)
wl,iwl,fwl = fmt
mm = 2**(wl-1)
mmin,mmax = -mm,mm
#print(" [rsz][ovl]: %f %d %d, %s" % (val, mmin, mmax, fmt))
if overflow_mode == 'saturate':
if val >= mmax:
retval = mmax-1
elif val <= mmin:
retval = mmin
else:
retval = val
elif overflow_mode == 'ring' or overflow_mode == 'wrap':
retval = (val - mmin) % (mmax - mmin) + mmin
else:
raise ValueError
return retval
def _round(val, fmt, round_mode):
"""Round the initial value if needed"""
# Scale the value to the integer range (the underlying representation)
assert is_round_mode(round_mode)
assert isinstance(fmt, tuple)
wl,iwl,fwl = fmt
_val = val
val = val * 2.0**fwl
#print(" [rsz][rnd]: %f %f, %s" % (val, _val, fmt))
if round_mode == 'ceil':
retval = math.ceil(val)
elif round_mode == 'fix':
if val > 0:
retval = math.floor(val)
else:
retval = math.ceil(val)
elif round_mode == 'floor':
retval = math.floor(val)
elif round_mode == 'nearest':
fval,ival = math.modf(val)
if fval == .5:
retval = int(val+1) if val > 0 else int(val-1)
else:
retval = round(val)
elif round_mode == 'round':
retval = round(val)
elif round_mode == 'round_even' or round_mode == 'convergent':
fval,ival = math.modf(val)
abs_ival = int(abs(ival))
sign = -1 if ival < 0 else 1
if (abs(fval) - 0.5) == 0.0:
if abs_ival%2 == 0:
retval = abs_ival * sign
else:
retval = (abs_ival + 1) * sign
else:
retval = round(val)
else:
raise TypeError("invalid round mode!" % self.round_mode)
return int(retval)
def resize(val, fmt, round_mode='convergent', overflow_mode='saturate'):
"""
"""
if isinstance(fmt, fixbv):
fmt = fmt.format
elif isinstance(fmt, FixedPointFormat):
fmt = tuple(fmt[:])
elif isinstance(fmt, tuple):
fmt = fmt
else:
pass
if isinstance(val, fixbv):
fval = float(val)
elif isinstance(val, float):
fval = val
else:
fval = float(val)
wl,iwl,fwl = fmt
mm = 2**iwl
res = 2**-fwl
rfx = fixbv(0, min=-mm, max=mm, res=res)
assert (wl,iwl,fwl,) == rfx.format, "%d,%d,%d != %s" % (wl,iwl,fwl, repr(rfx))
ival = _round(fval, fmt, round_mode=round_mode)
ival = _overflow(ival, fmt, overflow_mode=overflow_mode)
rfx._val = ival
rfx._handleBounds()
return rfx
| lgpl-2.1 | -943,949,032,652,043,500 | 26.277457 | 82 | 0.573427 | false |
TribeMedia/sky_engine | tools/sort_sources.py | 68 | 5062 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Given a GYP/GN filename, sort C-ish source files in that file.
Shows a diff and prompts for confirmation before doing the deed.
Works great with tools/git/for-all-touched-files.py.
Limitations:
1) Comments used as section headers
If a comment (1+ lines starting with #) appears in a source list without a
preceding blank line, the tool assumes that the comment is about the next
line. For example, given the following source list,
sources = [
"b.cc",
# Comment.
"a.cc",
"c.cc",
]
the tool will produce the following output:
sources = [
# Comment.
"a.cc",
"b.cc",
"c.cc",
]
This is not correct if the comment is for starting a new section like:
sources = [
"b.cc",
# These are for Linux.
"a.cc",
"c.cc",
]
The tool cannot disambiguate the two types of comments. The problem can be
worked around by inserting a blank line before the comment because the tool
interprets a blank line as the end of a source list.
2) Sources commented out
Sometimes sources are commented out with their positions kept in the
alphabetical order, but what if the list is not sorted correctly? For
example, given the following source list,
sources = [
"a.cc",
# "b.cc",
"d.cc",
"c.cc",
]
the tool will produce the following output:
sources = [
"a.cc",
"c.cc",
# "b.cc",
"d.cc",
]
This is because the tool assumes that the comment (# "b.cc",) is about the
next line ("d.cc",). This kind of errors should be fixed manually, or the
commented-out code should be deleted.
3) " and ' are used both used in the same source list (GYP only problem)
If both " and ' are used in the same source list, sources quoted with " will
appear first in the output. The problem is rare enough so the tool does not
attempt to normalize them. Hence this kind of errors should be fixed
manually.
4) Spaces and tabs used in the same source list
Similarly, if spaces and tabs are both used in the same source list, sources
indented with tabs will appear first in the output. This kind of errors
should be fixed manually.
"""
import difflib
import optparse
import re
import sys
from yes_no import YesNo
SUFFIXES = ['c', 'cc', 'cpp', 'h', 'mm', 'rc', 'rc.version', 'ico', 'def',
'release']
SOURCE_PATTERN = re.compile(r'^\s+[\'"].*\.(%s)[\'"],$' %
'|'.join([re.escape(x) for x in SUFFIXES]))
COMMENT_PATTERN = re.compile(r'^\s+#')
def SortSources(original_lines):
"""Sort source file names in |original_lines|.
Args:
original_lines: Lines of the original content as a list of strings.
Returns:
Lines of the sorted content as a list of strings.
The algorithm is fairly naive. The code tries to find a list of C-ish
source file names by a simple regex, then sort them. The code does not try
to understand the syntax of the build files. See the file comment above for
details.
"""
output_lines = []
comments = []
sources = []
for line in original_lines:
if re.search(COMMENT_PATTERN, line):
comments.append(line)
elif re.search(SOURCE_PATTERN, line):
# Associate the line with the preceding comments.
sources.append([line, comments])
comments = []
else:
# |sources| should be flushed first, to handle comments at the end of a
# source list correctly.
if sources:
for source_line, source_comments in sorted(sources):
output_lines.extend(source_comments)
output_lines.append(source_line)
sources = []
if comments:
output_lines.extend(comments)
comments = []
output_lines.append(line)
return output_lines
def ProcessFile(filename, should_confirm):
"""Process the input file and rewrite if needed.
Args:
filename: Path to the input file.
should_confirm: If true, diff and confirmation prompt are shown.
"""
original_lines = []
with open(filename, 'r') as input_file:
for line in input_file:
original_lines.append(line)
new_lines = SortSources(original_lines)
if original_lines == new_lines:
print '%s: no change' % filename
return
if should_confirm:
diff = difflib.unified_diff(original_lines, new_lines)
sys.stdout.writelines(diff)
if not YesNo('Use new file (y/N)'):
return
with open(filename, 'w') as output_file:
output_file.writelines(new_lines)
def main():
parser = optparse.OptionParser(usage='%prog filename1 filename2 ...')
parser.add_option('-f', '--force', action='store_false', default=True,
dest='should_confirm',
help='Turn off confirmation prompt.')
opts, filenames = parser.parse_args()
if len(filenames) < 1:
parser.print_help()
return 1
for filename in filenames:
ProcessFile(filename, opts.should_confirm)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 14,583,438,924,033,156 | 26.069519 | 77 | 0.66851 | false |
XiaJieCom/change | Demo/days07/day07.py | 1 | 4725 |
'''
class Aniaml:
count = 10
def __init__(self,name):
self.name = name
self.num = None
hobbie = 'meat'
@classmethod #类方法,不能访问实例变量
def talk(self):
print('%s is talking ...'%self.hobbie )
@staticmethod #静态方法,不能访问类变量及实例变量
def walk():
print('is walking ...')
@property #把方法变属性
def habbit(self):
print('%s habit is sss'%self.name)
@property
def total_players(self):
return self.num
@total_players.setter
def total_players(self,num):
self.num = num
print('total players:',self.num)
@total_players.deleter
def total_players(self):
print('total player got deleted.')
del self.num
Aniaml.hobbie
Aniaml.talk()
d = Aniaml('hahah')
print(d.total_players)
d.total_players = 3
del d.total_players
print(d.total_players)
'''
''''
class A:
n = 'A'
def f2(self):
print('f2 from A')
class B(A):
n = 'B'
def __init__(self):
pass
def f1(self):
print('f1 from B')
def f2(self):
print('f2 from B')
def __del__(self):
print('del ....')
def __call__(self, *args, **kwargs):
print('__cal__')
class C(A):
n = 'C'
def f2(self):
print('f2 from C')
class D(B,C):
pass
d = D()
d.f1()
d.f2()
print(B.__doc__)
print(B.__dict__)
print(B.__class__)
print(B.__module__)
B.__del__
obj = B()
obj()
'''
'''
import sys
class WebServer(object):
def __init__(self,host,port):
self.host = host
self.port = port
def start(self):
print('Server is stating ...')
def stop(self):
print('Server is stopping ...')
def restart(self):
self.stop()
self.start()
print('Server is restarting ...')
def test_run(self,name):
print('Test_running ...',name,self.host)
if __name__ == '__main__':
server = WebServer('localhost',80)
if hasattr(server,sys.argv[1]):
func = getattr(server,sys.argv[1])
func()
setattr(server,'run',test_run)
server.run(server,'haha')
'''
'''
import socket
ip_port = ('127.0.0.1',9999)
sk = socket.socket()
sk.bind(ip_port)
sk.listen(5)
while True:
print('Server is waiting ... ')
conn,addr = sk.accept()
client_data = conn.recv(1024)
print(str(client_data,'utf-8'))
conn.sendall(bytes('这是 server !','utf-8'))
conn.close()
'''
'''
import socket
#ip_port = ('0.0.0.0',9999)
ip_port = ('127.0.0.1',9090)
sk = socket.socket()
sk.bind(ip_port)
sk.listen(5)
'''
'''
while True:
print('Server is waiting ... ')
conn,addr = sk.accept()
client_data = conn.recv(1024)
print(str(client_data,'utf-8'))
conn.sendall(bytes('这是 server !','utf-8'))
while True:
client_data = conn.recv(1024)
server_raw = input('>>').strip()
conn.sendall(bytes(server_raw,'utf-8'))
print(str(client_data,'utf-8'))
'''
'''
menu_dic = {'1':'start',
'2':'stop',
'3':'restart'
}
raw = input('请输入您的选择: ').strip()
if raw in menu_dic:
print(menu_dic[raw])
'''
'''
import sys
class WebServer(object):
def __init__(self,host,port):
self.host = host
self.port = port
def start(self):
print('Server is stating ...')
def stop(self):
print('Server is stopping ...')
def restart(self):
self.stop()
self.start()
print('Server is restarting ...')
def test_run(self,name):
print('Test_running ...',name,self.host)
if __name__ == '__main__':
server = WebServer('localhost',80)
if hasattr(server,sys.argv[1]):
func = getattr(server,sys.argv[1])
func()
setattr(server,'run',test_run)
server.run(server,'haha')
'''
class Aniaml:
count = 10
def __init__(self,name):
self.name = name
self.num = None
hobbie = 'meat'
@classmethod #类方法,不能访问实例变量
def talk(self):
print('%s is talking ...'%self.hobbie )
@staticmethod #静态方法,不能访问类变量及实例变量
def walk():
print('is walking ...')
@property #把方法变属性
def habbit(self):
print('%s habit is sss'%self.name)
@property
def total_players(self):
return self.num
@total_players.setter
def total_players(self,num):
self.num = num
print('total players:',self.num)
@total_players.deleter
def total_players(self):
print('total player got deleted.')
del self.num
Aniaml.hobbie
Aniaml.talk()
d = Aniaml('hahah')
print(d.total_players)
d.total_players = 3
del d.total_players
print(d.total_players)
| lgpl-2.1 | -7,414,383,576,604,905,000 | 20.870813 | 47 | 0.560053 | false |
sabi0/intellij-community | python/lib/Lib/site-packages/django/contrib/messages/tests/user_messages.py | 241 | 2619 | from django import http
from django.contrib.auth.models import User
from django.contrib.messages.storage.user_messages import UserMessagesStorage,\
LegacyFallbackStorage
from django.contrib.messages.tests.base import skipUnlessAuthIsInstalled
from django.contrib.messages.tests.cookie import set_cookie_data
from django.contrib.messages.tests.fallback import FallbackTest
from django.test import TestCase
class UserMessagesTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='tester')
def test_add(self):
storage = UserMessagesStorage(http.HttpRequest())
self.assertRaises(NotImplementedError, storage.add, 'Test message 1')
def test_get_anonymous(self):
# Ensure that the storage still works if no user is attached to the
# request.
storage = UserMessagesStorage(http.HttpRequest())
self.assertEqual(len(storage), 0)
def test_get(self):
storage = UserMessagesStorage(http.HttpRequest())
storage.request.user = self.user
self.user.message_set.create(message='test message')
self.assertEqual(len(storage), 1)
self.assertEqual(list(storage)[0].message, 'test message')
UserMessagesTest = skipUnlessAuthIsInstalled(UserMessagesTest)
class LegacyFallbackTest(FallbackTest, TestCase):
storage_class = LegacyFallbackStorage
def setUp(self):
super(LegacyFallbackTest, self).setUp()
self.user = User.objects.create(username='tester')
def get_request(self, *args, **kwargs):
request = super(LegacyFallbackTest, self).get_request(*args, **kwargs)
request.user = self.user
return request
def test_get_legacy_only(self):
request = self.get_request()
storage = self.storage_class(request)
self.user.message_set.create(message='user message')
# Test that the message actually contains what we expect.
self.assertEqual(len(storage), 1)
self.assertEqual(list(storage)[0].message, 'user message')
def test_get_legacy(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
self.user.message_set.create(message='user message')
set_cookie_data(cookie_storage, ['cookie'])
# Test that the message actually contains what we expect.
self.assertEqual(len(storage), 2)
self.assertEqual(list(storage)[0].message, 'user message')
self.assertEqual(list(storage)[1], 'cookie')
LegacyFallbackTest = skipUnlessAuthIsInstalled(LegacyFallbackTest)
| apache-2.0 | 5,477,343,693,049,914,000 | 36.414286 | 79 | 0.704849 | false |
mikhaelfelian/PROJECT-RESTO-TIGERA | assets/tinymce/filemanager/connectors/py/connector.py | 9 | 22350 | #!/usr/bin/env python
"""
FCKeditor - The text editor for internet
Copyright (C) 2003-2005 Frederico Caldeira Knabben
Licensed under the terms of the GNU Lesser General Public License:
http://www.opensource.org/licenses/lgpl-license.php
For further information visit:
http://www.fckeditor.net/
"Support Open Source software. What about a donation today?"
File Name: connector.py
Connector for Python.
Tested With:
Standard:
Python 2.3.3
Zope:
Zope Version: (Zope 2.8.1-final, python 2.3.5, linux2)
Python Version: 2.3.5 (#4, Mar 10 2005, 01:40:25)
[GCC 3.3.3 20040412 (Red Hat Linux 3.3.3-7)]
System Platform: linux2
File Authors:
Andrew Liu ([email protected])
"""
"""
Author Notes (04 December 2005):
This module has gone through quite a few phases of change. Obviously,
I am only supporting that part of the code that I use. Initially
I had the upload directory as a part of zope (ie. uploading files
directly into Zope), before realising that there were too many
complex intricacies within Zope to deal with. Zope is one ugly piece
of code. So I decided to complement Zope by an Apache server (which
I had running anyway, and doing nothing). So I mapped all uploads
from an arbitrary server directory to an arbitrary web directory.
All the FCKeditor uploading occurred this way, and I didn't have to
stuff around with fiddling with Zope objects and the like (which are
terribly complex and something you don't want to do - trust me).
Maybe a Zope expert can touch up the Zope components. In the end,
I had FCKeditor loaded in Zope (probably a bad idea as well), and
I replaced the connector.py with an alias to a server module.
Right now, all Zope components will simple remain as is because
I've had enough of Zope.
See notes right at the end of this file for how I aliased out of Zope.
Anyway, most of you probably wont use Zope, so things are pretty
simple in that regard.
Typically, SERVER_DIR is the root of WEB_DIR (not necessarily).
Most definitely, SERVER_USERFILES_DIR points to WEB_USERFILES_DIR.
"""
import cgi
import re
import os
import string
"""
escape
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
def escape(text, replace=string.replace):
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
"""
getFCKeditorConnector
Creates a new instance of an FCKeditorConnector, and runs it
"""
def getFCKeditorConnector(context=None):
# Called from Zope. Passes the context through
connector = FCKeditorConnector(context=context)
return connector.run()
"""
FCKeditorRequest
A wrapper around the request object
Can handle normal CGI request, or a Zope request
Extend as required
"""
class FCKeditorRequest(object):
def __init__(self, context=None):
if (context is not None):
r = context.REQUEST
else:
r = cgi.FieldStorage()
self.context = context
self.request = r
def isZope(self):
if (self.context is not None):
return True
return False
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
value = None
if (self.isZope()):
value = self.request.get(key, default)
else:
if key in self.request.keys():
value = self.request[key].value
else:
value = default
return value
"""
FCKeditorConnector
The connector class
"""
class FCKeditorConnector(object):
# Configuration for FCKEditor
# can point to another server here, if linked correctly
#WEB_HOST = "http://127.0.0.1/"
WEB_HOST = ""
SERVER_DIR = "/var/www/html/"
WEB_USERFILES_FOLDER = WEB_HOST + "upload/"
SERVER_USERFILES_FOLDER = SERVER_DIR + "upload/"
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
# Class Attributes
parentFolderRe = re.compile("[\/][^\/]+[\/]?$")
"""
Constructor
"""
def __init__(self, context=None):
# The given root path will NOT be shown to the user
# Only the userFilesPath will be shown
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context=context)
self.rootPath = self.SERVER_DIR
self.userFilesFolder = self.SERVER_USERFILES_FOLDER
self.webUserFilesFolder = self.WEB_USERFILES_FOLDER
# Enables / Disables the connector
self.enabled = False # Set to True to enable this connector
# These are instance variables
self.zopeRootContext = None
self.zopeUploadContext = None
# Copied from php module =)
self.allowedExtensions = {
"File": None,
"Image": None,
"Flash": None,
"Media": None
}
self.deniedExtensions = {
"File": [ "php", "php3", "php5", "phtml", "asp", "aspx", "ascx", "jsp", "cfm", "cfc", "pl", "bat", "exe", "dll", "reg", "cgi" ],
"Image": [ "php", "php3", "php5", "phtml", "asp", "aspx", "ascx", "jsp", "cfm", "cfc", "pl", "bat", "exe", "dll", "reg", "cgi" ],
"Flash": [ "php", "php3", "php5", "phtml", "asp", "aspx", "ascx", "jsp", "cfm", "cfc", "pl", "bat", "exe", "dll", "reg", "cgi" ],
"Media": [ "php", "php3", "php5", "phtml", "asp", "aspx", "ascx", "jsp", "cfm", "cfc", "pl", "bat", "exe", "dll", "reg", "cgi" ]
}
"""
Zope specific functions
"""
def isZope(self):
# The context object is the zope object
if (self.context is not None):
return True
return False
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
"""
Generic manipulation functions
"""
def getUserFilesFolder(self):
return self.userFilesFolder
def getWebUserFilesFolder(self):
return self.webUserFilesFolder
def getAllowedExtensions(self, resourceType):
return self.allowedExtensions[resourceType]
def getDeniedExtensions(self, resourceType):
return self.deniedExtensions[resourceType]
def removeFromStart(self, string, char):
return string.lstrip(char)
def removeFromEnd(self, string, char):
return string.rstrip(char)
def convertToXmlAttribute(self, value):
if (value is None):
value = ""
return escape(value)
def convertToPath(self, path):
if (path[-1] <> "/"):
return path + "/"
else:
return path
def getUrlFromPath(self, resourceType, path):
if (resourceType is None) or (resourceType == ''):
url = "%s%s" % (
self.removeFromEnd(self.getUserFilesFolder(), '/'),
path
)
else:
url = "%s%s%s" % (
self.getUserFilesFolder(),
resourceType,
path
)
return url
def getWebUrlFromPath(self, resourceType, path):
if (resourceType is None) or (resourceType == ''):
url = "%s%s" % (
self.removeFromEnd(self.getWebUserFilesFolder(), '/'),
path
)
else:
url = "%s%s%s" % (
self.getWebUserFilesFolder(),
resourceType,
path
)
return url
def removeExtension(self, fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(self, fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def getParentFolder(self, folderPath):
parentFolderPath = self.parentFolderRe.sub('', folderPath)
return parentFolderPath
"""
serverMapFolder
Purpose: works out the folder map on the server
"""
def serverMapFolder(self, resourceType, folderPath):
# Get the resource type directory
resourceTypeFolder = "%s%s/" % (
self.getUserFilesFolder(),
resourceType
)
# Ensure that the directory exists
self.createServerFolder(resourceTypeFolder)
# Return the resource type directory combined with the
# required path
return "%s%s" % (
resourceTypeFolder,
self.removeFromStart(folderPath, '/')
)
"""
createServerFolder
Purpose: physically creates a folder on the server
"""
def createServerFolder(self, folderPath):
# Check if the parent exists
parentFolderPath = self.getParentFolder(folderPath)
if not(os.path.exists(parentFolderPath)):
errorMsg = self.createServerFolder(parentFolderPath)
if errorMsg is not None:
return errorMsg
# Check if this exists
if not(os.path.exists(folderPath)):
os.mkdir(folderPath)
os.chmod(folderPath, 0755)
errorMsg = None
else:
if os.path.isdir(folderPath):
errorMsg = None
else:
raise "createServerFolder: Non-folder of same name already exists"
return errorMsg
"""
getRootPath
Purpose: returns the root path on the server
"""
def getRootPath(self):
return self.rootPath
"""
setXmlHeaders
Purpose: to prepare the headers for the xml to return
"""
def setXmlHeaders(self):
#now = self.context.BS_get_now()
#yesterday = now - 1
self.setHeader("Content-Type", "text/xml")
#self.setHeader("Expires", yesterday)
#self.setHeader("Last-Modified", now)
#self.setHeader("Cache-Control", "no-store, no-cache, must-revalidate")
self.printHeaders()
return
def setHeader(self, key, value):
if (self.isZope()):
self.context.REQUEST.RESPONSE.setHeader(key, value)
else:
print "%s: %s" % (key, value)
return
def printHeaders(self):
# For non-Zope requests, we need to print an empty line
# to denote the end of headers
if (not(self.isZope())):
print ""
"""
createXmlFooter
Purpose: returns the xml header
"""
def createXmlHeader(self, command, resourceType, currentFolder):
self.setXmlHeaders()
s = ""
# Create the XML document header
s += """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
self.convertToXmlAttribute(currentFolder),
self.convertToXmlAttribute(
self.getWebUrlFromPath(
resourceType,
currentFolder
)
),
)
return s
"""
createXmlFooter
Purpose: returns the xml footer
"""
def createXmlFooter(self):
s = """</Connector>"""
return s
"""
sendError
Purpose: in the event of an error, return an xml based error
"""
def sendError(self, number, text):
self.setXmlHeaders()
s = ""
# Create the XML document header
s += """<?xml version="1.0" encoding="utf-8" ?>"""
s += """<Connector>"""
s += """<Error number="%s" text="%s" />""" % (number, text)
s += """</Connector>"""
return s
"""
getFolders
Purpose: command to recieve a list of folders
"""
def getFolders(self, resourceType, currentFolder):
if (self.isZope()):
return self.getZopeFolders(resourceType, currentFolder)
else:
return self.getNonZopeFolders(resourceType, currentFolder)
def getZopeFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getNonZopeFolders(self, resourceType, currentFolder):
# Map the virtual path to our local server
serverPath = self.serverMapFolder(resourceType, currentFolder)
# Open the folders node
s = ""
s += """<Folders>"""
for someObject in os.listdir(serverPath):
someObjectPath = os.path.join(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(someObject)
)
# Close the folders node
s += """</Folders>"""
return s
"""
getFoldersAndFiles
Purpose: command to recieve a list of folders and files
"""
def getFoldersAndFiles(self, resourceType, currentFolder):
if (self.isZope()):
return self.getZopeFoldersAndFiles(resourceType, currentFolder)
else:
return self.getNonZopeFoldersAndFiles(resourceType, currentFolder)
def getNonZopeFoldersAndFiles(self, resourceType, currentFolder):
# Map the virtual path to our local server
serverPath = self.serverMapFolder(resourceType, currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = os.path.join(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
self.convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
files += """<File name="%s" size="%s" />""" % (
self.convertToXmlAttribute(someObject),
os.path.getsize(someObjectPath)
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
# Return it
s = folders + files
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
self.convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
"""
createFolder
Purpose: command to create a new folder
"""
def createFolder(self, resourceType, currentFolder):
if (self.isZope()):
return self.createZopeFolder(resourceType, currentFolder)
else:
return self.createNonZopeFolder(resourceType, currentFolder)
def createZopeFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
error = """<Error number="%s" originalDescription="%s" />""" % (
errorNo,
self.convertToXmlAttribute(errorMsg)
)
return error
def createNonZopeFolder(self, resourceType, currentFolder):
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
currentFolderPath = self.serverMapFolder(
resourceType,
currentFolder
)
try:
newFolderPath = currentFolderPath + newFolder
errorMsg = self.createServerFolder(newFolderPath)
if (errorMsg is not None):
errorNo = 110
except:
errorNo = 103
else:
errorNo = 102
error = """<Error number="%s" originalDescription="%s" />""" % (
errorNo,
self.convertToXmlAttribute(errorMsg)
)
return error
"""
getFileName
Purpose: helper function to extrapolate the filename
"""
def getFileName(self, filename):
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
"""
fileUpload
Purpose: command to upload files to server
"""
def fileUpload(self, resourceType, currentFolder):
if (self.isZope()):
return self.zopeFileUpload(resourceType, currentFolder)
else:
return self.nonZopeFileUpload(resourceType, currentFolder)
def zopeFileUpload(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
self.zopeFileUpload(resourceType, currentFolder, count)
return
def nonZopeFileUpload(self, resourceType, currentFolder):
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileNameOnly = self.removeExtension(newFileName)
newFileExtension = self.getExtension(newFileName).lower()
allowedExtensions = self.getAllowedExtensions(resourceType)
deniedExtensions = self.getDeniedExtensions(resourceType)
if (allowedExtensions is not None):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions is not None):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
if (self.isZope()):
# Upload into zope
self.zopeFileUpload(resourceType, currentFolder)
else:
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = self.serverMapFolder(
resourceType,
currentFolder
)
i = 0
while (True):
newFilePath = "%s%s" % (
currentFolderPath,
newFileName
)
if os.path.exists(newFilePath):
i += 1
newFilePath = "%s%s(%s).%s" % (
currentFolderPath,
newFileNameOnly,
i,
newFileExtension
)
errorNo = 201
break
else:
fileHandle = open(newFilePath,'w')
linecount = 0
while (1):
#line = newFile.file.readline()
line = newFile.readline()
if not line: break
fileHandle.write("%s" % line)
linecount += 1
os.chmod(newFilePath, 0777)
break
else:
newFileName = "Extension not allowed"
errorNo = 203
else:
newFileName = "No File"
errorNo = 202
string = """
<script type="text/javascript">
window.parent.frames["frmUpload"].OnUploadCompleted(%s,"%s");
</script>
""" % (
errorNo,
newFileName.replace('"',"'")
)
return string
def run(self):
s = ""
try:
# Check if this is disabled
if not(self.enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations and try again")
# Make sure we have valid inputs
if not(
(self.request.has_key("Command")) and
(self.request.has_key("Type")) and
(self.request.has_key("CurrentFolder"))
):
return
# Get command
command = self.request.get("Command", None)
# Get resource type
resourceType = self.request.get("Type", None)
# folder syntax must start and end with "/"
currentFolder = self.request.get("CurrentFolder", None)
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Check for invalid paths
if (".." in currentFolder):
return self.sendError(102, "")
# File upload doesn't have to return XML, so intercept
# her:e
if (command == "FileUpload"):
return self.fileUpload(resourceType, currentFolder)
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder)
# Execute the command
if (command == "GetFolders"):
f = self.getFolders
elif (command == "GetFoldersAndFiles"):
f = self.getFoldersAndFiles
elif (command == "CreateFolder"):
f = self.createFolder
else:
f = None
if (f is not None):
s += f(resourceType, currentFolder)
s += self.createXmlFooter()
except Exception, e:
s = "ERROR: %s" % e
return s
# Running from command line
if __name__ == '__main__':
# To test the output, uncomment the standard headers
#print "Content-Type: text/html"
#print ""
print getFCKeditorConnector()
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.connector as connector
return connector.getFCKeditorConnector(context=context).run()
"""
| gpl-2.0 | 8,343,888,651,736,149,000 | 26.617157 | 133 | 0.654407 | false |
gregbdunn/aws-ec2rescue-linux | tools/moduletests/unit/test_arpcache.py | 2 | 12661 | # Copyright 2016-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
Unit tests for the arpcache module
"""
import os
import subprocess
import sys
import unittest
import mock
import moduletests.src.arpcache
try:
# Python 2.x
from cStringIO import StringIO
except ImportError:
# Python 3.x
from io import StringIO
if sys.hexversion >= 0x3040000:
# contextlib.redirect_stdout was introduced in Python 3.4
import contextlib
else:
# contextlib2 is a backport of contextlib from Python 3.5 and is compatible with Python2/3
import contextlib2 as contextlib
class TestArpcache(unittest.TestCase):
config_file_path = "/etc/sysctl.d/55-arp-gc_thresh1.conf"
def setUp(self):
self.output = StringIO()
def tearDown(self):
self.output.close()
@mock.patch("subprocess.check_output")
def test_detect_noproblem(self, check_output_mock):
check_output_mock.return_value = "net.ipv4.neigh.default.gc_thresh1 = 0"
self.assertFalse(moduletests.src.arpcache.detect())
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output")
def test_detect_problem(self, check_output_mock):
check_output_mock.return_value = "net.ipv4.neigh.default.gc_thresh1 = 1"
self.assertTrue(moduletests.src.arpcache.detect())
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(
"1", "test", "/etc/sysctl.d/55-arp-gc_thresh1.conf: no such file or directory"))
def test_fix_cpe(self, check_output_mock):
with contextlib.redirect_stdout(self.output):
self.assertRaises(subprocess.CalledProcessError, moduletests.src.arpcache.fix, self.config_file_path)
self.assertTrue(self.output.getvalue().endswith(
"[UNFIXED] 'sysctl -w net.ipv4.neigh.default.gc_thresh1=0' failed for running system\n"))
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[False])
@mock.patch("moduletests.src.arpcache.open", mock.mock_open(read_data="stuff"))
def test_fix_exists_sudo_true(self, check_output_mock, exists_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith(
"[FIXED] set net.ipv4.neigh.default.gc_thresh1=0 for running system\n"
"[FIXED] net.ipv4.neigh.default.gc_thresh1=0 in /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[True])
@mock.patch("moduletests.src.arpcache.open", mock.mock_open(read_data="net.ipv4.neigh.default.gc_thresh1 = 0\n"
"something else\n"))
def test_fix_sudo_true(self, check_output_mock, exists_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith(
"[FIXED] set net.ipv4.neigh.default.gc_thresh1=0 for running system\n"
"[FIXED] net.ipv4.neigh.default.gc_thresh1=0 in /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[True])
@mock.patch("moduletests.src.arpcache.open", mock.mock_open(read_data="net.ipv4.neigh.default.gc_thresh1 = 0\n"
"net.ipv4.neigh.default.gc_thresh1 = 0\n"))
def test_fix_sudo_true_found_twice(self, check_output_mock, exists_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith(
"[FIXED] set net.ipv4.neigh.default.gc_thresh1=0 for running system\n"
"[FIXED] net.ipv4.neigh.default.gc_thresh1=0 in /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[False])
@mock.patch("moduletests.src.arpcache.open", side_effect=IOError)
def test_fix_writefail(self, open_mock, exists_mock, check_output_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertRaises(IOError, moduletests.src.arpcache.fix, self.config_file_path)
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
self.assertTrue(open_mock.called)
self.assertTrue(self.output.getvalue().endswith(
"[UNFIXED] Failed to write config to /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
@mock.patch("moduletests.src.arpcache.detect", return_value=False)
def test_run_success(self, detect_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.run())
self.assertTrue(self.output.getvalue().endswith("Determining if aggressive ARP caching is enabled\n"
"[SUCCESS] Aggressive arp caching is disabled.\n"))
self.assertTrue(detect_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", return_value=True)
def test_run_no_remediate(self, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": False,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
moduletests.src.arpcache.run()
self.assertTrue("[UNFIXED] Remediation impossible without sudo and --remediate.\n"
"-- Running as root/sudo: True\n"
"-- Required --remediate flag specified: False\n"
"[FAILURE] Aggressive arp caching is enabled."
in self.output.getvalue())
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", return_value=True)
@mock.patch("moduletests.src.arpcache.os.path.isfile", return_value=True)
@mock.patch("moduletests.src.arpcache.backup", return_value=True)
@mock.patch("moduletests.src.arpcache.fix", return_value=True)
@mock.patch("moduletests.src.arpcache.restore", return_value=True)
def test_run_failure_isfile(self, restore_mock, fix_mock, backup_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {self.config_file_path: "/some/path"},
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue("Determining if aggressive ARP caching is enabled\n"
"[FAILURE] Aggressive arp caching is enabled. "
"This can cause issues communicating with instances in the same subnet"
in self.output.getvalue())
self.assertTrue(restore_mock.called)
self.assertTrue(fix_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", return_value=True)
@mock.patch("moduletests.src.arpcache.os.path.isfile", return_value=False)
@mock.patch("moduletests.src.arpcache.fix", return_value=True)
def test_run_failure(self, fix_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue("Determining if aggressive ARP caching is enabled\n"
"[FAILURE] Aggressive arp caching is enabled. "
"This can cause issues communicating with instances in the same subnet"
in self.output.getvalue())
self.assertTrue(fix_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", side_effect=(True, False))
@mock.patch("moduletests.src.arpcache.os.path.isfile", return_value=False)
@mock.patch("moduletests.src.arpcache.fix", return_value=True)
def test_run_fix(self, fix_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.run())
self.assertTrue(self.output.getvalue().endswith("Determining if aggressive ARP caching is enabled\n"
"[SUCCESS] Aggressive arp caching is disabled after "
"remediation. Please see the logs for further details\n"))
self.assertTrue(fix_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", side_effect=Exception)
@mock.patch("moduletests.src.arpcache.restore", return_value=True)
def test_run_detect_exception(self, restore_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {self.config_file_path: "/some/path"},
"REMEDIATE": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
self.assertTrue(restore_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict", side_effect=Exception)
def test_run_config_exception(self, config_mock):
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue(config_mock.called)
| apache-2.0 | -1,023,453,071,174,588,700 | 52.876596 | 117 | 0.629492 | false |
sagarghuge/recurringtask | GTG/gtk/editor/notify_dialog.py | 1 | 2066 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from gi.repository import Gtk
from GTG import _, ngettext
from GTG.gtk.editor import GnomeConfig
class NotifyCloseUI():
def __init__(self):
# Load window tree
self.builder = Gtk.Builder()
self.builder.add_from_file(GnomeConfig.NOTIFY_UI_FILE)
signals = {"on_confirm_activate": self.on_confirm_pressed,
"on_delete_cancel": lambda x: x.hide, }
self.builder.connect_signals(signals)
def on_confirm_pressed(self, widget):
self.builder.get_object("notify_dialog").hide()
def notifyclose(self):
cdlabel2 = self.builder.get_object("cd-label2")
cdlabel2.set_label(ngettext(
"You need to set Due date before closing the task.",
"You need to set Due date before closing the task.",
0))
notifyclose_dialog = self.builder.get_object("notify_dialog")
notifyclose_dialog.resize(1, 1)
confirm_button = self.builder.get_object("confirm")
confirm_button.grab_focus()
if notifyclose_dialog.run() != 1:
pass
notifyclose_dialog.hide()
| gpl-3.0 | 5,517,736,922,483,735,000 | 37.259259 | 79 | 0.625363 | false |
saltastro/polsalt | polsalt/specpolwollaston.py | 1 | 2770 |
"""
specpolsplit
Split O and E beams
"""
import numpy as np
from scipy.interpolate import interp1d
from scipy.ndimage.interpolation import shift
from specpolutils import rssmodelwave
def read_wollaston(hdu, wollaston_file):
""" Correct the O or E beam for distortion due to the beam splitter
Parameters
----------
hdu: fits.HDUList
Polarimetric observations data
wollaston_file: None or str
File containing the central position of the split O and E beams
Return
------
woll_pix: ~numpy.ndarray
A two column array represnting the center pixels of the O+E beams for
the given configuration
"""
#set up data
data= hdu['SCI'].data
rows,cols = data.shape
grating = hdu[0].header['GRATING'].strip()
grang = hdu[0].header['GR-ANGLE']
artic = hdu[0].header['CAMANG']
trkrho = hdu[0].header['TRKRHO']
date = hdu[0].header['DATE-OBS'].replace('-','')
cbin, rbin = [int(x) for x in hdu[0].header['CCDSUM'].split(" ")]
#load data from wollaston file
lam_m = np.loadtxt(wollaston_file,dtype=float,usecols=(0,))
rpix_om = np.loadtxt(wollaston_file,dtype=float,unpack=True,usecols=(1,2))
lam_c = rssmodelwave(grating,grang,artic,trkrho,cbin,cols,date)
return interp1d(lam_m,rpix_om,kind='cubic',bounds_error=False)(lam_c)
def specpolwollaston(hdu, wollaston_file=None):
""" Correct the O or E beam for distortion due to the beam splitter
Parameters
----------
hdu: fits.HDUList
Polarimetric observations data
beam: str
Either the 'O' or the 'E' beam
wollaston_file: None or str
File containing the central position of the split O and E beams
Return
------
whdu: fits.HDUList
New object with each extension corrected
"""
rows,cols = hdu[1].data.shape
cbin, rbin = [int(x) for x in hdu[0].header['CCDSUM'].split(" ")]
#determine the shift
rpix_oc = read_wollaston(data, wollaston_file)
drow_shift = (rpix_oc-rpix_oc[:,cols/2][:,None])/rbin
for i in range(len(hdu)):
if hdu[i].data.any():
for o in (0,1):
hdu[i].data[o] = correct_wollaston(hdu[i].data[o], drow_shift[o])
return hdu
def correct_wollaston(data, drow_shift):
"""Correct the distortion in the data by a shift
Parameters
----------
data: ~numpy.ndarray
Data to be corrected
drow_shift: ~numpy.ndarray
Shift to be applied to each column
Returns
-------
sdata: ~numpy.ndarray
Corrected data
"""
rows,cols = data.shape
sdata = np.zeros(data.shape, dtype='float32')
for c in range(cols):
shift(data[:,c], drow_shift[c], sdata[:,c], order=1)
return sdata
| bsd-3-clause | -7,685,851,724,710,988,000 | 24.181818 | 80 | 0.626354 | false |
Fritz449/SRLF | algos/a3c_discrete.py | 1 | 15229 | import tensorflow as tf
import numpy as np
import os
import sys
import random
import subprocess
from redis import Redis
import time
sys.path.append(os.path.realpath(".."))
import helpers.utils as hlp
from models.feed_forward import FFDiscrete
class A3CDiscreteTrainer(FFDiscrete):
def __init__(self, sess, args):
FFDiscrete.__init__(self, sess, args)
self.sess = sess
self.config = args['config']
self.env = args['environment']
self.timesteps_per_launch = args['max_pathlength']
self.n_workers = args['n_workers']
self.distributed = args['distributed']
self.n_tests = args['n_tests']
self.entropy_coef = args['entropy_coef']
self.learning_rate = args['learning_rate']
self.n_steps = args['n_steps']
self.scale = args['scale']
self.gamma = args['gamma']
self.save_every = args.get('save_every', 1)
self.test_every = args.get('test_every', 10)
self.sums = self.sumsqrs = self.sumtime = 0
self.timestep = 0
self.create_internal()
self.train_scores = []
self.test_scores = []
np.set_printoptions(precision=6)
# Worker parameters:
self.id_worker = args['id_worker']
self.test_mode = args['test_mode']
def create_internal(self):
self.targets = {
"advantage": tf.placeholder(dtype=tf.float32, shape=[None]),
"return": tf.placeholder(dtype=tf.float32, shape=[None]),
}
for i in range(len(self.n_actions)):
self.targets["action_{}".format(i)] = tf.placeholder(dtype=tf.int32, shape=[None])
N = tf.shape(self.targets["advantage"])[0]
base = [N] + [1 for _ in range(len(self.n_actions))]
log_dist = tf.zeros(shape=[N] + self.n_actions)
p_n = tf.zeros(shape=[N])
for i, n in enumerate(self.n_actions):
right_shape = base[:]
right_shape[i + 1] = n
actions = self.targets["action_{}".format(i)]
action_log_dist = tf.reshape(self.action_logprobs[i], [-1])
p = tf.reshape(tf.gather(action_log_dist, tf.range(0, N) * n + actions), [-1])
p_n += p
log_dist += tf.reshape(action_log_dist, right_shape)
N = tf.cast(N, tf.float32)
self.loss = -tf.reduce_mean(p_n * self.targets["advantage"])
self.entropy = tf.reduce_sum(-tf.exp(log_dist) * log_dist) / N
value_loss = tf.reduce_mean((self.targets["return"] - self.value) ** 2)
self.loss += -self.entropy_coef * self.entropy + value_loss / 2
self.weights += self.value_weights
self.gradients = tf.gradients(self.loss, self.weights)
def save(self, name):
directory = 'saves/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
directory += 'iteration_{}'.format(self.timestep) + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for i, tensor in enumerate(tf.global_variables()):
value = self.sess.run(tensor)
np.save(directory + 'weight_{}'.format(i), value)
if self.scale != 'off':
np.save(directory + 'sums', self.sums)
np.save(directory + 'sumsquares', self.sumsqrs)
np.save(directory + 'sumtime', self.sumtime)
np.save(directory + 'timestep', np.array([self.timestep]))
np.save(directory + 'train_scores', np.array(self.train_scores))
np.save(directory + 'test_scores', np.array(self.test_scores))
print("Agent successfully saved in folder {}".format(directory))
def load(self, name, iteration=None):
try:
directory = 'saves/' + name + '/'
if not os.path.exists(directory):
print('That directory does not exist!')
raise Exception
if iteration is None:
iteration = np.max([int(x[10:]) for x in [dir for dir in os.walk(directory)][0][1]])
directory += 'iteration_{}'.format(iteration) + '/'
for i, tensor in enumerate(tf.global_variables()):
arr = np.load(directory + 'weight_{}.npy'.format(i))
self.sess.run(tensor.assign(arr))
if self.scale != 'off':
self.sums = np.load(directory + 'sums.npy')
self.sumsqrs = np.load(directory + 'sumsquares.npy')
self.sumtime = np.load(directory + 'sumtime.npy')
self.timestep = np.load(directory + 'timestep.npy')[0]
self.train_scores = np.load(directory + 'train_scores.npy').tolist()
self.test_scores = np.load(directory + 'test_scores.npy').tolist()
print("Agent successfully loaded from folder {}".format(directory))
except:
print("Something is wrong, loading failed")
def apply_adam_updates(self, variables_server, gradients, learning_rate, epsilon=1e-6):
update_steps = hlp.load_object(variables_server.get('update_steps')) + 1
variables_server.set('update_steps', hlp.dump_object(update_steps))
learning_rate = learning_rate * ((1 - 0.999 ** update_steps) ** 0.5) / (1 - 0.9 ** update_steps)
for i, gradient in enumerate(gradients):
momentum = hlp.load_object(variables_server.get('momentum_{}'.format(i)))
momentum = 0.999 * momentum + (1 - 0.999) * gradient * gradient
variables_server.set('momentum_{}'.format(i), hlp.dump_object(momentum))
velocity = hlp.load_object(variables_server.get('velocity_{}'.format(i)))
velocity = 0.9 * velocity + (1 - 0.9) * gradient
variables_server.set('velocity_{}'.format(i), hlp.dump_object(velocity))
weight = hlp.load_object(variables_server.get('weight_{}'.format(i)))
new_weight = weight - velocity * learning_rate / ((momentum ** 0.5) + epsilon)
variables_server.set('weight_{}'.format(i), hlp.dump_object(new_weight))
return update_steps
def work(self):
variables_server = Redis(port=12000)
if self.scale != 'off':
try:
means = hlp.load_object(variables_server.get("means"))
stds = hlp.load_object(variables_server.get("stds"))
self.sess.run(self.norm_set_op, feed_dict=dict(zip(self.norm_phs, [means, stds])))
except:
pass
try:
weights = [hlp.load_object(variables_server.get("weight_{}".format(i))) for i in
range(len(self.weights))]
self.set_weights(weights)
except:
pass
env = self.env
while True:
observations, action_tuples, rewards, timestamps = [], [], [], []
for _ in range(self.n_steps):
observations.append(env.features[0])
timestamps.append(env.timestamp)
actions = self.act(env.features)
env.step(actions)
action_tuples.append(actions)
rewards.append(env.reward)
if env.done or env.timestamp > self.timesteps_per_launch:
variables_server.lpush('results', hlp.dump_object(env.get_total_reward()))
print("Episode reward: {}".format(env.get_total_reward()), "Length: {}".format(env.timestamp))
break
timestamps.append(env.timestamp)
observations_batch = np.array(observations)
actions_batch = np.array(action_tuples)
feed_dict = {self.state_input: observations_batch}
for i in range(len(self.n_actions)):
feed_dict[self.targets["action_{}".format(i)]] = actions_batch[:, i]
if env.done or env.timestamp > self.timesteps_per_launch:
rewards.append(0)
env.reset()
else:
obs = observations[-1]
rewards.append(self.sess.run(self.value, feed_dict={self.state_input: obs.reshape((1,) + obs.shape)}))
returns_batch = hlp.discount(np.array(rewards), self.gamma, np.array(timestamps))[:-1]
values = self.sess.run(self.value, feed_dict)
feed_dict[self.targets["advantage"]] = returns_batch - values
feed_dict[self.targets["return"]] = returns_batch
gradients = self.sess.run(self.gradients, feed_dict)
self.apply_adam_updates(variables_server, gradients, self.learning_rate)
weights = [hlp.load_object(variables_server.get("weight_{}".format(i))) for i in
range(len(self.weights))]
self.set_weights(weights)
def make_rollout(self):
variables_server = Redis(port=12000)
if self.scale != 'off':
try:
means = hlp.load_object(variables_server.get("means"))
stds = hlp.load_object(variables_server.get("stds"))
self.sess.run(self.norm_set_op, feed_dict=dict(zip(self.norm_phs, [means, stds])))
except:
pass
try:
weights = [hlp.load_object(variables_server.get("weight_{}".format(i))) for i in
range(len(self.weights))]
self.set_weights(weights)
except:
pass
env = self.env
n_tasks = self.n_tests
timestep = 0
i_task = 0
paths = []
while i_task < n_tasks:
path = {}
observations, action_tuples, rewards, dist_tuples, timestamps = [], [], [], [], []
sums = np.zeros((1, env.get_observation_space()))
sumsqrs = np.zeros(sums.shape)
env.reset()
while not env.done and env.timestamp < self.timesteps_per_launch:
sums += env.features
sumsqrs += np.square(env.features)
observations.append(env.features[0])
timestamps.append(env.timestamp)
if not self.test_mode:
actions, dist_tuple = self.act(env.features, return_dists=True)
dist_tuples.append(dist_tuple)
else:
actions = self.act(env.features, exploration=False)
env.step(actions)
timestep += 1
action_tuples.append(actions)
rewards.append(env.reward)
path["observations"] = np.array(observations)
path["action_tuples"] = np.array(action_tuples)
path["rewards"] = np.array(rewards)
if not self.test_mode:
path["dist_tuples"] = np.array(dist_tuples)
path["timestamps"] = np.array(timestamps)
path["sumobs"] = sums
path["sumsqrobs"] = sumsqrs
path["terminated"] = env.done
path["total"] = env.get_total_reward()
paths.append(path)
i_task += 1
if self.distributed:
variables_server.set("paths_{}".format(self.id_worker), hlp.dump_object(paths))
else:
self.paths = paths
def train(self):
cmd_server = 'redis-server --port 12000'
p = subprocess.Popen(cmd_server, shell=True, preexec_fn=os.setsid)
self.variables_server = Redis(port=12000)
means = "-"
stds = "-"
if self.scale != 'off':
if self.timestep == 0:
print("Time to measure features!")
if self.distributed:
worker_args = \
{
'config': self.config,
'test_mode': False,
}
hlp.launch_workers(worker_args, self.n_workers)
paths = []
for i in range(self.n_workers):
paths += hlp.load_object(self.variables_server.get("paths_{}".format(i)))
else:
self.test_mode = False
self.make_rollout()
paths = self.paths
for path in paths:
self.sums += path["sumobs"]
self.sumsqrs += path["sumsqrobs"]
self.sumtime += path["observations"].shape[0]
stds = np.sqrt((self.sumsqrs - np.square(self.sums) / self.sumtime) / (self.sumtime - 1))
means = self.sums / self.sumtime
print("Init means: {}".format(means))
print("Init stds: {}".format(stds))
self.variables_server.set("means", hlp.dump_object(means))
self.variables_server.set("stds", hlp.dump_object(stds))
self.sess.run(self.norm_set_op, feed_dict=dict(zip(self.norm_phs, [means, stds])))
weights = self.get_weights()
for i, weight in enumerate(weights):
self.variables_server.set("weight_" + str(i), hlp.dump_object(weight))
self.variables_server.set('momentum_{}'.format(i), hlp.dump_object(np.zeros(weight.shape)))
self.variables_server.set('velocity_{}'.format(i), hlp.dump_object(np.zeros(weight.shape)))
self.variables_server.set('update_steps', hlp.dump_object(0))
worker_args = \
{
'config': self.config,
'test_mode': False,
}
hlp.launch_workers(worker_args, self.n_workers, command='work', wait=False)
while True:
time.sleep(self.test_every)
print("Time for testing!")
if self.distributed:
worker_args = \
{
'config': self.config,
'test_mode': True,
}
hlp.launch_workers(worker_args, self.n_workers)
paths = []
for i in range(self.n_workers):
paths += hlp.load_object(self.variables_server.get("paths_{}".format(i)))
else:
self.test_mode = True
self.make_rollout()
paths = self.paths
total_rewards = np.array([path["total"] for path in paths])
eplens = np.array([len(path["rewards"]) for path in paths])
print("""
-------------------------------------------------------------
Mean test score: {test_scores}
Mean test episode length: {test_eplengths}
Max test score: {max_test}
Number of train episodes: {number}
Mean of features: {means}
Std of features: {stds}
-------------------------------------------------------------
""".format(
means=means,
stds=stds,
test_scores=np.mean(total_rewards),
test_eplengths=np.mean(eplens),
max_test=np.max(total_rewards),
number=self.variables_server.llen('results')
))
self.timestep += 1
self.train_scores = [hlp.load_object(res) for res in self.variables_server.lrange('results', 0, -1)][::-1]
self.test_scores.append(np.mean(total_rewards))
if self.timestep % self.save_every == 0:
self.save(self.config[:-5])
| apache-2.0 | -6,978,503,716,314,443,000 | 41.539106 | 118 | 0.537987 | false |
varunarya10/nova_test_latest | nova/virt/libvirt/guest.py | 4 | 13570 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright (c) 2011 Piston Cloud Computing, Inc
# Copyright (c) 2012 University Of Minho
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2015 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Manages information about the guest.
This class encapsulates libvirt domain provides certain
higher level APIs around the raw libvirt API. These APIs are
then used by all the other libvirt related classes
"""
from lxml import etree
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import importutils
from nova import exception
from nova.i18n import _
from nova.i18n import _LE
from nova import utils
from nova.virt.libvirt import config as vconfig
libvirt = None
LOG = logging.getLogger(__name__)
class Guest(object):
def __init__(self, domain):
global libvirt
if libvirt is None:
libvirt = importutils.import_module('libvirt')
self._domain = domain
def __repr__(self):
return "<Guest %(id)d %(name)s %(uuid)s>" % {
'id': self.id,
'name': self.name,
'uuid': self.uuid
}
@property
def id(self):
return self._domain.ID()
@property
def uuid(self):
return self._domain.UUIDString()
@property
def name(self):
return self._domain.name()
@property
def _encoded_xml(self):
return encodeutils.safe_decode(self._domain.XMLDesc(0))
@classmethod
def create(cls, xml, host):
"""Create a new Guest
:param xml: XML definition of the domain to create
:param host: host.Host connection to define the guest on
:returns guest.Guest: Guest ready to be launched
"""
try:
# TODO(sahid): Host.write_instance_config should return
# an instance of Guest
domain = host.write_instance_config(xml)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error defining a domain with XML: %s') %
encodeutils.safe_decode(xml))
return cls(domain)
def launch(self, pause=False):
"""Starts a created guest.
:param pause: Indicates whether to start and pause the guest
"""
flags = pause and libvirt.VIR_DOMAIN_START_PAUSED or 0
try:
return self._domain.createWithFlags(flags)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error launching a defined domain '
'with XML: %s') %
self._encoded_xml, errors='ignore')
def poweroff(self):
"""Stops a running guest."""
self._domain.destroy()
def inject_nmi(self):
"""Injects an NMI to a guest."""
self._domain.injectNMI()
def resume(self):
"""Resumes a suspended guest."""
self._domain.resume()
def enable_hairpin(self):
"""Enables hairpin mode for this guest."""
interfaces = self.get_interfaces()
try:
for interface in interfaces:
utils.execute(
'tee',
'/sys/class/net/%s/brport/hairpin_mode' % interface,
process_input='1',
run_as_root=True,
check_exit_code=[0, 1])
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error enabling hairpin mode with XML: %s') %
self._encoded_xml, errors='ignore')
def get_interfaces(self):
"""Returns a list of all network interfaces for this domain."""
doc = None
try:
doc = etree.fromstring(self._encoded_xml)
except Exception:
return []
interfaces = []
nodes = doc.findall('./devices/interface/target')
for target in nodes:
interfaces.append(target.get('dev'))
return interfaces
def get_vcpus_info(self):
"""Returns virtual cpus information of guest.
:returns: guest.VCPUInfo
"""
vcpus = self._domain.vcpus()
if vcpus is not None:
for vcpu in vcpus[0]:
yield VCPUInfo(
id=vcpu[0], cpu=vcpu[3], state=vcpu[1], time=vcpu[2])
def delete_configuration(self):
"""Undefines a domain from hypervisor."""
try:
self._domain.undefineFlags(
libvirt.VIR_DOMAIN_UNDEFINE_MANAGED_SAVE)
except libvirt.libvirtError:
LOG.debug("Error from libvirt during undefineFlags. %d"
"Retrying with undefine", self.id)
self._domain.undefine()
except AttributeError:
# Older versions of libvirt don't support undefine flags,
# trying to remove managed image
try:
if self._domain.hasManagedSaveImage(0):
self._domain.managedSaveRemove(0)
except AttributeError:
pass
self._domain.undefine()
def has_persistent_configuration(self):
"""Whether domain config is persistently stored on the host."""
return self._domain.isPersistent()
def attach_device(self, conf, persistent=False, live=False):
"""Attaches device to the guest.
:param conf: A LibvirtConfigObject of the device to attach
:param persistent: A bool to indicate whether the change is
persistent or not
:param live: A bool to indicate whether it affect the guest
in running state
"""
flags = persistent and libvirt.VIR_DOMAIN_AFFECT_CONFIG or 0
flags |= live and libvirt.VIR_DOMAIN_AFFECT_LIVE or 0
self._domain.attachDeviceFlags(conf.to_xml(), flags=flags)
def get_disk(self, device):
"""Returns the disk mounted at device
:returns LivirtConfigGuestDisk: mounted at device or None
"""
try:
doc = etree.fromstring(self._domain.XMLDesc(0))
except Exception:
return None
node = doc.find("./devices/disk/target[@dev='%s'].." % device)
if node is not None:
conf = vconfig.LibvirtConfigGuestDisk()
conf.parse_dom(node)
return conf
def detach_device(self, conf, persistent=False, live=False):
"""Detaches device to the guest.
:param conf: A LibvirtConfigObject of the device to detach
:param persistent: A bool to indicate whether the change is
persistent or not
:param live: A bool to indicate whether it affect the guest
in running state
"""
flags = persistent and libvirt.VIR_DOMAIN_AFFECT_CONFIG or 0
flags |= live and libvirt.VIR_DOMAIN_AFFECT_LIVE or 0
self._domain.detachDeviceFlags(conf.to_xml(), flags=flags)
def get_xml_desc(self, dump_inactive=False, dump_sensitive=False,
dump_migratable=False):
"""Returns xml description of guest.
:param dump_inactive: Dump inactive domain information
:param dump_sensitive: Dump security sensitive information
:param dump_migratable: Dump XML suitable for migration
:returns string: XML description of the guest
"""
flags = dump_inactive and libvirt.VIR_DOMAIN_XML_INACTIVE or 0
flags |= dump_sensitive and libvirt.VIR_DOMAIN_XML_SECURE or 0
flags |= dump_migratable and libvirt.VIR_DOMAIN_XML_MIGRATABLE or 0
return self._domain.XMLDesc(flags=flags)
def save_memory_state(self):
"""Saves the domain's memory state. Requires running domain.
raises: raises libvirtError on error
"""
self._domain.managedSave(0)
def get_block_device(self, disk):
"""Returns a block device wrapper for disk."""
return BlockDevice(self, disk)
class BlockDevice(object):
"""Wrapper around block device API"""
REBASE_DEFAULT_BANDWIDTH = 0 # in MiB/s - 0 unlimited
COMMIT_DEFAULT_BANDWIDTH = 0 # in MiB/s - 0 unlimited
def __init__(self, guest, disk):
self._guest = guest
self._disk = disk
def abort_job(self, async=False, pivot=False):
"""Request to cancel any job currently running on the block.
:param async: Request only, do not wait for completion
:param pivot: Pivot to new file when ending a copy or
active commit job
"""
flags = async and libvirt.VIR_DOMAIN_BLOCK_JOB_ABORT_ASYNC or 0
flags |= pivot and libvirt.VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT or 0
self._guest._domain.blockJobAbort(self._disk, flags=flags)
def get_job_info(self):
"""Returns information about job currently running
:returns: BlockDeviceJobInfo or None
"""
status = self._guest._domain.blockJobInfo(self._disk, flags=0)
if status != -1:
return BlockDeviceJobInfo(
job=status.get("type", 0),
bandwidth=status.get("bandwidth", 0),
cur=status.get("cur", 0),
end=status.get("end", 0))
def rebase(self, base, shallow=False, reuse_ext=False,
copy=False, relative=False):
"""Rebases block to new base
:param shallow: Limit copy to top of source backing chain
:param reuse_ext: Reuse existing external file of a copy
:param copy: Start a copy job
:param relative: Keep backing chain referenced using relative names
"""
flags = shallow and libvirt.VIR_DOMAIN_BLOCK_REBASE_SHALLOW or 0
flags |= reuse_ext and libvirt.VIR_DOMAIN_BLOCK_REBASE_REUSE_EXT or 0
flags |= copy and libvirt.VIR_DOMAIN_BLOCK_REBASE_COPY or 0
flags |= relative and libvirt.VIR_DOMAIN_BLOCK_REBASE_RELATIVE or 0
return self._guest._domain.blockRebase(
self._disk, base, self.REBASE_DEFAULT_BANDWIDTH, flags=flags)
def commit(self, base, top, relative=False):
"""Commit on block device
For performance during live snapshot it will reduces the disk chain
to a single disk.
:param relative: Keep backing chain referenced using relative names
"""
flags = relative and libvirt.VIR_DOMAIN_BLOCK_COMMIT_RELATIVE or 0
return self._guest._domain.blockCommit(
self._disk, base, top, self.COMMIT_DEFAULT_BANDWIDTH, flags=flags)
def resize(self, size_kb):
"""Resizes block device to Kib size."""
self._guest._domain.blockResize(self._disk, size_kb)
def wait_for_job(self, abort_on_error=False, wait_for_job_clean=False):
"""Wait for libvirt block job to complete.
Libvirt may return either cur==end or an empty dict when
the job is complete, depending on whether the job has been
cleaned up by libvirt yet, or not.
:param abort_on_error: Whether to stop process and raise NovaException
on error (default: False)
:param wait_for_job_clean: Whether to force wait to ensure job is
finished (see bug: LP#1119173)
:returns: True if still in progress
False if completed
"""
status = self.get_job_info()
if not status and abort_on_error:
msg = _('libvirt error while requesting blockjob info.')
raise exception.NovaException(msg)
if wait_for_job_clean:
job_ended = status.job == 0
else:
job_ended = status.cur == status.end
return not job_ended
class VCPUInfo(object):
def __init__(self, id, cpu, state, time):
"""Structure for information about guest vcpus.
:param id: The virtual cpu number
:param cpu: The host cpu currently associated
:param state: The running state of the vcpu (0 offline, 1 running, 2
blocked on resource)
:param time: The cpu time used in nanoseconds
"""
self.id = id
self.cpu = cpu
self.state = state
self.time = time
class BlockDeviceJobInfo(object):
def __init__(self, job, bandwidth, cur, end):
"""Structure for information about running job.
:param job: The running job (0 placeholder, 1 pull,
2 copy, 3 commit, 4 active commit)
:param bandwidth: Used in MiB/s
:param cur: Indicates the position between 0 and 'end'
:param end: Indicates the position for this operation
"""
self.job = job
self.bandwidth = bandwidth
self.cur = cur
self.end = end
| apache-2.0 | 6,569,003,820,764,894,000 | 34.430809 | 78 | 0.60818 | false |
munkiat/libcloud | docs/examples/compute/vmware_vcloud_1.5.py | 60 | 2076 | from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security
# Skip this step if you are launching nodes on an official vCloud
# provider. It is intended only for self signed SSL certs in
# vanilla vCloud Director v1.5 test deployments.
# Note: Code like this poses a security risk (MITM attack) and
# that's the reason why you should never use it for anything else
# besides testing. You have been warned.
libcloud.security.VERIFY_SSL_CERT = False
vcloud = get_driver(Provider.VCLOUD)
driver = vcloud('you username@organisation', 'your password',
host='vcloud.local', api_version='1.5')
# List all instantiated vApps
nodes = driver.list_nodes()
# List all VMs within the first vApp instance
print nodes[0].extra['vms']
# List all available vApp Templates
images = driver.list_images()
image = [i for i in images if i.name == 'natty-server-cloudimg-amd64'][0]
# Create node with minimum set of parameters
node = driver.create_node(name='test node 1', image=image)
# Destroy the node
driver.destroy_node(node)
# Create node without deploying and powering it on
node = driver.create_node(name='test node 2', image=image, ex_deploy=False)
# Create node with custom CPU & Memory values
node = driver.create_node(name='test node 3', image=image, ex_vm_cpu=3,
ex_vm_memory=1024)
# Create node with customised networking parameters (eg. for OVF
# imported images)
node = driver.create_node(name='test node 4', image=image,
ex_vm_network='your vm net name',
ex_network='your org net name',
ex_vm_fence='bridged', ex_vm_ipmode='DHCP')
# Create node in a custom virtual data center
node = driver.create_node(name='test node 4', image=image,
ex_vdc='your vdc name')
# Create node with guest OS customisation script to be run at first boot
node = driver.create_node(name='test node 5', image=image,
ex_vm_script='filesystem path to your script')
| apache-2.0 | 6,054,260,024,234,999,000 | 38.169811 | 75 | 0.691715 | false |
aarticianpc/greenpointtrees | src/paypal/pro/migrations/0001_initial.py | 12 | 2843 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='PayPalNVP',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('method', models.CharField(max_length=64, blank=True)),
('ack', models.CharField(max_length=32, blank=True)),
('profilestatus', models.CharField(max_length=32, blank=True)),
('timestamp', models.DateTimeField(null=True, blank=True)),
('profileid', models.CharField(max_length=32, blank=True)),
('profilereference', models.CharField(max_length=128, blank=True)),
('correlationid', models.CharField(max_length=32, blank=True)),
('token', models.CharField(max_length=64, blank=True)),
('payerid', models.CharField(max_length=64, blank=True)),
('firstname', models.CharField(max_length=255, verbose_name='First Name', blank=True)),
('lastname', models.CharField(max_length=255, verbose_name='Last Name', blank=True)),
('street', models.CharField(max_length=255, verbose_name='Street Address', blank=True)),
('city', models.CharField(max_length=255, verbose_name='City', blank=True)),
('state', models.CharField(max_length=255, verbose_name='State', blank=True)),
('countrycode', models.CharField(max_length=2, verbose_name='Country', blank=True)),
('zip', models.CharField(max_length=32, verbose_name='Postal / Zip Code', blank=True)),
('invnum', models.CharField(max_length=255, blank=True)),
('custom', models.CharField(max_length=255, blank=True)),
('flag', models.BooleanField(default=False)),
('flag_code', models.CharField(max_length=32, blank=True)),
('flag_info', models.TextField(blank=True)),
('ipaddress', models.IPAddressField(blank=True)),
('query', models.TextField(blank=True)),
('response', models.TextField(blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'db_table': 'paypal_nvp',
'verbose_name': 'PayPal NVP',
},
bases=(models.Model,),
),
]
| mit | 2,255,531,316,139,580,400 | 52.641509 | 114 | 0.577559 | false |
kmspriyatham/symath | scipy/scipy/sparse/construct.py | 2 | 19642 | """Functions to construct sparse matrices
"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['spdiags', 'eye', 'identity', 'kron', 'kronsum',
'hstack', 'vstack', 'bmat', 'rand', 'diags', 'block_diag']
from warnings import warn
import numpy as np
from .sputils import upcast
from .csr import csr_matrix
from .csc import csc_matrix
from .bsr import bsr_matrix
from .coo import coo_matrix
from .lil import lil_matrix
from .dia import dia_matrix
from .base import issparse
def spdiags(data, diags, m, n, format=None):
"""
Return a sparse matrix from diagonals.
Parameters
----------
data : array_like
matrix diagonals stored row-wise
diags : diagonals to set
- k = 0 the main diagonal
- k > 0 the k-th upper diagonal
- k < 0 the k-th lower diagonal
m, n : int
shape of the result
format : format of the result (e.g. "csr")
By default (format=None) an appropriate sparse matrix
format is returned. This choice is subject to change.
See Also
--------
diags : more convenient form of this function
dia_matrix : the sparse DIAgonal format.
Examples
--------
>>> data = array([[1,2,3,4],[1,2,3,4],[1,2,3,4]])
>>> diags = array([0,-1,2])
>>> spdiags(data, diags, 4, 4).todense()
matrix([[1, 0, 3, 0],
[1, 2, 0, 4],
[0, 2, 3, 0],
[0, 0, 3, 4]])
"""
return dia_matrix((data, diags), shape=(m,n)).asformat(format)
def diags(diagonals, offsets, shape=None, format=None, dtype=None):
"""
Construct a sparse matrix from diagonals.
.. versionadded:: 0.11
Parameters
----------
diagonals : sequence of array_like
Sequence of arrays containing the matrix diagonals,
corresponding to `offsets`.
offsets : sequence of int
Diagonals to set:
- k = 0 the main diagonal
- k > 0 the k-th upper diagonal
- k < 0 the k-th lower diagonal
shape : tuple of int, optional
Shape of the result. If omitted, a square matrix large enough
to contain the diagonals is returned.
format : {"dia", "csr", "csc", "lil", ...}, optional
Matrix format of the result. By default (format=None) an
appropriate sparse matrix format is returned. This choice is
subject to change.
dtype : dtype, optional
Data type of the matrix.
See Also
--------
spdiags : construct matrix from diagonals
Notes
-----
This function differs from `spdiags` in the way it handles
off-diagonals.
The result from `diags` is the sparse equivalent of::
np.diag(diagonals[0], offsets[0])
+ ...
+ np.diag(diagonals[k], offsets[k])
Repeated diagonal offsets are disallowed.
Examples
--------
>>> diagonals = [[1,2,3,4], [1,2,3], [1,2]]
>>> diags(diagonals, [0, -1, 2]).todense()
matrix([[1, 0, 1, 0],
[1, 2, 0, 2],
[0, 2, 3, 0],
[0, 0, 3, 4]])
Broadcasting of scalars is supported (but shape needs to be
specified):
>>> diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)).todense()
matrix([[-2., 1., 0., 0.],
[ 1., -2., 1., 0.],
[ 0., 1., -2., 1.],
[ 0., 0., 1., -2.]])
If only one diagonal is wanted (as in `numpy.diag`), the following
works as well:
>>> diags([1, 2, 3], 1).todense()
matrix([[ 0., 1., 0., 0.],
[ 0., 0., 2., 0.],
[ 0., 0., 0., 3.],
[ 0., 0., 0., 0.]])
"""
# if offsets is not a sequence, assume that there's only one diagonal
try:
iter(offsets)
except TypeError:
# now check that there's actually only one diagonal
try:
iter(diagonals[0])
except TypeError:
diagonals = [np.atleast_1d(diagonals)]
else:
raise ValueError("Different number of diagonals and offsets.")
else:
diagonals = list(map(np.atleast_1d, diagonals))
offsets = np.atleast_1d(offsets)
# Basic check
if len(diagonals) != len(offsets):
raise ValueError("Different number of diagonals and offsets.")
# Determine shape, if omitted
if shape is None:
m = len(diagonals[0]) + abs(int(offsets[0]))
shape = (m, m)
# Determine data type, if omitted
if dtype is None:
dtype = np.common_type(*diagonals)
# Construct data array
m, n = shape
M = max([min(m + offset, n - offset) + max(0, offset)
for offset in offsets])
M = max(0, M)
data_arr = np.zeros((len(offsets), M), dtype=dtype)
for j, diagonal in enumerate(diagonals):
offset = offsets[j]
k = max(0, offset)
length = min(m + offset, n - offset)
if length <= 0:
raise ValueError("Offset %d (index %d) out of bounds" % (offset, j))
try:
data_arr[j, k:k+length] = diagonal
except ValueError:
if len(diagonal) != length and len(diagonal) != 1:
raise ValueError(
"Diagonal length (index %d: %d at offset %d) does not "
"agree with matrix size (%d, %d)." % (
j, len(diagonal), offset, m, n))
raise
return dia_matrix((data_arr, offsets), shape=(m, n)).asformat(format)
def identity(n, dtype='d', format=None):
"""Identity matrix in sparse format
Returns an identity matrix with shape (n,n) using a given
sparse format and dtype.
Parameters
----------
n : integer
Shape of the identity matrix.
dtype :
Data type of the matrix
format : string
Sparse format of the result, e.g. format="csr", etc.
Examples
--------
>>> identity(3).todense()
matrix([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> identity(3, dtype='int8', format='dia')
<3x3 sparse matrix of type '<type 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
return eye(n, n, dtype=dtype, format=format)
def eye(m, n=None, k=0, dtype=float, format=None):
"""Sparse matrix with ones on diagonal
Returns a sparse (m x n) matrix where the k-th diagonal
is all ones and everything else is zeros.
Parameters
----------
n : integer
Number of rows in the matrix.
m : integer, optional
Number of columns. Default: n
k : integer, optional
Diagonal to place ones on. Default: 0 (main diagonal)
dtype :
Data type of the matrix
format : string
Sparse format of the result, e.g. format="csr", etc.
Examples
--------
>>> from scipy import sparse
>>> sparse.eye(3).todense()
matrix([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> sparse.eye(3, dtype=np.int8)
<3x3 sparse matrix of type '<type 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
if n is None:
n = m
m,n = int(m),int(n)
if m == n and k == 0:
# fast branch for special formats
if format in ['csr', 'csc']:
indptr = np.arange(n+1, dtype=np.intc)
indices = np.arange(n, dtype=np.intc)
data = np.ones(n, dtype=dtype)
cls = {'csr': csr_matrix, 'csc': csc_matrix}[format]
return cls((data,indices,indptr),(n,n))
elif format == 'coo':
row = np.arange(n, dtype=np.intc)
col = np.arange(n, dtype=np.intc)
data = np.ones(n, dtype=dtype)
return coo_matrix((data,(row,col)),(n,n))
diags = np.ones((1, max(0, min(m + k, n))), dtype=dtype)
return spdiags(diags, k, m, n).asformat(format)
def kron(A, B, format=None):
"""kronecker product of sparse matrices A and B
Parameters
----------
A : sparse or dense matrix
first matrix of the product
B : sparse or dense matrix
second matrix of the product
format : string
format of the result (e.g. "csr")
Returns
-------
kronecker product in a sparse matrix format
Examples
--------
>>> A = csr_matrix(array([[0,2],[5,0]]))
>>> B = csr_matrix(array([[1,2],[3,4]]))
>>> kron(A,B).todense()
matrix([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
>>> kron(A,[[1,2],[3,4]]).todense()
matrix([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
"""
B = coo_matrix(B)
if (format is None or format == "bsr") and 2*B.nnz >= B.shape[0] * B.shape[1]:
# B is fairly dense, use BSR
A = csr_matrix(A,copy=True)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix(output_shape)
B = B.toarray()
data = A.data.repeat(B.size).reshape(-1,B.shape[0],B.shape[1])
data = data * B
return bsr_matrix((data,A.indices,A.indptr), shape=output_shape)
else:
# use COO
A = coo_matrix(A)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix(output_shape)
# expand entries of a into blocks
row = A.row.repeat(B.nnz)
col = A.col.repeat(B.nnz)
data = A.data.repeat(B.nnz)
row *= B.shape[0]
col *= B.shape[1]
# increment block indices
row,col = row.reshape(-1,B.nnz),col.reshape(-1,B.nnz)
row += B.row
col += B.col
row,col = row.reshape(-1),col.reshape(-1)
# compute block entries
data = data.reshape(-1,B.nnz) * B.data
data = data.reshape(-1)
return coo_matrix((data,(row,col)), shape=output_shape).asformat(format)
def kronsum(A, B, format=None):
"""kronecker sum of sparse matrices A and B
Kronecker sum of two sparse matrices is a sum of two Kronecker
products kron(I_n,A) + kron(B,I_m) where A has shape (m,m)
and B has shape (n,n) and I_m and I_n are identity matrices
of shape (m,m) and (n,n) respectively.
Parameters
----------
A
square matrix
B
square matrix
format : string
format of the result (e.g. "csr")
Returns
-------
kronecker sum in a sparse matrix format
Examples
--------
"""
A = coo_matrix(A)
B = coo_matrix(B)
if A.shape[0] != A.shape[1]:
raise ValueError('A is not square')
if B.shape[0] != B.shape[1]:
raise ValueError('B is not square')
dtype = upcast(A.dtype, B.dtype)
L = kron(eye(B.shape[0],dtype=dtype), A, format=format)
R = kron(B, eye(A.shape[0],dtype=dtype), format=format)
return (L+R).asformat(format) # since L + R is not always same format
def hstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices horizontally (column wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : string
sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
See Also
--------
vstack : stack sparse matrices vertically (row wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, hstack
>>> A = coo_matrix([[1,2],[3,4]])
>>> B = coo_matrix([[5],[6]])
>>> hstack( [A,B] ).todense()
matrix([[1, 2, 5],
[3, 4, 6]])
"""
return bmat([blocks], format=format, dtype=dtype)
def vstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices vertically (row wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : string
sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
See Also
--------
hstack : stack sparse matrices horizontally (column wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, vstack
>>> A = coo_matrix([[1,2],[3,4]])
>>> B = coo_matrix([[5,6]])
>>> vstack( [A,B] ).todense()
matrix([[1, 2],
[3, 4],
[5, 6]])
"""
return bmat([[b] for b in blocks], format=format, dtype=dtype)
def bmat(blocks, format=None, dtype=None):
"""
Build a sparse matrix from sparse sub-blocks
Parameters
----------
blocks : array_like
Grid of sparse matrices with compatible shapes.
An entry of None implies an all-zero matrix.
format : {'bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'}, optional
The sparse format of the result (e.g. "csr"). If not given, the matrix
is returned in "coo" format.
dtype : dtype specifier, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
Returns
-------
bmat : sparse matrix
A "coo" sparse matrix or type of sparse matrix identified by `format`.
See Also
--------
block_diag, diags
Examples
--------
>>> from scipy.sparse import coo_matrix, bmat
>>> A = coo_matrix([[1,2],[3,4]])
>>> B = coo_matrix([[5],[6]])
>>> C = coo_matrix([[7]])
>>> bmat( [[A,B],[None,C]] ).todense()
matrix([[1, 2, 5],
[3, 4, 6],
[0, 0, 7]])
>>> bmat( [[A,None],[None,C]] ).todense()
matrix([[1, 2, 0],
[3, 4, 0],
[0, 0, 7]])
"""
blocks = np.asarray(blocks, dtype='object')
if np.rank(blocks) != 2:
raise ValueError('blocks must have rank 2')
M,N = blocks.shape
block_mask = np.zeros(blocks.shape, dtype=np.bool)
brow_lengths = np.zeros(blocks.shape[0], dtype=np.intc)
bcol_lengths = np.zeros(blocks.shape[1], dtype=np.intc)
# convert everything to COO format
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
A = coo_matrix(blocks[i,j])
blocks[i,j] = A
block_mask[i,j] = True
if brow_lengths[i] == 0:
brow_lengths[i] = A.shape[0]
else:
if brow_lengths[i] != A.shape[0]:
raise ValueError('blocks[%d,:] has incompatible row dimensions' % i)
if bcol_lengths[j] == 0:
bcol_lengths[j] = A.shape[1]
else:
if bcol_lengths[j] != A.shape[1]:
raise ValueError('blocks[:,%d] has incompatible column dimensions' % j)
# ensure that at least one value in each row and col is not None
if brow_lengths.min() == 0:
raise ValueError('blocks[%d,:] is all None' % brow_lengths.argmin())
if bcol_lengths.min() == 0:
raise ValueError('blocks[:,%d] is all None' % bcol_lengths.argmin())
nnz = sum([A.nnz for A in blocks[block_mask]])
if dtype is None:
dtype = upcast(*tuple([A.dtype for A in blocks[block_mask]]))
row_offsets = np.concatenate(([0], np.cumsum(brow_lengths)))
col_offsets = np.concatenate(([0], np.cumsum(bcol_lengths)))
data = np.empty(nnz, dtype=dtype)
row = np.empty(nnz, dtype=np.intc)
col = np.empty(nnz, dtype=np.intc)
nnz = 0
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
A = blocks[i,j]
data[nnz:nnz + A.nnz] = A.data
row[nnz:nnz + A.nnz] = A.row
col[nnz:nnz + A.nnz] = A.col
row[nnz:nnz + A.nnz] += row_offsets[i]
col[nnz:nnz + A.nnz] += col_offsets[j]
nnz += A.nnz
shape = (np.sum(brow_lengths), np.sum(bcol_lengths))
return coo_matrix((data, (row, col)), shape=shape).asformat(format)
def block_diag(mats, format=None, dtype=None):
"""
Build a block diagonal sparse matrix from provided matrices.
.. versionadded:: 0.11.0
Parameters
----------
A, B, ... : sequence of matrices
Input matrices.
format : str, optional
The sparse format of the result (e.g. "csr"). If not given, the matrix
is returned in "coo" format.
dtype : dtype specifier, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
Returns
-------
res : sparse matrix
See Also
--------
bmat, diags
Examples
--------
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> C = coo_matrix([[7]])
>>> block_diag((A, B, C)).todense()
matrix([[1, 2, 0, 0],
[3, 4, 0, 0],
[0, 0, 5, 0],
[0, 0, 6, 0],
[0, 0, 0, 7]])
"""
nmat = len(mats)
rows = []
for ia, a in enumerate(mats):
row = [None]*nmat
if issparse(a):
row[ia] = a
else:
row[ia] = coo_matrix(a)
rows.append(row)
return bmat(rows, format=format, dtype=dtype)
def rand(m, n, density=0.01, format="coo", dtype=None, random_state=None):
"""Generate a sparse matrix of the given shape and density with uniformely
distributed values.
Parameters
----------
m, n : int
shape of the matrix
density : real
density of the generated matrix: density equal to one means a full
matrix, density of 0 means a matrix with no non-zero items.
format : str
sparse matrix format.
dtype : dtype
type of the returned matrix values.
random_state : {numpy.random.RandomState, int}, optional
Random number generator or random seed. If not given, the singleton
numpy.random will be used.
Notes
-----
Only float types are supported for now.
"""
if density < 0 or density > 1:
raise ValueError("density expected to be 0 <= density <= 1")
if dtype and not dtype in [np.float32, np.float64, np.longdouble]:
raise NotImplementedError("type %s not supported" % dtype)
mn = m * n
# XXX: sparse uses intc instead of intp...
tp = np.intp
if mn > np.iinfo(tp).max:
msg = """\
Trying to generate a random sparse matrix such as the product of dimensions is
greater than %d - this is not supported on this machine
"""
raise ValueError(msg % np.iinfo(tp).max)
# Number of non zero values
k = int(density * m * n)
# Generate a few more values than k so that we can get unique values
# afterwards.
# XXX: one could be smarter here
mlow = 5
fac = 1.02
gk = min(k + mlow, fac * k)
if random_state is None:
random_state = np.random
elif isinstance(random_state, (int, np.integer)):
random_state = np.random.RandomState(random_state)
def _gen_unique_rand(rng, _gk):
ind = rng.rand(int(_gk))
return np.unique(np.floor(ind * mn))[:k]
ind = _gen_unique_rand(random_state, gk)
while ind.size < k:
gk *= 1.05
ind = _gen_unique_rand(random_state, gk)
j = np.floor(ind * 1. / m).astype(tp)
i = (ind - j * m).astype(tp)
vals = random_state.rand(k).astype(dtype)
return coo_matrix((vals, (i, j)), shape=(m, n)).asformat(format)
| apache-2.0 | -5,423,150,516,931,027 | 27.800587 | 95 | 0.545006 | false |
superdesk/superdesk-core | superdesk/io/feeding_services/file_service.py | 2 | 8059 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
import os
import shutil
from datetime import datetime
from lxml import etree
from flask import current_app as app
from superdesk.errors import IngestFileError, ParserError, ProviderError
from superdesk.io.registry import register_feeding_service
from superdesk.io.feed_parsers import XMLFeedParser
from superdesk.io.feeding_services import FeedingService, OLD_CONTENT_MINUTES
from superdesk.notification import push_notification
from superdesk.utc import utc
from superdesk.utils import get_sorted_files, FileSortAttributes
logger = logging.getLogger(__name__)
class FileFeedingService(FeedingService):
"""
Feeding Service class which can read the configured local file system for article(s).
"""
NAME = "file"
ERRORS = [
ParserError.IPTC7901ParserError().get_error_description(),
ParserError.nitfParserError().get_error_description(),
ParserError.newsmlOneParserError().get_error_description(),
ProviderError.ingestError().get_error_description(),
ParserError.parseFileError().get_error_description(),
]
label = "File feed"
fields = [
{
"id": "path",
"type": "text",
"label": "Server Folder",
"placeholder": "path to folder",
"required": True,
"errors": {3003: "Path not found on server.", 3004: "Path should be directory."},
}
]
def _test(self, provider):
path = provider.get("config", {}).get("path", None)
if not os.path.exists(path):
raise IngestFileError.notExistsError()
if not os.path.isdir(path):
raise IngestFileError.isNotDirError()
def _update(self, provider, update):
# check if deprecated FILE_INGEST_OLD_CONTENT_MINUTES setting is still used
if "FILE_INGEST_OLD_CONTENT_MINUTES" in app.config:
deprecated_cont_min = app.config["FILE_INGEST_OLD_CONTENT_MINUTES"]
cont_min = app.config[OLD_CONTENT_MINUTES]
if deprecated_cont_min != cont_min:
logger.warning(
"'FILE_INGEST_OLD_CONTENT_MINUTES' is deprecated, please update settings.py to use {new_name!r}".format(
new_name=OLD_CONTENT_MINUTES
)
)
app.config[OLD_CONTENT_MINUTES] = deprecated_cont_min
self.provider = provider
self.path = provider.get("config", {}).get("path", None)
if not self.path:
logger.warn(
"File Feeding Service {} is configured without path. Please check the configuration".format(
provider["name"]
)
)
return []
registered_parser = self.get_feed_parser(provider)
for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created):
try:
last_updated = None
file_path = os.path.join(self.path, filename)
if os.path.isfile(file_path):
last_updated = self.get_last_updated(file_path)
if self.is_latest_content(last_updated, provider.get("last_updated")):
if self.is_empty(file_path):
logger.info("Ignoring empty file {}".format(filename))
continue
if isinstance(registered_parser, XMLFeedParser):
with open(file_path, "rb") as f:
xml = etree.parse(f)
parser = self.get_feed_parser(provider, xml.getroot())
item = parser.parse(xml.getroot(), provider)
else:
parser = self.get_feed_parser(provider, file_path)
item = parser.parse(file_path, provider)
self.after_extracting(item, provider)
if isinstance(item, list):
failed = yield item
else:
failed = yield [item]
self.move_file(self.path, filename, provider=provider, success=not failed)
else:
self.move_file(self.path, filename, provider=provider, success=False)
except Exception as ex:
if last_updated and self.is_old_content(last_updated):
self.move_file(self.path, filename, provider=provider, success=False)
raise ParserError.parseFileError("{}-{}".format(provider["name"], self.NAME), filename, ex, provider)
push_notification("ingest:update")
def after_extracting(self, article, provider):
"""Sub-classes should override this method if something needs to be done to the given article.
For example, if the article comes from DPA provider the system needs to derive dateline
from the properties in the article.
Invoked after parser parses the article received from the provider.
:param article: dict having properties that can be saved into ingest collection
:type article: dict
:param provider: dict - Ingest provider details to which the current directory has been configured
:type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource`
"""
pass
def move_file(self, file_path, filename, provider, success=True):
"""Move the files from the current directory to the _Processed if successful, else _Error if unsuccessful.
Creates _Processed and _Error directories within current directory if they don't exist.
:param file_path: str - current directory location
:param filename: str - file name in the current directory to move
:param provider: dict - Ingest provider details to which the current directory has been configured
:param success: bool - default value is True. When True moves to _Processed directory else _Error directory.
:raises IngestFileError.folderCreateError() if creation of _Processed or _Error directories fails
:raises IngestFileError.fileMoveError() if failed to move the file pointed by filename
"""
try:
if not os.path.exists(os.path.join(file_path, "_PROCESSED/")):
os.makedirs(os.path.join(file_path, "_PROCESSED/"))
if not os.path.exists(os.path.join(file_path, "_ERROR/")):
os.makedirs(os.path.join(file_path, "_ERROR/"))
except Exception as ex:
raise IngestFileError.folderCreateError(ex, provider)
try:
if success:
shutil.copy2(os.path.join(file_path, filename), os.path.join(file_path, "_PROCESSED/"))
else:
shutil.copy2(os.path.join(file_path, filename), os.path.join(file_path, "_ERROR/"))
except Exception as ex:
raise IngestFileError.fileMoveError(ex, provider)
finally:
os.remove(os.path.join(file_path, filename))
def is_empty(self, file_path):
"""Test if given file path is empty, return True if a file is empty"""
return not (os.path.isfile(file_path) and os.path.getsize(file_path) > 0)
def get_last_updated(self, file_path):
"""Get last updated time for file.
Using both mtime and ctime timestamps not to miss
old files being copied around and recent files after
changes done in place.
"""
stat = os.lstat(file_path)
timestamp = max(stat.st_mtime, stat.st_ctime)
return datetime.fromtimestamp(timestamp, tz=utc)
register_feeding_service(FileFeedingService)
| agpl-3.0 | -6,315,356,105,772,752,000 | 42.096257 | 124 | 0.613103 | false |
ATIX-AG/ansible | lib/ansible/modules/cloud/azure/azure_rm_virtualmachine.py | 8 | 79001 | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <[email protected]>
# Chris Houseknecht, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: azure_rm_virtualmachine
version_added: "2.1"
short_description: Manage Azure virtual machines.
description:
- Create, update, stop and start a virtual machine. Provide an existing storage account and network interface or
allow the module to create these for you. If you choose not to provide a network interface, the resource group
must contain a virtual network with at least one subnet.
- Before Ansible 2.5, this required an image found in the Azure Marketplace which can be discovered with
M(azure_rm_virtualmachineimage_facts). In Ansible 2.5 and newer, custom images can be used as well, see the
examples for more details.
options:
resource_group:
description:
- Name of the resource group containing the virtual machine.
required: true
name:
description:
- Name of the virtual machine.
required: true
custom_data:
description:
- Data which is made available to the virtual machine and used by e.g., cloud-init.
version_added: "2.5"
state:
description:
- Assert the state of the virtual machine.
- State 'present' will check that the machine exists with the requested configuration. If the configuration
of the existing machine does not match, the machine will be updated. Use options started, allocated and restarted to change the machine's power
state.
- State 'absent' will remove the virtual machine.
default: present
choices:
- absent
- present
started:
description:
- Use with state 'present' to start the machine. Set to false to have the machine be 'stopped'.
default: true
allocated:
description:
- Toggle that controls if the machine is allocated/deallocated, only useful with state='present'.
default: True
restarted:
description:
- Use with state 'present' to restart a running VM.
location:
description:
- Valid Azure location. Defaults to location of the resource group.
short_hostname:
description:
- Name assigned internally to the host. On a linux VM this is the name returned by the `hostname` command.
When creating a virtual machine, short_hostname defaults to name.
vm_size:
description:
- A valid Azure VM size value. For example, 'Standard_D4'. The list of choices varies depending on the
subscription and location. Check your subscription for available choices. Required when creating a VM.
admin_username:
description:
- Admin username used to access the host after it is created. Required when creating a VM.
admin_password:
description:
- Password for the admin username. Not required if the os_type is Linux and SSH password authentication
is disabled by setting ssh_password_enabled to false.
ssh_password_enabled:
description:
- When the os_type is Linux, setting ssh_password_enabled to false will disable SSH password authentication
and require use of SSH keys.
default: true
ssh_public_keys:
description:
- "For os_type Linux provide a list of SSH keys. Each item in the list should be a dictionary where the
dictionary contains two keys: path and key_data. Set the path to the default location of the
authorized_keys files. On an Enterprise Linux host, for example, the path will be
/home/<admin username>/.ssh/authorized_keys. Set key_data to the actual value of the public key."
image:
description:
- Specifies the image used to build the VM.
- If a string, the image is sourced from a custom image based on the
name.
- 'If a dict with the keys C(publisher), C(offer), C(sku), and
C(version), the image is sourced from a Marketplace image. NOTE:
set image.version to C(latest) to get the most recent version of a
given image.'
- 'If a dict with the keys C(name) and C(resource_group), the image
is sourced from a custom image based on the C(name) and
C(resource_group) set. NOTE: the key C(resource_group) is optional
and if omitted, all images in the subscription will be searched
for by C(name).'
- Custom image support was added in Ansible 2.5
required: true
availability_set:
description:
- Name or ID of an existing availability set to add the VM to. The availability_set should be in the same resource group as VM.
version_added: "2.5"
storage_account_name:
description:
- Name of an existing storage account that supports creation of VHD blobs. If not specified for a new VM,
a new storage account named <vm name>01 will be created using storage type 'Standard_LRS'.
storage_container_name:
description:
- Name of the container to use within the storage account to store VHD blobs. If no name is specified a
default container will created.
default: vhds
storage_blob_name:
description:
- Name fo the storage blob used to hold the VM's OS disk image. If no name is provided, defaults to
the VM name + '.vhd'. If you provide a name, it must end with '.vhd'
aliases:
- storage_blob
managed_disk_type:
description:
- Managed OS disk type
choices:
- Standard_LRS
- Premium_LRS
version_added: "2.4"
os_disk_caching:
description:
- Type of OS disk caching.
choices:
- ReadOnly
- ReadWrite
default: ReadOnly
aliases:
- disk_caching
os_type:
description:
- Base type of operating system.
choices:
- Windows
- Linux
default:
- Linux
data_disks:
description:
- Describes list of data disks.
version_added: "2.4"
suboptions:
lun:
description:
- The logical unit number for data disk
default: 0
version_added: "2.4"
disk_size_gb:
description:
- The initial disk size in GB for blank data disks
version_added: "2.4"
managed_disk_type:
description:
- Managed data disk type
choices:
- Standard_LRS
- Premium_LRS
version_added: "2.4"
storage_account_name:
description:
- Name of an existing storage account that supports creation of VHD blobs. If not specified for a new VM,
a new storage account named <vm name>01 will be created using storage type 'Standard_LRS'.
version_added: "2.4"
storage_container_name:
description:
- Name of the container to use within the storage account to store VHD blobs. If no name is specified a
default container will created.
default: vhds
version_added: "2.4"
storage_blob_name:
description:
- Name fo the storage blob used to hold the VM's OS disk image. If no name is provided, defaults to
the VM name + '.vhd'. If you provide a name, it must end with '.vhd'
version_added: "2.4"
caching:
description:
- Type of data disk caching.
choices:
- ReadOnly
- ReadWrite
default: ReadOnly
version_added: "2.4"
public_ip_allocation_method:
description:
- If a public IP address is created when creating the VM (because a Network Interface was not provided),
determines if the public IP address remains permanently associated with the Network Interface. If set
to 'Dynamic' the public IP address may change any time the VM is rebooted or power cycled.
- The C(Disabled) choice was added in Ansible 2.6.
choices:
- Dynamic
- Static
- Disabled
default:
- Static
aliases:
- public_ip_allocation
open_ports:
description:
- If a network interface is created when creating the VM, a security group will be created as well. For
Linux hosts a rule will be added to the security group allowing inbound TCP connections to the default
SSH port 22, and for Windows hosts ports 3389 and 5986 will be opened. Override the default open ports by
providing a list of ports.
network_interface_names:
description:
- List of existing network interface names to add to the VM. If a network interface name is not provided
when the VM is created, a default network interface will be created. In order for the module to create
a network interface, at least one Virtual Network with one Subnet must exist.
virtual_network_resource_group:
description:
- When creating a virtual machine, if a specific virtual network from another resource group should be
used, use this parameter to specify the resource group to use.
version_added: "2.4"
virtual_network_name:
description:
- When creating a virtual machine, if a network interface name is not provided, one will be created.
The new network interface will be assigned to the first virtual network found in the resource group.
Use this parameter to provide a specific virtual network instead.
aliases:
- virtual_network
subnet_name:
description:
- When creating a virtual machine, if a network interface name is not provided, one will be created.
The new network interface will be assigned to the first subnet found in the virtual network.
Use this parameter to provide a specific subnet instead.
aliases:
- subnet
remove_on_absent:
description:
- When removing a VM using state 'absent', also remove associated resources
- "It can be 'all' or a list with any of the following: ['network_interfaces', 'virtual_storage', 'public_ips']"
- Any other input will be ignored
default: ['all']
plan:
description:
- A dictionary describing a third-party billing plan for an instance
version_added: 2.5
suboptions:
name:
description:
- billing plan name
required: true
product:
description:
- product name
required: true
publisher:
description:
- publisher offering the plan
required: true
promotion_code:
description:
- optional promotion code
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Create VM with defaults
azure_rm_virtualmachine:
resource_group: Testing
name: testvm10
admin_username: chouseknecht
admin_password: <your password here>
image:
offer: CentOS
publisher: OpenLogic
sku: '7.1'
version: latest
- name: Create a VM with managed disk
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_D4
managed_disk_type: Standard_LRS
admin_username: adminUser
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
- name: Create a VM with existing storage account and NIC
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
vm_size: Standard_D4
storage_account: testaccount001
admin_username: adminUser
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
network_interfaces: testvm001
image:
offer: CentOS
publisher: OpenLogic
sku: '7.1'
version: latest
- name: Create a VM with OS and multiple data managed disks
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_D4
managed_disk_type: Standard_LRS
admin_username: adminUser
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
data_disks:
- lun: 0
disk_size_gb: 64
managed_disk_type: Standard_LRS
- lun: 1
disk_size_gb: 128
managed_disk_type: Premium_LRS
- name: Create a VM with OS and multiple data storage accounts
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_DS1_v2
admin_username: adminUser
ssh_password_enabled: false
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
network_interfaces: testvm001
storage_container: osdisk
storage_blob: osdisk.vhd
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
data_disks:
- lun: 0
disk_size_gb: 64
storage_container_name: datadisk1
storage_blob_name: datadisk1.vhd
- lun: 1
disk_size_gb: 128
storage_container_name: datadisk2
storage_blob_name: datadisk2.vhd
- name: Create a VM with a custom image
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_DS1_v2
admin_username: adminUser
admin_password: password01
image: customimage001
- name: Create a VM with a custom image from a particular resource group
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_DS1_v2
admin_username: adminUser
admin_password: password01
image:
name: customimage001
resource_group: Testing
- name: Power Off
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
started: no
- name: Deallocate
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
allocated: no
- name: Power On
azure_rm_virtualmachine:
resource_group:
name: testvm002
- name: Restart
azure_rm_virtualmachine:
resource_group:
name: testvm002
restarted: yes
- name: remove vm and all resources except public ips
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
state: absent
remove_on_absent:
- network_interfaces
- virtual_storage
'''
RETURN = '''
powerstate:
description: Indicates if the state is running, stopped, deallocated
returned: always
type: string
example: running
deleted_vhd_uris:
description: List of deleted Virtual Hard Disk URIs.
returned: 'on delete'
type: list
example: ["https://testvm104519.blob.core.windows.net/vhds/testvm10.vhd"]
deleted_network_interfaces:
description: List of deleted NICs.
returned: 'on delete'
type: list
example: ["testvm1001"]
deleted_public_ips:
description: List of deleted public IP address names.
returned: 'on delete'
type: list
example: ["testvm1001"]
azure_vm:
description: Facts about the current state of the object. Note that facts are not part of the registered output but available directly.
returned: always
type: complex
contains: {
"properties": {
"availabilitySet": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Compute/availabilitySets/MYAVAILABILITYSET"
},
"hardwareProfile": {
"vmSize": "Standard_D1"
},
"instanceView": {
"disks": [
{
"name": "testvm10.vhd",
"statuses": [
{
"code": "ProvisioningState/succeeded",
"displayStatus": "Provisioning succeeded",
"level": "Info",
"time": "2016-03-30T07:11:16.187272Z"
}
]
}
],
"statuses": [
{
"code": "ProvisioningState/succeeded",
"displayStatus": "Provisioning succeeded",
"level": "Info",
"time": "2016-03-30T20:33:38.946916Z"
},
{
"code": "PowerState/running",
"displayStatus": "VM running",
"level": "Info"
}
],
"vmAgent": {
"extensionHandlers": [],
"statuses": [
{
"code": "ProvisioningState/succeeded",
"displayStatus": "Ready",
"level": "Info",
"message": "GuestAgent is running and accepting new configurations.",
"time": "2016-03-30T20:31:16.000Z"
}
],
"vmAgentVersion": "WALinuxAgent-2.0.16"
}
},
"networkProfile": {
"networkInterfaces": [
{
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/testvm10_NIC01",
"name": "testvm10_NIC01",
"properties": {
"dnsSettings": {
"appliedDnsServers": [],
"dnsServers": []
},
"enableIPForwarding": false,
"ipConfigurations": [
{
"etag": 'W/"041c8c2a-d5dd-4cd7-8465-9125cfbe2cf8"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/testvm10_NIC01/ipConfigurations/default",
"name": "default",
"properties": {
"privateIPAddress": "10.10.0.5",
"privateIPAllocationMethod": "Dynamic",
"provisioningState": "Succeeded",
"publicIPAddress": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/publicIPAddresses/testvm10_PIP01",
"name": "testvm10_PIP01",
"properties": {
"idleTimeoutInMinutes": 4,
"ipAddress": "13.92.246.197",
"ipConfiguration": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/testvm10_NIC01/ipConfigurations/default"
},
"provisioningState": "Succeeded",
"publicIPAllocationMethod": "Static",
"resourceGuid": "3447d987-ca0d-4eca-818b-5dddc0625b42"
}
}
}
}
],
"macAddress": "00-0D-3A-12-AA-14",
"primary": true,
"provisioningState": "Succeeded",
"resourceGuid": "10979e12-ccf9-42ee-9f6d-ff2cc63b3844",
"virtualMachine": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Compute/virtualMachines/testvm10"
}
}
}
]
},
"osProfile": {
"adminUsername": "chouseknecht",
"computerName": "test10",
"linuxConfiguration": {
"disablePasswordAuthentication": false
},
"secrets": []
},
"provisioningState": "Succeeded",
"storageProfile": {
"dataDisks": [
{
"caching": "ReadWrite",
"createOption": "empty",
"diskSizeGB": 64,
"lun": 0,
"name": "datadisk1.vhd",
"vhd": {
"uri": "https://testvm10sa1.blob.core.windows.net/datadisk/datadisk1.vhd"
}
}
],
"imageReference": {
"offer": "CentOS",
"publisher": "OpenLogic",
"sku": "7.1",
"version": "7.1.20160308"
},
"osDisk": {
"caching": "ReadOnly",
"createOption": "fromImage",
"name": "testvm10.vhd",
"osType": "Linux",
"vhd": {
"uri": "https://testvm10sa1.blob.core.windows.net/vhds/testvm10.vhd"
}
}
}
},
"type": "Microsoft.Compute/virtualMachines"
}
''' # NOQA
import base64
import random
import re
try:
from msrestazure.azure_exceptions import CloudError
from msrestazure.tools import parse_resource_id
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.basic import to_native, to_bytes
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, azure_id_to_dict
AZURE_OBJECT_CLASS = 'VirtualMachine'
AZURE_ENUM_MODULES = ['azure.mgmt.compute.models']
def extract_names_from_blob_uri(blob_uri, storage_suffix):
# HACK: ditch this once python SDK supports get by URI
m = re.match(r'^https://(?P<accountname>[^.]+)\.blob\.{0}/'
r'(?P<containername>[^/]+)/(?P<blobname>.+)$'.format(storage_suffix), blob_uri)
if not m:
raise Exception("unable to parse blob uri '%s'" % blob_uri)
extracted_names = m.groupdict()
return extracted_names
class AzureRMVirtualMachine(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
custom_data=dict(type='str'),
state=dict(choices=['present', 'absent'], default='present', type='str'),
location=dict(type='str'),
short_hostname=dict(type='str'),
vm_size=dict(type='str'),
admin_username=dict(type='str'),
admin_password=dict(type='str', no_log=True),
ssh_password_enabled=dict(type='bool', default=True),
ssh_public_keys=dict(type='list'),
image=dict(type='raw'),
availability_set=dict(type='str'),
storage_account_name=dict(type='str', aliases=['storage_account']),
storage_container_name=dict(type='str', aliases=['storage_container'], default='vhds'),
storage_blob_name=dict(type='str', aliases=['storage_blob']),
os_disk_caching=dict(type='str', aliases=['disk_caching'], choices=['ReadOnly', 'ReadWrite'],
default='ReadOnly'),
managed_disk_type=dict(type='str', choices=['Standard_LRS', 'Premium_LRS']),
os_type=dict(type='str', choices=['Linux', 'Windows'], default='Linux'),
public_ip_allocation_method=dict(type='str', choices=['Dynamic', 'Static', 'Disabled'], default='Static',
aliases=['public_ip_allocation']),
open_ports=dict(type='list'),
network_interface_names=dict(type='list', aliases=['network_interfaces']),
remove_on_absent=dict(type='list', default=['all']),
virtual_network_resource_group=dict(type='str'),
virtual_network_name=dict(type='str', aliases=['virtual_network']),
subnet_name=dict(type='str', aliases=['subnet']),
allocated=dict(type='bool', default=True),
restarted=dict(type='bool', default=False),
started=dict(type='bool', default=True),
data_disks=dict(type='list'),
plan=dict(type='dict')
)
self.resource_group = None
self.name = None
self.custom_data = None
self.state = None
self.location = None
self.short_hostname = None
self.vm_size = None
self.admin_username = None
self.admin_password = None
self.ssh_password_enabled = None
self.ssh_public_keys = None
self.image = None
self.availability_set = None
self.storage_account_name = None
self.storage_container_name = None
self.storage_blob_name = None
self.os_type = None
self.os_disk_caching = None
self.managed_disk_type = None
self.network_interface_names = None
self.remove_on_absent = set()
self.tags = None
self.force = None
self.public_ip_allocation_method = None
self.open_ports = None
self.virtual_network_resource_group = None
self.virtual_network_name = None
self.subnet_name = None
self.allocated = None
self.restarted = None
self.started = None
self.differences = None
self.data_disks = None
self.plan = None
self.results = dict(
changed=False,
actions=[],
powerstate_change=None,
ansible_facts=dict(azure_vm=None)
)
super(AzureRMVirtualMachine, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
# make sure options are lower case
self.remove_on_absent = set([resource.lower() for resource in self.remove_on_absent])
changed = False
powerstate_change = None
results = dict()
vm = None
network_interfaces = []
requested_vhd_uri = None
data_disk_requested_vhd_uri = None
disable_ssh_password = None
vm_dict = None
image_reference = None
custom_image = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
if self.state == 'present':
# Verify parameters and resolve any defaults
if self.vm_size and not self.vm_size_is_valid():
self.fail("Parameter error: vm_size {0} is not valid for your subscription and location.".format(
self.vm_size
))
if self.network_interface_names:
for name in self.network_interface_names:
nic = self.get_network_interface(name)
network_interfaces.append(nic.id)
if self.ssh_public_keys:
msg = "Parameter error: expecting ssh_public_keys to be a list of type dict where " \
"each dict contains keys: path, key_data."
for key in self.ssh_public_keys:
if not isinstance(key, dict):
self.fail(msg)
if not key.get('path') or not key.get('key_data'):
self.fail(msg)
if self.image and isinstance(self.image, dict):
if all(key in self.image for key in ('publisher', 'offer', 'sku', 'version')):
marketplace_image = self.get_marketplace_image_version()
if self.image['version'] == 'latest':
self.image['version'] = marketplace_image.name
self.log("Using image version {0}".format(self.image['version']))
image_reference = self.compute_models.ImageReference(
publisher=self.image['publisher'],
offer=self.image['offer'],
sku=self.image['sku'],
version=self.image['version']
)
elif self.image.get('name'):
custom_image = True
image_reference = self.get_custom_image_reference(
self.image.get('name'),
self.image.get('resource_group'))
else:
self.fail("parameter error: expecting image to contain [publisher, offer, sku, version] or [name, resource_group]")
elif self.image and isinstance(self.image, str):
custom_image = True
image_reference = self.get_custom_image_reference(self.image)
elif self.image:
self.fail("parameter error: expecting image to be a string or dict not {0}".format(type(self.image).__name__))
if self.plan:
if not self.plan.get('name') or not self.plan.get('product') or not self.plan.get('publisher'):
self.fail("parameter error: plan must include name, product, and publisher")
if not self.storage_blob_name and not self.managed_disk_type:
self.storage_blob_name = self.name + '.vhd'
elif self.managed_disk_type:
self.storage_blob_name = self.name
if self.storage_account_name and not self.managed_disk_type:
properties = self.get_storage_account(self.storage_account_name)
requested_vhd_uri = '{0}{1}/{2}'.format(properties.primary_endpoints.blob,
self.storage_container_name,
self.storage_blob_name)
disable_ssh_password = not self.ssh_password_enabled
try:
self.log("Fetching virtual machine {0}".format(self.name))
vm = self.compute_client.virtual_machines.get(self.resource_group, self.name, expand='instanceview')
self.check_provisioning_state(vm, self.state)
vm_dict = self.serialize_vm(vm)
if self.state == 'present':
differences = []
current_nics = []
results = vm_dict
# Try to determine if the VM needs to be updated
if self.network_interface_names:
for nic in vm_dict['properties']['networkProfile']['networkInterfaces']:
current_nics.append(nic['id'])
if set(current_nics) != set(network_interfaces):
self.log('CHANGED: virtual machine {0} - network interfaces are different.'.format(self.name))
differences.append('Network Interfaces')
updated_nics = [dict(id=id, primary=(i is 0))
for i, id in enumerate(network_interfaces)]
vm_dict['properties']['networkProfile']['networkInterfaces'] = updated_nics
changed = True
if self.os_disk_caching and \
self.os_disk_caching != vm_dict['properties']['storageProfile']['osDisk']['caching']:
self.log('CHANGED: virtual machine {0} - OS disk caching'.format(self.name))
differences.append('OS Disk caching')
changed = True
vm_dict['properties']['storageProfile']['osDisk']['caching'] = self.os_disk_caching
update_tags, vm_dict['tags'] = self.update_tags(vm_dict.get('tags', dict()))
if update_tags:
differences.append('Tags')
changed = True
if self.short_hostname and self.short_hostname != vm_dict['properties']['osProfile']['computerName']:
self.log('CHANGED: virtual machine {0} - short hostname'.format(self.name))
differences.append('Short Hostname')
changed = True
vm_dict['properties']['osProfile']['computerName'] = self.short_hostname
if self.started and vm_dict['powerstate'] not in ['starting', 'running'] and self.allocated:
self.log("CHANGED: virtual machine {0} not running and requested state 'running'".format(self.name))
changed = True
powerstate_change = 'poweron'
elif self.state == 'present' and vm_dict['powerstate'] == 'running' and self.restarted:
self.log("CHANGED: virtual machine {0} {1} and requested state 'restarted'"
.format(self.name, vm_dict['powerstate']))
changed = True
powerstate_change = 'restarted'
elif self.state == 'present' and not self.allocated and vm_dict['powerstate'] not in ['deallocated', 'deallocating']:
self.log("CHANGED: virtual machine {0} {1} and requested state 'deallocated'"
.format(self.name, vm_dict['powerstate']))
changed = True
powerstate_change = 'deallocated'
elif not self.started and vm_dict['powerstate'] == 'running':
self.log("CHANGED: virtual machine {0} running and requested state 'stopped'".format(self.name))
changed = True
powerstate_change = 'poweroff'
self.differences = differences
elif self.state == 'absent':
self.log("CHANGED: virtual machine {0} exists and requested state is 'absent'".format(self.name))
results = dict()
changed = True
except CloudError:
self.log('Virtual machine {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: virtual machine {0} does not exist but state is 'present'.".format(self.name))
changed = True
self.results['changed'] = changed
self.results['ansible_facts']['azure_vm'] = results
self.results['powerstate_change'] = powerstate_change
if self.check_mode:
return self.results
if changed:
if self.state == 'present':
default_storage_account = None
if not vm:
# Create the VM
self.log("Create virtual machine {0}".format(self.name))
self.results['actions'].append('Created VM {0}'.format(self.name))
# Validate parameters
if not self.admin_username:
self.fail("Parameter error: admin_username required when creating a virtual machine.")
if self.os_type == 'Linux':
if disable_ssh_password and not self.ssh_public_keys:
self.fail("Parameter error: ssh_public_keys required when disabling SSH password.")
if not image_reference:
self.fail("Parameter error: an image is required when creating a virtual machine.")
availability_set_resource = None
if self.availability_set:
parsed_availability_set = parse_resource_id(self.availability_set)
availability_set = self.get_availability_set(parsed_availability_set.get('resource_group', self.resource_group),
parsed_availability_set.get('name'))
availability_set_resource = self.compute_models.SubResource(availability_set.id)
# Get defaults
if not self.network_interface_names:
default_nic = self.create_default_nic()
self.log("network interface:")
self.log(self.serialize_obj(default_nic, 'NetworkInterface'), pretty_print=True)
network_interfaces = [default_nic.id]
# os disk
if not self.storage_account_name and not self.managed_disk_type:
storage_account = self.create_default_storage_account()
self.log("storage account:")
self.log(self.serialize_obj(storage_account, 'StorageAccount'), pretty_print=True)
requested_vhd_uri = 'https://{0}.blob.{1}/{2}/{3}'.format(
storage_account.name,
self._cloud_environment.suffixes.storage_endpoint,
self.storage_container_name,
self.storage_blob_name)
default_storage_account = storage_account # store for use by data disks if necessary
if not self.short_hostname:
self.short_hostname = self.name
nics = [self.compute_models.NetworkInterfaceReference(id=id, primary=(i is 0))
for i, id in enumerate(network_interfaces)]
# os disk
if self.managed_disk_type:
vhd = None
managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=self.managed_disk_type)
elif custom_image:
vhd = None
managed_disk = None
else:
vhd = self.compute_models.VirtualHardDisk(uri=requested_vhd_uri)
managed_disk = None
plan = None
if self.plan:
plan = self.compute_models.Plan(name=self.plan.get('name'), product=self.plan.get('product'),
publisher=self.plan.get('publisher'),
promotion_code=self.plan.get('promotion_code'))
vm_resource = self.compute_models.VirtualMachine(
self.location,
tags=self.tags,
os_profile=self.compute_models.OSProfile(
admin_username=self.admin_username,
computer_name=self.short_hostname,
),
hardware_profile=self.compute_models.HardwareProfile(
vm_size=self.vm_size
),
storage_profile=self.compute_models.StorageProfile(
os_disk=self.compute_models.OSDisk(
name=self.storage_blob_name,
vhd=vhd,
managed_disk=managed_disk,
create_option=self.compute_models.DiskCreateOptionTypes.from_image,
caching=self.os_disk_caching,
),
image_reference=image_reference,
),
network_profile=self.compute_models.NetworkProfile(
network_interfaces=nics
),
availability_set=availability_set_resource,
plan=plan
)
if self.admin_password:
vm_resource.os_profile.admin_password = self.admin_password
if self.custom_data:
# Azure SDK (erroneously?) wants native string type for this
vm_resource.os_profile.custom_data = to_native(base64.b64encode(to_bytes(self.custom_data)))
if self.os_type == 'Linux':
vm_resource.os_profile.linux_configuration = self.compute_models.LinuxConfiguration(
disable_password_authentication=disable_ssh_password
)
if self.ssh_public_keys:
ssh_config = self.compute_models.SshConfiguration()
ssh_config.public_keys = \
[self.compute_models.SshPublicKey(path=key['path'], key_data=key['key_data']) for key in self.ssh_public_keys]
vm_resource.os_profile.linux_configuration.ssh = ssh_config
# data disk
if self.data_disks:
data_disks = []
count = 0
for data_disk in self.data_disks:
if not data_disk.get('managed_disk_type'):
if not data_disk.get('storage_blob_name'):
data_disk['storage_blob_name'] = self.name + '-data-' + str(count) + '.vhd'
count += 1
if data_disk.get('storage_account_name'):
data_disk_storage_account = self.get_storage_account(data_disk['storage_account_name'])
else:
if(not default_storage_account):
data_disk_storage_account = self.create_default_storage_account()
self.log("data disk storage account:")
self.log(self.serialize_obj(data_disk_storage_account, 'StorageAccount'), pretty_print=True)
default_storage_account = data_disk_storage_account # store for use by future data disks if necessary
else:
data_disk_storage_account = default_storage_account
if not data_disk.get('storage_container_name'):
data_disk['storage_container_name'] = 'vhds'
data_disk_requested_vhd_uri = 'https://{0}.blob.{1}/{2}/{3}'.format(
data_disk_storage_account.name,
self._cloud_environment.suffixes.storage_endpoint,
data_disk['storage_container_name'],
data_disk['storage_blob_name']
)
if not data_disk.get('managed_disk_type'):
data_disk_managed_disk = None
disk_name = data_disk['storage_blob_name']
data_disk_vhd = self.compute_models.VirtualHardDisk(uri=data_disk_requested_vhd_uri)
else:
data_disk_vhd = None
data_disk_managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=data_disk['managed_disk_type'])
disk_name = self.name + "-datadisk-" + str(count)
count += 1
data_disk['caching'] = data_disk.get(
'caching', 'ReadOnly'
)
data_disks.append(self.compute_models.DataDisk(
lun=data_disk['lun'],
name=disk_name,
vhd=data_disk_vhd,
caching=data_disk['caching'],
create_option=self.compute_models.DiskCreateOptionTypes.empty,
disk_size_gb=data_disk['disk_size_gb'],
managed_disk=data_disk_managed_disk,
))
vm_resource.storage_profile.data_disks = data_disks
self.log("Create virtual machine with parameters:")
self.create_or_update_vm(vm_resource)
elif self.differences and len(self.differences) > 0:
# Update the VM based on detected config differences
self.log("Update virtual machine {0}".format(self.name))
self.results['actions'].append('Updated VM {0}'.format(self.name))
nics = [self.compute_models.NetworkInterfaceReference(id=interface['id'], primary=(i is 0))
for i, interface in enumerate(vm_dict['properties']['networkProfile']['networkInterfaces'])]
# os disk
if not vm_dict['properties']['storageProfile']['osDisk'].get('managedDisk'):
managed_disk = None
vhd = self.compute_models.VirtualHardDisk(uri=vm_dict['properties']['storageProfile']['osDisk']['vhd']['uri'])
else:
vhd = None
managed_disk = self.compute_models.ManagedDiskParameters(
storage_account_type=vm_dict['properties']['storageProfile']['osDisk']['managedDisk']['storageAccountType']
)
availability_set_resource = None
try:
availability_set_resource = self.compute_models.SubResource(vm_dict['properties']['availabilitySet']['id'])
except Exception:
# pass if the availability set is not set
pass
vm_resource = self.compute_models.VirtualMachine(
vm_dict['location'],
os_profile=self.compute_models.OSProfile(
admin_username=vm_dict['properties']['osProfile']['adminUsername'],
computer_name=vm_dict['properties']['osProfile']['computerName']
),
hardware_profile=self.compute_models.HardwareProfile(
vm_size=vm_dict['properties']['hardwareProfile']['vmSize']
),
storage_profile=self.compute_models.StorageProfile(
os_disk=self.compute_models.OSDisk(
name=vm_dict['properties']['storageProfile']['osDisk']['name'],
vhd=vhd,
managed_disk=managed_disk,
create_option=vm_dict['properties']['storageProfile']['osDisk']['createOption'],
os_type=vm_dict['properties']['storageProfile']['osDisk']['osType'],
caching=vm_dict['properties']['storageProfile']['osDisk']['caching'],
),
image_reference=self.compute_models.ImageReference(
publisher=vm_dict['properties']['storageProfile']['imageReference']['publisher'],
offer=vm_dict['properties']['storageProfile']['imageReference']['offer'],
sku=vm_dict['properties']['storageProfile']['imageReference']['sku'],
version=vm_dict['properties']['storageProfile']['imageReference']['version']
),
),
availability_set=availability_set_resource,
network_profile=self.compute_models.NetworkProfile(
network_interfaces=nics
),
)
if vm_dict.get('tags'):
vm_resource.tags = vm_dict['tags']
# Add custom_data, if provided
if vm_dict['properties']['osProfile'].get('customData'):
custom_data = vm_dict['properties']['osProfile']['customData']
# Azure SDK (erroneously?) wants native string type for this
vm_resource.os_profile.custom_data = to_native(base64.b64encode(to_bytes(custom_data)))
# Add admin password, if one provided
if vm_dict['properties']['osProfile'].get('adminPassword'):
vm_resource.os_profile.admin_password = vm_dict['properties']['osProfile']['adminPassword']
# Add linux configuration, if applicable
linux_config = vm_dict['properties']['osProfile'].get('linuxConfiguration')
if linux_config:
ssh_config = linux_config.get('ssh', None)
vm_resource.os_profile.linux_configuration = self.compute_models.LinuxConfiguration(
disable_password_authentication=linux_config.get('disablePasswordAuthentication', False)
)
if ssh_config:
public_keys = ssh_config.get('publicKeys')
if public_keys:
vm_resource.os_profile.linux_configuration.ssh = self.compute_models.SshConfiguration(public_keys=[])
for key in public_keys:
vm_resource.os_profile.linux_configuration.ssh.public_keys.append(
self.compute_models.SshPublicKey(path=key['path'], key_data=key['keyData'])
)
# data disk
if vm_dict['properties']['storageProfile'].get('dataDisks'):
data_disks = []
for data_disk in vm_dict['properties']['storageProfile']['dataDisks']:
if data_disk.get('managedDisk'):
managed_disk_type = data_disk['managedDisk']['storageAccountType']
data_disk_managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=managed_disk_type)
data_disk_vhd = None
else:
data_disk_vhd = data_disk['vhd']['uri']
data_disk_managed_disk = None
data_disks.append(self.compute_models.DataDisk(
lun=int(data_disk['lun']),
name=data_disk.get('name'),
vhd=data_disk_vhd,
caching=data_disk.get('caching'),
create_option=data_disk.get('createOption'),
disk_size_gb=int(data_disk['diskSizeGB']),
managed_disk=data_disk_managed_disk,
))
vm_resource.storage_profile.data_disks = data_disks
self.log("Update virtual machine with parameters:")
self.create_or_update_vm(vm_resource)
# Make sure we leave the machine in requested power state
if (powerstate_change == 'poweron' and
self.results['ansible_facts']['azure_vm']['powerstate'] != 'running'):
# Attempt to power on the machine
self.power_on_vm()
elif (powerstate_change == 'poweroff' and
self.results['ansible_facts']['azure_vm']['powerstate'] == 'running'):
# Attempt to power off the machine
self.power_off_vm()
elif powerstate_change == 'restarted':
self.restart_vm()
elif powerstate_change == 'deallocated':
self.deallocate_vm()
self.results['ansible_facts']['azure_vm'] = self.serialize_vm(self.get_vm())
elif self.state == 'absent':
# delete the VM
self.log("Delete virtual machine {0}".format(self.name))
self.results['ansible_facts']['azure_vm'] = None
self.delete_vm(vm)
# until we sort out how we want to do this globally
del self.results['actions']
return self.results
def get_vm(self):
'''
Get the VM with expanded instanceView
:return: VirtualMachine object
'''
try:
vm = self.compute_client.virtual_machines.get(self.resource_group, self.name, expand='instanceview')
return vm
except Exception as exc:
self.fail("Error getting virtual machine {0} - {1}".format(self.name, str(exc)))
def serialize_vm(self, vm):
'''
Convert a VirtualMachine object to dict.
:param vm: VirtualMachine object
:return: dict
'''
result = self.serialize_obj(vm, AZURE_OBJECT_CLASS, enum_modules=AZURE_ENUM_MODULES)
result['id'] = vm.id
result['name'] = vm.name
result['type'] = vm.type
result['location'] = vm.location
result['tags'] = vm.tags
result['powerstate'] = dict()
if vm.instance_view:
result['powerstate'] = next((s.code.replace('PowerState/', '')
for s in vm.instance_view.statuses if s.code.startswith('PowerState')), None)
# Expand network interfaces to include config properties
for interface in vm.network_profile.network_interfaces:
int_dict = azure_id_to_dict(interface.id)
nic = self.get_network_interface(int_dict['networkInterfaces'])
for interface_dict in result['properties']['networkProfile']['networkInterfaces']:
if interface_dict['id'] == interface.id:
nic_dict = self.serialize_obj(nic, 'NetworkInterface')
interface_dict['name'] = int_dict['networkInterfaces']
interface_dict['properties'] = nic_dict['properties']
# Expand public IPs to include config properties
for interface in result['properties']['networkProfile']['networkInterfaces']:
for config in interface['properties']['ipConfigurations']:
if config['properties'].get('publicIPAddress'):
pipid_dict = azure_id_to_dict(config['properties']['publicIPAddress']['id'])
try:
pip = self.network_client.public_ip_addresses.get(self.resource_group,
pipid_dict['publicIPAddresses'])
except Exception as exc:
self.fail("Error fetching public ip {0} - {1}".format(pipid_dict['publicIPAddresses'],
str(exc)))
pip_dict = self.serialize_obj(pip, 'PublicIPAddress')
config['properties']['publicIPAddress']['name'] = pipid_dict['publicIPAddresses']
config['properties']['publicIPAddress']['properties'] = pip_dict['properties']
self.log(result, pretty_print=True)
if self.state != 'absent' and not result['powerstate']:
self.fail("Failed to determine PowerState of virtual machine {0}".format(self.name))
return result
def power_off_vm(self):
self.log("Powered off virtual machine {0}".format(self.name))
self.results['actions'].append("Powered off virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.power_off(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error powering off virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def power_on_vm(self):
self.results['actions'].append("Powered on virtual machine {0}".format(self.name))
self.log("Power on virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.start(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error powering on virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def restart_vm(self):
self.results['actions'].append("Restarted virtual machine {0}".format(self.name))
self.log("Restart virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.restart(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error restarting virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def deallocate_vm(self):
self.results['actions'].append("Deallocated virtual machine {0}".format(self.name))
self.log("Deallocate virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.deallocate(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deallocating virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def delete_vm(self, vm):
vhd_uris = []
managed_disk_ids = []
nic_names = []
pip_names = []
if self.remove_on_absent.intersection(set(['all', 'virtual_storage'])):
# store the attached vhd info so we can nuke it after the VM is gone
if(vm.storage_profile.os_disk.managed_disk):
self.log('Storing managed disk ID for deletion')
managed_disk_ids.append(vm.storage_profile.os_disk.managed_disk.id)
elif(vm.storage_profile.os_disk.vhd):
self.log('Storing VHD URI for deletion')
vhd_uris.append(vm.storage_profile.os_disk.vhd.uri)
data_disks = vm.storage_profile.data_disks
for data_disk in data_disks:
if(data_disk.vhd):
vhd_uris.append(data_disk.vhd.uri)
elif(data_disk.managed_disk):
managed_disk_ids.append(data_disk.managed_disk.id)
# FUTURE enable diff mode, move these there...
self.log("VHD URIs to delete: {0}".format(', '.join(vhd_uris)))
self.results['deleted_vhd_uris'] = vhd_uris
self.log("Managed disk IDs to delete: {0}".format(', '.join(managed_disk_ids)))
self.results['deleted_managed_disk_ids'] = managed_disk_ids
if self.remove_on_absent.intersection(set(['all', 'network_interfaces'])):
# store the attached nic info so we can nuke them after the VM is gone
self.log('Storing NIC names for deletion.')
for interface in vm.network_profile.network_interfaces:
id_dict = azure_id_to_dict(interface.id)
nic_names.append(id_dict['networkInterfaces'])
self.log('NIC names to delete {0}'.format(', '.join(nic_names)))
self.results['deleted_network_interfaces'] = nic_names
if self.remove_on_absent.intersection(set(['all', 'public_ips'])):
# also store each nic's attached public IPs and delete after the NIC is gone
for name in nic_names:
nic = self.get_network_interface(name)
for ipc in nic.ip_configurations:
if ipc.public_ip_address:
pip_dict = azure_id_to_dict(ipc.public_ip_address.id)
pip_names.append(pip_dict['publicIPAddresses'])
self.log('Public IPs to delete are {0}'.format(', '.join(pip_names)))
self.results['deleted_public_ips'] = pip_names
self.log("Deleting virtual machine {0}".format(self.name))
self.results['actions'].append("Deleted virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.delete(self.resource_group, self.name)
# wait for the poller to finish
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting virtual machine {0} - {1}".format(self.name, str(exc)))
# TODO: parallelize nic, vhd, and public ip deletions with begin_deleting
# TODO: best-effort to keep deleting other linked resources if we encounter an error
if self.remove_on_absent.intersection(set(['all', 'virtual_storage'])):
self.log('Deleting VHDs')
self.delete_vm_storage(vhd_uris)
self.log('Deleting managed disks')
self.delete_managed_disks(managed_disk_ids)
if self.remove_on_absent.intersection(set(['all', 'network_interfaces'])):
self.log('Deleting network interfaces')
for name in nic_names:
self.delete_nic(name)
if self.remove_on_absent.intersection(set(['all', 'public_ips'])):
self.log('Deleting public IPs')
for name in pip_names:
self.delete_pip(name)
return True
def get_network_interface(self, name):
try:
nic = self.network_client.network_interfaces.get(self.resource_group, name)
return nic
except Exception as exc:
self.fail("Error fetching network interface {0} - {1}".format(name, str(exc)))
def delete_nic(self, name):
self.log("Deleting network interface {0}".format(name))
self.results['actions'].append("Deleted network interface {0}".format(name))
try:
poller = self.network_client.network_interfaces.delete(self.resource_group, name)
except Exception as exc:
self.fail("Error deleting network interface {0} - {1}".format(name, str(exc)))
self.get_poller_result(poller)
# Delete doesn't return anything. If we get this far, assume success
return True
def delete_pip(self, name):
self.results['actions'].append("Deleted public IP {0}".format(name))
try:
poller = self.network_client.public_ip_addresses.delete(self.resource_group, name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting {0} - {1}".format(name, str(exc)))
# Delete returns nada. If we get here, assume that all is well.
return True
def delete_managed_disks(self, managed_disk_ids):
for mdi in managed_disk_ids:
try:
poller = self.rm_client.resources.delete_by_id(mdi, '2017-03-30')
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting managed disk {0} - {1}".format(mdi, str(exc)))
def delete_vm_storage(self, vhd_uris):
# FUTURE: figure out a cloud_env indepdendent way to delete these
for uri in vhd_uris:
self.log("Extracting info from blob uri '{0}'".format(uri))
try:
blob_parts = extract_names_from_blob_uri(uri, self._cloud_environment.suffixes.storage_endpoint)
except Exception as exc:
self.fail("Error parsing blob URI {0}".format(str(exc)))
storage_account_name = blob_parts['accountname']
container_name = blob_parts['containername']
blob_name = blob_parts['blobname']
blob_client = self.get_blob_client(self.resource_group, storage_account_name)
self.log("Delete blob {0}:{1}".format(container_name, blob_name))
self.results['actions'].append("Deleted blob {0}:{1}".format(container_name, blob_name))
try:
blob_client.delete_blob(container_name, blob_name)
except Exception as exc:
self.fail("Error deleting blob {0}:{1} - {2}".format(container_name, blob_name, str(exc)))
def get_marketplace_image_version(self):
try:
versions = self.compute_client.virtual_machine_images.list(self.location,
self.image['publisher'],
self.image['offer'],
self.image['sku'])
except Exception as exc:
self.fail("Error fetching image {0} {1} {2} - {3}".format(self.image['publisher'],
self.image['offer'],
self.image['sku'],
str(exc)))
if versions and len(versions) > 0:
if self.image['version'] == 'latest':
return versions[len(versions) - 1]
for version in versions:
if version.name == self.image['version']:
return version
self.fail("Error could not find image {0} {1} {2} {3}".format(self.image['publisher'],
self.image['offer'],
self.image['sku'],
self.image['version']))
def get_custom_image_reference(self, name, resource_group=None):
try:
if resource_group:
vm_images = self.compute_client.images.list_by_resource_group(resource_group)
else:
vm_images = self.compute_client.images.list()
except Exception as exc:
self.fail("Error fetching custom images from subscription - {0}".format(str(exc)))
for vm_image in vm_images:
if vm_image.name == name:
self.log("Using custom image id {0}".format(vm_image.id))
return self.compute_models.ImageReference(id=vm_image.id)
self.fail("Error could not find image with name {0}".format(name))
def get_availability_set(self, resource_group, name):
try:
return self.compute_client.availability_sets.get(resource_group, name)
except Exception as exc:
self.fail("Error fetching availability set {0} - {1}".format(name, str(exc)))
def get_storage_account(self, name):
try:
account = self.storage_client.storage_accounts.get_properties(self.resource_group,
name)
return account
except Exception as exc:
self.fail("Error fetching storage account {0} - {1}".format(name, str(exc)))
def create_or_update_vm(self, params):
try:
poller = self.compute_client.virtual_machines.create_or_update(self.resource_group, self.name, params)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating or updating virtual machine {0} - {1}".format(self.name, str(exc)))
def vm_size_is_valid(self):
'''
Validate self.vm_size against the list of virtual machine sizes available for the account and location.
:return: boolean
'''
try:
sizes = self.compute_client.virtual_machine_sizes.list(self.location)
except Exception as exc:
self.fail("Error retrieving available machine sizes - {0}".format(str(exc)))
for size in sizes:
if size.name == self.vm_size:
return True
return False
def create_default_storage_account(self):
'''
Create a default storage account <vm name>XXXX, where XXXX is a random number. If <vm name>XXXX exists, use it.
Otherwise, create one.
:return: storage account object
'''
account = None
valid_name = False
# Attempt to find a valid storage account name
storage_account_name_base = re.sub('[^a-zA-Z0-9]', '', self.name[:20].lower())
for i in range(0, 5):
rand = random.randrange(1000, 9999)
storage_account_name = storage_account_name_base + str(rand)
if self.check_storage_account_name(storage_account_name):
valid_name = True
break
if not valid_name:
self.fail("Failed to create a unique storage account name for {0}. Try using a different VM name."
.format(self.name))
try:
account = self.storage_client.storage_accounts.get_properties(self.resource_group, storage_account_name)
except CloudError:
pass
if account:
self.log("Storage account {0} found.".format(storage_account_name))
self.check_provisioning_state(account)
return account
sku = self.storage_models.Sku(self.storage_models.SkuName.standard_lrs)
sku.tier = self.storage_models.SkuTier.standard
kind = self.storage_models.Kind.storage
parameters = self.storage_models.StorageAccountCreateParameters(sku, kind, self.location)
self.log("Creating storage account {0} in location {1}".format(storage_account_name, self.location))
self.results['actions'].append("Created storage account {0}".format(storage_account_name))
try:
poller = self.storage_client.storage_accounts.create(self.resource_group, storage_account_name, parameters)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Failed to create storage account: {0} - {1}".format(storage_account_name, str(exc)))
return self.get_storage_account(storage_account_name)
def check_storage_account_name(self, name):
self.log("Checking storage account name availability for {0}".format(name))
try:
response = self.storage_client.storage_accounts.check_name_availability(name)
if response.reason == 'AccountNameInvalid':
raise Exception("Invalid default storage account name: {0}".format(name))
except Exception as exc:
self.fail("Error checking storage account name availability for {0} - {1}".format(name, str(exc)))
return response.name_available
def create_default_nic(self):
'''
Create a default Network Interface <vm name>01. Requires an existing virtual network
with one subnet. If NIC <vm name>01 exists, use it. Otherwise, create one.
:return: NIC object
'''
network_interface_name = self.name + '01'
nic = None
self.log("Create default NIC {0}".format(network_interface_name))
self.log("Check to see if NIC {0} exists".format(network_interface_name))
try:
nic = self.network_client.network_interfaces.get(self.resource_group, network_interface_name)
except CloudError:
pass
if nic:
self.log("NIC {0} found.".format(network_interface_name))
self.check_provisioning_state(nic)
return nic
self.log("NIC {0} does not exist.".format(network_interface_name))
virtual_network_resource_group = None
if self.virtual_network_resource_group:
virtual_network_resource_group = self.virtual_network_resource_group
else:
virtual_network_resource_group = self.resource_group
if self.virtual_network_name:
try:
self.network_client.virtual_networks.list(virtual_network_resource_group, self.virtual_network_name)
virtual_network_name = self.virtual_network_name
except CloudError as exc:
self.fail("Error: fetching virtual network {0} - {1}".format(self.virtual_network_name, str(exc)))
else:
# Find a virtual network
no_vnets_msg = "Error: unable to find virtual network in resource group {0}. A virtual network " \
"with at least one subnet must exist in order to create a NIC for the virtual " \
"machine.".format(virtual_network_resource_group)
virtual_network_name = None
try:
vnets = self.network_client.virtual_networks.list(virtual_network_resource_group)
except CloudError:
self.log('cloud error!')
self.fail(no_vnets_msg)
for vnet in vnets:
virtual_network_name = vnet.name
self.log('vnet name: {0}'.format(vnet.name))
break
if not virtual_network_name:
self.fail(no_vnets_msg)
if self.subnet_name:
try:
subnet = self.network_client.subnets.get(virtual_network_resource_group, virtual_network_name, self.subnet_name)
subnet_id = subnet.id
except Exception as exc:
self.fail("Error: fetching subnet {0} - {1}".format(self.subnet_name, str(exc)))
else:
no_subnets_msg = "Error: unable to find a subnet in virtual network {0}. A virtual network " \
"with at least one subnet must exist in order to create a NIC for the virtual " \
"machine.".format(virtual_network_name)
subnet_id = None
try:
subnets = self.network_client.subnets.list(virtual_network_resource_group, virtual_network_name)
except CloudError:
self.fail(no_subnets_msg)
for subnet in subnets:
subnet_id = subnet.id
self.log('subnet id: {0}'.format(subnet_id))
break
if not subnet_id:
self.fail(no_subnets_msg)
pip = None
if self.public_ip_allocation_method != 'Disabled':
self.results['actions'].append('Created default public IP {0}'.format(self.name + '01'))
pip_info = self.create_default_pip(self.resource_group, self.location, self.name + '01', self.public_ip_allocation_method)
pip = self.network_models.PublicIPAddress(id=pip_info.id, location=pip_info.location, resource_guid=pip_info.resource_guid)
self.results['actions'].append('Created default security group {0}'.format(self.name + '01'))
group = self.create_default_securitygroup(self.resource_group, self.location, self.name + '01', self.os_type,
self.open_ports)
parameters = self.network_models.NetworkInterface(
location=self.location,
ip_configurations=[
self.network_models.NetworkInterfaceIPConfiguration(
private_ip_allocation_method='Dynamic',
)
]
)
parameters.ip_configurations[0].subnet = self.network_models.Subnet(id=subnet_id)
parameters.ip_configurations[0].name = 'default'
parameters.network_security_group = self.network_models.NetworkSecurityGroup(id=group.id,
location=group.location,
resource_guid=group.resource_guid)
parameters.ip_configurations[0].public_ip_address = pip
self.log("Creating NIC {0}".format(network_interface_name))
self.log(self.serialize_obj(parameters, 'NetworkInterface'), pretty_print=True)
self.results['actions'].append("Created NIC {0}".format(network_interface_name))
try:
poller = self.network_client.network_interfaces.create_or_update(self.resource_group,
network_interface_name,
parameters)
new_nic = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating network interface {0} - {1}".format(network_interface_name, str(exc)))
return new_nic
def main():
AzureRMVirtualMachine()
if __name__ == '__main__':
main()
| gpl-3.0 | 5,300,736,337,898,016,000 | 45.71851 | 217 | 0.537651 | false |
Qalthos/ansible | test/units/module_utils/test_hetzner.py | 31 | 7252 | # Copyright: (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import copy
import json
import pytest
from mock import MagicMock
from ansible.module_utils import hetzner
class ModuleFailException(Exception):
def __init__(self, msg, **kwargs):
super(ModuleFailException, self).__init__(msg)
self.fail_msg = msg
self.fail_kwargs = kwargs
def get_module_mock():
def f(msg, **kwargs):
raise ModuleFailException(msg, **kwargs)
module = MagicMock()
module.fail_json = f
module.from_json = json.loads
return module
# ########################################################################################
FETCH_URL_JSON_SUCCESS = [
(
(None, dict(
body=json.dumps(dict(
a='b'
)).encode('utf-8'),
)),
None,
(dict(
a='b'
), None)
),
(
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
a='b'
)).encode('utf-8'),
)),
['foo'],
(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
a='b'
), 'foo')
),
]
FETCH_URL_JSON_FAIL = [
(
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
None,
'Request failed: 400 foo (bar)'
),
(
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
['bar'],
'Request failed: 400 foo (bar)'
),
]
@pytest.mark.parametrize("return_value, accept_errors, result", FETCH_URL_JSON_SUCCESS)
def test_fetch_url_json(monkeypatch, return_value, accept_errors, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=return_value)
assert hetzner.fetch_url_json(module, 'https://foo/bar', accept_errors=accept_errors) == result
@pytest.mark.parametrize("return_value, accept_errors, result", FETCH_URL_JSON_FAIL)
def test_fetch_url_json_fail(monkeypatch, return_value, accept_errors, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=return_value)
with pytest.raises(ModuleFailException) as exc:
hetzner.fetch_url_json(module, 'https://foo/bar', accept_errors=accept_errors)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
# ########################################################################################
GET_FAILOVER_SUCCESS = [
(
'1.2.3.4',
(None, dict(
body=json.dumps(dict(
failover=dict(
active_server_ip='1.1.1.1',
ip='1.2.3.4',
netmask='255.255.255.255',
)
)).encode('utf-8'),
)),
'1.1.1.1',
dict(
active_server_ip='1.1.1.1',
ip='1.2.3.4',
netmask='255.255.255.255',
)
),
]
GET_FAILOVER_FAIL = [
(
'1.2.3.4',
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
'Request failed: 400 foo (bar)'
),
]
@pytest.mark.parametrize("ip, return_value, result, record", GET_FAILOVER_SUCCESS)
def test_get_failover_record(monkeypatch, ip, return_value, result, record):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
assert hetzner.get_failover_record(module, ip) == record
@pytest.mark.parametrize("ip, return_value, result", GET_FAILOVER_FAIL)
def test_get_failover_record_fail(monkeypatch, ip, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
with pytest.raises(ModuleFailException) as exc:
hetzner.get_failover_record(module, ip)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
@pytest.mark.parametrize("ip, return_value, result, record", GET_FAILOVER_SUCCESS)
def test_get_failover(monkeypatch, ip, return_value, result, record):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
assert hetzner.get_failover(module, ip) == result
@pytest.mark.parametrize("ip, return_value, result", GET_FAILOVER_FAIL)
def test_get_failover_fail(monkeypatch, ip, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
with pytest.raises(ModuleFailException) as exc:
hetzner.get_failover(module, ip)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
# ########################################################################################
SET_FAILOVER_SUCCESS = [
(
'1.2.3.4',
'1.1.1.1',
(None, dict(
body=json.dumps(dict(
failover=dict(
active_server_ip='1.1.1.2',
)
)).encode('utf-8'),
)),
('1.1.1.2', True)
),
(
'1.2.3.4',
'1.1.1.1',
(None, dict(
body=json.dumps(dict(
error=dict(
code="FAILOVER_ALREADY_ROUTED",
status=400,
message="Failover already routed",
),
)).encode('utf-8'),
)),
('1.1.1.1', False)
),
]
SET_FAILOVER_FAIL = [
(
'1.2.3.4',
'1.1.1.1',
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
'Request failed: 400 foo (bar)'
),
]
@pytest.mark.parametrize("ip, value, return_value, result", SET_FAILOVER_SUCCESS)
def test_set_failover(monkeypatch, ip, value, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
assert hetzner.set_failover(module, ip, value) == result
@pytest.mark.parametrize("ip, value, return_value, result", SET_FAILOVER_FAIL)
def test_set_failover_fail(monkeypatch, ip, value, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
with pytest.raises(ModuleFailException) as exc:
hetzner.set_failover(module, ip, value)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
| gpl-3.0 | -2,605,984,255,953,017,000 | 26.366038 | 99 | 0.511859 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.