repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
michaellaier/pymor | src/pymortests/fixtures/discretization.py | 1 | 1548 | # This file is part of the pyMOR project (http://www.pymor.org).
# Copyright Holders: Rene Milk, Stephan Rave, Felix Schindler
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
from itertools import product
import pytest
from pymor.discretizers.advection import discretize_nonlinear_instationary_advection_fv
from pymor.discretizers.elliptic import discretize_elliptic_cg
from pymortests.fixtures.analyticalproblem import (picklable_thermalblock_problems, non_picklable_thermalblock_problems,
burgers_problems)
picklable_discretizaion_generators = \
[lambda p=p,d=d: discretize_elliptic_cg(p, diameter=d)[0]
for p, d in product(picklable_thermalblock_problems, [1./50., 1./100.])] + \
[lambda p=p,d=d: discretize_nonlinear_instationary_advection_fv(p, diameter=d)[0]
for p, d in product(burgers_problems, [1./10., 1./15.])]
non_picklable_discretization_generators = \
[lambda p=p,d=d: discretize_elliptic_cg(p, diameter=d)[0]
for p, d in product(non_picklable_thermalblock_problems, [1./20., 1./30.])]
discretization_generators = picklable_discretizaion_generators + non_picklable_discretization_generators
@pytest.fixture(params=discretization_generators)
def discretization(request):
return request.param()
@pytest.fixture(params=picklable_discretizaion_generators)
def picklable_discretization(request):
return request.param()
| bsd-2-clause | -357,065,425,509,571,400 | 37.7 | 120 | 0.72739 | false |
surak/Raspberry | electronics/dotmatrix/dotmatrix_letters.py | 1 | 6534 | alphabet= {'A': [0b11110011,
0b11101101,
0b11011110,
0b11011110,
0b11000000,
0b11011110,
0b11011110,
0b11011110],
'B': [0b11100000,
0b11011110,
0b11011110,
0b11100000,
0b11011110,
0b11011110,
0b11011110,
0b11100000],
'C': [0b11100001,
0b11011110,
0b11111110,
0b11111110,
0b11111110,
0b11111110,
0b11011110,
0b11100001],
'D': [0b11100000,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11100000],
'E': [0b11000000,
0b11111110,
0b11111110,
0b11110000,
0b11111110,
0b11111110,
0b11111110,
0b11000000],
'F': [0b11000000,
0b11111110,
0b11111110,
0b11110000,
0b11111110,
0b11111110,
0b11111110,
0b11111110],
'G': [0b11100001,
0b11011110,
0b11111110,
0b11111110,
0b11000110,
0b11011110,
0b11011110,
0b11100001],
'H': [0b11011110,
0b11011110,
0b11011110,
0b11000000,
0b11011110,
0b11011110,
0b11011110,
0b11011110],
'I': [0b11100000,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11100000],
'J': [0b11011111,
0b11011111,
0b11011111,
0b11011111,
0b11011111,
0b11011111,
0b11011110,
0b11100001],
'K': [0b11101110,
0b11110110,
0b11111010,
0b11111100,
0b11111010,
0b11110110,
0b11101110,
0b11011110],
'L': [0b11111110,
0b11111110,
0b11111110,
0b11111110,
0b11111110,
0b11111110,
0b11111110,
0b11000000],
'M': [0b11011110,
0b11001100,
0b11010010,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110],
'N': [0b11011110,
0b11011100,
0b11011010,
0b11010110,
0b11001110,
0b11011110,
0b11011110,
0b11011110],
'O': [0b11100001,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11100001],
'P': [0b11100000,
0b11011110,
0b11011110,
0b11011110,
0b11100000,
0b11111110,
0b11111110,
0b11111110],
'Q': [0b11100001,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11010110,
0b11101110,
0b11010001],
'R': [0b11100000,
0b11011110,
0b11011110,
0b11011110,
0b11100000,
0b11110110,
0b11101110,
0b11011110],
'S': [0b11100001,
0b11011110,
0b11111110,
0b11100001,
0b11011111,
0b11011110,
0b11011110,
0b11100001],
'T': [0b11100000,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11111011],
'U': [0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11100001],
'V': [0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11101101,
0b11110011],
'W': [0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11011110,
0b11010010,
0b11101101],
"X": [0b11101110,
0b11101110,
0b11110101,
0b11111011,
0b11111011,
0b11110101,
0b11101110,
0b11101110],
"Y": [0b11101110,
0b11101110,
0b11110101,
0b11111011,
0b11111011,
0b11111011,
0b11111011,
0b11111011],
"Z": [0b11000000,
0b11011111,
0b11101111,
0b11110111,
0b11111011,
0b11111101,
0b11111110,
0b11000000],
" ": [0b11111111,
0b11111111,
0b11111111,
0b11111111,
0b11111111,
0b11111111,
0b11111111,
0b11111111],
"0": [0b11111111,
0b10011001,
0b01100110,
0b01111110,
0b01111110,
0b10111101,
0b11011011,
0b11100111] # This is just a heart :-)
}
| bsd-3-clause | 256,584,926,753,121,860 | 28.04 | 55 | 0.350627 | false |
HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/Examples/Catalyst/PythonDolfinExample/simulation-catalyst-step2.py | 1 | 5483 | """This demo program solves the incompressible Navier-Stokes equations
on an L-shaped domain using Chorin's splitting method."""
# Copyright (C) 2010-2011 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Mikael Mortensen 2011
#
# First added: 2010-08-30
# Last changed: 2011-06-30
#
# SC14 Paraview's Catalyst tutorial
#
# Step 2 : plug to catalyst python API, add a coProcess function
#
# [SC14-Catalyst] we need a python environment that enables import of both Dolfin and ParaView
execfile("simulation-env.py")
# [SC14-Catalyst] import paraview, vtk and paraview's simple API
import sys
import paraview
import paraview.vtk as vtk
import paraview.simple as pvsimple
# [SC14-Catalyst] check for command line arguments
if len(sys.argv) != 3:
print "command is 'python",sys.argv[0],"<script name> <number of time steps>'"
sys.exit(1)
# [SC14-Catalyst] initialize and read input parameters
paraview.options.batch = True
paraview.options.symmetric = True
# [SC14-Catalyst] import user co-processing script
import vtkPVCatalystPython
import os
scriptpath, scriptname = os.path.split(sys.argv[1])
sys.path.append(scriptpath)
if scriptname.endswith(".py"):
print 'script name is ', scriptname
scriptname = scriptname[0:len(scriptname)-3]
try:
cpscript = __import__(scriptname)
except:
print sys.exc_info()
print 'Cannot find ', scriptname, ' -- no coprocessing will be performed.'
sys.exit(1)
# [SC14-Catalyst] Co-Processing routine to be called at the end of each simulation time step
def coProcess(grid, time, step):
# initialize data description
datadescription = vtkPVCatalystPython.vtkCPDataDescription()
datadescription.SetTimeData(time, step)
datadescription.AddInput("input")
cpscript.RequestDataDescription(datadescription)
# to be continued ...
# Begin demo
from dolfin import *
# Print log messages only from the root process in parallel
parameters["std_out_all_processes"] = False;
# Load mesh from file
mesh = Mesh(DOLFIN_EXAMPLE_DATA_DIR+"/lshape.xml.gz")
# Define function spaces (P2-P1)
V = VectorFunctionSpace(mesh, "Lagrange", 2)
Q = FunctionSpace(mesh, "Lagrange", 1)
# Define trial and test functions
u = TrialFunction(V)
p = TrialFunction(Q)
v = TestFunction(V)
q = TestFunction(Q)
# Set parameter values
dt = 0.01
T = 3
nu = 0.01
# Define time-dependent pressure boundary condition
p_in = Expression("sin(3.0*t)", t=0.0)
# Define boundary conditions
noslip = DirichletBC(V, (0, 0),
"on_boundary && \
(x[0] < DOLFIN_EPS | x[1] < DOLFIN_EPS | \
(x[0] > 0.5 - DOLFIN_EPS && x[1] > 0.5 - DOLFIN_EPS))")
inflow = DirichletBC(Q, p_in, "x[1] > 1.0 - DOLFIN_EPS")
outflow = DirichletBC(Q, 0, "x[0] > 1.0 - DOLFIN_EPS")
bcu = [noslip]
bcp = [inflow, outflow]
# Create functions
u0 = Function(V)
u1 = Function(V)
p1 = Function(Q)
# Define coefficients
k = Constant(dt)
f = Constant((0, 0))
# Tentative velocity step
F1 = (1/k)*inner(u - u0, v)*dx + inner(grad(u0)*u0, v)*dx + \
nu*inner(grad(u), grad(v))*dx - inner(f, v)*dx
a1 = lhs(F1)
L1 = rhs(F1)
# Pressure update
a2 = inner(grad(p), grad(q))*dx
L2 = -(1/k)*div(u1)*q*dx
# Velocity update
a3 = inner(u, v)*dx
L3 = inner(u1, v)*dx - k*inner(grad(p1), v)*dx
# Assemble matrices
A1 = assemble(a1)
A2 = assemble(a2)
A3 = assemble(a3)
# Use amg preconditioner if available
prec = "amg" if has_krylov_solver_preconditioner("amg") else "default"
# Create files for storing solution
ufile = File("results/velocity.pvd")
pfile = File("results/pressure.pvd")
# Time-stepping
maxtimestep = int(sys.argv[2])
tstep = 0
t = dt
while tstep < maxtimestep:
# Update pressure boundary condition
p_in.t = t
# Compute tentative velocity step
begin("Computing tentative velocity")
b1 = assemble(L1)
[bc.apply(A1, b1) for bc in bcu]
solve(A1, u1.vector(), b1, "gmres", "default")
end()
# Pressure correction
begin("Computing pressure correction")
b2 = assemble(L2)
[bc.apply(A2, b2) for bc in bcp]
solve(A2, p1.vector(), b2, "gmres", prec)
end()
# Velocity correction
begin("Computing velocity correction")
b3 = assemble(L3)
[bc.apply(A3, b3) for bc in bcu]
solve(A3, u1.vector(), b3, "gmres", "default")
end()
# Plot solution [SC14-Catalyst] Not anymore
# plot(p1, title="Pressure", rescale=True)
# plot(u1, title="Velocity", rescale=True)
# Save to file [SC14-Catalyst] Not anymore
# ufile << u1
# pfile << p1
# [SC14-Catalyst] convert solution to VTK grid
ugrid = None
# [SC14-Catalyst] trigger catalyst execution
coProcess(ugrid,t,tstep)
# Move to next time step
u0.assign(u1)
t += dt
tstep += 1
print "t =", t, "step =",tstep
# Hold plot [SC14-Catalyst] Not anymore
# interactive()
| gpl-3.0 | 4,628,511,660,132,762,000 | 26.552764 | 94 | 0.683932 | false |
frank2/paranoia | test/test_fundamentals.py | 1 | 2457 | #!/usr/bin/env python
import unittest
from paranoia.fundamentals import *
class FundamentalsModuleTest(unittest.TestCase):
def test_crt(self):
self.assertNotEqual(malloc, None)
self.assertNotEqual(realloc, None)
self.assertNotEqual(free, None)
self.assertNotEqual(memset, None)
self.assertNotEqual(memmove, None)
def test_alignment(self):
self.assertFalse(aligned(4, 8))
self.assertTrue(aligned(0, 8))
self.assertTrue(aligned(16, 8))
self.assertEqual(alignment_delta(4, 8), 4)
self.assertEqual(alignment_delta(2, 8), 6)
self.assertEqual(alignment_delta(0, 8), 0)
self.assertEqual(align(2, 8), 8)
self.assertEqual(align(12, 8), 16)
self.assertEqual(align(16, 8), 16)
def test_list_conversions(self):
self.assertEqual(bitlist_to_bytelist([1, 1, 0, 0, 1, 1, 0, 0]), [0b11001100])
self.assertEqual(bitlist_to_bytelist([1, 1, 0, 0]), [0b1100])
self.assertEqual(bitlist_to_bytelist([1, 1, 0, 0, 1, 1, 0, 0] * 4), [0b11001100] * 4)
self.assertEqual(bytelist_to_bitlist([0b11001100]), [1, 1, 0, 0, 1, 1, 0, 0])
self.assertEqual(bytelist_to_bitlist([0b11001100]*4), [1, 1, 0, 0, 1, 1, 0, 0] * 4)
self.assertEqual(bytelist_to_bitlist([0b1100]), [0, 0, 0, 0, 1, 1, 0, 0])
self.assertEqual(bitlist_to_numeric([1, 1, 0, 0]), 0xC)
self.assertEqual(bitlist_to_numeric([1, 1, 0, 0] * 2), 0xCC)
self.assertEqual(bitlist_to_numeric([1, 1, 0, 0] * 4), 0xCCCC)
self.assertEqual(numeric_to_bitlist(0xC), [1, 1, 0, 0])
self.assertEqual(numeric_to_bitlist(0xCC), [1, 1, 0, 0] * 2)
self.assertEqual(numeric_to_bitlist(0xCCCC), [1, 1, 0, 0] * 4)
def test_dict_merge(self):
left = {'a': 'b'}
right = {'a': 'b'}
dict_merge(left, right)
self.assertEqual({'a': 'b'}, left)
left = {'a': 'b'}
right = {'a': 'c'}
dict_merge(left, right)
self.assertEqual({'a': 'b'}, left)
left = {'a': 'b'}
right = {'c': 'd'}
dict_merge(left, right)
self.assertEqual({'a': 'b', 'c': 'd'}, left)
def test_string_address(self):
import ctypes
key = 'ADNU'
found_offset = ctypes.string_at(id(key), 256).index(key)
self.assertEqual(found_offset, string_offset)
self.assertEqual(key, ctypes.string_at(id(key)+found_offset))
| gpl-3.0 | -2,910,718,512,257,623,600 | 35.132353 | 93 | 0.578755 | false |
jimi-c/ansible | lib/ansible/modules/cloud/amazon/ec2_instance.py | 1 | 67394 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_instance
short_description: Create & manage EC2 instances
description:
- Gather facts about ec2 instances in AWS
version_added: "2.5"
author:
- Ryan Scott Brown, @ryansb
requirements: [ "boto3", "botocore" ]
options:
instance_ids:
description:
- If you specify one or more instance IDs, only instances that have the specified IDs are returned.
state:
description:
- Goal state for the instances
choices: [present, terminated, running, started, stopped, restarted, rebooted, absent]
default: present
wait:
description:
- Whether or not to wait for the desired state (use wait_timeout to customize this)
default: true
wait_timeout:
description:
- How long to wait (in seconds) for the instance to finish booting/terminating
default: 600
instance_type:
description:
- Instance type to use for the instance, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html)
Only required when instance is not already present
default: t2.micro
user_data:
description:
- Opaque blob of data which is made available to the ec2 instance
tower_callback:
description:
- Preconfigured user-data to enable an instance to perform a Tower callback (Linux only).
- Mutually exclusive with I(user_data).
- For Windows instances, to enable remote access via Ansible set I(tower_callback.windows) to true, and optionally set an admin password.
- If using 'windows' and 'set_password', callback to Tower will not be performed but the instance will be ready to receive winrm connections from Ansible.
suboptions:
tower_address:
description:
- IP address or DNS name of Tower server. Must be accessible via this address from the VPC that this instance will be launched in.
job_template_id:
description:
- Either the integer ID of the Tower Job Template, or the name (name supported only for Tower 3.2+)
host_config_key:
description:
- Host configuration secret key generated by the Tower job template.
tags:
description:
- A hash/dictionary of tags to add to the new instance or to add/remove from an existing one.
purge_tags:
description:
- Delete any tags not specified in the task that are on the instance.
This means you have to specify all the desired tags on each task affecting an instance.
default: false
image:
description:
- An image to use for the instance. The ec2_ami_facts module may be used to retrieve images.
One of I(image) or I(image_id) are required when instance is not already present.
- Complex object containing I(image.id), I(image.ramdisk), and I(image.kernel).
- I(image.id) is the AMI ID.
- I(image.ramdisk) overrides the AMI's default ramdisk ID.
- I(image.kernel) is a string AKI to override the AMI kernel.
image_id:
description:
- I(ami) ID to use for the instance. One of I(image) or I(image_id) are required when instance is not already present.
- This is an alias for I(image.id).
security_groups:
description:
- A list of security group IDs or names (strings). Mutually exclusive with I(security_group).
security_group:
description:
- A security group ID or name. Mutually exclusive with I(security_groups).
name:
description:
- The Name tag for the instance.
vpc_subnet_id:
description:
- The subnet ID in which to launch the instance (VPC)
If none is provided, ec2_instance will chose the default zone of the default VPC
aliases: ['subnet_id']
network:
description:
- Either a dictionary containing the key 'interfaces' corresponding to a list of network interface IDs or
containing specifications for a single network interface.
- If specifications for a single network are given, accepted keys are assign_public_ip (bool),
private_ip_address (str), ipv6_addresses (list), source_dest_check (bool), description (str),
delete_on_termination (bool), device_index (int), groups (list of security group IDs),
private_ip_addresses (list), subnet_id (str).
- I(network.interfaces) should be a list of ENI IDs (strings) or a list of objects containing the key I(id).
- Use the ec2_eni to create ENIs with special settings.
volumes:
description:
- A list of block device mappings, by default this will always use the AMI root device so the volumes option is primarily for adding more storage.
- A mapping contains the (optional) keys device_name, virtual_name, ebs.volume_type, ebs.volume_size, ebs.kms_key_id,
ebs.iops, and ebs.delete_on_termination.
- For more information about each parameter, see U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_BlockDeviceMapping.html)
launch_template:
description:
- The EC2 launch template to base instance configuration on.
- I(launch_template.id) the ID or the launch template (optional if name is specified)
- I(launch_template.name) the pretty name of the launch template (optional if id is specified)
- I(launch_template.version) the specific version of the launch template to use. If unspecified, the template default is chosen.
key_name:
description:
- Name of the SSH access key to assign to the instance - must exist in the region the instance is created.
availability_zone:
description:
- Specify an availability zone to use the default subnet it. Useful if not specifying the I(vpc_subnet_id) parameter.
- If no subnet, ENI, or availability zone is provided, the default subnet in the default VPC will be used in the first AZ (alphabetically sorted).
instance_initiated_shutdown_behavior:
description:
- Whether to stop or terminate an instance upon shutdown.
choices: ['stop', 'terminate']
tenancy:
description:
- What type of tenancy to allow an instance to use. Default is shared tenancy. Dedicated tenancy will incur additional charges.
choices: ['dedicated', 'default']
termination_protection:
description:
- Whether to enable termination protection.
This module will not terminate an instance with termination protection active, it must be turned off first.
cpu_credit_specification:
description:
- For T2 series instances, choose whether to allow increased charges to buy CPU credits if the default pool is depleted.
- Choose I(unlimited) to enable buying additional CPU credits.
choices: [unlimited, standard]
cpu_options:
description:
- Reduce the number of vCPU exposed to the instance.
- Those parameters can only be set at instance launch. The two suboptions threads_per_core and core_count are mandatory.
- See U(https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html) for combinations available.
- Requires botocore >= 1.10.16
version_added: 2.7
suboptions:
threads_per_core:
description:
- Select the number of threads per core to enable. Disable or Enable Intel HT
choices: [1, 2]
required: true
core_count:
description:
- Set the number of core to enable.
required: true
detailed_monitoring:
description:
- Whether to allow detailed cloudwatch metrics to be collected, enabling more detailed alerting.
ebs_optimized:
description:
- Whether instance is should use optimized EBS volumes, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html)
filters:
description:
- A dict of filters to apply when deciding whether existing instances match and should be altered. Each dict item
consists of a filter key and a filter value. See
U(http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstances.html)
for possible filters. Filter names and values are case sensitive.
By default, instances are filtered for counting by their "Name" tag, base AMI, state (running, by default), and
subnet ID. Any queryable filter can be used. Good candidates are specific tags, SSH keys, or security groups.
default: {"tag:Name": "<provided-Name-attribute>", "subnet-id": "<provided-or-default subnet>"}
instance_role:
description:
- The ARN or name of an EC2-enabled instance role to be used. If a name is not provided in arn format
then the ListInstanceProfiles permission must also be granted.
U(https://docs.aws.amazon.com/IAM/latest/APIReference/API_ListInstanceProfiles.html) If no full ARN is provided,
the role with a matching name will be used from the active AWS account.
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Terminate every running instance in a region. Use with EXTREME caution.
- ec2_instance:
state: absent
filters:
instance-state-name: running
# restart a particular instance by its ID
- ec2_instance:
state: restarted
instance_ids:
- i-12345678
# start an instance with a public IP address
- ec2_instance:
name: "public-compute-instance"
key_name: "prod-ssh-key"
vpc_subnet_id: subnet-5ca1ab1e
instance_type: c5.large
security_group: default
network:
assign_public_ip: true
image_id: ami-123456
tags:
Environment: Testing
# start an instance and have it begin a Tower callback on boot
- ec2_instance:
name: "tower-callback-test"
key_name: "prod-ssh-key"
vpc_subnet_id: subnet-5ca1ab1e
security_group: default
tower_callback:
# IP or hostname of tower server
tower_address: 1.2.3.4
job_template_id: 876
host_config_key: '[secret config key goes here]'
network:
assign_public_ip: true
image_id: ami-123456
cpu_credit_specification: unlimited
tags:
SomeThing: "A value"
'''
RETURN = '''
instances:
description: a list of ec2 instances
returned: always
type: complex
contains:
ami_launch_index:
description: The AMI launch index, which can be used to find this instance in the launch group.
returned: always
type: int
sample: 0
architecture:
description: The architecture of the image
returned: always
type: string
sample: x86_64
block_device_mappings:
description: Any block device mapping entries for the instance.
returned: always
type: complex
contains:
device_name:
description: The device name exposed to the instance (for example, /dev/sdh or xvdh).
returned: always
type: string
sample: /dev/sdh
ebs:
description: Parameters used to automatically set up EBS volumes when the instance is launched.
returned: always
type: complex
contains:
attach_time:
description: The time stamp when the attachment initiated.
returned: always
type: string
sample: "2017-03-23T22:51:24+00:00"
delete_on_termination:
description: Indicates whether the volume is deleted on instance termination.
returned: always
type: bool
sample: true
status:
description: The attachment state.
returned: always
type: string
sample: attached
volume_id:
description: The ID of the EBS volume
returned: always
type: string
sample: vol-12345678
client_token:
description: The idempotency token you provided when you launched the instance, if applicable.
returned: always
type: string
sample: mytoken
ebs_optimized:
description: Indicates whether the instance is optimized for EBS I/O.
returned: always
type: bool
sample: false
hypervisor:
description: The hypervisor type of the instance.
returned: always
type: string
sample: xen
iam_instance_profile:
description: The IAM instance profile associated with the instance, if applicable.
returned: always
type: complex
contains:
arn:
description: The Amazon Resource Name (ARN) of the instance profile.
returned: always
type: string
sample: "arn:aws:iam::000012345678:instance-profile/myprofile"
id:
description: The ID of the instance profile
returned: always
type: string
sample: JFJ397FDG400FG9FD1N
image_id:
description: The ID of the AMI used to launch the instance.
returned: always
type: string
sample: ami-0011223344
instance_id:
description: The ID of the instance.
returned: always
type: string
sample: i-012345678
instance_type:
description: The instance type size of the running instance.
returned: always
type: string
sample: t2.micro
key_name:
description: The name of the key pair, if this instance was launched with an associated key pair.
returned: always
type: string
sample: my-key
launch_time:
description: The time the instance was launched.
returned: always
type: string
sample: "2017-03-23T22:51:24+00:00"
monitoring:
description: The monitoring for the instance.
returned: always
type: complex
contains:
state:
description: Indicates whether detailed monitoring is enabled. Otherwise, basic monitoring is enabled.
returned: always
type: string
sample: disabled
network_interfaces:
description: One or more network interfaces for the instance.
returned: always
type: complex
contains:
association:
description: The association information for an Elastic IPv4 associated with the network interface.
returned: always
type: complex
contains:
ip_owner_id:
description: The ID of the owner of the Elastic IP address.
returned: always
type: string
sample: amazon
public_dns_name:
description: The public DNS name.
returned: always
type: string
sample: ""
public_ip:
description: The public IP address or Elastic IP address bound to the network interface.
returned: always
type: string
sample: 1.2.3.4
attachment:
description: The network interface attachment.
returned: always
type: complex
contains:
attach_time:
description: The time stamp when the attachment initiated.
returned: always
type: string
sample: "2017-03-23T22:51:24+00:00"
attachment_id:
description: The ID of the network interface attachment.
returned: always
type: string
sample: eni-attach-3aff3f
delete_on_termination:
description: Indicates whether the network interface is deleted when the instance is terminated.
returned: always
type: bool
sample: true
device_index:
description: The index of the device on the instance for the network interface attachment.
returned: always
type: int
sample: 0
status:
description: The attachment state.
returned: always
type: string
sample: attached
description:
description: The description.
returned: always
type: string
sample: My interface
groups:
description: One or more security groups.
returned: always
type: complex
contains:
- group_id:
description: The ID of the security group.
returned: always
type: string
sample: sg-abcdef12
group_name:
description: The name of the security group.
returned: always
type: string
sample: mygroup
ipv6_addresses:
description: One or more IPv6 addresses associated with the network interface.
returned: always
type: complex
contains:
- ipv6_address:
description: The IPv6 address.
returned: always
type: string
sample: "2001:0db8:85a3:0000:0000:8a2e:0370:7334"
mac_address:
description: The MAC address.
returned: always
type: string
sample: "00:11:22:33:44:55"
network_interface_id:
description: The ID of the network interface.
returned: always
type: string
sample: eni-01234567
owner_id:
description: The AWS account ID of the owner of the network interface.
returned: always
type: string
sample: 01234567890
private_ip_address:
description: The IPv4 address of the network interface within the subnet.
returned: always
type: string
sample: 10.0.0.1
private_ip_addresses:
description: The private IPv4 addresses associated with the network interface.
returned: always
type: complex
contains:
- association:
description: The association information for an Elastic IP address (IPv4) associated with the network interface.
returned: always
type: complex
contains:
ip_owner_id:
description: The ID of the owner of the Elastic IP address.
returned: always
type: string
sample: amazon
public_dns_name:
description: The public DNS name.
returned: always
type: string
sample: ""
public_ip:
description: The public IP address or Elastic IP address bound to the network interface.
returned: always
type: string
sample: 1.2.3.4
primary:
description: Indicates whether this IPv4 address is the primary private IP address of the network interface.
returned: always
type: bool
sample: true
private_ip_address:
description: The private IPv4 address of the network interface.
returned: always
type: string
sample: 10.0.0.1
source_dest_check:
description: Indicates whether source/destination checking is enabled.
returned: always
type: bool
sample: true
status:
description: The status of the network interface.
returned: always
type: string
sample: in-use
subnet_id:
description: The ID of the subnet for the network interface.
returned: always
type: string
sample: subnet-0123456
vpc_id:
description: The ID of the VPC for the network interface.
returned: always
type: string
sample: vpc-0123456
placement:
description: The location where the instance launched, if applicable.
returned: always
type: complex
contains:
availability_zone:
description: The Availability Zone of the instance.
returned: always
type: string
sample: ap-southeast-2a
group_name:
description: The name of the placement group the instance is in (for cluster compute instances).
returned: always
type: string
sample: ""
tenancy:
description: The tenancy of the instance (if the instance is running in a VPC).
returned: always
type: string
sample: default
private_dns_name:
description: The private DNS name.
returned: always
type: string
sample: ip-10-0-0-1.ap-southeast-2.compute.internal
private_ip_address:
description: The IPv4 address of the network interface within the subnet.
returned: always
type: string
sample: 10.0.0.1
product_codes:
description: One or more product codes.
returned: always
type: complex
contains:
- product_code_id:
description: The product code.
returned: always
type: string
sample: aw0evgkw8ef3n2498gndfgasdfsd5cce
product_code_type:
description: The type of product code.
returned: always
type: string
sample: marketplace
public_dns_name:
description: The public DNS name assigned to the instance.
returned: always
type: string
sample:
public_ip_address:
description: The public IPv4 address assigned to the instance
returned: always
type: string
sample: 52.0.0.1
root_device_name:
description: The device name of the root device
returned: always
type: string
sample: /dev/sda1
root_device_type:
description: The type of root device used by the AMI.
returned: always
type: string
sample: ebs
security_groups:
description: One or more security groups for the instance.
returned: always
type: complex
contains:
- group_id:
description: The ID of the security group.
returned: always
type: string
sample: sg-0123456
- group_name:
description: The name of the security group.
returned: always
type: string
sample: my-security-group
network.source_dest_check:
description: Indicates whether source/destination checking is enabled.
returned: always
type: bool
sample: true
state:
description: The current state of the instance.
returned: always
type: complex
contains:
code:
description: The low byte represents the state.
returned: always
type: int
sample: 16
name:
description: The name of the state.
returned: always
type: string
sample: running
state_transition_reason:
description: The reason for the most recent state transition.
returned: always
type: string
sample:
subnet_id:
description: The ID of the subnet in which the instance is running.
returned: always
type: string
sample: subnet-00abcdef
tags:
description: Any tags assigned to the instance.
returned: always
type: dict
sample:
virtualization_type:
description: The type of virtualization of the AMI.
returned: always
type: string
sample: hvm
vpc_id:
description: The ID of the VPC the instance is in.
returned: always
type: dict
sample: vpc-0011223344
'''
import re
import uuid
import string
import textwrap
import time
from collections import namedtuple
try:
import boto3
import botocore.exceptions
except ImportError:
pass
from ansible.module_utils.six import text_type, string_types
from ansible.module_utils.six.moves.urllib import parse as urlparse
from ansible.module_utils._text import to_bytes, to_native
import ansible.module_utils.ec2 as ec2_utils
from ansible.module_utils.ec2 import (boto3_conn,
ec2_argument_spec,
get_aws_connection_info,
AWSRetry,
ansible_dict_to_boto3_filter_list,
compare_aws_tags,
boto3_tag_list_to_ansible_dict,
ansible_dict_to_boto3_tag_list,
camel_dict_to_snake_dict)
from ansible.module_utils.aws.core import AnsibleAWSModule
module = None
def tower_callback_script(tower_conf, windows=False, passwd=None):
script_url = 'https://raw.githubusercontent.com/ansible/ansible/devel/examples/scripts/ConfigureRemotingForAnsible.ps1'
if windows and passwd is not None:
script_tpl = """<powershell>
$admin = [adsi]("WinNT://./administrator, user")
$admin.PSBase.Invoke("SetPassword", "{PASS}")
Invoke-Expression ((New-Object System.Net.Webclient).DownloadString('{SCRIPT}'))
</powershell>
"""
return to_native(textwrap.dedent(script_tpl).format(PASS=passwd, SCRIPT=script_url))
elif windows and passwd is None:
script_tpl = """<powershell>
$admin = [adsi]("WinNT://./administrator, user")
Invoke-Expression ((New-Object System.Net.Webclient).DownloadString('{SCRIPT}'))
</powershell>
"""
return to_native(textwrap.dedent(script_tpl).format(PASS=passwd, SCRIPT=script_url))
elif not windows:
for p in ['tower_address', 'job_template_id', 'host_config_key']:
if p not in tower_conf:
module.fail_json(msg="Incomplete tower_callback configuration. tower_callback.{0} not set.".format(p))
if isinstance(tower_conf['job_template_id'], string_types):
tower_conf['job_template_id'] = urlparse.quote(tower_conf['job_template_id'])
tpl = string.Template(textwrap.dedent("""#!/bin/bash
set -x
retry_attempts=10
attempt=0
while [[ $attempt -lt $retry_attempts ]]
do
status_code=`curl --max-time 10 -v -k -s -i \
--data "host_config_key=${host_config_key}" \
'https://${tower_address}/api/v2/job_templates/${template_id}/callback/' \
| head -n 1 \
| awk '{print $2}'`
if [[ $status_code == 404 ]]
then
status_code=`curl --max-time 10 -v -k -s -i \
--data "host_config_key=${host_config_key}" \
'https://${tower_address}/api/v1/job_templates/${template_id}/callback/' \
| head -n 1 \
| awk '{print $2}'`
# fall back to using V1 API for Tower 3.1 and below, since v2 API will always 404
fi
if [[ $status_code == 201 ]]
then
exit 0
fi
attempt=$(( attempt + 1 ))
echo "$${status_code} received... retrying in 1 minute. (Attempt $${attempt})"
sleep 60
done
exit 1
"""))
return tpl.safe_substitute(tower_address=tower_conf['tower_address'],
template_id=tower_conf['job_template_id'],
host_config_key=tower_conf['host_config_key'])
raise NotImplementedError("Only windows with remote-prep or non-windows with tower job callback supported so far.")
@AWSRetry.jittered_backoff()
def manage_tags(match, new_tags, purge_tags, ec2):
changed = False
old_tags = boto3_tag_list_to_ansible_dict(match['Tags'])
tags_to_set, tags_to_delete = compare_aws_tags(
old_tags, new_tags,
purge_tags=purge_tags,
)
if tags_to_set:
ec2.create_tags(
Resources=[match['InstanceId']],
Tags=ansible_dict_to_boto3_tag_list(tags_to_set))
changed |= True
if tags_to_delete:
delete_with_current_values = dict((k, old_tags.get(k)) for k in tags_to_delete)
ec2.delete_tags(
Resources=[match['InstanceId']],
Tags=ansible_dict_to_boto3_tag_list(delete_with_current_values))
changed |= True
return changed
def build_volume_spec(params):
volumes = params.get('volumes') or []
return [ec2_utils.snake_dict_to_camel_dict(v, capitalize_first=True) for v in volumes]
def add_or_update_instance_profile(instance, desired_profile_name):
instance_profile_setting = instance.get('IamInstanceProfile')
if instance_profile_setting and desired_profile_name:
if desired_profile_name in (instance_profile_setting.get('Name'), instance_profile_setting.get('Arn')):
# great, the profile we asked for is what's there
return False
else:
desired_arn = determine_iam_role(desired_profile_name)
if instance_profile_setting.get('Arn') == desired_arn:
return False
# update association
ec2 = module.client('ec2')
try:
association = ec2.describe_iam_instance_profile_associations(Filters=[{'Name': 'instance-id', 'Values': [instance['InstanceId']]}])
except botocore.exceptions.ClientError as e:
# check for InvalidAssociationID.NotFound
module.fail_json_aws(e, "Could not find instance profile association")
try:
resp = ec2.replace_iam_instance_profile_association(
AssociationId=association['IamInstanceProfileAssociations'][0]['AssociationId'],
IamInstanceProfile={'Arn': determine_iam_role(desired_profile_name)}
)
return True
except botocore.exceptions.ClientError as e:
module.fail_json_aws(e, "Could not associate instance profile")
if not instance_profile_setting and desired_profile_name:
# create association
ec2 = module.client('ec2')
try:
resp = ec2.associate_iam_instance_profile(
IamInstanceProfile={'Arn': determine_iam_role(desired_profile_name)},
InstanceId=instance['InstanceId']
)
return True
except botocore.exceptions.ClientError as e:
module.fail_json_aws(e, "Could not associate new instance profile")
return False
def build_network_spec(params, ec2=None):
"""
Returns list of interfaces [complex]
Interface type: {
'AssociatePublicIpAddress': True|False,
'DeleteOnTermination': True|False,
'Description': 'string',
'DeviceIndex': 123,
'Groups': [
'string',
],
'Ipv6AddressCount': 123,
'Ipv6Addresses': [
{
'Ipv6Address': 'string'
},
],
'NetworkInterfaceId': 'string',
'PrivateIpAddress': 'string',
'PrivateIpAddresses': [
{
'Primary': True|False,
'PrivateIpAddress': 'string'
},
],
'SecondaryPrivateIpAddressCount': 123,
'SubnetId': 'string'
},
"""
if ec2 is None:
ec2 = module.client('ec2')
interfaces = []
network = params.get('network') or {}
if not network.get('interfaces'):
# they only specified one interface
spec = {
'DeviceIndex': 0,
}
if network.get('assign_public_ip') is not None:
spec['AssociatePublicIpAddress'] = network['assign_public_ip']
if params.get('vpc_subnet_id'):
spec['SubnetId'] = params['vpc_subnet_id']
else:
default_vpc = get_default_vpc(ec2)
if default_vpc is None:
raise module.fail_json(
msg="No default subnet could be found - you must include a VPC subnet ID (vpc_subnet_id parameter) to create an instance")
else:
sub = get_default_subnet(ec2, default_vpc)
spec['SubnetId'] = sub['SubnetId']
if network.get('private_ip_address'):
spec['PrivateIpAddress'] = network['private_ip_address']
if params.get('security_group') or params.get('security_groups'):
groups = discover_security_groups(
group=params.get('security_group'),
groups=params.get('security_groups'),
subnet_id=spec['SubnetId'],
ec2=ec2
)
spec['Groups'] = [g['GroupId'] for g in groups]
# TODO more special snowflake network things
return [spec]
# handle list of `network.interfaces` options
for idx, interface_params in enumerate(network.get('interfaces', [])):
spec = {
'DeviceIndex': idx,
}
if isinstance(interface_params, string_types):
# naive case where user gave
# network_interfaces: [eni-1234, eni-4567, ....]
# put into normal data structure so we don't dupe code
interface_params = {'id': interface_params}
if interface_params.get('id') is not None:
# if an ID is provided, we don't want to set any other parameters.
spec['NetworkInterfaceId'] = interface_params['id']
interfaces.append(spec)
continue
spec['DeleteOnTermination'] = interface_params.get('delete_on_termination', True)
if interface_params.get('ipv6_addresses'):
spec['Ipv6Addresses'] = [{'Ipv6Address': a} for a in interface_params.get('ipv6_addresses', [])]
if interface_params.get('private_ip_address'):
spec['PrivateIpAddress'] = interface_params.get('private_ip_address')
if interface_params.get('description'):
spec['Description'] = interface_params.get('description')
if interface_params.get('subnet_id', params.get('vpc_subnet_id')):
spec['SubnetId'] = interface_params.get('subnet_id', params.get('vpc_subnet_id'))
elif not spec.get('SubnetId') and not interface_params['id']:
# TODO grab a subnet from default VPC
raise ValueError('Failed to assign subnet to interface {0}'.format(interface_params))
interfaces.append(spec)
return interfaces
def warn_if_public_ip_assignment_changed(instance):
# This is a non-modifiable attribute.
assign_public_ip = (module.params.get('network') or {}).get('assign_public_ip')
if assign_public_ip is None:
return
# Check that public ip assignment is the same and warn if not
public_dns_name = instance.get('PublicDnsName')
if (public_dns_name and not assign_public_ip) or (assign_public_ip and not public_dns_name):
module.warn(
"Unable to modify public ip assignment to {0} for instance {1}. "
"Whether or not to assign a public IP is determined during instance creation.".format(
assign_public_ip, instance['InstanceId']))
def warn_if_cpu_options_changed(instance):
# This is a non-modifiable attribute.
cpu_options = module.params.get('cpu_options')
if cpu_options is None:
return
# Check that the CpuOptions set are the same and warn if not
core_count_curr = instance['CpuOptions'].get('CoreCount')
core_count = cpu_options.get('core_count')
threads_per_core_curr = instance['CpuOptions'].get('ThreadsPerCore')
threads_per_core = cpu_options.get('threads_per_core')
if core_count_curr != core_count:
module.warn(
"Unable to modify core_count from {0} to {1}. "
"Assigning a number of core is determinted during instance creation".format(
core_count_curr, core_count))
if threads_per_core_curr != threads_per_core:
module.warn(
"Unable to modify threads_per_core from {0} to {1}. "
"Assigning a number of threads per core is determined during instance creation.".format(
threads_per_core_curr, threads_per_core))
def discover_security_groups(group, groups, parent_vpc_id=None, subnet_id=None, ec2=None):
if ec2 is None:
ec2 = module.client('ec2')
if subnet_id is not None:
try:
sub = ec2.describe_subnets(SubnetIds=[subnet_id])
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'InvalidGroup.NotFound':
module.fail_json(
"Could not find subnet {0} to associate security groups. Please check the vpc_subnet_id and security_groups parameters.".format(
subnet_id
)
)
module.fail_json_aws(e, msg="Error while searching for subnet {0} parent VPC.".format(subnet_id))
except botocore.exceptions.BotoCoreError as e:
module.fail_json_aws(e, msg="Error while searching for subnet {0} parent VPC.".format(subnet_id))
parent_vpc_id = sub['Subnets'][0]['VpcId']
vpc = {
'Name': 'vpc-id',
'Values': [parent_vpc_id]
}
# because filter lists are AND in the security groups API,
# make two separate requests for groups by ID and by name
id_filters = [vpc]
name_filters = [vpc]
if group:
name_filters.append(
dict(
Name='group-name',
Values=[group]
)
)
if group.startswith('sg-'):
id_filters.append(
dict(
Name='group-id',
Values=[group]
)
)
if groups:
name_filters.append(
dict(
Name='group-name',
Values=groups
)
)
if [g for g in groups if g.startswith('sg-')]:
id_filters.append(
dict(
Name='group-id',
Values=[g for g in groups if g.startswith('sg-')]
)
)
found_groups = []
for f_set in (id_filters, name_filters):
if len(f_set) > 1:
found_groups.extend(ec2.get_paginator(
'describe_security_groups'
).paginate(
Filters=f_set
).search('SecurityGroups[]'))
return list(dict((g['GroupId'], g) for g in found_groups).values())
def build_top_level_options(params):
spec = {}
if params.get('image_id'):
spec['ImageId'] = params['image_id']
elif isinstance(params.get('image'), dict):
image = params.get('image', {})
spec['ImageId'] = image.get('id')
if 'ramdisk' in image:
spec['RamdiskId'] = image['ramdisk']
if 'kernel' in image:
spec['KernelId'] = image['kernel']
if not spec.get('ImageId') and not params.get('launch_template'):
module.fail_json(msg="You must include an image_id or image.id parameter to create an instance, or use a launch_template.")
if params.get('key_name') is not None:
spec['KeyName'] = params.get('key_name')
if params.get('user_data') is not None:
spec['UserData'] = to_native(params.get('user_data'))
elif params.get('tower_callback') is not None:
spec['UserData'] = tower_callback_script(
tower_conf=params.get('tower_callback'),
windows=params.get('tower_callback').get('windows', False),
passwd=params.get('tower_callback').get('set_password'),
)
if params.get('launch_template') is not None:
spec['LaunchTemplate'] = {}
if not params.get('launch_template').get('id') or params.get('launch_template').get('name'):
module.fail_json(msg="Could not create instance with launch template. Either launch_template.name or launch_template.id parameters are required")
if params.get('launch_template').get('id') is not None:
spec['LaunchTemplate']['LaunchTemplateId'] = params.get('launch_template').get('id')
if params.get('launch_template').get('name') is not None:
spec['LaunchTemplate']['LaunchTemplateName'] = params.get('launch_template').get('name')
if params.get('launch_template').get('version') is not None:
spec['LaunchTemplate']['Version'] = to_native(params.get('launch_template').get('version'))
if params.get('detailed_monitoring', False):
spec['Monitoring'] = {'Enabled': True}
if params.get('cpu_credit_specification') is not None:
spec['CreditSpecification'] = {'CpuCredits': params.get('cpu_credit_specification')}
if params.get('tenancy') is not None:
spec['Placement'] = {'Tenancy': params.get('tenancy')}
if (params.get('network') or {}).get('ebs_optimized') is not None:
spec['EbsOptimized'] = params['network'].get('ebs_optimized')
if params.get('instance_initiated_shutdown_behavior'):
spec['InstanceInitiatedShutdownBehavior'] = params.get('instance_initiated_shutdown_behavior')
if params.get('termination_protection') is not None:
spec['DisableApiTermination'] = params.get('termination_protection')
if params.get('cpu_options') is not None:
spec['CpuOptions'] = {}
spec['CpuOptions']['ThreadsPerCore'] = params.get('cpu_options').get('threads_per_core')
spec['CpuOptions']['CoreCount'] = params.get('cpu_options').get('core_count')
return spec
def build_instance_tags(params, propagate_tags_to_volumes=True):
tags = params.get('tags', {})
if params.get('name') is not None:
if tags is None:
tags = {}
tags['Name'] = params.get('name')
return [
{
'ResourceType': 'volume',
'Tags': ansible_dict_to_boto3_tag_list(tags),
},
{
'ResourceType': 'instance',
'Tags': ansible_dict_to_boto3_tag_list(tags),
},
]
def build_run_instance_spec(params, ec2=None):
if ec2 is None:
ec2 = module.client('ec2')
spec = dict(
ClientToken=uuid.uuid4().hex,
MaxCount=1,
MinCount=1,
)
# network parameters
spec['NetworkInterfaces'] = build_network_spec(params, ec2)
spec['BlockDeviceMappings'] = build_volume_spec(params)
spec.update(**build_top_level_options(params))
spec['TagSpecifications'] = build_instance_tags(params)
# IAM profile
if params.get('instance_role'):
spec['IamInstanceProfile'] = dict(Arn=determine_iam_role(params.get('instance_role')))
spec['InstanceType'] = params['instance_type']
return spec
def await_instances(ids, state='OK'):
if not module.params.get('wait', True):
# the user asked not to wait for anything
return
state_opts = {
'OK': 'instance_status_ok',
'STOPPED': 'instance_stopped',
'TERMINATED': 'instance_terminated',
'EXISTS': 'instance_exists',
'RUNNING': 'instance_running',
}
if state not in state_opts:
module.fail_json(msg="Cannot wait for state {0}, invalid state".format(state))
waiter = module.client('ec2').get_waiter(state_opts[state])
try:
waiter.wait(
InstanceIds=ids,
WaiterConfig={
'Delay': 15,
'MaxAttempts': module.params.get('wait_timeout', 600) // 15,
}
)
except botocore.exceptions.WaiterConfigError as e:
module.fail_json(msg="{0}. Error waiting for instances {1} to reach state {2}".format(
to_native(e), ', '.join(ids), state))
except botocore.exceptions.WaiterError as e:
module.warn("Instances {0} took too long to reach state {1}. {2}".format(
', '.join(ids), state, to_native(e)))
def diff_instance_and_params(instance, params, ec2=None, skip=None):
"""boto3 instance obj, module params"""
if ec2 is None:
ec2 = module.client('ec2')
if skip is None:
skip = []
changes_to_apply = []
id_ = instance['InstanceId']
ParamMapper = namedtuple('ParamMapper', ['param_key', 'instance_key', 'attribute_name', 'add_value'])
def value_wrapper(v):
return {'Value': v}
param_mappings = [
ParamMapper('ebs_optimized', 'EbsOptimized', 'ebsOptimized', value_wrapper),
ParamMapper('termination_protection', 'DisableApiTermination', 'disableApiTermination', value_wrapper),
# user data is an immutable property
# ParamMapper('user_data', 'UserData', 'userData', value_wrapper),
]
for mapping in param_mappings:
if params.get(mapping.param_key) is not None and mapping.instance_key not in skip:
value = ec2.describe_instance_attribute(Attribute=mapping.attribute_name, InstanceId=id_)
if params.get(mapping.param_key) is not None and value[mapping.instance_key]['Value'] != params.get(mapping.param_key):
arguments = dict(
InstanceId=instance['InstanceId'],
# Attribute=mapping.attribute_name,
)
arguments[mapping.instance_key] = mapping.add_value(params.get(mapping.param_key))
changes_to_apply.append(arguments)
if (params.get('network') or {}).get('source_dest_check') is not None:
# network.source_dest_check is nested, so needs to be treated separately
check = bool(params.get('network').get('source_dest_check'))
if instance['SourceDestCheck'] != check:
changes_to_apply.append(dict(
InstanceId=instance['InstanceId'],
SourceDestCheck={'Value': check},
))
return changes_to_apply
def change_network_attachments(instance, params, ec2):
if (params.get('network') or {}).get('interfaces') is not None:
new_ids = []
for inty in params.get('network').get('interfaces'):
if isinstance(inty, dict) and 'id' in inty:
new_ids.append(inty['id'])
elif isinstance(inty, string_types):
new_ids.append(inty)
# network.interfaces can create the need to attach new interfaces
old_ids = [inty['NetworkInterfaceId'] for inty in instance['NetworkInterfaces']]
to_attach = set(new_ids) - set(old_ids)
for eni_id in to_attach:
ec2.attach_network_interface(
DeviceIndex=new_ids.index(eni_id),
InstanceId=instance['InstanceId'],
NetworkInterfaceId=eni_id,
)
return bool(len(to_attach))
return False
def find_instances(ec2, ids=None, filters=None):
paginator = ec2.get_paginator('describe_instances')
if ids:
return list(paginator.paginate(
InstanceIds=ids,
).search('Reservations[].Instances[]'))
elif filters is None:
module.fail_json(msg="No filters provided when they were required")
elif filters is not None:
for key in filters.keys():
if not key.startswith("tag:"):
filters[key.replace("_", "-")] = filters.pop(key)
return list(paginator.paginate(
Filters=ansible_dict_to_boto3_filter_list(filters)
).search('Reservations[].Instances[]'))
return []
@AWSRetry.jittered_backoff()
def get_default_vpc(ec2):
vpcs = ec2.describe_vpcs(Filters=ansible_dict_to_boto3_filter_list({'isDefault': 'true'}))
if len(vpcs.get('Vpcs', [])):
return vpcs.get('Vpcs')[0]
return None
@AWSRetry.jittered_backoff()
def get_default_subnet(ec2, vpc, availability_zone=None):
subnets = ec2.describe_subnets(
Filters=ansible_dict_to_boto3_filter_list({
'vpc-id': vpc['VpcId'],
'state': 'available',
'default-for-az': 'true',
})
)
if len(subnets.get('Subnets', [])):
if availability_zone is not None:
subs_by_az = dict((subnet['AvailabilityZone'], subnet) for subnet in subnets.get('Subnets'))
if availability_zone in subs_by_az:
return subs_by_az[availability_zone]
# to have a deterministic sorting order, we sort by AZ so we'll always pick the `a` subnet first
# there can only be one default-for-az subnet per AZ, so the AZ key is always unique in this list
by_az = sorted(subnets.get('Subnets'), key=lambda s: s['AvailabilityZone'])
return by_az[0]
return None
def ensure_instance_state(state, ec2=None):
if ec2 is None:
module.client('ec2')
if state in ('running', 'started'):
changed, failed, instances = change_instance_state(filters=module.params.get('filters'), desired_state='RUNNING')
if failed:
module.fail_json(
msg="Unable to start instances",
reboot_success=list(changed),
reboot_failed=failed)
module.exit_json(
msg='Instances started',
reboot_success=list(changed),
changed=bool(len(changed)),
reboot_failed=[],
instances=[pretty_instance(i) for i in instances],
)
elif state in ('restarted', 'rebooted'):
changed, failed, instances = change_instance_state(
filters=module.params.get('filters'),
desired_state='STOPPED')
changed, failed, instances = change_instance_state(
filters=module.params.get('filters'),
desired_state='RUNNING')
if failed:
module.fail_json(
msg="Unable to restart instances",
reboot_success=list(changed),
reboot_failed=failed)
module.exit_json(
msg='Instances restarted',
reboot_success=list(changed),
changed=bool(len(changed)),
reboot_failed=[],
instances=[pretty_instance(i) for i in instances],
)
elif state in ('stopped',):
changed, failed, instances = change_instance_state(
filters=module.params.get('filters'),
desired_state='STOPPED')
if failed:
module.fail_json(
msg="Unable to stop instances",
stop_success=list(changed),
stop_failed=failed)
module.exit_json(
msg='Instances stopped',
stop_success=list(changed),
changed=bool(len(changed)),
stop_failed=[],
instances=[pretty_instance(i) for i in instances],
)
elif state in ('absent', 'terminated'):
terminated, terminate_failed, instances = change_instance_state(
filters=module.params.get('filters'),
desired_state='TERMINATED')
if terminate_failed:
module.fail_json(
msg="Unable to terminate instances",
terminate_success=list(terminated),
terminate_failed=terminate_failed)
module.exit_json(
msg='Instances terminated',
terminate_success=list(terminated),
changed=bool(len(terminated)),
terminate_failed=[],
instances=[pretty_instance(i) for i in instances],
)
@AWSRetry.jittered_backoff()
def change_instance_state(filters, desired_state, ec2=None):
"""Takes STOPPED/RUNNING/TERMINATED"""
if ec2 is None:
ec2 = module.client('ec2')
changed = set()
instances = find_instances(ec2, filters=filters)
to_change = set(i['InstanceId'] for i in instances)
unchanged = set()
for inst in instances:
try:
if desired_state == 'TERMINATED':
# TODO use a client-token to prevent double-sends of these start/stop/terminate commands
# https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html
resp = ec2.terminate_instances(InstanceIds=[inst['InstanceId']])
[changed.add(i['InstanceId']) for i in resp['TerminatingInstances']]
if desired_state == 'STOPPED':
if inst['State']['Name'] == 'stopping':
unchanged.add(inst['InstanceId'])
continue
resp = ec2.stop_instances(InstanceIds=[inst['InstanceId']])
[changed.add(i['InstanceId']) for i in resp['StoppingInstances']]
if desired_state == 'RUNNING':
resp = ec2.start_instances(InstanceIds=[inst['InstanceId']])
[changed.add(i['InstanceId']) for i in resp['StartingInstances']]
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError):
# we don't care about exceptions here, as we'll fail out if any instances failed to terminate
pass
if changed:
await_instances(ids=list(changed) + list(unchanged), state=desired_state)
change_failed = list(to_change - changed)
instances = find_instances(ec2, ids=list(to_change))
return changed, change_failed, instances
def pretty_instance(i):
instance = camel_dict_to_snake_dict(i, ignore_list=['Tags'])
instance['tags'] = boto3_tag_list_to_ansible_dict(i['Tags'])
return instance
def determine_iam_role(name_or_arn):
if re.match(r'^arn:aws:iam::\d+:instance-profile/[\w+=/,.@-]+$', name_or_arn):
return name_or_arn
iam = module.client('iam', retry_decorator=AWSRetry.jittered_backoff())
try:
role = iam.get_instance_profile(InstanceProfileName=name_or_arn, aws_retry=True)
return role['InstanceProfile']['Arn']
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'NoSuchEntity':
module.fail_json_aws(e, msg="Could not find instance_role {0}".format(name_or_arn))
module.fail_json_aws(e, msg="An error occurred while searching for instance_role {0}. Please try supplying the full ARN.".format(name_or_arn))
def handle_existing(existing_matches, changed, ec2, state):
if state in ('running', 'started') and [i for i in existing_matches if i['State']['Name'] != 'running']:
ins_changed, failed, instances = change_instance_state(filters=module.params.get('filters'), desired_state='RUNNING')
module.exit_json(
changed=bool(len(ins_changed)) or changed,
instances=[pretty_instance(i) for i in instances],
instance_ids=[i['InstanceId'] for i in instances],
)
changes = diff_instance_and_params(existing_matches[0], module.params)
for c in changes:
ec2.modify_instance_attribute(**c)
changed |= bool(changes)
changed |= add_or_update_instance_profile(existing_matches[0], module.params.get('instance_role'))
changed |= change_network_attachments(existing_matches[0], module.params, ec2)
altered = find_instances(ec2, ids=[i['InstanceId'] for i in existing_matches])
module.exit_json(
changed=bool(len(changes)) or changed,
instances=[pretty_instance(i) for i in altered],
instance_ids=[i['InstanceId'] for i in altered],
changes=changes,
)
def ensure_present(existing_matches, changed, ec2, state):
if len(existing_matches):
try:
handle_existing(existing_matches, changed, ec2, state)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(
e, msg="Failed to handle existing instances {0}".format(', '.join([i['InstanceId'] for i in existing_matches])),
# instances=[pretty_instance(i) for i in existing_matches],
# instance_ids=[i['InstanceId'] for i in existing_matches],
)
try:
instance_spec = build_run_instance_spec(module.params)
instance_response = run_instances(ec2, **instance_spec)
instances = instance_response['Instances']
instance_ids = [i['InstanceId'] for i in instances]
for ins in instances:
changes = diff_instance_and_params(ins, module.params, skip=['UserData', 'EbsOptimized'])
for c in changes:
try:
AWSRetry.jittered_backoff()(ec2.modify_instance_attribute)(**c)
except botocore.exceptions.ClientError as e:
module.fail_json_aws(e, msg="Could not apply change {0} to new instance.".format(str(c)))
await_instances(instance_ids)
instances = ec2.get_paginator('describe_instances').paginate(
InstanceIds=instance_ids
).search('Reservations[].Instances[]')
module.exit_json(
changed=True,
instances=[pretty_instance(i) for i in instances],
instance_ids=instance_ids,
spec=instance_spec,
)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to create new EC2 instance")
@AWSRetry.jittered_backoff()
def run_instances(ec2, **instance_spec):
try:
return ec2.run_instances(**instance_spec)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'InvalidParameterValue' and "Invalid IAM Instance Profile ARN" in e.response['Error']['Message']:
# If the instance profile has just been created, it takes some time to be visible by ec2
# So we wait 10 second and retry the run_instances
time.sleep(10)
return ec2.run_instances(**instance_spec)
else:
raise e
def main():
global module
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'started', 'running', 'stopped', 'restarted', 'rebooted', 'terminated', 'absent']),
wait=dict(default=True, type='bool'),
wait_timeout=dict(default=600, type='int'),
# count=dict(default=1, type='int'),
image=dict(type='dict'),
image_id=dict(type='str'),
instance_type=dict(default='t2.micro', type='str'),
user_data=dict(type='str'),
tower_callback=dict(type='dict'),
ebs_optimized=dict(type='bool'),
vpc_subnet_id=dict(type='str', aliases=['subnet_id']),
availability_zone=dict(type='str'),
security_groups=dict(default=[], type='list'),
security_group=dict(type='str'),
instance_role=dict(type='str'),
name=dict(type='str'),
tags=dict(type='dict'),
purge_tags=dict(type='bool', default=False),
filters=dict(type='dict', default=None),
launch_template=dict(type='dict'),
key_name=dict(type='str'),
cpu_credit_specification=dict(type='str', choices=['standard', 'unlimited']),
cpu_options=dict(type='dict', options=dict(
core_count=dict(type='int', required=True),
threads_per_core=dict(type='int', choices=[1, 2], required=True)
)),
tenancy=dict(type='str', choices=['dedicated', 'default']),
instance_initiated_shutdown_behavior=dict(type='str', choices=['stop', 'terminate']),
termination_protection=dict(type='bool'),
detailed_monitoring=dict(type='bool'),
instance_ids=dict(default=[], type='list'),
network=dict(default=None, type='dict'),
volumes=dict(default=None, type='list'),
))
# running/present are synonyms
# as are terminated/absent
module = AnsibleAWSModule(
argument_spec=argument_spec,
mutually_exclusive=[
['security_groups', 'security_group'],
['availability_zone', 'vpc_subnet_id'],
['tower_callback', 'user_data'],
['image_id', 'image'],
],
supports_check_mode=True
)
if module.params.get('network'):
if module.params.get('network').get('interfaces'):
if module.params.get('security_group'):
module.fail_json(msg="Parameter network.interfaces can't be used with security_group")
if module.params.get('security_groups'):
module.fail_json(msg="Parameter network.interfaces can't be used with security_groups")
state = module.params.get('state')
ec2 = module.client('ec2')
if module.params.get('filters') is None:
filters = {
# all states except shutting-down and terminated
'instance-state-name': ['pending', 'running', 'stopping', 'stopped']
}
if state == 'stopped':
# only need to change instances that aren't already stopped
filters['instance-state-name'] = ['stopping', 'pending', 'running']
if isinstance(module.params.get('instance_ids'), string_types):
filters['instance-id'] = [module.params.get('instance_ids')]
elif isinstance(module.params.get('instance_ids'), list) and len(module.params.get('instance_ids')):
filters['instance-id'] = module.params.get('instance_ids')
else:
if not module.params.get('vpc_subnet_id'):
if module.params.get('network'):
# grab AZ from one of the ENIs
ints = module.params.get('network').get('interfaces')
if ints:
filters['network-interface.network-interface-id'] = []
for i in ints:
if isinstance(i, dict):
i = i['id']
filters['network-interface.network-interface-id'].append(i)
else:
sub = get_default_subnet(ec2, get_default_vpc(ec2), availability_zone=module.params.get('availability_zone'))
filters['subnet-id'] = sub['SubnetId']
else:
filters['subnet-id'] = [module.params.get('vpc_subnet_id')]
if module.params.get('name'):
filters['tag:Name'] = [module.params.get('name')]
if module.params.get('image_id'):
filters['image-id'] = [module.params.get('image_id')]
elif (module.params.get('image') or {}).get('id'):
filters['image-id'] = [module.params.get('image', {}).get('id')]
module.params['filters'] = filters
if module.params.get('cpu_options') and not module.botocore_at_least('1.10.16'):
module.fail_json(msg="cpu_options is only supported with botocore >= 1.10.16")
existing_matches = find_instances(ec2, filters=module.params.get('filters'))
changed = False
if state not in ('terminated', 'absent') and existing_matches:
for match in existing_matches:
warn_if_public_ip_assignment_changed(match)
warn_if_cpu_options_changed(match)
changed |= manage_tags(match, (module.params.get('tags') or {}), module.params.get('purge_tags', False), ec2)
if state in ('present', 'running', 'started'):
ensure_present(existing_matches=existing_matches, changed=changed, ec2=ec2, state=state)
elif state in ('restarted', 'rebooted', 'stopped', 'absent', 'terminated'):
if existing_matches:
ensure_instance_state(state, ec2)
else:
module.exit_json(
msg='No matching instances found',
changed=False,
instances=[],
)
else:
module.fail_json(msg="We don't handle the state {0}".format(state))
if __name__ == '__main__':
main()
| gpl-3.0 | 1,229,012,916,601,802,200 | 41.253292 | 160 | 0.574932 | false |
mblayman/markwiki | markwiki/storage/fs/user.py | 1 | 4620 | # Copyright (c) 2016, Matt Layman
import json
import hashlib
import os
from markwiki.exceptions import UserStorageError
from markwiki.models.user import User
from markwiki.storage.user import UserStorage
class FileUserStorage(UserStorage):
'''A file system based user storage'''
def __init__(self, config):
self._path = os.path.join(config['MARKWIKI_HOME'], 'users')
# An index of user ID to user file paths
self._id_index_file = os.path.join(self._path, 'id.index')
self._id_index = {}
# An index of user email to user file paths
self._email_index_file = os.path.join(self._path, 'email.index')
self._email_index = {}
def initialize(self):
if not os.path.exists(self._path):
os.mkdir(self._path)
self._write_json(self._id_index, self._id_index_file)
self._write_json(self._email_index, self._email_index_file)
else:
self._read_indices()
def create(self, user):
'''Create a new user by storing it as JSON on the file system.'''
user_file = self._get_user_file(user.name)
if os.path.exists(user_file):
raise UserStorageError('A user with that name already exists.')
if self.find_by_email(user.email) is not None:
raise UserStorageError('A user with that email already exists.')
# Everything looks good so get the user an ID and save it.
user.user_id = self._generate_user_id()
self._write_json(user.__dict__, user_file)
# Now that the user is saved, update the indices.
self._update_indices(user, user_file)
def find_by_email(self, email):
'''Find a user by their email or return ``None``.'''
user_file = self._email_index.get(email)
if user_file is None:
return None
return self._load_user(user_file)
def find_by_id(self, user_id):
'''Find a user by their ID or return ``None``.'''
user_file = self._id_index.get(user_id)
if user_file is None:
return None
return self._load_user(user_file)
def find_by_name(self, name):
'''Find a user by their name or return ``None``.'''
user_file = self._get_user_file(name)
return self._load_user(user_file)
def update(self, user):
'''Update an existing user.'''
user_file = self._get_user_file(user.name)
self._write_json(user.__dict__, user_file)
def _generate_user_id(self):
'''Generate a unique user ID.'''
# Because there might be multiple workers (like if running with
# gunicorn), refresh the in-memory indices to avoid ID clashes.
self._read_indices()
user_id = len(self._id_index)
while self.find_by_id(u'{0}'.format(user_id)) is not None:
user_id += 1
# The auth system will try to do lookups with unicode so the key might
# as well be unicode to be consistent.
return u'{0}'.format(user_id)
def _get_user_file(self, name):
'''Get the file path where the user's data will be stored.'''
m = hashlib.md5()
m.update(name.encode('utf-8'))
return os.path.join(self._path, m.hexdigest())
def _load_user(self, user_file):
'''Load a user from a file.'''
if not os.path.exists(user_file):
return None
with open(user_file, 'r') as f:
data = json.loads(f.read())
return User(data['name'], data['email'], data['login_type'],
data['password_digest'], data['user_id'])
def _read_indices(self):
'''Read the file indices into memory.'''
with open(self._id_index_file, 'r') as f:
self._id_index = json.loads(f.read())
with open(self._email_index_file, 'r') as f:
self._email_index = json.loads(f.read())
def _update_indices(self, user, user_file):
'''Update the file indices with the provided user information.'''
self._id_index[user.user_id] = user_file
self._write_json(self._id_index, self._id_index_file)
# Not every user has an associated email account.
if user.email:
self._email_index[user.email] = user_file
self._write_json(self._email_index, self._email_index_file)
def _write_json(self, data, out):
'''Write out JSON with common settings.'''
json_data = json.dumps(data, sort_keys=True, indent=2,
separators=(',', ': '))
with open(out, 'w') as f:
f.write(json_data)
| bsd-2-clause | 3,899,588,193,903,150,000 | 35.377953 | 78 | 0.588528 | false |
RUB-NDS/PRET | fuzzer.py | 1 | 1263 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
class fuzzer():
vol = ["", ".", "\\", "/", "file:///", "C:/"]
var = ["~", "$HOME"]
win = ["%WINDIR%", "%SYSTEMROOT%", "%HOMEPATH%", "%PROGRAMFILES%"]
smb = ["\\\\127.0.0.1\\"]
web = ["http://127.0.0.1/"] # "http://hacking-printers.net/log.me"
dir = ["..", "...", "...."] # also combinations like "./.."
# sep = ["", "\\", "/", "\\\\", "//", "\\/"]
fhs = ["/etc", "/bin", "/sbin", "/home", "/proc", "/dev", "/lib",
"/opt", "/run", "/sys", "/tmp", "/usr", "/var", "/mnt",]
abs = [".profile", ["etc", "passwd"], ["bin", "sh"], ["bin", "ls"],
"boot.ini", ["windows", "win.ini"], ["windows", "cmd.exe"]]
rel = ["%WINDIR%\\win.ini",
"%WINDIR%\\repair\\sam",
"%WINDIR%\\repair\\system",
"%WINDIR%\\system32\\config\\system.sav",
"%WINDIR%\\System32\\drivers\\etc\\hosts",
"%SYSTEMDRIVE%\\boot.ini",
"%USERPROFILE%\\ntuser.dat",
"%SYSTEMDRIVE%\\pagefile.sys",
"%SYSTEMROOT%\\repair\\sam",
"%SYSTEMROOT%\\repair\\system"]
# define prefixes to use in fuzzing modes
path = vol+var+win+smb+web # path fuzzing
write = vol+var+win+smb+fhs # write fuzzing
blind = vol+var # blind fuzzing
| gpl-2.0 | 3,648,293,130,167,252,500 | 41.1 | 69 | 0.475851 | false |
oxyum/python-tlogger | tlogger/logger.py | 1 | 4372 | # -*- mode: python; coding: utf-8; -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .action_binder import ActionBinder
from .action_stack import action_stack
from .actions import Action
from .constants import Level
from .decorators import wrap_descriptor_method, wrap_function
from .proxies import ContextManagerProxy, IterableProxy
from .utils import is_descriptor
try:
from django import VERSION # nopep8
DJANGO_AVAILABLE = True
except ImportError:
DJANGO_AVAILABLE = False
class Logger(object):
def __init__(self, name_or_logger, action_class=Action):
self.logger = name_or_logger
self.action_class = action_class
def __call__(self, func=None, **kwargs):
if func is None:
return self.parametrized_decorator(**kwargs)
return self._decorator(func, self.action_class, self.logger)
if DJANGO_AVAILABLE:
def view(self, func=None, **kwargs):
params = self._get_view_defaults()
if func is None:
params.update(kwargs)
return self.parametrized_decorator(**params)
return self._decorator(func, self.action_class, self.logger,
**params)
def _get_view_defaults(self):
return dict(hide_params=['result'])
def parametrized_decorator(self, **kwargs):
action_class = kwargs.pop('action_class', self.action_class)
def decorator(func):
return self._decorator(func, action_class, self.logger, **kwargs)
return decorator
def _decorator(self, func, action_class, logger, **kwargs):
if is_descriptor(func):
return wrap_descriptor_method(func, action_class, logger, **kwargs)
return wrap_function(func, action_class, logger, **kwargs)
def dump(self, **kwargs):
self.event(suffix='dump_variable', payload=kwargs)
def create_ad_hoc_action(self, context_object):
return Action.create_ad_hoc(logger=self.logger,
context_object=context_object)
def event(self, suffix, payload, action=None, **kwargs):
action = action or self.get_current_action()
if action is None:
with self.create_ad_hoc_action() as ad_hoc:
ad_hoc.emit_event(suffix, payload, **kwargs)
else:
action.emit_event(suffix, payload, **kwargs)
def get_current_action(self):
return action_stack.peek()
def start_action(self, name, **kwargs):
return self.action_class(name, self.logger, **kwargs)
def _raw(self, suffix, level, msg, *args, **kwargs):
self.event(suffix, {}, level=level,
raw_msg=msg, raw_args=args, raw_kwargs=kwargs)
def debug(self, msg, *args, **kwargs):
self._raw('debug', Level.debug, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
self._raw('info', Level.info, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
self._raw('warning', Level.warning, msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
self._raw('error', Level.error, msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
exc_info = kwargs.pop('exc_info', 1)
self._raw('exception', Level.error, msg, *args, exc_info=exc_info,
**kwargs)
def critical(self, msg, *args, **kwargs):
self._raw('critical', Level.critical, msg, *args, **kwargs)
def set_status(self, code, msg):
self.get_current_action().set_status(code, msg)
def action_for(self, func):
return ActionBinder.get_action(func)
def iter(self, iterable, steps=False, name=None, context_object=None,
**kwargs):
action = self.start_action(
name or 'iterations', context_object=context_object, **kwargs
)
return IterableProxy(iterable, steps=steps, action=action)
def context(self, context_manager, name=None, **kwargs):
action = self.start_action(
name or 'context', context_object=context_manager, **kwargs
)
return ContextManagerProxy(context_manager, action=action)
def get_logger(name, logger_class=Logger):
return logger_class(name)
| mit | 1,517,032,221,352,195,000 | 32.891473 | 79 | 0.623056 | false |
pombreda/eggy | eggy/model/Model.py | 1 | 94271 | #!/usr/bin/env python
# eggy - a useful IDE
# Copyright (c) 2008 Mark Florisson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module provides the model extending the gui. The model forms the central
part of the application.
"""
__all__ = ['Model', 'NoSuchFileException']
import os
import re
import sys
import glob
import user
import time
import Queue
import shutil
import signal
import socket
import select
import atexit
import codecs
import textwrap
import traceback
import chardet
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4 import Qsci
from eggy.gui import MainWindow, EditorTabWidget, TextEdit
from eggy.network.Network import Network, PortTakenException
from eggy.compile.Compile import Compile
import eggy.compile.Compile as Compile_
from eggy.decorators import Decorators
from eggy.project import Project
from eggy.project import Find
class NoSuchFileException(Exception):
"""
Exception raised when actions requiring an open file are invoked
when no file is opened.
"""
class Model(MainWindow.MainWindow):
"""
This class contains most of the intelligence behind the whole application.
Most actions, buttons, etc in MainWindow.py are implemented here. It's
represents the model and controller.
"""
def __init__(self, base, appname, version, mymailaddress, website):
"""
Constructor
@param base the root of the program files
@param appname the name of the application
@param mymailaddress my mail address
@param website the project's website
"""
# our shared queue with Network
self._queue = Queue.Queue(0) # infinite
# network listen port
self._port = None
# name of the application
self._appname = appname
self.version = version
# mail address to send bugreports and other spam to
self.mymail = mymailaddress
# the project's website
self._website = website
# we must take care of not saving settings twice when we have an
# impatient user that keeps clicking on the cross
self._shuttingDown = False
# the directory of this program's files
# (for determing the path for icons in MainWindow)
self.base = base
# contains the filename as key, a list with editor (editable text) and
# whether filesaveas should be invoked as values
self._editors = {}
self._icons = (QIcon(self.base + "img/filenew.png"),
QIcon(self.base + "img/edit.png"))
# contains the list of opened files in order, used for order
self._openFileList = []
# tells the count for the name of a new file
self._count = 0
# A compile instance when the program is at the moment compiling
# useful for the "stop" button
self._compileObject = None
# name as key, Project object as value
self._projects = {}
# filename as key, widget indicating the file is being synced as value
self._syncingFiles = {}
# the directory containing all projects
self.projectDir = None
# the current chat browser widget (this is for hiding the current,
# and showing a new one when the user switches to a document in
# another project)
self._currentChatBrowser = None
# the nickname of the user
self._username = None
# contains the plugin name as key, the plugin module as value
self._plugins = {}
# some user-configurable settings
self.tabwidth = 4
self.useTabs = False
self.whiteSpaceVisible = False
self.boxedFolding = True
self.autoComplete = True
self.autoCompleteWords = True
# amount of characters to type before poping up a completion dialog
self.autoCompleteInvocationAmount = 3
# whether to show the eggy image in the project tree or not
self.stylesheet = True
# if the user removed .project.pk files, popup one dialog, not as
# many as there are directories
self.errorPoppedUp = False
self._restoreSettings()
# the filter to set for showing files in the project tree
self.fileExtensions = [".java", ".py", ".pyw", ".pyx", ".sh", ".pl",
".vhdl", ".html", ".xml", ".css", ".rb", ".cpp", ".h", ".d",
".inc", ".js", ".cs", ".c", ".sql", ".cgi", ".fcgi"]
# instance variables must be set before calling MainWindow.__init__()
super(Model, self).__init__()
self.setStylesheet()
# this must be after the call to the superclass, because we need
# connect functions from QObject
self._networkUp = False
self._networkRestart()
if self.projectCheckDir():
self.setProjectDir()
else:
self._projectSetProjectDir()
Compile_.loadCompilers()
self._loadPlugins()
try:
self.tabChanged(self.editorTabWidget.currentIndex())
except NoSuchFileException:
self._fileNew()
self.actionFileSaveAll.setEnabled(False)
self._setupSocket()
debug = Decorators.debug
network = Decorators.network
def _abspath(self, filename):
"""
Private method to determine the absolute path of a filename
@param filename the filename to get the path of
@return the path of filename or the user's home directory on failure
(str)
"""
filename = str(filename)
try:
path = filename[:filename.rindex("/")] + "/"
except ValueError:
path = user.home + "/"
return path
def _basename(self, filename):
"""
Private method to get the basename of a filename
@param filename the filename to get the basename of
@return the basename of filename or the user's home directory on
failure (str)
"""
filename = str(filename)
try:
base = filename[filename.rindex("/")+1:]
except ValueError:
base = user.home
return base
def errorMessage(self, text):
"""
Public method to display a warning to the user
@param text the message to display
"""
QMessageBox.warning(self, "Warning", text)
def infoMessage(self, text, title="Note: "):
"""
Public method to display an information message to the user
@param text the message to display
@param title the WindowText
"""
QMessageBox.information(self, title, text)
def systrayMessage(self, title, message):
if QSystemTrayIcon.supportsMessages():
self._systemtray.showMessage(title, message)
else:
self.infoMessage(message, title=title)
def _fileGetOpenFile(self, index=None):
"""
Private method to get the filename of an opened file by index
raises NoSuchFileException when there are no tabs open
@param index the index of the filename
@return the filename (str) or None on an invalid index
"""
if index is None:
index = self.editorTabWidget.currentIndex()
if -1 < index < len(self._openFileList):
return self._openFileList[index]
else:
raise NoSuchFileException("Muahaha")
def _fileGetIndex(self, filename):
"""
Private method to get the index of a filename
@param filename the filname
@return the index of filename (int)
"""
return self._openFileList.index(filename)
def _fileGetEditor(self, *args):
"""
Protected method to get the editor object by filename or by index
@param *args the filename or index of the editor to get
@return the editor object or None when *args is invalid
"""
retval = None
args = args[0]
if isinstance(args, str):
if args in self._editors:
retval = self._editors[args][0]
elif isinstance(args, int):
if args < len(self._openFileList):
retval = self._editors[self._fileGetOpenFile(args)][0]
return retval
def _getCurrentEditor(self):
"""
Private method for getting the currently selected editor object.
Raises NoSuchFileException when no documents are open
@return editor object
"""
index = self.editorTabWidget.currentIndex()
if -1 < index < len(self._openFileList):
filename = self._openFileList[index]
return self._editors[filename][0]
else:
raise NoSuchFileException()
def get(self, filename=None):
"""
Public method that makes it easy for plugins to obtain information
about the currently opened document
@return a tuple containing the filename, editor object and index of the
currently selected document
"""
index = self.editorTabWidget.currentIndex()
editor = None
if filename is None:
if -1 < index < len(self._openFileList):
filename = self._openFileList[index]
editor = self._editors[filename][0]
else:
if filename in self._openFileList:
index = self._openFileList.index(filename)
editor = self._editors[filename][0]
return (filename, editor, index)
def _fileRemoveOpenFile(self, index):
"""
Protected method to remove and close an opened file
@param index the index to remove the file at
"""
self.editorTabWidget.removeTab(index)
filename = self._fileGetOpenFile(index)
if not os.path.exists(filename) and filename.startswith("Untitled") \
and "Untitled%i" % (self._count - 1) not in self._openFileList:
self._count -= 1
self._openFileList.remove(filename)
self._editors.pop(filename)
self.emit(SIGNAL("fileClosed"), filename)
self.tabChanged(self.editorTabWidget.currentIndex())
def _fileAddOpenFile(self, fname, editor, fileSaveAs=False):
"""
Private method to add a file for opening
@param fname the name of the file
@param editor the editor object
@param fileSaveAs whether fileSaveAs should be invoked or not
"""
self._openFileList.append(fname)
self._editors[fname] = [editor, fileSaveAs]
editor.setModified(False)
self.emit(SIGNAL("fileOpened"), fname)
if os.path.exists(fname):
fname = self._basename(fname)
self.editorTabWidget.addTab(editor, self._icons[0], fname)
self.editorTabWidget.setCurrentWidget(editor)
if len(self._openFileList) == 2:
if self._openFileList[0].startswith("Untitled") and \
not self._fileGetEditor(self._openFileList[0]).isModified() and \
not fname.startswith("Untitled"):
self._fileRemoveOpenFile(0)
if len(self._openFileList) == 1:
self.tabChanged(self.editorTabWidget.currentIndex())
def _center(self, widget):
"""
Protected method to center a widget above the main window
@param widget the widget to center
"""
x = (self.width() / 2) - (widget.width() / 2)
y = (self.height() / 2) - (widget.height() / 2)
widget.move(self.pos() + QPoint(x,y))
# >>>>>>>>>>>>>>>>>>>>>> File menu actions <<<<<<<<<<<<<<<<<<<<<<
def _createEditor(self, filename=None):
"""
Private method for creating a QScintilla text editor
"""
editor = TextEdit.TextEdit(self, filename)
self.connect(editor, SIGNAL("modificationChanged(bool)"),
self._modificationChanged, Qt.QueuedConnection)
self.connect(editor, SIGNAL("modificationChanged(bool)"),
self._modificationChanged, Qt.QueuedConnection)
self.connect(editor, SIGNAL("copyAvailable(bool)"),
self.actionEditCopy.setEnabled, Qt.QueuedConnection)
self.connect(editor, SIGNAL("copyAvailable(bool)"),
self.actionEditCut.setEnabled)
return editor
def _modificationChanged(self, enable):
"""
Private method invoked when a documents modification changed
"""
self.actionFileSave.setEnabled(enable)
fileSaveAll = False
for number, filename in enumerate(self._openFileList):
if filename not in self._editors:
continue
editor, b = self._editors[filename]
modified = editor.isModified()
icon = self._icons[int(modified)]
self.editorTabWidget.setTabIcon(number, icon)
self.editorTabWidget.tabBar().setTabToolTip(number, filename)
if modified:
fileSaveAll = True
self.actionFileSave.setEnabled(enable)
self.actionFileSaveAll.setEnabled(fileSaveAll)
def _fileNew(self):
"""
Protected method to create a new (unsaved) file
"""
editor = self._createEditor()
name = "Untitled%i" % self._count
self._fileAddOpenFile(name, editor, True)
self._count += 1
def _fileOpen(self):
"""
Protected method to popup a dialog and load the selected files
"""
for filename in self._selectFiles():
self.loadFile(filename)
def _fileGetLastDir(self):
"""
Protected method to get the last accessed directory
@return last accessed directory or the user's home directory (str)
"""
settings = QSettings()
return str(settings.value("FileActions/LastDir", \
QVariant(QString(user.home))).toString())
def _fileSetLastDir(self, filename):
"""
Protected method to set the last accesses directory in the settings
"""
settings = QSettings()
settings.setValue("FileActions/LastDir", \
QVariant(QString(self._abspath(filename))))
def _selectFiles(self, filter=None):
"""
Private method for letting the user select files
@param filter the filter allowing matching files to be selected
@return the selected files (QStringList)
"""
lastdir = self._fileGetLastDir()
if filter is None:
filenames = list(QFileDialog.getOpenFileNames(self, \
"Select files for opening", lastdir))
else:
filenames = list(QFileDialog.getOpenFileNames(self, \
"Select files for opening", lastdir, filter).toStringList())
if filenames:
self._fileSetLastDir(filenames[0])
return filenames
def loadFile(self, filename=None):
"""
Public method that loads a file and adds a tab for it
@param filename the file to open
"""
if filename is None:
action = self.sender()
if isinstance(action, QAction):
filename = action.data().toString()
filename = str(filename)
if filename in self._openFileList:
self.editorTabWidget.setCurrentIndex(self._fileGetIndex(filename))
elif os.path.exists(filename) and not os.path.isdir(filename):
editor = self._createEditor(filename)
try:
encoding = 'utf8'
try:
lines = codecs.open(filename, 'rU', encoding).readlines()
except UnicodeDecodeError:
encoding = chardet.detect(open(filename).read())['encoding']
lines = codecs.open(filename, 'rU', encoding).readlines()
for line, text in enumerate(lines):
editor.insertAt(text, line, 0)
except IOError, e:
self.errorMessage("Failed to open file %s " % (filename,) + \
"because it is read-only or does not exist.")
except UnicodeDecodeError, e:
self.errorMessage("Failed to determine file's encoding.")
else:
self._fileAddOpenFile(filename, editor)
self._fileAddRecentFile(filename)
def _fileAddRecentFile(self, filename):
"""
Private method used for updating the File -> "Open Recent" menu
@param filename the file to add to the menu
"""
filename = str(filename)
if filename not in self.recentlyOpenedFiles:
self.recentlyOpenedFiles.insert(0, filename)
self.recentlyOpenedFiles = self.recentlyOpenedFiles[:12]
def _fileOpenRecentMenu(self):
"""
Protected method that creates the File Open Recent menu
"""
self.actionOpenRecentMenu.clear()
for f in self.recentlyOpenedFiles:
basename = self._basename(f)
action = self.createAction("%s %s[ %s ]" % (basename, \
(15-len(basename))*" ", f), self.loadFile, \
tip="Open file %s" % f
)
action.setData(QVariant(QString(f)))
self.actionOpenRecentMenu.addAction(action)
def fileSave(self, index=-1, filename=None):
"""
Public method for saving a file
@param index save the file specified by index, if not specified,
the currently selected file will be saved
@return True on successful save
"""
if filename is not None and filename in self._openFileList:
index = self._openFileList.index(filename)
if index == -1:
index = self.editorTabWidget.currentIndex()
retval = True
try:
filename = self._fileGetOpenFile(index)
except NoSuchFileException:
retval = False
else:
if self._editors[filename][1]:
retval = self._fileSaveAs()
else:
editor = self._editors[filename][0]
file = None
try:
file = open(filename, "w")
file.write(unicode(editor.text()).encode('utf8'))
except (IOError, UnicodeEncodeError), e:
self.errorMessage("Unable to save file %s: \n%s" % \
(filename, e))
retval = False
else:
editor.setModified(False)
self.statusbar.showMessage("Saved %s" % filename, 1500)
if file is not None:
file.close()
# self.projectRefresh()
return retval
def _fileSaveAs(self):
"""
Protected method for saving the current file as
@return True on success
"""
lastdir = self._fileGetLastDir()
index = self.editorTabWidget.currentIndex()
oldfilename = self._fileGetOpenFile(index)
filename = QFileDialog.getSaveFileName(self, "Save File As - %s" % oldfilename, lastdir)
# check for cancel
retval = False
if not filename.isEmpty():
filename = str(filename)
editor = self._fileGetEditor(oldfilename)
# set the last accessed directory...
self._fileSetLastDir(filename)
self._editors[filename] = [editor, False]
self.editorTabWidget.setTabText(index, self._basename(filename))
del self._editors[oldfilename]
self._openFileList[index] = filename
self._fileAddRecentFile(filename)
retval = self.fileSave()
return retval
def _fileSaveAll(self):
"""
Protected method for saving all opened files
"""
for index in range(len(self._openFileList)):
self.fileSave(index)
# It's possible to create a document, modify it, and close it.
# We need to disable the actions because the signal won't be emitted
self.actionFileSave.setEnabled(False)
self.actionFileSaveAll.setEnabled(False)
def _filePrint(self):
"""
Protected method for printing a file
"""
try:
filename = self._fileGetOpenFile()
editor = self._fileGetEditor(filename)
except NoSuchFileException:
pass
else:
printer = Qsci.QsciPrinter()
p = QPrintDialog(printer, self)
if p.exec_() == QDialog.Accepted:
printer.setDocName(filename)
if printer.printRange(editor):
self.infoMessage("File %s successfully printed." % filename)
else:
self.infoMessage("Failed to print file %s." % filename)
def _fileQuit(self):
"""
Protected method that closes the application
"""
self.close()
# >>>>>>>>>>>>>>>>>>>>>> Edit menu actions <<<<<<<<<<<<<<<<<<<<<<
def _editUndo(self):
"""
Protected method undoing the last operation of the user
"""
try:
self._getCurrentEditor().undo()
except NoSuchFileException:
pass
def _editRedo(self):
"""
Protected method redoing the last operation of the user
"""
try:
self._getCurrentEditor().redo()
except NoSuchFileException:
pass
def _editCut(self):
"""
Protected method cutting selected text
"""
try:
self._getCurrentEditor().cut()
except NoSuchFileException:
pass
def _editCopy(self):
"""
Protected method copying selected text
"""
try:
self._getCurrentEditor().copy()
except NoSuchFileException:
pass
def _editPaste(self):
"""
Protected method pasting copied text
"""
try:
self._getCurrentEditor().paste()
except NoSuchFileException:
pass
@property
def indentationChar(self):
# return "\t" if self.useTabs else " "
if self.useTabs:
indentationChar = "\t"
else:
indentationChar = " "
return indentationChar
def _editUnindent(self):
"""
Protected method for unindenting a line or a block of selected lines
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
if editor.hasSelectedText():
l1, i1, l2, i2 = editor.getSelection()
for linenumber in xrange(l1, l2 + 1):
self._unindentLine(editor, linenumber)
tabwidth = self.tabwidth
if self.useTabs:
tabwidth = 1
editor.setSelection(l1, i1, l2, i2 - tabwidth)
else:
line = editor.getCursorPosition()[0]
self._unindentLine(editor, line)
def _unindentLine(self, editor, line):
"""
Private method that unindents the given line
@param editor the editor to unindent the line on
@param line the line to unindent
"""
text = unicode(editor.text(line))
if self.useTabs:
if text[0] == "\t":
width = 1
else:
return
else:
spaces = 0
for spaces, char in enumerate(text):
if char != " ":
break
width = spaces % self.tabwidth
if width == 0 and spaces >= 4:
width = 4
editor.replaceLine(line, text[width:], send=True)
def _editIndent(self):
"""
Protected method that indents the given line
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
if editor.hasSelectedText():
# indent a block
l1, i1, l2, i2 = editor.getSelection()
for linenumber in xrange(l1, l2 + 1):
self._indentLine(editor, linenumber)
editor.setSelection(l1, i1, l2, i2 + self.tabwidth)
else:
line = editor.getCursorPosition()[0]
self._indentLine(editor, line)
def _indentLine(self, editor, line):
"""
Private method that indents the given line
@param editor the editor to indent the line on
@param line the line to indent
"""
text = unicode(editor.text(line))
if self.useTabs:
editor.replaceLine(line, "\t" + text, send=True)
return
spaces = 0
for spaces, char in enumerate(text):
if char != " ":
break
width = self.tabwidth - (spaces % self.tabwidth)
editor.replaceLine(line, " "*width + text, send=True)
def _editComment(self):
"""
Protected method for commenting out a line or block
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
editor.beginUndoAction()
if editor.hasSelectedText():
l1, i1, l2, i2 = editor.getSelection()
# comment each line
for linenumber in xrange(l1, l2 + 1):
self._commentLine(editor, linenumber)
# and re-set the selection
editor.setSelection(l1, i1, l2, i2 + len(editor.comment))
else:
line, index = editor.getCursorPosition()
self._commentLine(editor, line)
if re.match("^ *%s$" % editor.comment,
unicode(editor.text(line))):
# empty line comment, set cursor position after comment
editor.setCursorPosition(line,
editor.text(line).length() - 1)
editor.endUndoAction()
def _commentLine(self, editor, line):
"""
Private method that unindents line line on editor editor
@param editor the editor containing the line
@param line the line to comment
"""
text = unicode(editor.text(line))
spaces = 0
for spaces, char in enumerate(text):
if char != self.indentationChar:
break
text = "%s%s%s" % (self.indentationChar * spaces,
editor.comment, text[spaces:])
if editor.comment.startswith("<!--"):
# html comment
text = text[:-1] + " -->\n"
editor.replaceLine(line, text, send=True)
def _editUncomment(self):
"""
Protected method for commenting out a line or block
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
# make the action undoable
editor.beginUndoAction()
if editor.hasSelectedText():
l1, i1, l2, i2 = editor.getSelection()
# comment all selected lines
for linenumber in xrange(l1, l2 + 1):
self._uncommentLine(editor, linenumber)
# re-set the selection
editor.setSelection(l1, i1, l2, i2 - len(editor.comment))
else:
line = editor.getCursorPosition()[0]
self._uncommentLine(editor, line)
editor.endUndoAction()
def _uncommentLine(self, editor, line):
"""
Private method that uncomments line line on editor editor
@param editor the editor containing the line
@param line the line to uncomment
"""
text = unicode(editor.text(line))
if editor.comment.startswith("<!--"):
# undo html comment
text = text.replace("-->", "", 1)
editor.replaceLine(line, \
text.replace(editor.comment, "", 1), send=True)
def _editMoveBeginning(self):
"""
Protected method for setting the cursor to the beginning of the line
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
line, index = editor.getCursorPosition()
text = unicode(editor.text(line))
if re.match("^ *$", text) is None:
# not an empty line
index = 0
for index, char in enumerate(text):
if char != self.indentationChar:
break
editor.setCursorPosition(line, index)
def _editMoveEnd(self):
"""
Protected method for setting the cursor to the end of the line
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
line, index = editor.getCursorPosition()
# -1 for the newline character
index = editor.text(line).length()
if unicode(editor.text(line)).endswith("\n"):
index -= 1
editor.setCursorPosition(line, index)
def _editSelectAll(self):
"""
Protected method for selecting all text
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
editor.selectAll()
def _editJump(self, line):
"""
Protected method for jumping to a user-specified line
"""
editor = None
if line > 1:
line -= 1
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
index = 0
text = unicode(editor.text(line))[:-1]
for index, char in enumerate(text):
if char != self.indentationChar:
break
editor.setLastLineJumpedFrom()
editor.setCursorPosition(line, index)
editor.setFocus()
def _editFind(self):
"""
Protected method for poppup up a find dialog
"""
self._findReplaceDlg.show()
self._findInput.selectAll()
self._findInput.setFocus()
def _editFindString(self, find, forward=True, line=-1):
"""
Private method for finding and selecting a string in a document
@param find the text to look for
@param forward whether to search forward or backward
@param line the line where the search should start from
"""
try:
self._getCurrentEditor().findFirst(find,
self._regexCheckBox.isChecked(), False, False,
True, forward, line)
except NoSuchFileException:
pass
def _editFindPrevious(self):
"""
Protected method for finding a previously found string in a document
"""
self._findReplaceDlg.show()
text = self._findInput.text()
if text:
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
self._editFindString(text, False, editor.getCursorPosition()[0])
editor.findNext()
else:
self._findInput.setFocus()
def _editFindNext(self):
"""
Protected method for finding a next occurrence of a string
"""
text = None
try:
text = self._findInput.text()
except AttributeError:
# find next invoked from menu without find dialog
self._editFind()
self._findReplaceDlg.show()
text = self._findInput.text()
if text:
self._editFindString(text)
else:
self._findInput.setFocus()
def _editReplace(self):
"""
Protected method for replacing a selected and found text
"""
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
if editor.hasSelectedText():
line, index = editor.getCursorPosition()
editor.removeSelectedText()
editor.insert(self._replaceInput.text())
editor.send(line, type=TextEdit.TextEdit.REPLACE)
else:
self.statusbar.showMessage("Find something first", 3000)
# >>>>>>>>>>>>>>>>>>>>>> View menu actions <<<<<<<<<<<<<<<<<<<<<<
def _viewIncreaseFont(self):
"""
Protected method increasing font size for all editors
"""
editors = self._editors.values()
for editor, boolean in editors:
editor.zoomIn()
if len(editors) > 0:
editors[0][0].increaseFontSize()
def _viewDecreaseFont(self):
"""
Protected method decreasing font size for all editors
"""
editors = self._editors.values()
for editor, boolean in editors:
editor.zoomOut()
if len(editors) > 0:
editors[0][0].decreaseFontSize()
def _hideInformationBar(self):
"""
Protected method decreasing font size for all editors
"""
if self.actionHideInformationBar.isChecked():
self.toolbox.hide()
else:
self.toolbox.show()
def _hideContextTabWidget(self):
# hide = self.contextTabWidget.isHidden()
# self.contextTabWidget.setVisible(hide)
# self.buttonHide.setIcon(self.buttonHideIcons[hide])
# self.buttonHide.setText((hide and "->") or "<-") # "v" if hide else "^")
self.contextTabWidget.setVisible(self.contextTabWidget.isHidden())
def _viewSetHighlighting(self, hl=None):
"""
Protected method setting the highlighting of the current document
@param hl the highlighting to set (str). If this is omitted, the
method is probably invoked through an action, and the action's
text is used as hl
"""
if hl is None:
action = self.sender()
if isinstance(action, QAction):
hl = str(action.text())
if hl is not None:
try:
self._getCurrentEditor().setLanguage("", hl)
except NoSuchFileException:
pass
def _viewLeftTab(self):
self.editorTabWidget.previous()
def _viewRightTab(self):
self.editorTabWidget.next()
def _viewCloseTab(self):
index = self.editorTabWidget.currentIndex()
if index > -1:
self._confirmEditorClose(index)
# >>>>>>>>>>>>>>>>>>>>>> Project menu actions <<<<<<<<<<<<<<<<<<<<<<
@debug
def projectCheckDir(self):
"""
Private method checking if the project dir is properly set
@return whether the project dir is properly set (bool)
"""
return self.projectDir is not None and os.path.exists(self.projectDir)
@debug
def _projectEnsureDir(self):
"""
Protected method ensuring the projectDir is properly set
@return false if the user doesnt want to set it (bool)
"""
if not self.projectCheckDir():
self._projectSetProjectDir()
if self.projectCheckDir():
self.projectRefresh()
return True
else:
return False
else:
return True
def _find(self, filename, widget):
"""
Protected method for finding a file in the project directory
@param filename the name of the file to find
@param widget the QTextBrowser object to display results in
"""
if self.projectCheckDir():
filename = filename.lower()
regex = re.compile(filename)
for f in Find.Find(self.projectDir).find(): #exclude=()):
if filename in f.lower() or regex.search(f.lower()):
widget.addItem(f)
def _confirmOverwrite(self, filename):
"""
Private method checking if the given filename exists and returning
whether it can be overwritten or not.
@param filename the name of the file to be checked
@return to overwrite the file (bool)
"""
retval = True
if os.path.exists(filename):
if os.path.isdir(filename):
self.errorMessage("File exists and is a directory." + \
"Please pick another name")
retval = False
else:
retval = QMessageBox.question(self, "Overwrite %s" % filename, \
"Filename %s already exists. Overwrite it?" % (filename), \
QMessageBox.Yes|QMessageBox.No) == QMessageBox.Yes
return retval
@debug
def _projectNewFile(self, project, package, filename, send=True):
"""
Protected method creating a new file for in a project
@param project the project to put the file in
@param package the package of the file
@param filename the file to be created
@param send whether we create the new file or some other host in the project
"""
if package is None:
path = os.path.join(self.projectDir, project, "")
else:
path = os.path.join(self.projectDir, project,
package.replace(".", os.sep), "")
if filename.endswith(".java"):
filename = filename.title()
fname = os.path.join(path, filename)
try:
if not os.path.isdir(path):
os.makedirs(path)
os.mknod(path + filename, 0644)
load = True
elif self._confirmOverwrite(fname):
if os.path.exists(path + filename):
# first see if it's opened, and if so, close it
try:
idx = self._fileGetIndex(fname)
except ValueError:
pass
else:
self._fileRemoveOpenFile(idx)
os.remove(fname)
os.mknod(fname, 0644)
load = True
else:
load = False
except OSError, e:
self.errorMessage("Unable to create file: %s" % e)
return
if send:
self._projectSendNewFile(project, package, filename)
if load and send:
self.loadFile(fname)
self._setRelevantText(project, package)
# set focus
self.editorTabWidget.setCurrentIndex(
self.editorTabWidget.currentIndex())
# self.projectRefresh()
def _setRelevantText(self, project, package=None):
"""
Private method setting some code in the editor
@param package the package of the file
"""
filename = self._fileGetOpenFile(self.editorTabWidget.currentIndex())
editor = self._fileGetEditor(filename)
filename = self._basename(filename)
if filename.endswith(".py") or filename.endswith(".pyw"):
editor.insert("#!/usr/bin/env python\n\n")
elif filename.endswith(".sh"):
editor.insert("#!/bin/bash\n\n")
elif filename.endswith(".java"):
editor.insert( \
"public class %s%s {\n\n"%(filename[0].upper(), filename[1:-5]) + \
" public %s%s () {\n\n"%(filename[0].upper(),filename[1:-5]) + \
" }\n\n" + \
"}\n"
)
if package is not None:
editor.insertAt("package %s.%s;\n\n" % (project, package), 0, 0)
elif filename.endswith(".pl"):
editor.insert("#!/usr/bin/env perl\n\n")
elif filename.endswith(".rb"):
editor.insert("#!/usr/bin/env ruby\n\n")
elif filename.endswith(".vhdl"):
editor.insert(
"library ieee;\n" +
"use ieee.std_logic.1164.all;\n\n" +
"entity myentity is\n" +
" port ();\n" +
"end myentity\n\n" +
"architecture behaviour of myentity is\n" +
"begin\n" +
" -- \n"
"end behaviour;\n"
)
elif filename.endswith(".c"):
editor.insert(
"\n"
"\n"
"int main(int argc, char **argv) {\n"
"\n"
"}\n"
)
self.fileSave(self.editorTabWidget.currentIndex())
@debug
def removeFile(self, filename, project=True):
"""
Public method for removing a file from a project, or a whole project
@param filename the file to remove
@param project wether the file is a file in a project (or a project)
(it could also be a template)
"""
directory = os.path.isdir(filename)
try:
if directory:
shutil.rmtree(filename)
else:
os.remove(filename)
except OSError, e:
self.errorMessage("Unable to delete file or directory: %s" % e)
return
if project:
if self._abspath(filename[:-1]) == self.projectDir and \
self._basename(filename) in self._projects:
self._projects[self._basename(filename)].close()
del self._projects[self._basename(filename)]
if directory:
# we need to check if it's a directory (e.g. when you remove
# /foo/bar/foo you don't want /foo/bar/foo.py to be closed)
filename += "/"
removed = 0
for x in xrange(len(self._openFileList)):
if self._openFileList[x - removed].startswith(filename):
self._fileRemoveOpenFile(x - removed)
removed += 1
elif filename in self._openFileList:
self._fileRemoveOpenFile(self._openFileList.index(filename))
# the timer thingy prevents a segfault, for a reason unknown
QTimer.singleShot(0, self.projectRefresh)
@debug
def renameFile(self, old, new, send=True):
"""
Public method for renaming a file or project.
@param old the old file to rename, including full path
@param new the new filename, without path
"""
newname = self._abspath(old) + new
if self._confirmOverwrite(newname):
if not os.path.exists(old):
return
if send:
project, package, filename = self._projectGetCurrentInfo(old)
self._projectSendRenameFile(project, package, filename, new)
os.rename(old, newname)
self.projectRefresh()
self._templateTree.refresh()
# discard '/' or last letter to get the path
def updateFileList():
"""
Function updating open files, if a directory along it's path
was renamed
"""
for x in xrange(len(self._openFileList)):
fname = self._openFileList[x]
if fname.startswith(old):
newfname = "".join((newname, fname[len(old):]))
self._openFileList[x] = newfname
self._editors[newfname] = self._editors.pop(fname)
# discard '/' or last letter to get the path
path = self._abspath(old[:-1])
if path == self.projectDir and self._basename(old) in self._projects:
# a project was renamed
self._projects[self._basename(old)].setName(new)
self._projects[new] = self._projects.pop(self._basename(old))
updateFileList()
elif old in self._openFileList:
# an open file was renamed
index = self._openFileList.index(old)
self._openFileList[index] = newname
self.editorTabWidget.setTabText(index, new)
self._editors[newname] = self._editors.pop(old)
elif os.path.isdir(newname):
# package renamed
updateFileList()
@debug
def projectAddFile(self, project, src):
"""
Public method for adding an existing file to the project.
@param project the project to add the selected file to
@param src the file to be added
"""
dest = "".join((self.projectDir, project, "/", self._basename(src)))
if self._confirmOverwrite(dest):
try:
shutil.copy(src, dest)
except IOError, e:
self.errorMessage("Failed to copy %s to %s:\n%s" %(src,dest,e))
return
# let other people know we added a new file (they can download it using
# the sync option)
project, package, filename = self._projectGetCurrentInfo(dest)
self._projectSendNewFile(project, package, filename)
self.loadFile(dest)
self.projectRefresh()
def projectRefresh(self):
"""
Public method that refreshes the project tree
"""
self.projectTree.projectRefresh()
def _projectSetProjectDir(self):
"""
Private method poping up a dialog asking for the directory that will
contain all projects and project files.
"""
# self.infoMessage() does not work here (segfault)
def popup():
o = QWidget()
QMessageBox.information(o, "Set Project Directory",
"Please set the directory that will contain your source "
"and project files.")
QTimer.singleShot(0, popup)
d = QFileDialog.getExistingDirectory(None, \
"Set the source directory for all projects", self._fileGetLastDir())
self.projectDir = str(d)
if self.projectCheckDir():
self._fileSetLastDir(d)
self.setProjectDir()
def setProjectDir(self):
"""
Public method used only for setting the project directory programatically.
"""
# first time d will be an empty string, so check
d = str(self.projectDir)
if os.path.isdir(d):
self.projectDir = d
Project.PROJECTDIR = d
MainWindow.PROJECTDIR = d
self.projectTree.setModel_()
self.projectDirLabel.setText(QString("<b>%s</b>" % d))
self._loadProjects()
else:
# popup a dialog
self._projectSetProjectDir()
def _loadProjects(self):
"""
Private method creating project objects from all projects in the project
directory
"""
names = [name for name in os.listdir(self.projectDir) \
if os.path.isdir(self.projectDir + name) and not name == ".templates"]
for name in names:
self._projects[name] = Project.Project(self, name)
self._projects[name].load()
def _projectCreateProject(self):
"""
Protected method for creating a new project
"""
name = self.projectInput.text()
pw = self.passwordInput.text()
if name.isEmpty():
self.errorMessage("Please provide a name for the project")
self.projectNewWidget.raise_()
self.projectNewWidget.activateWindow()
return
if QFile.exists(self.projectDir + name):
self._repeatDlg(self.projectNewWidget, "File already exists. " + \
"Please remove it or pick another name")
return
if pw.isEmpty():
# self.infoMessage("You didn't provide a password. If you at " + \
# "some point decide otherwise, you can set it via " + \
# "\"Project\" -> \"Project Settings\"", "Password")
pw = None
name = str(name)
p = Project.Project(self, name, pw)
if p.create():
self._projects[name] = p
self.projectNewWidget.close()
self.toolbox.setCurrentIndex(0) # the project
self.projectRefresh()
else:
self.projectNewWidget.raise_()
self.projectNewWidget.activateWindow()
def _projectNew(self):
"""
Protected method popping up a dialog for creating a new project
"""
if self.projectCheckDir():
self.createProjectNewDlg()
else:
if self._projectEnsureDir():
self._projectNew()
@debug
def _projectSettings(self, oldname, newname, password, visible):
"""
Protected method for setting the newly decided project settings
applyProjectSettings
@param oldname the old name of the project
@param newname the new name of the project
@param password the password for the project
@param visible the visibility of the project
"""
if oldname != newname:
self.renameFile(self.projectDir + oldname, newname)
password = password.strip() or None
self._projects[newname].setPassword(password)
self._projects[newname].setVisible(visible)
def _projectGetInfo(self, name):
"""
Protected method for retrieving project information
@param name the name of the project to retrieve information of
@return (project name (str), project password (str), project \
visibility (bool)) (tuple)
"""
if name in self._projects:
p = self._projects[name]
pw = p.password()
if pw is None:
pw = ""
return (p.getName(), pw, p.isVisible())
else:
return ("", "", "")
def _projectGetCurrentInfo(self, filename=None):
"""
Private method for obtaining information about the current file or
the one given. This method may raise NoSuchFileException.
@param filename the filename (str)
@return a tuple with project, package, filename
"""
if filename is None:
filename = self._fileGetOpenFile()
if self.projectDir is not None:
project = filename.replace(self.projectDir, "").split("/")[0]
f = self._basename(filename)
package = filename[\
len(self.projectDir) + len(project) +1 : len(filename) - len(f) -1]
package = package or Network.PASS
return (project, package, f)
else:
return ("", "", filename)
def isConnected(self, filename):
"""
Public method used by TextEdit to determine if the file is in a project
that is connected with other hosts. This is done for the 'undo' action,
since the action might be relatively resource intensive
"""
retval = False
project, package, filename = self._projectGetCurrentInfo(filename)
if project in self._projects:
retval = self._projects[project].isConnected()
return retval
def setStylesheet(self):
stylesheet = ""
if self.stylesheet:
icon = self.base + "img/eggy/eggy-tree-small.png"
stylesheet = ("QTreeView, QTextBrowser, QListWidget {"
"background-color: white;"
"background-image: url(%s); " % icon + \
"background-attachment: scroll;"
"background-repeat: vertical;"
"background-position: center;"
"}"
)
self.projectTree.setStyleSheet(stylesheet)
self._templateTree.setStyleSheet(stylesheet)
self._pluginList.setStyleSheet(stylesheet)
# for project in self._projects.itervalues():
# project.memberList().setStyleSheet(stylesheet)
self._currentMemberList.setStyleSheet(stylesheet)
def tabChanged(self, index):
"""
Public method that updates the chat widget and user list on tab change
according to the project of the newly selected tab
@param index the index of the current tab
"""
try:
if len(self._openFileList) < index < 0:
raise NoSuchFileException
project, package, filename = self._projectGetCurrentInfo()
editor = self._fileGetEditor(self._openFileList[index])
except NoSuchFileException:
pass
else:
self.emit(SIGNAL("tabchanged"))
editor.setFocus()
self.actionFileSave.setEnabled(editor.isModified())
self.actionEditCopy.setEnabled(editor.hasSelectedText())
self.actionEditCut.setEnabled(editor.hasSelectedText())
self.filenameLabel.filename = self._openFileList[index]
if project in self._projects:
project = self._projects[project]
self._currentChatBrowser.hide()
self._currentChatBrowser = project.browser()
self._currentChatBrowser.show()
self.chatLabel.setText("Project chat: <b>%s</b>" % project.getName())
self._currentMemberList.hide()
self._currentMemberList = project.memberList()
self._currentMemberList.show()
self._userLabel.setText("Members in project <b>%s</b>" % project.getName())
# >>>>>>>>>>>>>>>>>>>>>> Model->Network communication <<<<<<<<<<<<<<<<<<<<<<
@network
def _projectConnect(self, address, port, project):
"""
Protected method that lets the user connect to another project
@param address the address of the host
@param port the host's port number
"""
if project not in self._projects or not \
self._projects[project].isVisible():
# user might have removed the project or set it to invisible
# while having the dialog open
return
self._projects[project].server = address
self._projects[project].serverport = port
self._queue.put((Network.TYPE_CONNECT, address, int(port), project, \
self._projects[project].password()))
@network
def sendInsertedText(self, line, txt):
"""
Public method for sending text to the other project members
@param txt the text to be inserted (str)
"""
project, package, filename = self._projectGetCurrentInfo()
if project in self._projects:
p = self._projects[project]
timer = p.getTimer()
if p.isVisible():
self._queue.put((Network.TYPE_INSERTEDTEXT, timer.now(), \
project, package, filename, line, txt))
def projectSetVisible(self, project, add=True):
"""
Public method for setting the project visibility (and syncing this
with the network). We don't apply the network decorator, because
when the user decides to restart the network in the settings dialog,
we need our projects synced into the network.
@param project the project name
@param add whether to add or remove the project from the network
"""
project = self._projects[project]
if add:
self._queue.put((Network.TYPE_ADDPROJECT, project))
else:
self._queue.put((Network.TYPE_REMOVEPROJECT, project))
@network
def _projectSync(self, project, user):
"""
Protected method that lets the user sync all files in the project
@param project the project to be synced
@param user the victim to request all syncs from
"""
for f in Find.Find(self.projectDir).find(project, include_path=True):
project, package, filename = self._projectGetCurrentInfo(f)
w = self._syncingWidget(filename)
self._projectRequestSyncFile(w, user, project, package, filename)
@network
@debug
def _projectRequestSyncFile(self, widget, user, project, package, filename):
"""
Protected method for requesting the sync for a file
@param widget the widget that will temporarily replace the editor
@param user the user to send the request to
@param project the project the file is in
@param package the package of the file
@param filename the (base)name of the file
"""
fn = self._assemble(project, package, filename)
self._syncingFiles[fn] = widget
if fn not in self._openFileList:
self.loadFile(fn)
editor = self._fileGetEditor(fn)
assert editor is not None and fn in self._openFileList
index = self._openFileList.index(fn)
self.fileSave(index)
# hide the editor and display the "syncing widget"
self.editorTabWidget.removeTab(index)
self.editorTabWidget.insertTab(index, widget, filename)
self.editorTabWidget.setCurrentIndex(index)
editor.setState(TextEdit.TextEdit.SYNCING)
self._queue.put((Network.TYPE_REQUESTSYNC, user, project, \
package or Network.PASS, filename))
@debug
def _projectSyncCompleted(self, filename):
"""
Protected method called when the syncing was aborted or stopped
@param filename the name of the sync that sync was called on
"""
if filename in self._syncingFiles:
assert filename in self._openFileList
index = self._openFileList.index(filename)
editor = self._fileGetEditor(index)
editor.setState(TextEdit.TextEdit.NORMAL)
editor.processReceivedWhileSyncing()
# restore the tab with the editor
self.editorTabWidget.removeTab(index)
self.editorTabWidget.insertTab(index, editor, self._basename(filename))
self.editorTabWidget.setCurrentIndex(index)
del self._syncingFiles[filename]
@network
@debug
def replySync(self, args):
"""
Public method for replying to a request for the sync of a file
"""
username, project, package, f = [str(arg) for arg in args]
filename = self._assemble(project, package, f)
file = None
if filename in self._openFileList:
self.fileSave(self._openFileList.index(filename))
try:
file = open(filename, "rU")
except IOError:
self._queue.put((Network.TYPE_SYNC, username, project, package, f, None))
if file is not None:
file.close()
else:
self._queue.put((Network.TYPE_SYNC, username, project, package, f, file))
def synced(self, args):
"""
Public method for receiving the synced file
@param args a QStringList from the type [project, package, filename, file's_text]
"""
project, package, f, text = [unicode(arg) for arg in args]
filename = self._assemble(project, package, f)
if filename in self._syncingFiles and filename in self._openFileList:
editor = self._fileGetEditor(filename)
assert editor is not None
done = True
if text == Network.ERROR:
self.errorMessage("Unable to sync file, the person synced " + \
"from has probably set permissions to tight.")
elif text.startswith("insert"):
editor.setText(text[6:])
done = False # wait for append, Network.ERROR or |done| packets
elif text.startswith("append"):
editor.append(text[6:])
done = False
if done:
self._projectSyncCompleted(filename)
@network
def _projectSendNewFile(self, project, package, filename):
self._queue.put((Network.TYPE_PROJECTNEWFILE, project, \
package or Network.PASS, filename))
@network
def projectSendRemoveFile(self, filename):
project, package, filename = self._projectGetCurrentInfo(filename)
self._queue.put((Network.TYPE_PROJECTREMOVEFILE, project, package, filename))
@network
def _projectSendRenameFile(self, project, package, old, new):
if package is None:
package = Network.PASS
self._queue.put((Network.TYPE_PROJECTRENAMEFILE, project, package, old, new))
@debug
def sendProjectFiles(self, args):
"""
Public method that gets signalled from the network, after having
sent a list of addresses, that it needs
to send a list of project files to a specific user.
@param args a QStringList containing project and username
"""
project, username = [str(arg) for arg in list(args)]
text = ""
for f in Find.Find(self.projectDir).find(project):
text += "%s%s" % (Network.DELIM, f)
self._queue.put((Network.TYPE_PROJECTFILES, project, username, text))
@network
def _userChatSend(self):
"""
Protected method for sending chat text to other hosts in the project
"""
# retrieve the project from the chatlabel
project = str(self.chatLabel.text()).split(" ")[-1].replace("<b>", \
"").replace("</b>", "")
text = str(self._chatInput.text())
if text:
self._chatInput.clear()
if project in self._projects and \
self._projects[project].isVisible():
self._queue.put((Network.TYPE_SENDCHATTEXT, project, str(text)))
# let ourselves know we said something
l = QStringList()
l.append(project)
l.append(self._username)
l.append(text)
self.userChatInsertText(l)
else:
self._currentChatBrowser.setHtml(
"<b>File not in a project.</b><br><br>" + \
"You can set it to visible in Project -> Project Settings.<br>"
)
@network
def _chatChangeUsername(self, old, new):
"""
Protected method letter other users know we changed our name
@param old our old username
@param new our new username
"""
for project in self._projects:
l = QStringList()
l.append(project)
l.append(old)
l.append(new)
self.chatUsernameChanged(l, us=True)
if self._projects[project].isVisible():
self._queue.put(
(Network.TYPE_USERNAMECHANGED, project, old, new)
)
@network
def _projectsQuit(self):
"""
Private method for quitting all projects. Invoked on program shutdown
"""
for project in self._projects:
self.projectSetVisible(project, False)
def _networkRestart(self):
"""
Protected method called by the constructor or by the user from the
settings dialog to restart the network
"""
if self._settingsDlg is not None:
# save the potentially newly set port first
popup = True
self._port = int(self._settingsNetworkPort.text())
try:
self._network = Network(self, self._queue, self._username, \
port=self._port)
except PortTakenException:
self._networkUp = False
self.errorMessage("Unable to start the network, the port is " + \
"probably already taken. Please choose another in the " + \
"\"Settings\" -> \"Configure eggy\" dialog under the " + \
"\"Network\" tab or try again after some time. You will not " + \
"be able to connect or accept incoming connections until the " + \
"network is started.")
else:
Decorators.RUNNING = True
self._networkUp = True
self._network.start()
# disable the restart button
if self._settingsDlg is not None:
self._networkRestartButton.setEnabled(False)
if self._settingsDlg is not None:
self._settingsDlg.raise_()
# >>>>>>>>>>>>>>>>>>>>>> Network->Model communication <<<<<<<<<<<<<<<<<<<<<<
def _assemble(self, project, package, filename):
package = package.strip() or Network.PASS
f = "/".join((self.projectDir + project, package, filename))
return str(f.replace(Network.PASS + "/", ""))
def networkError(self, text):
self.errorMessage(text)
def receiveInsertText(self, args):
"""
Public method invoked by the network when another connected host
inserted text
@param args a QStringList containing respectively project, package,
filename, line, text
"""
project, package, filename, line, text = \
[unicode(arg) for arg in list(args)]
f = self._assemble(project, package, filename)
editor = self._fileGetEditor(f)
if editor is not None:
try:
editor.receive(int(line), text)
except ValueError:
pass
@debug
def receiveProjectNewFile(self, args):
project, package, filename = [str(arg) for arg in list(args)]
if package == Network.PASS:
package = None
self._projectNewFile(project, package, filename, send=False)
@debug
def receiveProjectRemoveFile(self, args):
user, project, package, filename = [str(arg) for arg in list(args)]
filename = self._assemble(project, package, filename)
self.projectTree.projectRemoveFile(filename=filename, \
msg="User %s want to delete %s. " % (user, filename))
@debug
def receiveProjectRenameFile(self, args):
project, package, old, new = [str(arg) for arg in list(args)]
old = self._assemble(project, package, old)
self.renameFile(old, new, send=False)
@debug
def receiveProjectFiles(self, args):
project, text = [unicode(arg) for arg in list(args)]
project = str(project)
files = text.split("|||")
for f in Find.Find(self.projectDir).find(project):
if f in files:
files.remove(f)
for f in files:
if f:
if "/" in f:
self._projectNewFile(project, self._abspath(f), \
self._basename(f), send=False)
else:
self._projectNewFile(project, None, f, send=False)
self.projectRefresh()
def userChatInsertText(self, args):
"""
Public method that handles arrived chat text from another host
"""
project, username= [str(arg) for arg in list(args)[:2]]
text = unicode(list(args)[2])
if project in self._projects:
browser = self._projects[project].browser()
browser.insertHtml("%s < <b>%s</b> > " % \
(time.strftime("%H:%M"), username))
browser.insertPlainText(text + "\n")
# browser.verticalScrollBar().setSliderDown(True)
browser.verticalScrollBar().setValue(
browser.verticalScrollBar().maximum())
def chatUsernameChanged(self, args, us=False):
"""
Public method that displays a change of username from someone in
in the right chat
@param args format: [project, old, new]
@param us if we are the ones changing our name, or some other host
"""
project, old, new = [str(arg) for arg in list(args)]
if project in self._projects:
p = self._projects[project]
p.browser().insertHtml(\
"%s -- %s is now known as <b>%s</b> --<br>\n" % \
(time.strftime("%H:%M"), old, new))
if not us:
p.removeMember(old)
p.addMember(new)
def userConnected(self, args):
"""
Public method that adds a newly connected user to the memberlist of
the project
@param args a QStringList of type [project, username]
"""
project, username = [str(arg) for arg in list(args)]
if project in self._projects:
self._projects[project].browser().insertHtml(\
"%s -- %s has <b>joined</b> the project<br>\n" % \
(time.strftime("%H:%M"), username))
self._projects[project].addMember(username)
def userQuit(self, args):
"""
Public method for removing a former participant
@param args QStringList of the format [project, username]
"""
project, username = [str(arg) for arg in list(args)]
if project in self._projects:
self._projects[project].browser().insertHtml(\
"%s -- %s has <b>left</b> the project<br>\n" % \
(time.strftime("%H:%M"), username))
self._projects[project].removeMember(username)
'''
# >>>>>>>>>>>>>>>>>>>>>> Compilation methods <<<<<<<<<<<<<<<<<<<<<<
def _compile(self):
"""
Protected method taking care of compiling and/or running the currently
selected file
"""
self.actionFileCompile.setEnabled(False)
try:
filename = self._fileGetOpenFile(self.editorTabWidget.currentIndex())
except NoSuchFileException:
return
if not os.path.exists(filename):
#self.errorMessage("Please save the file first.")
if self._fileSaveAs():
# sucessfully saved
self._compile()
return
# save the file first
self.fileSave()
# compile
self._compileCode(filename, self._compileCheckBoxCompile.isChecked(),
self._compileCheckBoxRun.isChecked())
def _compileCode(self, filename, compile, run):
"""
Private method taking care of compiling and running the given file
@param filename the filename to compile/run
@param run whether to compile only, or compile and run (interpreted
languages are run either way)
"""
try:
if self._compileCheckBoxProgram.isChecked():
programargs = str(self._programArguments.text())
else:
programargs = ""
self._compileObject = (Compile(self, filename, compile, run,
str(self._compileArguments.text()),
str(self._runArguments.text()),
str(programargs)))
except NoCompilerAvailableException:
self.errorMessage("Failed to compile, unknown file type.")
else:
self._compileText.clear()
self._compileObject.start()
def setOutput(self, text):
"""
Public method called from a compilation thread
@param text the text to be inserted
"""
self._compileText.insertPlainText(text)
self._compileText.verticalScrollBar().setValue(
self._compileText.verticalScrollBar().maximum())
def setHtml(self, html):
"""
Public method called from a compilation thread
@param html the html text to be inserted
"""
self._compileText.insertHtml(html)
self._compileText.verticalScrollBar().setValue(
self._compileText.verticalScrollBar().maximum())
def compilationStarted(self, filename):
self._compileStopButton.setEnabled(True)
self._compileButton.setEnabled(False)
self.statusbar.showMessage("Started compiling/running %s" % filename,\
3000)
def compilationFinished(self, filename):
self.actionFileCompile.setEnabled(True)
self._compileStopButton.setEnabled(False)
self._compileButton.setEnabled(True)
self.statusbar.showMessage("Finished compiling/running %s" % filename,\
3000)
def compilationKilled(self, filename):
self._compileStopButton.setEnabled(False)
self._compileButton.setEnabled(True)
self.statusbar.showMessage("Killed compiling/running %s" % filename,\
3000)
def _stop(self):
"""
Protecting method used for stopping the current compilation
"""
if self._compileObject is not None and not self._compileObject.killed:
self._compileObject.kill()
'''
# >>>>>>>>>>>>>>>>>>>>>> Template methods <<<<<<<<<<<<<<<<<<<<<<
def templateCreate(self):
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
if not editor.hasSelectedText():
self.errorMessage("Select something first")
elif self._templateTree.templateDir() is None:
self.errorMessage("Set the project directory first")
else:
self._templateText = str(editor.selectedText())
self._templateCreateDlg()
def templateSave(self, d, filename):
if d is None:
filename = "%s%s" % (self._templateTree.templateDir(), filename)
else:
filename = "%s%s/%s" % (self._templateTree.templateDir(), d, \
filename)
if os.path.exists(filename):
if self._confirmOverwrite(filename):
self.removeFile(filename, False)
else:
return
try:
os.mknod(filename, 0774)
f = open(filename, "w")
f.write(self._templateText)
del self._templateText
self._templateTree.refresh()
except OSError, e:
self.errorMessage("Unable to save template %s: %s" % (filename, e))
return
def templatePaste(self, template):
try:
editor = self._getCurrentEditor()
except NoSuchFileException:
pass
else:
try:
f = open(template, "r")
except IOError, e:
self.errorMessage("Unable to read template: %s" % e)
return
# care for indentation
l, i = editor.getCursorPosition()
editor.beginUndoAction()
for number, line in enumerate(f):
editor.insertLine(" " * i + line, l + number)
editor.endUndoAction()
# editor.insertText(text)
def templateMkdir(self, name):
if self._projectEnsureDir():
filename = self._templateTree.templateDir() + name
if os.path.exists(filename):
if self._confirmOverwrite(filename):
self.removeFile(filename, False)
else:
return
try:
os.makedirs(filename)
except OSError, e:
self.errorMessage("Unable to create file %s: %s" % (filename, e))
self._templateTree.refresh()
# >>>>>>>>>>>>>>>>>>>>>> Settings menu actions <<<<<<<<<<<<<<<<<<<<<<
def _applySettings(self):
"""
Protected method that applies the user's configuration as set in "Settings -> Configure"
"""
settings = QSettings()
# editor
self.useTabs = self._settingsUseTabs.isChecked()
self.tabwidth = self._settingsTabwidth.value()
self.whiteSpaceVisible = self._settingsWhiteSpaceVisible.isChecked()
self.boxedFolding = self._settingsBoxedFolding.isChecked()
self.autoComplete = self._settingsAutoComplete.isChecked()
self.indentationGuides = self._settingsIndentationGuides.isChecked()
self.autoCompleteWords = self._settingsAutoCompleteWords.isChecked()
self.autoCompleteInvocationAmount = \
self._settingsAutoCompleteInvocation.value()
self.showAllFiles = self._settingsShowAllFiles.isChecked()
self.stylesheet = self._settingsShowEggyImage.isChecked()
self.setStylesheet()
self.projectTree.setFilters()
for editor, b in self._editors.itervalues():
editor.setAttrs()
for extension in self._settingsCompilers:
tpl = self._settingsCompilers[extension]
compiler = str(tpl[1].text())
interpreter = compiler
if tpl[0] is not None:
compiler = str(tpl[0].text())
Compile_.setCompiler(extension, (compiler, interpreter))
self._port = int(self._settingsNetworkPort.text())
self._settingsDlg.close()
# >>>>>>>>>>>>>>>>>>>>>> Plugins <<<<<<<<<<<<<<<<<<<<<<
def _loadPlugins(self, refresh=False):
"""
Private method loading all plugins
@param refresh if refresh is True no plugins are stopped or started
(this is used when refreshing the plugin list)
"""
plugindir = self.base + "plugins"
if not os.path.exists(plugindir):
return
# remove all .pyc files (because if we edited a plugin, reload() will
# load the old .pyc file
for fname in glob.glob("/".join((plugindir, "*.pyc"))):
try:
os.remove(fname)
except OSError:
pass
for name in self._plugins.keys():
try:
reload(self._plugins[name])
except:
self._plugins.pop(name)
self._pluginList.clear()
for fname in glob.glob("/".join((plugindir, "*.py"))):
name = self._basename(fname).split(".")[0]
if name == '__init__':
continue
if name not in self._plugins:
try:
# __import__ in 2.4 does not accept keyword arguments
plugin = __import__("%s.%s" % ("eggy.plugins", name), {}, {},
['']) # import rightmost
# check for validity
assert isinstance(plugin.author, str)
assert isinstance(plugin.version, (float, int, long))
assert isinstance(plugin.description, str)
# and for existence and callability
for function in (plugin.start, plugin.stop):
assert callable(function)
except:
print "Invalid plugin: %s" % name
import traceback
traceback.print_exc()
continue
self._plugins[name] = plugin
plugin.method = {}
plugin.widget = {}
plugin.method["load"] = self.loadFile
plugin.method["save"] = self.fileSave
plugin.method["get"] = self.get
plugin.method["close"] = self._confirmEditorClose
plugin.method["infoMessage"] = self.infoMessage
plugin.method["errorMessage"] = self.errorMessage
plugin.method["systrayMessage"] = self.systrayMessage
plugin.method["createAction"] = self.createAction
plugin.method["createButton"] = self._createButton
plugin.method["showDlg"] = self._showDlg
plugin.widget["right"] = self.toolbox
plugin.widget["bottom"] = self.contextTabWidget
self._pluginList.addItem(name)
if not refresh:
if self._autostartPlugin(name):
self._pluginStart(name)
def _pluginNew(self):
name = self.base + "Example.py"
self.loadFile(name)
self._editors[name][1] = True # invoke fileSaveAs
def _pluginStart(self, name):
try:
self._plugins[name].start(self)
except:
self.systrayMessage(name, "Unable to start plugin '%s': %s %s" % (
(name,) + sys.exc_info()[:2]))
def _pluginStop(self, name):
"""
Private method calling 'save' on all plugins. Called when eggy
is being closed
@param name the name of the plugin to stop
"""
try:
self._plugins[name].stop(self)
except:
self.systrayMessage(name, "Unable to stop plugin %s" % name)
def _pluginsStop(self):
"""
Private method stopping all plugins on eggy shutdown
"""
for name in self._plugins:
self._pluginStop(name)
def _autostartPlugin(self, name):
return QSettings().value("Plugins/" + name, QVariant(False)).toBool()
def _pluginShowInfo(self, name):
name = str(name)
if not name:
return
plugin = self._plugins[name]
desc = textwrap.wrap(textwrap.dedent(plugin.description), 40)
self._pluginInfo.setText(
"<br />".join((
"<b>Author:</b>",
" " + plugin.author,
"",
"<b>Version:</b>",
" " + str(plugin.version),
"",
"<b>Description:</b>",
" " + "<br /> ".join(desc),
# " " + plugin.description.replace("\n", "<br /> "),
)).replace(" ", " "*2)
)
check = self._autostartPlugin(name)
if check != self._pluginAutoStart.isChecked():
# ignore the state change
self._ignoreStateChange += 1
self._pluginAutoStart.setChecked(check)
# >>>>>>>>>>>>>>>>>>>>>> Methods for quitting <<<<<<<<<<<<<<<<<<<<<<
def editorClose(self):
"""
Public method called by the user from the context menu to close
the current editor
"""
self._confirmEditorClose(self.editorTabWidget.currentIndex())
def editorCloseAll(self):
"""
Public method closing all open editors
"""
for index in range(len(self._openFileList)):
if not self._confirmEditorClose():
event.ignore()
break
def _confirmEditorClose(self, index=0):
"""
Private method for confirming the closing of a tab
@param index the index of the editor/file to close
@return True if the user did not press cancel, else False
"""
try:
filename = self._fileGetOpenFile(index)
except NoSuchFileException:
# invalid index
return True
retval = True
editor = self._fileGetEditor(filename)
if editor is not None and editor.isModified():
self.editorTabWidget.setCurrentWidget(editor)
answer = QMessageBox.question(self, "%s - Save Unsaved Changes" % filename, \
"File \"%s\" has unsaved changes. Save them?" % filename, \
QMessageBox.Yes|QMessageBox.No|QMessageBox.Cancel)
if answer == QMessageBox.Yes:
self.fileSave(index)
self._fileRemoveOpenFile(index)
elif answer == QMessageBox.No:
self._fileRemoveOpenFile(index)
elif answer == QMessageBox.Cancel:
retval = False
else:
self._fileRemoveOpenFile(index)
return retval
def _saveSettings(self):
"""
Private method saving the user's settings
"""
settings = QSettings()
settings.setValue("File/RecentlyOpenedFiles",
QVariant(QStringList(self.recentlyOpenedFiles)))
if self.projectCheckDir():
settings.setValue("Project/SourceDirectory",
QVariant(QString(self.projectDir)))
settings.setValue("Editor/OpenFiles",
QVariant(QStringList(
[f for f in self._openFileList if os.path.exists(f)])))
settings.setValue("Editor/IndexSelectedFile",
QVariant(self.editorTabWidget.currentIndex()))
settings.setValue("Chat/Username", QVariant(QString(self._username)))
settings.setValue("Editor/UseTabs", QVariant(self.useTabs))
settings.setValue("Editor/TabStopWidth", QVariant(self.tabwidth))
settings.setValue("Editor/WhiteSpaceVisible",
QVariant(self.whiteSpaceVisible))
settings.setValue("Editor/BoxedFolding", QVariant(self.boxedFolding))
settings.setValue("Editor/AutoComplete", QVariant(self.autoComplete))
settings.setValue("Editor/IndentationGuides", QVariant(
self.indentationGuides))
settings.setValue("Editor/AutoCompleteWords",
QVariant(self.autoCompleteWords))
settings.setValue("Editor/AutoComleteInvocationAmount",
QVariant(self.autoCompleteInvocationAmount))
settings.setValue("ProjectTree/Image", QVariant(self.stylesheet))
settings.setValue("ProjectTree/ShowAllFiles",
QVariant(self.showAllFiles))
settings.setValue("Network/Port", QVariant(self._port))
self._pluginsStop()
@debug
def _restoreSettings(self):
"""
Private method restoring the saved user's settings
"""
settings = QSettings()
l = settings.value("File/RecentlyOpenedFiles", \
QVariant(QStringList())).toStringList()
self.recentlyOpenedFiles = []
for filename in l:
filename = str(filename)
if os.path.exists(filename):
self.recentlyOpenedFiles.append(filename)
d = settings.value("Project/SourceDirectory", QVariant(QString())).toString()
if d.isEmpty():
self.projectDir = None
else:
self.projectDir = str(d)
if "/" in user.home:
username = user.home.split("/")[-1]
else:
username = "No_username_is_set"
self._username = str(settings.value("Chat/Username", \
QVariant(QString(username))).toString())
self.useTabs = settings.value("Editor/UseTabs",
QVariant(False)).toBool()
self.tabwidth = settings.value("Editor/TabStopWidth",
QVariant(4)).toInt()[0]
self.whiteSpaceVisible = settings.value("Editor/WhiteSpaceVisible",
QVariant(False)).toBool()
self.boxedFolding = settings.value("Editor/BoxedFolding",
QVariant(True)).toBool()
self.autoComplete = settings.value("Editor/AutoComplete",
QVariant(True)).toBool()
self.indentationGuides = settings.value("Editor/IndentationGuides",
QVariant(True)).toBool()
self.autoCompleteWords = settings.value("Editor/AutoCompleteWords",
QVariant(True)).toBool()
self.autoCompleteInvocationAmount = settings.value(
"Editor/AutoComleteInvocationAmount", QVariant(3)
).toInt()[0]
self.showAllFiles = settings.value("ProjectTree/ShowAllFiles",
QVariant(False)).toBool()
self.stylesheet = settings.value("ProjectTree/Image",
QVariant(True)).toBool()
self._port = settings.value("Network/Port", QVariant(7068)).toInt()[0]
def closeEvent(self, event):
"""
Protected method called when the user attempts to close the
application. This is a reimplementation of the event
handler.
@param event the instance of the close event object
"""
if self._shuttingDown:
return
# save the files while they are still open
self._saveSettings()
# Close all projects first
for project in self._projects.itervalues():
project.save()
# cant change a list while looping over it, duh
for index in range(len(self._openFileList)):
# zero is fine since we keep removing the files
if not self._confirmEditorClose():
# user pressed cancel
event.ignore()
break
else:
# StopIteration was raised
# the user decided to shutdown (didnt press cancel on some
# unsaved file)
self._shuttingDown = True
self._saveGuiSettings()
Compile_.saveCompilers()
self._projectsQuit()
self._queue.put((Network.TYPE_QUIT, "discard"))
event.ignore()
if self._networkUp:
QTimer.singleShot(3000, self.quit)
else:
self.quit()
def quit(self):
"""
This method will be invoked from the network, when the network said
goodbye to everyone, or directly, when the network isn't running
"""
import __main__ as eggy
eggy.app.quit()
def killed(self):
"""
Public method called when the user tries to kill the program.
If the network is running, it will send emit a quit signal invoking
'quit'. If the network is not running, we should quit ourselves.
Settings will be lost.
"""
if not self._networkUp:
raise SystemExit(1)
else:
class EventFaker(object):
def ignore(self):
pass
self.closeEvent(EventFaker())
def _setupSocket(self):
"""
This method is called once on initialisation to setup a UNIX Domain
Socket for receiving filenames it must open, by another eggy process.
(it will be sent a SIGUSR1 to notify it of available data)
"""
sockfile = os.path.join(os.sep, 'tmp', 'eggy.socket')
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(sockfile)
s.listen(32)
atexit.register(s.close)
atexit.register(os.unlink, sockfile)
bufsize = os.fpathconf(s.fileno(), 'PC_PIPE_BUF')
if 4096 < bufsize < 0:
bufsize = 4096
def sigusr_handler(signo, frame):
while select.select([s], [], [], 0)[0]:
client, addr = s.accept()
data = client.recv(bufsize)
for fname in data.split('\x00'):
self.loadFile(fname)
self.raise_()
self.activateWindow()
signal.signal(signal.SIGUSR1, sigusr_handler)
| gpl-3.0 | 8,457,566,645,242,356,000 | 34.175746 | 97 | 0.546923 | false |
ptgrogan/ofspy | ofspy/test/player/test_player.py | 1 | 16321 | """
Copyright 2015 Paul T. Grogan, Massachusetts Institute of Technology
Copyright 2017 Paul T. Grogan, Massachusetts Institute of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Test cases for L{ofspy.player} package.
"""
import unittest
import logging
#logging.disable(logging.WARNING)
from ...player import Contract, Data, Federation, Federate
from ...context.event import Demand, ValueSchedule
from ...context.location import Surface, Orbit
from ...context import Context
from ...game import Game
from ...simulation import Simulator
"""
Test cases for L{ofspy.player.Federation} class.
"""
class FederationTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
"""
Test cases for L{ofspy.player.Federate} class.
"""
class FederateTestCase(unittest.TestCase):
def setUp(self):
self.game = Game(numPlayers=1, initialCash=2000)
self.context = self.game.generateContext()
self.sim = Simulator(entities=[self.context],
initTime=0, timeStep=1, maxTime=3)
self.default = Federate(name='Default')
self.fed = self.context.federations[0].federates[0]
self.station = self.game.generateElement('GroundSta',pId=0,eId=0,mTypes=['pSGL'])
self.sat1 = self.game.generateElement('SmallSat',pId=0,eId=1,mTypes=['pSGL','SAR'])
self.sat2 = self.game.generateElement('SmallSat',pId=0,eId=2,mTypes=['pSGL','VIS'])
self.sat3= self.game.generateElement('SmallSat',pId=0,eId=3,mTypes=['pISL','VIS'])
self.sat4 = self.game.generateElement('SmallSat',pId=0,eId=4,mTypes=['pSGL','pISL'])
def tearDown(self):
self.game = None
self.context = None
self.sim = None
self.default = None
self.fed = None
self.station = None
self.sat1 = None
self.sat2 = None
self.sat3 = None
self.sat4 = None
class FederateDesignTestCase(FederateTestCase):
def test_design(self):
self.assertFalse(self.default.design(self.station))
self.assertFalse(self.default.design(self.sat1))
self.assertTrue(self.fed.design(self.station))
self.assertEqual(self.fed.getCash(), self.fed.initialCash - self.station.getDesignCost())
self.assertTrue(self.station in self.fed.elements)
self.assertTrue(self.fed.design(self.sat1))
self.assertEqual(self.fed.getCash(), self.fed.initialCash - self.station.getDesignCost()
- self.sat1.getDesignCost())
self.assertTrue(self.sat1 in self.fed.elements)
class FederateCommissionTestCase(FederateTestCase):
def test_commission(self):
self.fed.design(self.sat1)
self.assertFalse(self.fed.commission(
self.sat1, self.context.locations[0], self.context))
self.assertTrue(self.fed.commission(
self.sat1, self.context.locations[3], self.context))
self.assertEqual(self.sat1.location, self.context.locations[3])
self.assertEqual(self.fed.getCash(), self.fed.initialCash
- self.sat1.getDesignCost()
- self.sat1.getCommissionCost(
self.context.locations[3], self.context))
self.fed.design(self.station)
self.assertFalse(self.fed.commission(
self.station, self.context.locations[1], self.context))
self.assertTrue(self.fed.commission(
self.station, self.context.locations[0], self.context))
self.assertEqual(self.station.location, self.context.locations[0])
class FederateDecommissionTestCase(FederateTestCase):
def test_decommission(self):
self.assertFalse(self.fed.decommission(self.station))
self.fed.design(self.station)
self.fed.commission(self.station, self.context.locations[0], self.context)
self.assertTrue(self.fed.decommission(self.station))
self.assertTrue(self.station not in self.fed.elements)
self.fed.design(self.sat1)
self.fed.commission(self.sat1, self.context.locations[1], self.context)
self.assertTrue(self.fed.decommission(self.sat1))
self.assertTrue(self.sat1 not in self.fed.elements)
class FederateLiquidateTestCase(FederateTestCase):
def test_liquidate(self):
self.fed.design(self.station)
self.fed.commission(self.station, self.context.locations[0], self.context)
self.fed.design(self.sat1)
self.fed.commission(self.sat1, self.context.locations[1], self.context)
self.fed.liquidate(self.context)
self.assertTrue(self.station not in self.fed.elements)
self.assertTrue(self.sat1 not in self.fed.elements)
class FederateCanContractTestCase(FederateTestCase):
def test_canContract(self):
self.sim.init()
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
self.assertTrue(self.fed.canContract(event, self.context))
self.assertFalse(self.fed.canContract(
next(e for e in self.context.futureEvents), self.context))
class FederateContractTestCase(FederateTestCase):
def test_contract(self):
self.sim.init()
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract1 = self.fed.contract(event, self.context)
self.assertIsNot(contract1, None)
self.assertIn(contract1, self.fed.contracts)
contract2 = self.fed.contract(
next(e for e in self.context.futureEvents), self.context)
self.assertIs(contract2, None)
self.assertNotIn(contract2, self.fed.contracts)
class FederateCanSenseTestCase(FederateTestCase):
def test_canSense(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat1)
self.fed.commission(self.sat1,
self.context.locations[1],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
self.assertFalse(self.fed.canSense(event, self.sat1, self.context))
self.assertTrue(self.fed.canSense(event, self.sat2, self.context))
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS3.7")
self.assertTrue(self.fed.canSense(event, self.sat2, self.context))
class FederateSenseAndStoreTestCase(FederateTestCase):
def test_senseAndStore(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat1)
self.fed.commission(self.sat1,
self.context.locations[1],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract = self.fed.contract(event, self.context)
self.assertIsNotNone(contract)
self.assertTrue(self.fed.senseAndStore(
contract, self.sat2, self.context))
self.assertEqual(len(self.sat2.modules[1].data), 1)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS3.7")
self.assertFalse(self.fed.senseAndStore(
Contract(event), self.sat2, self.context))
class FederateCanTransportTestCase(FederateTestCase):
def test_canTransport(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.fed.design(self.sat3)
self.fed.commission(self.sat3,
self.context.locations[1],
self.context)
self.fed.design(self.sat4)
self.fed.commission(self.sat4,
self.context.locations[5],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract1 = self.fed.contract(event, self.context)
self.fed.senseAndStore(contract1, self.sat2, self.context)
data1 = next(d for d in self.sat2.modules[1].data if d.contract is contract1)
self.assertFalse(self.fed.canTransport('pSGL', data1, self.sat2, self.station, self.context))
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS3.7")
contract2 = self.fed.contract(event, self.context)
self.fed.senseAndStore(contract2, self.sat3, self.context)
data2 = next(d for d in self.sat3.modules[1].data if d.contract is contract2)
self.assertTrue(self.fed.canTransport('pISL', data2, self.sat3, self.sat4, self.context))
self.assertFalse(self.fed.canTransport('pSGL', data2, self.sat4, self.station, self.context))
self.sim.advance()
self.assertTrue(self.fed.canTransport('pSGL', data1, self.sat2, self.station, self.context))
class FederateTransportTestCase(FederateTestCase):
def test_transport(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.fed.design(self.sat3)
self.fed.commission(self.sat3,
self.context.locations[1],
self.context)
self.fed.design(self.sat4)
self.fed.commission(self.sat4,
self.context.locations[5],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract1 = self.fed.contract(event, self.context)
self.fed.senseAndStore(contract1, self.sat2, self.context)
data1 = next(d for d in self.sat2.modules[1].data if d.contract is contract1)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS3.7")
contract2 = self.fed.contract(event, self.context)
self.fed.senseAndStore(contract2, self.sat3, self.context)
data2 = next(d for d in self.sat3.modules[1].data if d.contract is contract2)
self.assertTrue(self.fed.transport('pISL', data2, self.sat3, self.sat4, self.context))
self.assertNotIn(data2, self.sat3.modules[0].data)
self.assertNotIn(data2, self.sat3.modules[1].data)
self.assertIn(data2, self.sat4.modules[1].data)
self.fed.resolve(contract2, self.context)
self.sim.advance()
self.assertTrue(self.fed.transport('pSGL', data1, self.sat2, self.station, self.context))
self.assertNotIn(data1, self.sat2.modules[0].data)
self.assertNotIn(data1, self.sat2.modules[1].data)
self.assertIn(data1, self.station.modules[0].data)
self.assertFalse(self.fed.transport('pSGL', data1, self.sat4, self.station, self.context))
class FederateDeleteDataTestCase(FederateTestCase):
def test_deleteData(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract1 = self.fed.contract(event, self.context)
self.fed.senseAndStore(contract1, self.sat2, self.context)
data1 = next(d for d in self.sat2.modules[1].data
if d.contract is contract1)
self.fed.deleteData(contract1)
self.assertNotIn(data1, self.sat2.modules[1].data)
self.assertIn(contract1, self.fed.contracts)
class FederateContractTestCase(FederateTestCase):
def test_getContract(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract1 = self.fed.contract(event, self.context)
self.assertIn(contract1, self.fed.contracts)
class FederateResolveTestCase(FederateTestCase):
def test_resolve(self):
self.sim.init()
self.fed.design(self.station)
self.fed.commission(self.station,
self.context.locations[0],
self.context)
self.fed.design(self.sat2)
self.fed.commission(self.sat2,
self.context.locations[1],
self.context)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS2.13")
contract1 = self.fed.contract(event, self.context)
self.fed.senseAndStore(contract1, self.sat2, self.context)
data1 = next(d for d in self.sat2.modules[1].data if d.contract is contract1)
self.sim.advance()
event = next(e for e in self.context.currentEvents
if e.name == "VIS3.7")
self.sim.advance()
self.fed.transport('pSGL', data1, self.sat2, self.station, self.context)
cash = self.fed.getCash()
self.assertIn(data1, self.station.modules[0].data)
self.assertTrue(self.fed.resolve(contract1, self.context))
self.assertNotIn(data1, self.station.modules[0].data)
self.assertNotIn(contract1, self.fed.contracts)
self.assertEqual(self.fed.getCash(), cash + contract1.getValue())
class FederateInitTestCase(FederateTestCase):
def test_init(self):
pass
class FederateTickTestCase(FederateTestCase):
def test_tick(self):
pass
class FederateTockTestCase(FederateTestCase):
def test_tock(self):
pass
| apache-2.0 | 4,516,671,871,898,892,000 | 42.873656 | 101 | 0.615097 | false |
jasondunsmore/heat | heat/engine/update.py | 1 | 11487 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import six
from heat.common import exception
from heat.common.i18n import _LI
from heat.common.i18n import repr_wraper
from heat.engine import dependencies
from heat.engine import scheduler
from heat.objects import resource as resource_objects
LOG = logging.getLogger(__name__)
@repr_wraper
class StackUpdate(object):
"""A Task to perform the update of an existing stack to a new template."""
def __init__(self, existing_stack, new_stack, previous_stack,
rollback=False, error_wait_time=None):
"""Initialise with the existing stack and the new stack."""
self.existing_stack = existing_stack
self.new_stack = new_stack
self.previous_stack = previous_stack
self.rollback = rollback
self.error_wait_time = error_wait_time
self.existing_snippets = dict((n, r.frozen_definition())
for n, r in self.existing_stack.items())
def __repr__(self):
if self.rollback:
return '%s Rollback' % str(self.existing_stack)
else:
return '%s Update' % str(self.existing_stack)
@scheduler.wrappertask
def __call__(self):
"""Return a co-routine that updates the stack."""
cleanup_prev = scheduler.DependencyTaskGroup(
self.previous_stack.dependencies,
self._remove_backup_resource,
reverse=True)
self.updater = scheduler.DependencyTaskGroup(
self.dependencies(),
self._resource_update,
error_wait_time=self.error_wait_time)
if not self.rollback:
yield cleanup_prev()
try:
yield self.updater()
finally:
self.previous_stack.reset_dependencies()
def _resource_update(self, res):
if res.name in self.new_stack and self.new_stack[res.name] is res:
return self._process_new_resource_update(res)
else:
return self._process_existing_resource_update(res)
@scheduler.wrappertask
def _remove_backup_resource(self, prev_res):
if prev_res.state not in ((prev_res.INIT, prev_res.COMPLETE),
(prev_res.DELETE, prev_res.COMPLETE)):
LOG.debug("Deleting backup resource %s" % prev_res.name)
yield prev_res.destroy()
@staticmethod
def _exchange_stacks(existing_res, prev_res):
resource_objects.Resource.exchange_stacks(existing_res.stack.context,
existing_res.id, prev_res.id)
prev_stack, existing_stack = prev_res.stack, existing_res.stack
prev_stack.add_resource(existing_res)
existing_stack.add_resource(prev_res)
@scheduler.wrappertask
def _create_resource(self, new_res):
res_name = new_res.name
# Clean up previous resource
if res_name in self.previous_stack:
prev_res = self.previous_stack[res_name]
if prev_res.state not in ((prev_res.INIT, prev_res.COMPLETE),
(prev_res.DELETE, prev_res.COMPLETE)):
# Swap in the backup resource if it is in a valid state,
# instead of creating a new resource
if prev_res.status == prev_res.COMPLETE:
LOG.debug("Swapping in backup Resource %s" % res_name)
self._exchange_stacks(self.existing_stack[res_name],
prev_res)
return
LOG.debug("Deleting backup Resource %s" % res_name)
yield prev_res.destroy()
# Back up existing resource
if res_name in self.existing_stack:
LOG.debug("Backing up existing Resource %s" % res_name)
existing_res = self.existing_stack[res_name]
self.previous_stack.add_resource(existing_res)
existing_res.state_set(existing_res.UPDATE, existing_res.COMPLETE)
self.existing_stack.add_resource(new_res)
# Save new resource definition to backup stack if it is not
# present in backup stack template already
# it allows to resolve all dependencies that existing resource
# can have if it was copied to backup stack
if (res_name not in
self.previous_stack.t[self.previous_stack.t.RESOURCES]):
LOG.debug("Backing up new Resource %s" % res_name)
definition = new_res.t.reparse(self.previous_stack,
new_res.stack.t)
self.previous_stack.t.add_resource(definition)
self.previous_stack.t.store(self.previous_stack.context)
yield new_res.create()
def _check_replace_restricted(self, res):
registry = res.stack.env.registry
restricted_actions = registry.get_rsrc_restricted_actions(res.name)
existing_res = self.existing_stack[res.name]
if 'replace' in restricted_actions:
ex = exception.ResourceActionRestricted(action='replace')
failure = exception.ResourceFailure(ex, existing_res,
existing_res.UPDATE)
existing_res._add_event(existing_res.UPDATE, existing_res.FAILED,
six.text_type(ex))
raise failure
@scheduler.wrappertask
def _process_new_resource_update(self, new_res):
res_name = new_res.name
if res_name in self.existing_stack:
if type(self.existing_stack[res_name]) is type(new_res):
existing_res = self.existing_stack[res_name]
try:
yield self._update_in_place(existing_res,
new_res)
except exception.UpdateReplace:
pass
else:
# Save updated resource definition to backup stack
# cause it allows the backup stack resources to be
# synchronized
LOG.debug("Backing up updated Resource %s" % res_name)
definition = existing_res.t.reparse(self.previous_stack,
existing_res.stack.t)
self.previous_stack.t.add_resource(definition)
self.previous_stack.t.store(self.previous_stack.context)
LOG.info(_LI("Resource %(res_name)s for stack "
"%(stack_name)s updated"),
{'res_name': res_name,
'stack_name': self.existing_stack.name})
return
else:
self._check_replace_restricted(new_res)
yield self._create_resource(new_res)
def _update_in_place(self, existing_res, new_res):
existing_snippet = self.existing_snippets[existing_res.name]
prev_res = self.previous_stack.get(new_res.name)
# Note the new resource snippet is resolved in the context
# of the existing stack (which is the stack being updated)
# but with the template of the new stack (in case the update
# is switching template implementations)
new_snippet = new_res.t.reparse(self.existing_stack,
self.new_stack.t)
return existing_res.update(new_snippet, existing_snippet,
prev_resource=prev_res)
@scheduler.wrappertask
def _process_existing_resource_update(self, existing_res):
res_name = existing_res.name
if res_name in self.previous_stack:
yield self._remove_backup_resource(self.previous_stack[res_name])
if res_name in self.new_stack:
new_res = self.new_stack[res_name]
if new_res.state == (new_res.INIT, new_res.COMPLETE):
# Already updated in-place
return
if existing_res.stack is not self.previous_stack:
yield existing_res.destroy()
if res_name not in self.new_stack:
self.existing_stack.remove_resource(res_name)
def dependencies(self):
"""Return the Dependencies graph for the update.
Returns a Dependencies object representing the dependencies between
update operations to move from an existing stack definition to a new
one.
"""
existing_deps = self.existing_stack.dependencies
new_deps = self.new_stack.dependencies
def edges():
# Create/update the new stack's resources in create order
for e in new_deps.graph().edges():
yield e
# Destroy/cleanup the old stack's resources in delete order
for e in existing_deps.graph(reverse=True).edges():
yield e
# Don't cleanup old resources until after they have been replaced
for name, res in six.iteritems(self.existing_stack):
if name in self.new_stack:
yield (res, self.new_stack[name])
return dependencies.Dependencies(edges())
def preview(self):
upd_keys = set(self.new_stack.resources.keys())
cur_keys = set(self.existing_stack.resources.keys())
common_keys = cur_keys.intersection(upd_keys)
deleted_keys = cur_keys.difference(upd_keys)
added_keys = upd_keys.difference(cur_keys)
updated_keys = []
replaced_keys = []
for key in common_keys:
current_res = self.existing_stack.resources[key]
updated_res = self.new_stack.resources[key]
current_props = current_res.frozen_definition().properties(
current_res.properties_schema, current_res.context)
updated_props = updated_res.frozen_definition().properties(
updated_res.properties_schema, updated_res.context)
# type comparison must match that in _process_new_resource_update
if type(current_res) is not type(updated_res):
replaced_keys.append(key)
continue
try:
if current_res.preview_update(updated_res.frozen_definition(),
current_res.frozen_definition(),
updated_props, current_props,
None):
updated_keys.append(key)
except exception.UpdateReplace:
replaced_keys.append(key)
return {
'unchanged': list(set(common_keys).difference(
set(updated_keys + replaced_keys))),
'updated': updated_keys,
'replaced': replaced_keys,
'added': list(added_keys),
'deleted': list(deleted_keys),
}
| apache-2.0 | -289,319,108,590,892,350 | 40.172043 | 79 | 0.582746 | false |
onqtam/doctest | scripts/bench/run_all.py | 1 | 2138 | #!/usr/bin/python3
import os
import sys
if sys.version_info[0] < 3: raise Exception("Python 3 or a more recent version is required.")
import json
import subprocess
average_num_times = 3
max_accum_time = 60 # don't take too long on a test - stop averaging if time exceeds some amount of seconds
with open('tests.json') as data_file:
data = json.load(data_file)
def runBench(prog):
result = subprocess.Popen(prog.split(), stdout = subprocess.PIPE).communicate()[0]
result = result.splitlines()
for line in result:
line = line.decode("utf-8")
if line.startswith("Time running "):
return str(line.rsplit(' ', 1)[-1])
return ""
call = 'python ./bench.py'
the_os = 'linux'
if os.name == "nt":
call = 'python bench.py'
the_os = 'windows'
f = open('results.txt', 'w')
for test in ['header', 'asserts', 'runtime']:
print( '\n************** ' + test + '\n')
f.write('\n************** ' + test + '\n')
f.flush()
for framework in ['doctest', 'catch']:
print( '== ' + framework + '\n')
f.write('== ' + framework + '\n')
f.flush()
for config in data['compilers'][the_os]:
for curr in data[test][1]:
if curr[0] == framework or curr[0] == "any":
command = call + data[test][0] + config + curr[1] + (' --catch' if framework == 'catch' else '')
print(command)
accum = float(0)
num_times = 0
for i in range(0, average_num_times):
res = float(runBench(command))
print(res)
accum += res
num_times += 1
if accum > max_accum_time:
break
average = "{:7.2f}".format(round(accum / num_times, 2))
print("AVERAGE: " + average)
f.write(average + " | ")
f.flush()
f.write("\n")
f.flush()
f.close()
| mit | -1,111,946,970,744,045,000 | 31.892308 | 116 | 0.472404 | false |
ajenhl/eats | server/eats/tests/test_name_form.py | 1 | 3009 | # -*- coding: utf-8 -*-
from django.test import TestCase
from eats.lib.name_form import abbreviate_name, asciify_name, create_name_forms, demacronise_name, substitute_ascii, unpunctuate_name
class NameFormTestCase (TestCase):
def test_abbreviate_name (self):
data = (
('en', u'Smith and Smith', u'Smith & Smith'),
)
for language_code, original, expected in data:
actual = abbreviate_name(original, language_code)
self.assertEqual(actual, expected)
def test_asciify_name (self):
"""Tests that a name is properly converted to ASCII."""
data = (
('Alan Smith', u'Alan Smith'),
(u'François', u'Francois'),
(u'Ægypt', u'AEgypt'),
(u'Encyclopædia Brittanica', u'Encyclopaedia Brittanica'),
(u'Œdipus', u'OEdipus'),
(u'Schloß', u'Schloss'),
(u'Hawaiʻi', u"Hawai'i"),
(u'Paradiſe Loſt', u'Paradise Lost'),
(u'Māori', u'Maori'),
(u'War’s End', u"War's End"),
)
for original, expected in data:
actual = asciify_name(original)
self.assertEqual(actual, expected)
def test_create_forms (self):
data = (
(u'Māori', 'mi', 'Latn', set((u'Maori', u'Maaori', u'Māori'))),
(u'François', 'fr', 'Latn', set((u'François', u'Francois'))),
(u'A. Smith', None, None, set((u'A. Smith', u'A Smith'))),
)
for original, language_code, script_code, expected in data:
actual = create_name_forms(original, language_code, script_code)
self.assertEqual(actual, expected)
def test_demacronis_name (self):
data = (
(u'Māori', u'Maaori'),
)
for original, expected in data:
actual = demacronise_name(original)
self.assertEqual(actual, expected)
def test_substitute_ascii (self):
data = (
(u'Alan Smith', u'Alan Smith'),
(u'Ægypt', u'AEgypt'),
(u'Encyclopædia Brittanica', u'Encyclopaedia Brittanica'),
(u'Œdipus', u'OEdipus'),
(u'Schloß', u'Schloss'),
(u'Hawaiʻi', u"Hawai'i"),
(u'Paradiſe Loſt', u'Paradise Lost'),
)
for original, expected in data:
actual = substitute_ascii(original)
self.assertEqual(actual, expected)
def test_unpunctuate_name (self):
data = (
(u'Alan Smith', u'Alan Smith'),
(u'A. Smith', u'A Smith'),
(u'Smith, Alan', u'Smith Alan'),
(u'Middle-earth', u'Middleearth'),
(u"War's End", u'Wars End'),
(u'War’s End', u'Wars End'),
(u'Never say never (again)', u'Never say never again'),
)
for original, expected in data:
actual = unpunctuate_name(original)
self.assertEqual(actual, expected)
| gpl-3.0 | 731,654,928,201,013,500 | 36.3 | 133 | 0.531166 | false |
ray-project/ray | release/xgboost_tests/workloads/tune_32x4.py | 1 | 1835 | """Moderate Ray Tune run (32 trials, 4 actors).
This training run will start 32 Ray Tune trials, each starting 4 actors.
The cluster comprises 32 nodes.
Test owner: krfricke
Acceptance criteria: Should run through and report final results, as well
as the Ray Tune results table. No trials should error. All trials should
run in parallel.
"""
from collections import Counter
import json
import os
import time
import ray
from ray import tune
from xgboost_ray import RayParams
from ray.util.xgboost.release_test_util import train_ray
def train_wrapper(config, ray_params):
train_ray(
path="/data/classification.parquet",
num_workers=4,
num_boost_rounds=100,
num_files=64,
regression=False,
use_gpu=False,
ray_params=ray_params,
xgboost_params=config,
)
if __name__ == "__main__":
search_space = {
"eta": tune.loguniform(1e-4, 1e-1),
"subsample": tune.uniform(0.5, 1.0),
"max_depth": tune.randint(1, 9)
}
ray.init(address="auto")
ray_params = RayParams(
elastic_training=False,
max_actor_restarts=2,
num_actors=4,
cpus_per_actor=1,
gpus_per_actor=0)
start = time.time()
analysis = tune.run(
tune.with_parameters(train_wrapper, ray_params=ray_params),
config=search_space,
num_samples=32,
resources_per_trial=ray_params.get_tune_resources())
taken = time.time() - start
result = {
"time_taken": taken,
"trial_states": dict(
Counter([trial.status for trial in analysis.trials]))
}
test_output_json = os.environ.get("TEST_OUTPUT_JSON",
"/tmp/tune_32x4.json")
with open(test_output_json, "wt") as f:
json.dump(result, f)
print("PASSED.")
| apache-2.0 | 452,494,953,157,767,740 | 24.486111 | 73 | 0.621253 | false |
cdapio/website | scripts/generate-videos/main.py | 1 | 4086 | #!/usr/bin/python
# Copyright © 2015-2019 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import json
from youtube import API
MAX_RESULTS = 50
CHANNEL_ID='UCfkRcekMTa5GA2DdNKba7Jg'
api = None
def search_videos(page_token):
return api.get('search', part='id,snippet', channelId=CHANNEL_ID, maxResults=MAX_RESULTS, pageToken=page_token, type='video')
def video_preview(video):
preview_size = ['maxres', 'high', 'medium', 'standard', 'default']
thumbnails = video['snippet']['thumbnails']
preview = ''
for size in preview_size:
if size in thumbnails:
preview = thumbnails[size]['url']
break
return preview
def extract_video_data(video):
preview = ''
video_data = {}
video_data['videoId'] = video['id']['videoId']
video_data['title'] = video['snippet']['title']
video_data['description'] = video['snippet']['description']
video_data['preview'] = video_preview(video)
return video_data
def fetch_videos():
all_videos = []
total_items = MAX_RESULTS
page_token = ''
while page_token is not None:
response = search_videos(page_token)
if 'nextPageToken' in response:
all_videos = all_videos + list(map(extract_video_data, response['items']))
page_token = response['nextPageToken']
else:
page_token = None
return all_videos
def get_original_videos(path):
try:
with open(path) as video_file:
return json.load(video_file)
except:
print('File not found: %s. Will create new one.' % path)
return {
'videos': []
}
def merge_videos(original_videos, youtube_videos, visible, update, update_props):
props = update_props.split(',')
marked = []
for video in youtube_videos:
matched_video = next((v for v in original_videos['videos'] if v['videoId'] == video['videoId']), None)
if matched_video is None:
marked = [video['videoId']] + marked
video['visible'] = visible
original_videos['videos'] = [video] + original_videos['videos']
print('Added new video:\n Link: https://www.youtube.com/watch?v=%s \n Title: %s \n' % (video['videoId'], video['title']) )
else:
marked = [video['videoId']] + marked
if update is not False:
for prop in props:
matched_video[prop] = video[prop]
original_videos['videos'] = list(filter(lambda v: v['videoId'] in marked, original_videos['videos']))
return original_videos
def save_videos(data, file_path):
with open(file_path, 'w') as outfile:
json.dump(data, outfile, indent=2)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--output', help='Absolute path to output file. Output file can exist.', required=True)
parser.add_argument('-k', '--api-key', help='Youtube API key.', required=True)
parser.add_argument('-v', '--visible', help='Append new videos as visible.', default=False)
parser.add_argument('-u', '--update', help='Update video in output file if it exists.', default=False)
parser.add_argument('--update-attributes', '--update-attributes', help='Comma separated list of attributes allowed to update. Works only when --update flag is true', default='description,title,preview')
args = parser.parse_args()
global api
api = API(api_key=args.api_key, client_secret='', client_id='')
original_videos = get_original_videos(args.output)
youtube_videos = fetch_videos()
merged_videos = merge_videos(original_videos, youtube_videos, args.visible, args.update, args.update_attributes)
save_videos(merged_videos, args.output)
if __name__ == '__main__':
main()
| apache-2.0 | 3,500,132,186,289,563,600 | 35.150442 | 204 | 0.682742 | false |
morgangalpin/duckomatic | tests/utils/test_subscriber.py | 1 | 1612 | # -*- coding: utf-8 -*-
from pytest import raises
# The parametrize function is generated, so this doesn't work:
#
# from pytest.mark import parametrize
#
import pytest
parametrize = pytest.mark.parametrize
# from duckomatic import metadata
from duckomatic.utils.subscriber import (Subscriber, NoDataException)
class TestSubscriber(object):
@parametrize('id_prefix', [
'',
'test123'
])
def test_init(self, id_prefix):
subscriber = Subscriber(id_prefix)
assert type(subscriber) == Subscriber
assert subscriber.get_id().startswith(id_prefix)
@parametrize('topic, data', [
('', {}),
('test123', {'test': 123})
])
def test_update_and_simple_get_update(self, topic, data):
subscriber = Subscriber()
subscriber.update(topic, data)
(actual_topic, actual_data) = subscriber.get_update()
assert actual_topic == topic
assert actual_data == data
@parametrize('timeout', [
(0)
])
def test_get_update_with_timeout(self, timeout):
subscriber = Subscriber()
with raises(NoDataException):
subscriber.get_update(timeout=timeout)
# Should not get here as an exception should be raised.
assert False
# Exception was raised correctly.
assert True
@parametrize('id_prefix', [
'',
'test123'
])
def test_get_id_is_unique(self, id_prefix):
subscriber1 = Subscriber(id_prefix)
subscriber2 = Subscriber(id_prefix)
assert subscriber1.get_id() != subscriber2.get_id()
| gpl-3.0 | -1,296,764,209,724,166,700 | 27.785714 | 69 | 0.620347 | false |
scottwittenburg/web-hpc-manager | python/webserver/SecureRemoteLauncher.py | 1 | 5903 |
import paramiko
import select
import argparse
import sys
import threading
import uuid
import tempfile
import os
import getpass
from ForwardSshTunnel import ForwardSshTunnel
class SecureRemoteLauncher(object) :
#-------------------------------------------------------------------------
# SecureRemoteLauncher constructor
#-------------------------------------------------------------------------
def __init__(self, mapFilePath) :
self.mappingFilePath = mapFilePath
self.sessionMap = {}
#-------------------------------------------------------------------------
# Create a port forwarding ssh tunnel
#-------------------------------------------------------------------------
def createTunnelOnRemotePort(self, transport, host, port) :
print 'Create a tunnel on remote port ' + str(port)
try:
tunnel = ForwardSshTunnel(port, # local port
host, # remote host
port, # remote port
transport) # SSHClient Transport object
tunnel.establishForwardTunnel()
except KeyboardInterrupt:
print 'C-c: Port forwarding stopped.'
except Exception as inst :
print 'Encountered exception in forwarding'
print inst
print 'Returning from createTunnelOnRemotePort()'
return tunnel
#-------------------------------------------------------------------------
# Rewrite the mapping file with the current session map
#-------------------------------------------------------------------------
def updateMappingFile(self) :
with open(self.mappingFilePath, 'w') as outfile :
for session in self.sessionMap :
outfile.write(session + ' ' + self.sessionMap[session] + '\n')
#-------------------------------------------------------------------------
# Wait for process to exit so that when it does we can end the tunnel
# thread and then end this waiting thread by returning from this
# function
#-------------------------------------------------------------------------
def waitOnChannelExitStatus(self, channel, sessionId, tunnel) :
# This call will block until channel process has finished
processReturnVal = channel.recv_exit_status()
# Now make sure to kill the thread which is running the port
# forwarding ssh tunnel
print 'Channel exit status ready, process has terminated'
if tunnel is not None :
print 'Attempting to end tunnel request loop...'
tunnel.terminateRequestLoop()
# Next remove this session from the map
del self.sessionMap[sessionId]
# Finally rewrite the map file with the updated session info
self.updateMappingFile()
print 'Returning from wait thread'
#-------------------------------------------------------------------------
# Try to start pvweb on remote machine until we successfully start on a
# port.
#-------------------------------------------------------------------------
def startPvwebOnOpenPortInRange(self, transport, remoteHost, fileToLoad, portRange) :
#port = random.randrange(portRange[0], portRange[1], 1)
port = 9010
# Works on mayall
#cmdFirstPart = 'export LD_LIBRARY_PATH=/opt/python-2.7.3/lib ; export DISPLAY=:0.0 ; /home/kitware/projects/ParaView/build-make-gpu/bin/pvpython /home/kitware/projects/ParaView/build-make-gpu/lib/site-packages/paraview/web/pv_web_visualizer.py --data-dir /home/kitware/Documents/haloregions --port '
# Works on solaris
cmdFirstPart = 'export DISPLAY=:0.0 ; /home/scott/projects/ParaView/build-make-gpu/bin/pvpython /home/scott/projects/ParaView/build-make-gpu/lib/site-packages/paraview/web/pv_web_visualizer.py --data-dir /home/scott/Documents/cosmodata/haloregions --port '
started = False
while started == False :
cmd = cmdFirstPart + str(port) + ' --load-file ' + fileToLoad + ' -f'
channel = transport.open_session()
channel.exec_command(cmd)
characters = ''
while True:
if channel.exit_status_ready():
break
rl, wl, xl = select.select([channel],[],[],0.0)
if len(rl) > 0 :
characters = channel.recv(1024)
if 'CannotListenError' in characters or 'Address already in use' in characters :
print 'port ' + str(port) + ' is already being used'
elif ('tarting on ' + str(port)) in characters:
print 'Ahh, we have finally started on port ' + str(port)
# write the mapping file here
sessionId = str(uuid.uuid1())
connectStr = 'localhost:' + str(port)
self.sessionMap[sessionId] = connectStr
self.updateMappingFile()
tunnel = self.createTunnelOnRemotePort(transport, remoteHost, port)
print 'Have now returned from readyCallback() !!!!'
t = threading.Thread(target=self.waitOnChannelExitStatus,
args=[channel, sessionId, tunnel],
kwargs={})
t.start()
print 'wait thread started, returning from startPvwebOnOpenPortInRange()'
return (sessionId, port)
started = True
if started == False :
#port = random.randrange(portRange[0], portRange[1], 1)
port += 1
print 'Returning from startPvwebOnOpenPortInRange()'
| bsd-3-clause | 8,105,040,297,267,630,000 | 42.725926 | 308 | 0.509571 | false |
swift-lang/swift-e-lab | parsl/tests/test_python_apps/test_at_scale.py | 1 | 1546 | import argparse
import time
import pytest
import parsl
from parsl.app.app import App
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def double(x):
return x * 2
def plain_double(x):
return x * 2
@pytest.mark.skip('not asserting anything')
def test_plain(n=2):
start = time.time()
x = []
for i in range(0, n):
x.extend([plain_double(i)])
print(sum(x))
ttc = time.time() - start
print("Total time : ", ttc)
return ttc
@pytest.mark.skip('not asserting anything')
def test_parallel(n=2):
start = time.time()
x = []
for i in range(0, n):
x.extend([double(i)])
print(sum([i.result() for i in x]))
ttc = time.time() - start
print("Total time : ", ttc)
return ttc
@pytest.mark.skip('not asserting anything')
def test_parallel2(n=2):
start = time.time()
x = []
for i in range(0, n):
x.extend([double(i)])
ttc = time.time() - start
print("Total time : ", ttc)
return ttc
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--count", default="10",
help="Count of apps to launch")
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
x = test_plain(int(args.count))
x = test_parallel(int(args.count))
x = test_parallel2(int(args.count))
| apache-2.0 | -1,594,028,241,758,037,000 | 18.325 | 61 | 0.584735 | false |
RealTimeWeb/wikisite | MoinMoin/macro/OrphanedPages.py | 1 | 1487 | # -*- coding: iso-8859-1 -*-
"""
MoinMoin - OrphanedPages Macro
@copyright: 2001 Juergen Hermann <[email protected]>
@license: GNU GPL, see COPYING for details.
"""
Dependencies = ["pages"]
def macro_OrphanedPages(macro):
_ = macro.request.getText
if macro.request.mode_getpagelinks: # prevent recursion
return ''
if macro.request.isSpiderAgent: # reduce bot cpu usage
return ''
# delete all linked pages from a dict of all pages
pages = macro.request.rootpage.getPageDict()
orphaned = {}
orphaned.update(pages)
for page in pages.values():
links = page.getPageLinks(macro.request)
for link in links:
if link in orphaned:
del orphaned[link]
result = []
f = macro.formatter
if not orphaned:
result.append(f.paragraph(1))
result.append(f.text(_("No orphaned pages in this wiki.")))
result.append(f.paragraph(0))
else:
# return a list of page links
orphanednames = orphaned.keys()
orphanednames.sort()
result.append(f.number_list(1))
for name in orphanednames:
if not name:
continue
result.append(f.listitem(1))
result.append(f.pagelink(1, name, generated=1))
result.append(f.text(name))
result.append(f.pagelink(0, name))
result.append(f.listitem(0))
result.append(f.number_list(0))
return ''.join(result)
| apache-2.0 | 3,464,022,933,834,179,600 | 28.156863 | 67 | 0.599193 | false |
zepto/musio-python2 | examples/musioencode.py | 1 | 8751 | #!/usr/bin/env python2
# vim: sw=4:ts=4:sts=4:fdm=indent:fdl=0:
# -*- coding: UTF8 -*-
#
# Test the vorbis encoder.
# Copyright (C) 2013 Josiah Gordon <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Test the vorbis encoder.
"""
from __future__ import print_function
def main(args):
""" Encode args['filename'] times.
"""
from os.path import basename as os_basename
from os.path import isfile as os_isfile
from os.path import splitext as os_splitext
from sys import stdin as sys_stdin
from sys import stdout as sys_stdout
from select import select
from time import sleep as time_sleep
from termios import tcgetattr, tcsetattr, ECHO, ICANON, TCSANOW
from termios import VMIN, VTIME
from musio import open_file, open_device
if args['debug']:
from musio import io_util
io_util.DEBUG = True
filename = args['filename']
output = os_splitext(os_basename(filename))[0] + '.' + args['filetype']
output_bytes = output.encode('utf-8', 'surrogateescape')
output_printable = output_bytes.decode('utf-8', 'ignore')
if os_isfile(output):
if raw_input("Overwrite %s (y/n): " % output_printable).lower().startswith('n'):
return
# Save the current terminal state.
normal = tcgetattr(sys_stdin)
quiet = tcgetattr(sys_stdin)
# Do not wait for key press and don't echo.
quiet[3] &= ~(ECHO | ICANON)
quiet[6][VMIN] = 0
quiet[6][VTIME] = 0
# Set the new terminal state.
tcsetattr(sys_stdin, TCSANOW, quiet)
# Value returned to tell the calling function whether to quit or
# not.
quit_val = True
if args['filetype'].lower() == 'ogg':
quality = args['quality'] / 10 if args['quality'] in range(-1, 11) else 0.5
elif args['filetype'].lower() == 'mp3':
quality = args['quality'] if args['quality'] in range(0, 10) else 2
try:
with open_file(**args) as in_file:
in_file_title = in_file._info_dict.get('title',
in_file._info_dict['name'])
comment_dict = {'title': in_file_title}
comment_dict.update(in_file._info_dict)
for i in ['title', 'artist', 'album', 'year', 'comment',
'track', 'genre']:
if args.get(i, ''):
comment_dict[i] = args[i]
with open_file(output, 'w', depth=in_file.depth, rate=in_file.rate,
channels=in_file.channels, quality=quality,
comment_dict=comment_dict) as out_file:
in_file.loops = 0
if args['show_position']:
filename_bytes = filename.encode('utf-8', 'surrogateescape')
filename_printable = filename_bytes.decode('utf-8', 'ignore')
print("Encoding: %s to %s" % (filename, output))
print(in_file)
for data in in_file:
if args['show_position']:
if in_file.length > 0:
# Calculate the percentage played.
pos = (in_file.position * 100) / float(in_file.length)
# Make the string.
pos_str = 'Position: %.2f%%' % pos
# Find the length of the string.
format_len = len(pos_str) + 2
# Print the string and after erasing the old
# one using ansi escapes.
print('\033[%dD\033[K%s' % (format_len, pos_str),
end='')
sys_stdout.flush()
out_file.write(data)
# Check for input.
r, _, _ = select([sys_stdin], [], [], 0)
# Get input if there was any otherwise continue.
if r:
command = r[0].readline().lower()
# Handle input commands.
if command.startswith('q'):
quit_val = False
break
elif command == '\n':
break
except Exception as err:
print("Error: %s" % err)
raise(err)
finally:
# Re-set the terminal state.
tcsetattr(sys_stdin, TCSANOW, normal)
if args['show_position']:
print("\nDone.")
return quit_val
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser(description="Musio encoder")
parser.add_argument('-e', '--quality', action='store', default=-10, type=int,
help='Encoding quality (1-10)', dest='quality')
parser.add_argument('-t', '--track', action='store', default=0, type=int,
help='Track to play', dest='track')
parser.add_argument('-tt', '--title', action='store', default='',
help='id3 Title tag', dest='title')
parser.add_argument('-ta', '--artist', action='store', default='',
help='id3 Artist tag', dest='artist')
parser.add_argument('-tl', '--album', action='store', default='',
help='id3 Album tag', dest='album')
parser.add_argument('-ty', '--year', action='store', default='',
help='id3 Year tag', dest='year')
parser.add_argument('-tc', '--comment', action='store', default='',
help='id3 Comment tag', dest='comment')
parser.add_argument('-tr', '--id3track', action='store', default='',
help='id3 Track tag', dest='track')
parser.add_argument('-tg', '--genre', action='store', default=0,
type=int, help='id3 Genre tag', dest='genre')
parser.add_argument('-p', '--path', action='store', default=[],
type=lambda a: a.split(','), help='Codec path',
dest='mod_path')
parser.add_argument('-b', '--blacklist', action='store', default=[],
type=lambda a: a.split(','), help='Blacklist a Codec',
dest='blacklist')
parser.add_argument('-s', '--soundfont', action='store',
default='/usr/share/soundfonts/fluidr3/FluidR3GM.SF2',
help='Soundfont to use when playing midis',
dest='soundfont')
parser.add_argument('-f', '--filetype', action='store',
default='ogg',
help='The output format',
dest='filetype')
parser.add_argument('-q', '--quiet', action='store_false', default=True,
help='Don\'t show playback percentage.',
dest='show_position')
parser.add_argument('-lg', '--list-genres', action='store_true',
default=False,
help='Print a list of valid genres and exit.',
dest='list_genres')
parser.add_argument('-d', '--debug', action='store_true', default=False,
help='Enable debug error messages.',
dest='debug')
parser.add_argument('-i', '--input', dest='input_filename', nargs='+')
args = parser.parse_args()
if args.list_genres:
# Print out valid genres.
from musio.mp3_file import get_genre_list
print("ID\tGenre")
for genre_id, genre in enumerate(get_genre_list()):
if genre:
print("%s\t%s" % (genre_id, genre))
elif args.input_filename:
# Copy the args dict to use later
args_dict = args.__dict__
# Pop the filenames list out of the args dict.
filenames = args_dict.pop('input_filename')
# Loop over all the filenames playing each one.
for filename in filenames:
# Pass only one filename to the main function.
args_dict['filename'] = filename
if not main(args_dict):
break
| gpl-3.0 | 5,520,849,006,102,449,000 | 40.278302 | 88 | 0.531368 | false |
locke105/mc-watchdog | watchdog.py | 1 | 3354 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Mathew Odden <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import shlex
import socket
import subprocess
import time
import mc_info
logging.basicConfig(level=logging.DEBUG)
LOG = logging.getLogger(__name__)
SERVER_CMD = 'java -Xms512M -Xmx1G -jar ftbserver.jar'
# interval in seconds between server status checks
POLL_INTERVAL = 30
class Service(object):
def __init__(self, start_cmd):
self.start_cmd = start_cmd
self.process = None
def run(self):
"""Begin main monitoring loop of service.
Starts the service if not already running.
"""
try:
while True:
if not self.check_server():
if not self._process_dead():
LOG.warning("Server dead but process still around. "
"Attempting to kill process...")
self.stop()
LOG.warning("Server process dead. Restarting...")
self.start()
# wait awhile for next poll
time.sleep(POLL_INTERVAL)
except:
# catch keyboard interrupt
self.stop()
def start(self):
args = shlex.split(self.start_cmd)
LOG.info("Starting service with command: %s" %
' '.join(args))
self.process = subprocess.Popen(args)
def _process_dead(self):
if self.process is None:
return True
self.process.poll()
if self.process.returncode is not None:
return True
return False
def stop(self):
"""Stop the underlying service process."""
# no process running
if self.process is None:
return
self.process.poll()
if self.process.returncode is not None:
return self.process.returncode
# send first stop signal
LOG.warning("Sending SIGTERM...")
self.process.terminate()
time.sleep(15)
self.process.poll()
if self.process.returncode is not None:
return self.process.returncode
# send kill signal and wait
LOG.warning("Process still running. Sending SIGKILL...")
self.process.kill()
self.process.wait()
return self.process.returncode
def check_server(self):
try:
sinfo = mc_info.get_info(host='localhost', port=35565)
except socket.error:
LOG.warning("Couldn't get server info!")
return False
LOG.debug("Server info: %s" % sinfo)
return True
if __name__ == '__main__':
LOG = logging.getLogger('watchdog')
server = Service(SERVER_CMD)
server.run()
| apache-2.0 | -536,500,030,523,389,630 | 27.666667 | 77 | 0.596899 | false |
kallimachos/template | doc/conf.py | 1 | 7434 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Configuration file for template documentation."""
import os
import sys
try:
import sphinx_rtd_theme
except ImportError:
sphinx_rtd_theme = None
try:
from sphinxcontrib import spelling
except ImportError as e:
print(e)
spelling = None
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('../template/'))
try:
from template import __version__
except ImportError:
print('Cannot load version.')
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
if spelling is not None:
extensions.append('sphinxcontrib.spelling')
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'template'
copyright = '2019, Brian Moss'
author = 'Brian Moss'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
try:
version = __version__
except NameError:
version = '0.1'
# The full version, including alpha/beta/rc tags.
# release = '0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'README.rst']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
# todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
if sphinx_rtd_theme:
html_theme = 'sphinx_rtd_theme'
else:
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_context = {
'css_files': [
'_static/theme_overrides.css', # overrides wide tables in RTD theme
],
}
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'doc'
# this will change the 'paragraph' character to '#'
html_add_permalinks = '#'
| gpl-3.0 | -856,876,936,687,386,000 | 31.043103 | 79 | 0.706618 | false |
Jaiz909/catscrape | catscrape/catscrape_main.py | 1 | 5125 | import logging
import re
from imgurpython import ImgurClient
import os
import sys
import urllib
from threading import Thread
try:
import Queue as queue
except ImportError:
import queue as queue
try:
import configparser
except ImportError:
import ConfigParser as configparser
logger = logging.getLogger('catscrape')
class ThreadPool(object):
def __init__(self, size):
self.threads = list()
self.work_queue = queue.Queue()
for _ in range(size):
self.threads.append(Thread(target=self.do_work))
def add_work(self, args):
self.work_queue.put(args)
def do_work(self):
try:
while True:
args = self.work_queue.get(False)
download_image(**args)
self.work_queue.task_done()
except queue.Empty:
# Nothing left to do
pass
def start(self):
for t in self.threads:
t.start()
# Wait for all tasks in the queue to finish
self.work_queue.join()
# All threads should be done
for t in self.threads:
t.join()
def download_image(url, directory, index=0):
"""
Download an image from a url and save it into a directory, using index
to preserve the order in the filename.
index: The image's index within the album.
url: The URL to the image to be downloaded.
directory: The path to the directory in which to save the image.
"""
file_name = re.match(r'.*\/(?P<file_name>.*)', url).group('file_name')
logger.debug("Downloading %s" % (url))
if sys.version_info >= (3, 0):
urllib.request.urlretrieve(url, os.path.join(directory, '%d_%s' % (index, file_name)))
else:
urllib.urlretrieve(url, os.path.join(directory, '%d_%s' % (index, file_name)))
logger.info("Downloaded %s" % (url))
def load_config(config_path='~/.catscrape/catscrape.conf'):
"""
Load a configuration file. If a config file does not exist, create a default one
and throw an exception.
config_path: Path to the configuration file. If this does not exist, create a default one.
returns: A dictionary of config options.
"""
path = os.path.expanduser(config_path)
conf_dir = os.path.dirname(path)
imgur_section = 'ImgurAPI'
default_client_id = 'REPLACE_WITH_CLIENT_ID'
default_client_secret = 'REPLACE_WITH_CLIENT_SECRET'
config_parser = configparser.RawConfigParser()
try:
# If the config directory or config file do not exist, create them with default values.
if not os.path.isdir(conf_dir):
os.makedirs(conf_dir)
if not os.path.isfile(path):
with open(path, 'wb') as f:
# If we don't already have a config. Write a default one.
config_parser.add_section(imgur_section)
config_parser.set(imgur_section, 'client_id', default_client_id)
config_parser.set(imgur_section, 'client_secret', default_client_secret)
config_parser.write(f)
except OSError as e:
logger.exception("Failed to create configuration directory in %s" % (conf_dir), e)
raise e
with open(path, "r") as f:
# Read the config.
config_parser.readfp(f)
config = {'client_id': config_parser.get(imgur_section, 'client_id'),
'client_secret': config_parser.get(imgur_section, 'client_secret')}
if config['client_id'] == default_client_id or config['client_secret'] == default_client_secret:
logger.exception("Default config detected. Please updated the config file at %s" % (path))
raise Exception("Default config")
# Return the config
return config
def download_albums(album_list, output_directory, num_threads, config_path):
"""
Download albums from album_list to output_directory.
album_list: List containing album urls to download.
output_directory: Directory in which to save the downloaded albums.
num_threads: Number of concurrent downloads to perform.
config_path: Path to an alternate config file.
"""
# Load the configuration from specified path if set.
if config_path:
config = load_config(config_path)
else:
config = load_config()
logger.debug("Connecting to Imgur")
imgur_client = ImgurClient(config['client_id'], config['client_secret'])
pool = ThreadPool(num_threads)
for album_url in album_list:
logger.debug('Downloading images from %s' % (album_url))
album_id = re.match(r'.*\/a\/(?P<id>.*)', album_url).group('id')
out_dir = os.path.join(output_directory, album_id)
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
logger.info('Downloading album %s' % (album_id))
images = imgur_client.get_album_images(album_id)
for index, image in enumerate(images):
pool.add_work({'index':index, 'url':image.link, 'directory':out_dir})
# Start the thread pool. Will block until all jobs are complete.
pool.start()
| mit | 2,054,281,727,529,545,000 | 33.166667 | 100 | 0.628098 | false |
lino-framework/tera | lino_tera/lib/invoicing/models.py | 1 | 1528 | # -*- coding: UTF-8 -*-
# Copyright 2016-2019 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
"""The :xfile:`models.py` module for :mod:`lino_voga.lib.invoicing`.
"""
from __future__ import unicode_literals
from lino_xl.lib.invoicing.models import *
from lino.api import _
class Plan(Plan):
"""An extended invoicing plan.
.. attribute:: course
If this field is nonempty, select only enrolments of that
given course.
"""
class Meta(Plan.Meta):
app_label = 'invoicing'
abstract = dd.is_abstract_model(__name__, 'Plan')
course = dd.ForeignKey('courses.Course', blank=True, null=True)
Plans.detail_layout = """user area today min_date max_date
partner course
invoicing.ItemsByPlan
"""
# from lino.modlib.users.mixins import StartPlan
from lino_xl.lib.invoicing.actions import StartInvoicing
class StartInvoicingForCourse(StartInvoicing):
"""Start an invoicing plan for this course.
This is installed onto the :class:`courses.Course
<lino_voga.lib.courses.models.Course>` model as `start_invoicing`.
"""
show_in_bbar = True
select_rows = True
def get_options(self, ar):
course = ar.selected_rows[0]
assert isinstance(course, rt.models.courses.Course)
return dict(course=course, partner=None)
@dd.receiver(dd.pre_analyze)
def install_start_action(sender=None, **kwargs):
rt.models.courses.Course.start_invoicing = StartInvoicingForCourse()
| bsd-2-clause | -857,604,811,259,924,500 | 23.645161 | 78 | 0.684555 | false |
ahri/pycurlbrowser | tests/test_curl.py | 1 | 1329 | from unittest import TestCase
from pycurlbrowser import CurlBackend
from datetime import timedelta
import pycurl
class TestBackendApi(TestCase):
"""
Test that the HttpBackend API is adhered-to.
"""
def setUp(self):
self.backend = CurlBackend()
self.backend._curl.setopt(pycurl.CONNECTTIMEOUT, 5)
self.backend._curl.setopt(pycurl.TIMEOUT, 10)
def visit(self):
url = 'http://www.reddit.com/'
method = 'GET'
data = None
headers = None
auth = None
follow = None
agent = "foo"
retries = 1
debug = None
self.backend.go(url, method, data, headers, auth, follow, agent, retries, debug)
return url
def test_go(self):
_ = self.visit()
def test_src(self):
_ = self.visit()
self.assertTrue(len(self.backend.src) > 0)
def test_url(self):
url = self.visit()
self.assertEqual(self.backend.url, url)
def test_roundtrip(self):
_ = self.visit()
self.assertTrue(self.backend.roundtrip > timedelta(0))
def test_http_code(self):
_ = self.visit()
self.assertEqual(self.backend.http_code, 200)
def test_headers(self):
_ = self.visit()
self.assertTrue(self.backend.headers.keys > 0)
| agpl-3.0 | 1,126,251,681,532,339,600 | 25.058824 | 88 | 0.588412 | false |
NORDUnet/opennsa | opennsa/config.py | 1 | 11031 | """
Configuration reader and defaults.
Author: Henrik Thostrup Jensen <[email protected]>
Copyright: NORDUnet (2011)
"""
import os
import configparser
from opennsa import constants as cnt
# defaults
DEFAULT_CONFIG_FILE = '/etc/opennsa.conf'
DEFAULT_LOG_FILE = '/var/log/opennsa.log'
DEFAULT_TLS = 'true'
DEFAULT_TOPOLOGY_FILE = '/usr/local/share/nsi/topology.owl'
DEFAULT_TCP_PORT = 9080
DEFAULT_TLS_PORT = 9443
DEFAULT_VERIFY = True
DEFAULT_CERTIFICATE_DIR = '/etc/ssl/certs' # This will work on most mordern linux distros
# config blocks and options
BLOCK_SERVICE = 'service'
BLOCK_DUD = 'dud'
BLOCK_JUNIPER_EX = 'juniperex'
BLOCK_JUNIPER_VPLS = 'junipervpls'
BLOCK_FORCE10 = 'force10'
BLOCK_BROCADE = 'brocade'
BLOCK_NCSVPN = 'ncsvpn'
BLOCK_PICA8OVS = 'pica8ovs'
BLOCK_JUNOSMX = 'junosmx'
BLOCK_JUNOSEX = 'junosex'
BLOCK_JUNOSSPACE = 'junosspace'
BLOCK_OESS = 'oess'
BLOCK_CUSTOM_BACKEND = 'custombackend'
# service block
DOMAIN = 'domain' # mandatory
NETWORK_NAME = 'network' # legacy, used to be mandatory
LOG_FILE = 'logfile'
HOST = 'host'
PORT = 'port'
TLS = 'tls'
REST = 'rest'
NRM_MAP_FILE = 'nrmmap'
PEERS = 'peers'
POLICY = 'policy'
PLUGIN = 'plugin'
SERVICE_ID_START = 'serviceid_start'
# database
DATABASE = 'database' # mandatory
DATABASE_USER = 'dbuser' # mandatory
DATABASE_PASSWORD = 'dbpassword' # can be none (os auth)
DATABASE_HOST = 'dbhost' # can be none (local db)
# tls
KEY = 'key' # mandatory, if tls is set
CERTIFICATE = 'certificate' # mandatory, if tls is set
CERTIFICATE_DIR = 'certdir' # mandatory (but dir can be empty)
VERIFY_CERT = 'verify'
ALLOWED_HOSTS = 'allowedhosts' # comma seperated list
# generic stuff
_SSH_HOST = 'host'
_SSH_PORT = 'port'
_SSH_HOST_FINGERPRINT = 'fingerprint'
_SSH_USER = 'user'
_SSH_PASSWORD = 'password'
_SSH_PUBLIC_KEY = 'publickey'
_SSH_PRIVATE_KEY = 'privatekey'
AS_NUMBER = 'asnumber'
# TODO: Don't do backend specifics for everything, it causes confusion, and doesn't really solve anything
# juniper block - same for mx / ex backends
JUNIPER_HOST = _SSH_HOST
JUNIPER_PORT = _SSH_PORT
JUNIPER_HOST_FINGERPRINT = _SSH_HOST_FINGERPRINT
JUNIPER_USER = _SSH_USER
JUNIPER_SSH_PUBLIC_KEY = _SSH_PUBLIC_KEY
JUNIPER_SSH_PRIVATE_KEY = _SSH_PRIVATE_KEY
# force10 block
FORCE10_HOST = _SSH_HOST
FORCE10_PORT = _SSH_PORT
FORCE10_USER = _SSH_USER
FORCE10_PASSWORD = _SSH_PASSWORD
FORCE10_HOST_FINGERPRINT = _SSH_HOST_FINGERPRINT
FORCE10_SSH_PUBLIC_KEY = _SSH_PUBLIC_KEY
FORCE10_SSH_PRIVATE_KEY = _SSH_PRIVATE_KEY
# Brocade block
BROCADE_HOST = _SSH_HOST
BROCADE_PORT = _SSH_PORT
BROCADE_HOST_FINGERPRINT = _SSH_HOST_FINGERPRINT
BROCADE_USER = _SSH_USER
BROCADE_SSH_PUBLIC_KEY = _SSH_PUBLIC_KEY
BROCADE_SSH_PRIVATE_KEY = _SSH_PRIVATE_KEY
BROCADE_ENABLE_PASSWORD = 'enablepassword'
# Pica8 OVS
PICA8OVS_HOST = _SSH_HOST
PICA8OVS_PORT = _SSH_PORT
PICA8OVS_HOST_FINGERPRINT = _SSH_HOST_FINGERPRINT
PICA8OVS_USER = _SSH_USER
PICA8OVS_SSH_PUBLIC_KEY = _SSH_PUBLIC_KEY
PICA8OVS_SSH_PRIVATE_KEY = _SSH_PRIVATE_KEY
PICA8OVS_DB_IP = 'dbip'
# NCS VPN Backend
NCS_SERVICES_URL = 'url'
NCS_USER = 'user'
NCS_PASSWORD = 'password'
# JUNOS block
JUNOS_HOST = _SSH_HOST
JUNOS_PORT = _SSH_PORT
JUNOS_HOST_FINGERPRINT = _SSH_HOST_FINGERPRINT
JUNOS_USER = _SSH_USER
JUNOS_SSH_PUBLIC_KEY = _SSH_PUBLIC_KEY
JUNOS_SSH_PRIVATE_KEY = _SSH_PRIVATE_KEY
JUNOS_ROUTERS = 'routers'
#Junosspace backend
SPACE_USER = 'space_user'
SPACE_PASSWORD = 'space_password'
SPACE_API_URL = 'space_api_url'
SPACE_ROUTERS = 'routers'
SPACE_CONFIGLET_ACTIVATE_LOCAL = 'configlet_activate_local'
SPACE_CONFIGLET_ACTIVATE_REMOTE = 'configlet_activate_remote'
SPACE_CONFIGLET_DEACTIVATE_LOCAL = 'configlet_deactivate_local'
SPACE_CONFIGLET_DEACTIVATE_REMOTE = 'configlet_deactivate_remote'
# OESS
OESS_URL = 'url'
OESS_USER = 'username'
OESS_PASSWORD = 'password'
OESS_WORKGROUP = 'workgroup'
class ConfigurationError(Exception):
"""
Raised in case of invalid/inconsistent configuration.
"""
class Peer(object):
def __init__(self, url, cost):
self.url = url
self.cost = cost
def readConfig(filename):
cfg = configparser.SafeConfigParser()
cfg.add_section(BLOCK_SERVICE)
cfg.read( [ filename ] )
return cfg
def readVerifyConfig(cfg):
"""
Read a config and verify that things are correct. Will also fill in
default values where applicable.
This is supposed to be used during application creation (before service
start) to ensure that simple configuration errors do not pop up efter
daemonization.
Returns a "verified" config, which is a dictionary.
"""
vc = {}
# Check for deprecated / old invalid stuff
try:
cfg.get(BLOCK_SERVICE, NRM_MAP_FILE)
raise ConfigurationError('NRM Map file should be specified under backend')
except configparser.NoOptionError:
pass
# check / extract
try:
vc[DOMAIN] = cfg.get(BLOCK_SERVICE, DOMAIN)
except configparser.NoOptionError:
raise ConfigurationError('No domain name specified in configuration file (mandatory, see docs/migration)')
try:
cfg.get(BLOCK_SERVICE, NETWORK_NAME)
raise ConfigurationError('Network name no longer used, use domain (see docs/migration)')
except configparser.NoOptionError:
pass
try:
vc[LOG_FILE] = cfg.get(BLOCK_SERVICE, LOG_FILE)
except configparser.NoOptionError:
vc[LOG_FILE] = DEFAULT_LOG_FILE
try:
nrm_map_file = cfg.get(BLOCK_SERVICE, NRM_MAP_FILE)
if not os.path.exists(nrm_map_file):
raise ConfigurationError('Specified NRM mapping file does not exist (%s)' % nrm_map_file)
vc[NRM_MAP_FILE] = nrm_map_file
except configparser.NoOptionError:
vc[NRM_MAP_FILE] = None
try:
vc[REST] = cfg.getboolean(BLOCK_SERVICE, REST)
except configparser.NoOptionError:
vc[REST] = False
try:
peers_raw = cfg.get(BLOCK_SERVICE, PEERS)
vc[PEERS] = [ Peer(purl.strip(), 1) for purl in peers_raw.split('\n') ]
except configparser.NoOptionError:
vc[PEERS] = None
try:
vc[HOST] = cfg.get(BLOCK_SERVICE, HOST)
except configparser.NoOptionError:
vc[HOST] = None
try:
vc[TLS] = cfg.getboolean(BLOCK_SERVICE, TLS)
except configparser.NoOptionError:
vc[TLS] = DEFAULT_TLS
try:
vc[PORT] = cfg.getint(BLOCK_SERVICE, PORT)
except configparser.NoOptionError:
vc[PORT] = DEFAULT_TLS_PORT if vc[TLS] else DEFAULT_TCP_PORT
try:
policies = cfg.get(BLOCK_SERVICE, POLICY).split(',')
for policy in policies:
if not policy in (cnt.REQUIRE_USER, cnt.REQUIRE_TRACE, cnt.AGGREGATOR, cnt.ALLOW_HAIRPIN):
raise ConfigurationError('Invalid policy: %s' % policy)
vc[POLICY] = policies
except configparser.NoOptionError:
vc[POLICY] = []
try:
vc[PLUGIN] = cfg.get(BLOCK_SERVICE, PLUGIN)
except configparser.NoOptionError:
vc[PLUGIN] = None
# database
try:
vc[DATABASE] = cfg.get(BLOCK_SERVICE, DATABASE)
except configparser.NoOptionError:
raise ConfigurationError('No database specified in configuration file (mandatory)')
try:
vc[DATABASE_USER] = cfg.get(BLOCK_SERVICE, DATABASE_USER)
except configparser.NoOptionError:
raise ConfigurationError('No database user specified in configuration file (mandatory)')
try:
vc[DATABASE_PASSWORD] = cfg.get(BLOCK_SERVICE, DATABASE_PASSWORD)
except configparser.NoOptionError:
vc[DATABASE_PASSWORD] = None
try:
vc[DATABASE_HOST] = cfg.get(BLOCK_SERVICE, DATABASE_HOST)
except configparser.NoOptionError:
vc[DATABASE_HOST] = None
try:
vc[SERVICE_ID_START] = cfg.get(BLOCK_SERVICE, SERVICE_ID_START)
except configparser.NoOptionError:
vc[SERVICE_ID_START] = None
# we always extract certdir and verify as we need that for performing https requests
try:
certdir = cfg.get(BLOCK_SERVICE, CERTIFICATE_DIR)
if not os.path.exists(certdir):
raise ConfigurationError('Specified certdir does not exist (%s)' % certdir)
vc[CERTIFICATE_DIR] = certdir
except configparser.NoOptionError:
vc[CERTIFICATE_DIR] = DEFAULT_CERTIFICATE_DIR
try:
vc[VERIFY_CERT] = cfg.getboolean(BLOCK_SERVICE, VERIFY_CERT)
except configparser.NoOptionError:
vc[VERIFY_CERT] = DEFAULT_VERIFY
# tls
if vc[TLS]:
try:
hostkey = cfg.get(BLOCK_SERVICE, KEY)
hostcert = cfg.get(BLOCK_SERVICE, CERTIFICATE)
if not os.path.exists(hostkey):
raise ConfigurationError('Specified hostkey does not exist (%s)' % hostkey)
if not os.path.exists(hostcert):
raise ConfigurationError('Specified hostcert does not exist (%s)' % hostcert)
vc[KEY] = hostkey
vc[CERTIFICATE] = hostcert
try:
allowed_hosts_cfg = cfg.get(BLOCK_SERVICE, ALLOWED_HOSTS)
vc[ALLOWED_HOSTS] = allowed_hosts_cfg.split(',')
except:
pass
except configparser.NoOptionError as e:
# Not enough options for configuring tls context
raise ConfigurationError('Missing TLS option: %s' % str(e))
# backends
backends = {}
for section in cfg.sections():
if section == 'service':
continue
if ':' in section:
backend_type, name = section.split(':',2)
else:
backend_type = section
name = ''
if name in backends:
raise ConfigurationError('Can only have one backend named "%s"' % name)
if backend_type in (BLOCK_DUD, BLOCK_JUNIPER_EX, BLOCK_JUNIPER_VPLS, BLOCK_JUNOSMX, BLOCK_FORCE10, BLOCK_BROCADE,
BLOCK_NCSVPN, BLOCK_PICA8OVS, BLOCK_OESS, BLOCK_JUNOSSPACE, BLOCK_JUNOSEX,
BLOCK_CUSTOM_BACKEND, 'asyncfail'):
backend_conf = dict( cfg.items(section) )
backend_conf['_backend_type'] = backend_type
backends[name] = backend_conf
vc['backend'] = backends
return vc
| bsd-3-clause | -2,675,240,323,979,953,000 | 30.42735 | 121 | 0.619164 | false |
NoBodyCam/TftpPxeBootBareMetal | nova/network/api.py | 1 | 16995 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import inspect
from nova.db import base
from nova import flags
from nova.network import model as network_model
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
FLAGS = flags.FLAGS
LOG = logging.getLogger(__name__)
def refresh_cache(f):
"""
Decorator to update the instance_info_cache
Requires context and instance as function args
"""
argspec = inspect.getargspec(f)
@functools.wraps(f)
def wrapper(self, context, *args, **kwargs):
res = f(self, context, *args, **kwargs)
try:
# get the instance from arguments (or raise ValueError)
instance = kwargs.get('instance')
if not instance:
instance = args[argspec.args.index('instance') - 2]
except ValueError:
msg = _('instance is a required argument to use @refresh_cache')
raise Exception(msg)
# get nw_info from return if possible, otherwise call for it
nw_info = res if isinstance(res, network_model.NetworkInfo) else None
update_instance_cache_with_nw_info(self, context, instance, nw_info,
*args, **kwargs)
# return the original function's return value
return res
return wrapper
def update_instance_cache_with_nw_info(api, context, instance,
nw_info=None,
*args,
**kwargs):
try:
nw_info = nw_info or api._get_instance_nw_info(context, instance)
# update cache
cache = {'network_info': nw_info.json()}
api.db.instance_info_cache_update(context, instance['uuid'], cache)
except Exception as e:
LOG.exception('Failed storing info cache', instance=instance)
LOG.debug(_('args: %s') % (args or {}))
LOG.debug(_('kwargs: %s') % (kwargs or {}))
class API(base.Base):
"""API for interacting with the network manager."""
def get_all(self, context):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_all_networks'})
def get(self, context, network_uuid):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_network',
'args': {'network_uuid': network_uuid}})
def create(self, context, **kwargs):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'create_networks',
'args': kwargs})
def delete(self, context, network_uuid):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'delete_network',
'args': {'fixed_range': None,
'uuid': network_uuid}})
def disassociate(self, context, network_uuid):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'disassociate_network',
'args': {'network_uuid': network_uuid}})
def get_fixed_ip(self, context, id):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_fixed_ip',
'args': {'id': id}})
def get_fixed_ip_by_address(self, context, address):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_fixed_ip_by_address',
'args': {'address': address}})
def get_floating_ip(self, context, id):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_floating_ip',
'args': {'id': id}})
def get_floating_ip_pools(self, context):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_floating_pools'})
def get_floating_ip_by_address(self, context, address):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_floating_ip_by_address',
'args': {'address': address}})
def get_floating_ips_by_project(self, context):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_floating_ips_by_project'})
def get_floating_ips_by_fixed_address(self, context, fixed_address):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_floating_ips_by_fixed_address',
'args': {'fixed_address': fixed_address}})
def get_instance_id_by_floating_address(self, context, address):
# NOTE(tr3buchet): i hate this
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_instance_id_by_floating_address',
'args': {'address': address}})
def get_vifs_by_instance(self, context, instance):
# NOTE(vish): When the db calls are converted to store network
# data by instance_uuid, this should pass uuid instead.
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_vifs_by_instance',
'args': {'instance_id': instance['id']}})
def get_vif_by_mac_address(self, context, mac_address):
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_vif_by_mac_address',
'args': {'mac_address': mac_address}})
def allocate_floating_ip(self, context, pool=None):
"""Adds a floating ip to a project from a pool. (allocates)"""
# NOTE(vish): We don't know which network host should get the ip
# when we allocate, so just send it to any one. This
# will probably need to move into a network supervisor
# at some point.
return rpc.call(context,
FLAGS.network_topic,
{'method': 'allocate_floating_ip',
'args': {'project_id': context.project_id,
'pool': pool}})
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Removes floating ip with address from a project. (deallocates)"""
rpc.call(context,
FLAGS.network_topic,
{'method': 'deallocate_floating_ip',
'args': {'address': address,
'affect_auto_assigned': affect_auto_assigned}})
@refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associates a floating ip with a fixed ip.
ensures floating ip is allocated to the project in context
"""
orig_instance_uuid = rpc.call(context,
FLAGS.network_topic,
{'method': 'associate_floating_ip',
'args': {'floating_address': floating_address,
'fixed_address': fixed_address,
'affect_auto_assigned': affect_auto_assigned}})
if orig_instance_uuid:
msg_dict = dict(address=floating_address,
instance_id=orig_instance_uuid)
LOG.info(_('re-assign floating IP %(address)s from '
'instance %(instance_id)s') % msg_dict)
orig_instance = self.db.instance_get_by_uuid(context,
orig_instance_uuid)
# purge cached nw info for the original instance
update_instance_cache_with_nw_info(self, context, orig_instance)
@refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociates a floating ip from fixed ip it is associated with."""
rpc.call(context,
FLAGS.network_topic,
{'method': 'disassociate_floating_ip',
'args': {'address': address}})
@refresh_cache
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocates all network structures for an instance.
:returns: network info as from get_instance_nw_info() below
"""
args = kwargs
args['instance_id'] = instance['id']
args['instance_uuid'] = instance['uuid']
args['project_id'] = instance['project_id']
args['host'] = instance['host']
args['rxtx_factor'] = instance['instance_type']['rxtx_factor']
nw_info = rpc.call(context, FLAGS.network_topic,
{'method': 'allocate_for_instance',
'args': args})
return network_model.NetworkInfo.hydrate(nw_info)
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocates all network structures related to instance."""
args = kwargs
args['instance_id'] = instance['id']
args['project_id'] = instance['project_id']
args['host'] = instance['host']
rpc.call(context, FLAGS.network_topic,
{'method': 'deallocate_for_instance',
'args': args})
def add_fixed_ip_to_instance(self, context, instance, network_id):
"""Adds a fixed ip to instance from specified network."""
args = {'instance_id': instance['id'],
'host': instance['host'],
'network_id': network_id}
rpc.call(context, FLAGS.network_topic,
{'method': 'add_fixed_ip_to_instance',
'args': args})
def remove_fixed_ip_from_instance(self, context, instance, address):
"""Removes a fixed ip from instance from specified network."""
args = {'instance_id': instance['id'],
'host': instance['host'],
'address': address}
rpc.call(context, FLAGS.network_topic,
{'method': 'remove_fixed_ip_from_instance',
'args': args})
def add_network_to_project(self, context, project_id):
"""Force adds another network to a project."""
rpc.call(context, FLAGS.network_topic,
{'method': 'add_network_to_project',
'args': {'project_id': project_id}})
@refresh_cache
def get_instance_nw_info(self, context, instance):
"""Returns all network info related to an instance."""
return self._get_instance_nw_info(context, instance)
def _get_instance_nw_info(self, context, instance):
"""Returns all network info related to an instance."""
args = {'instance_id': instance['id'],
'instance_uuid': instance['uuid'],
'rxtx_factor': instance['instance_type']['rxtx_factor'],
'host': instance['host'],
'project_id': instance['project_id']}
nw_info = rpc.call(context, FLAGS.network_topic,
{'method': 'get_instance_nw_info',
'args': args})
return network_model.NetworkInfo.hydrate(nw_info)
def validate_networks(self, context, requested_networks):
"""validate the networks passed at the time of creating
the server
"""
args = {'networks': requested_networks}
return rpc.call(context, FLAGS.network_topic,
{'method': 'validate_networks',
'args': args})
def get_instance_uuids_by_ip_filter(self, context, filters):
"""Returns a list of dicts in the form of
{'instance_uuid': uuid, 'ip': ip} that matched the ip_filter
"""
args = {'filters': filters}
return rpc.call(context, FLAGS.network_topic,
{'method': 'get_instance_uuids_by_ip_filter',
'args': args})
def get_dns_domains(self, context):
"""Returns a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
return rpc.call(context,
FLAGS.network_topic,
{'method': 'get_dns_domains'})
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address"""
args = {'address': address,
'name': name,
'dns_type': dns_type,
'domain': domain}
return rpc.call(context, FLAGS.network_topic,
{'method': 'add_dns_entry',
'args': args})
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address"""
args = {'address': address,
'name': name,
'domain': domain}
return rpc.call(context, FLAGS.network_topic,
{'method': 'modify_dns_entry',
'args': args})
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
args = {'name': name, 'domain': domain}
return rpc.call(context, FLAGS.network_topic,
{'method': 'delete_dns_entry',
'args': args})
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
args = {'domain': domain}
return rpc.call(context, FLAGS.network_topic,
{'method': 'delete_dns_domain',
'args': args})
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain"""
args = {'address': address, 'domain': domain}
return rpc.call(context, FLAGS.network_topic,
{'method': 'get_dns_entries_by_address',
'args': args})
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain"""
args = {'name': name, 'domain': domain}
return rpc.call(context, FLAGS.network_topic,
{'method': 'get_dns_entries_by_name',
'args': args})
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
args = {'domain': domain, 'av_zone': availability_zone}
return rpc.call(context, FLAGS.network_topic,
{'method': 'create_private_dns_domain',
'args': args})
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
args = {'domain': domain, 'project': project}
return rpc.call(context, FLAGS.network_topic,
{'method': 'create_public_dns_domain',
'args': args})
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures on hosts related to
instance"""
host = host or instance['host']
# NOTE(tr3buchet): host is passed in cases where we need to setup
# or teardown the networks on a host which has been migrated to/from
# and instance['host'] is not yet or is no longer equal to
args = {'instance_id': instance['id'],
'host': host,
'teardown': teardown}
# NOTE(tr3buchet): the call is just to wait for completion
rpc.call(context, FLAGS.network_topic,
{'method': 'setup_networks_on_host',
'args': args})
| apache-2.0 | 8,339,819,754,926,506,000 | 40.756757 | 78 | 0.540983 | false |
electrumalt/electrum-ixc | lib/blockchain.py | 1 | 20235 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 [email protected]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import threading, time, Queue, os, sys, shutil, traceback, json, auxpow
import zlib
from util import user_dir, appdata_dir, print_error
from bitcoin import *
from transaction import BCDataStream
import pprint
pp = pprint.PrettyPrinter(indent=4)
max_target = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
class Blockchain(threading.Thread):
def __init__(self, config, network):
threading.Thread.__init__(self)
self.daemon = True
self.config = config
self.network = network
self.lock = threading.Lock()
self.local_height = 0
self.running = False
self.headers_url = 'http://electrum-alt.org/ixcoin/blockchain_headers'
self.set_local_height()
self.queue = Queue.Queue()
def height(self):
return self.local_height
def stop(self):
with self.lock: self.running = False
def is_running(self):
with self.lock: return self.running
def run(self):
self.init_headers_file()
self.set_local_height()
print_error( "blocks:", self.local_height )
with self.lock:
self.running = True
while self.is_running():
try:
result = self.queue.get()
except Queue.Empty:
continue
if not result: continue
i, header = result
if not header: continue
height = header.get('block_height')
if height <= self.local_height:
continue
if height > self.local_height + 50:
if not self.get_and_verify_chunks(i, header, height):
continue
if height > self.local_height:
# get missing parts from interface (until it connects to my chain)
chain = self.get_chain( i, header )
# skip that server if the result is not consistent
if not chain:
print_error('e')
continue
# verify the chain
if self.verify_chain( chain ):
print_error("height:", height, i.server)
for header in chain:
self.save_header(header)
else:
print_error("error", i.server)
# todo: dismiss that server
continue
self.network.new_blockchain_height(height, i)
def verify_chain(self, chain):
first_header = chain[0]
prev_header = self.read_header(first_header.get('block_height') -1)
for header in chain:
height = header.get('block_height')
prev_hash = self.hash_header(prev_header)
bits, target = self.get_target(height, chain)
_hash = self.hash_header(header)
pow_hash = _hash
try:
if height >= 45000 and header['version'] == 196865:
assert auxpow.verify(_hash, auxpow.get_our_chain_id(), header['auxpow'])
pow_hash = self.hash_header(header['auxpow']['parent_block'])
assert prev_hash == header.get('prev_block_hash')
assert bits == header.get('bits')
assert int('0x'+pow_hash,16) < target
except Exception:
print traceback.format_exc()
print 'error validating chain at height ', height
print 'block ', height, '(',_hash,') failed validation'
pprint.pprint(header)
return False
prev_header = header
return True
def verify_chunk(self, index, hexdata):
hex_to_int = lambda s: int('0x' + s[::-1].encode('hex'), 16)
data = hexdata.decode('hex')
disk_data = ''
height = index * 2016
num = hex_to_int(data[0:4])
data = data[4:]
auxpowdata = data[num*88:]
auxpowbaseoffset = 0
if index == 0:
previous_hash = ("0"*64)
else:
prev_header = self.read_header(index*2016-1)
if prev_header is None: raise
previous_hash = self.hash_header(prev_header)
bits, target = self.get_target(height)
chain = []
for i in range(num):
height = index * 2016 + i
raw_header = data[i*88:(i+1)*88]
disk_data += raw_header[0:80] # strip auxpow data
header = self.header_from_string(raw_header)
_hash = self.hash_header(header)
_prev_hash = _hash
header['block_height'] = height
if (i == 0):
auxpowbaseoffset = header['auxpow_offset']
start = header['auxpow_offset'] - auxpowbaseoffset
end = start + header['auxpow_length']
if (end > start):
header['auxpow'] = self.auxpow_from_string(auxpowdata[start:end].decode('hex'))
#print header['auxpow']
if height >= 20160 and (height % 144) == 0:
#print height , '%', 144 , '=', height % 144
bits, target = self.get_target(height, chain)
if height >= 45000 and header['version'] == 196865: #TODO getAuxPowVersion()
#todo: check that auxpow.get_chain_id(header) == auxpow.get_our_chain_id?
#print header['auxpow']
try:
assert auxpow.verify(_hash, auxpow.get_our_chain_id(), header['auxpow'])
except Exception as e:
print traceback.format_exc()
print 'block ', height, '(',_hash,') failed validation'
print 'auxpow failed verification'
pp.pprint(header['auxpow'])
raise e
#pp.pprint(header)
#pp.pprint(parent_header)
_hash = self.hash_header(header['auxpow']['parent_block'])
#print _hash
# todo: verify auxpow data
#_hash = '' # auxpow.getHash()
try:
assert previous_hash == header.get('prev_block_hash')
assert bits == header.get('bits')
assert int('0x'+_hash,16) < target
except Exception as e:
print 'block ', height, ' failed validation'
raise e
if height % 144 == 0:
print 'block ', height, ' validated'
chain.append(header)
previous_header = header
previous_hash = _prev_hash
self.save_chunk(index, disk_data)
print_error("validated chunk %d"%height)
#def parent_block_to_header(self, parent_block):
#h = {}
#h['version'] = parent_block['version']
#h['prev_block_hash'] = parent_block['previousblockhash']
#h['merkle_root'] = parent_block['merkleroot']
#h['timestamp'] = parent_block['time']
#h['bits'] = int(parent_block['bits'], 16) #not sure
#h['nonce'] = parent_block['nonce']
#return h
def header_to_string(self, res):
s = int_to_hex(res.get('version'),4) \
+ rev_hex(res.get('prev_block_hash')) \
+ rev_hex(res.get('merkle_root')) \
+ int_to_hex(int(res.get('timestamp')),4) \
+ int_to_hex(int(res.get('bits')),4) \
+ int_to_hex(int(res.get('nonce')),4)
return s
def auxpow_from_string(self, s):
res = {}
res['coinbasetx'], s = tx_from_string(s)
res['coinbaseMerkleBranch'], res['coinbaseIndex'], s = merkle_branch_from_string(s)
res['chainMerkleBranch'], res['chainIndex'], s = merkle_branch_from_string(s)
res['parent_block'] = header_from_string(s)
return res
def header_from_string(self, s):
# hmmm why specify 0x at beginning if 16 is already specified??
hex_to_int = lambda s: int('0x' + s[::-1].encode('hex'), 16)
h = {}
h['version'] = hex_to_int(s[0:4])
h['prev_block_hash'] = hash_encode(s[4:36])
h['merkle_root'] = hash_encode(s[36:68])
h['timestamp'] = hex_to_int(s[68:72])
h['bits'] = hex_to_int(s[72:76])
h['nonce'] = hex_to_int(s[76:80])
if (len(s) > 80):
h['auxpow_offset'] = hex_to_int(s[80:84])
h['auxpow_length'] = hex_to_int(s[84:88])
return h
def hash_header(self, header):
return rev_hex(Hash(self.header_to_string(header).decode('hex')).encode('hex'))
def path(self):
return os.path.join( self.config.path, 'blockchain_headers')
# the file hosted on the server has extra data to index auxpow data
# we need to remove that data to have 80 byte block headers instead of 88
def remove_auxpow_indexes(self, filename):
size = os.path.getsize(filename)
f = open(self.path(), 'wb+')
fa = open(filename, 'rb')
i = 0
j = 0
while (i < size):
fa.seek(i)
f.seek(j)
chunk = fa.read(80)
f.write(chunk)
j += 80
i += 88
f.close()
fa.close()
os.remove(filename)
def init_headers_file(self):
filename = self.path()
if os.path.exists(filename):
return
try:
import urllib, socket
socket.setdefaulttimeout(30)
print_error('downloading ', self.headers_url )
urllib.urlretrieve(self.headers_url, filename + '_auxpow')
self.remove_auxpow_indexes(filename + '_auxpow')
print_error("done.")
except Exception:
print_error( 'download failed. creating file', filename + '_auxpow' )
open(filename,'wb+').close()
def save_chunk(self, index, chunk):
filename = self.path()
f = open(filename,'rb+')
f.seek(index*2016*80)
h = f.write(chunk)
f.close()
self.set_local_height()
def save_header(self, header):
data = self.header_to_string(header).decode('hex')
assert len(data) == 80
height = header.get('block_height')
filename = self.path()
f = open(filename,'rb+')
f.seek(height*80)
h = f.write(data)
f.close()
self.set_local_height()
def set_local_height(self):
name = self.path()
if os.path.exists(name):
h = os.path.getsize(name)/80 - 1
if self.local_height != h:
self.local_height = h
def read_header(self, block_height):
name = self.path()
if os.path.exists(name):
f = open(name,'rb')
f.seek(block_height*80)
h = f.read(80)
f.close()
if len(h) == 80:
h = self.header_from_string(h)
return h
def get_ixcoin_target(self, height, chain=None):
if chain is None:
chain = [] # Do not use mutables as default values!
nTargetTimespan = 24 * 60 * 60 #ixcoin: 144 blocks ever 24 hours
nInterval = 144
blockstogoback = nInterval
if (height >= 43000):
blockstogoback = nInterval + 1
last_height = (height / 144) * 144 - 1
first_height = (height / 144) * 144 - blockstogoback
#print 'new target at... ' , height
#print 'first height: '
#print first_height
#print 'last height: '
#print last_height
first = self.read_header(first_height)
last = self.read_header(last_height)
if first is None:
for h in chain:
if h.get('block_height') == first_height:
first = h
if last is None:
for h in chain:
if h.get('block_height') == last_height:
last = h
nActualTimespan = last.get('timestamp') - first.get('timestamp')
# https://github.com/FrictionlessCoin/iXcoin/blob/master/src/main.cpp#L1240
nTwoPercent = nTargetTimespan / 50
if nActualTimespan < nTargetTimespan:
#print 'smaller actual timespan'
if nActualTimespan < (nTwoPercent * 16):
#print 'a'
nActualTimespan = nTwoPercent * 45
elif nActualTimespan < (nTwoPercent * 32):
#print 'b'
nActualTimespan = nTwoPercent * 47
else:
#print 'c'
nActualTimespan = nTwoPercent * 49
elif nActualTimespan > (nTargetTimespan * 4):
#print 'd'
nActualTimespan = nTargetTimespan * 4
return self.get_target_from_timespans(last.get('bits'), nActualTimespan, nTargetTimespan)
def get_target_from_timespans(self, bits, nActualTimespan, nTargetTimespan):
# convert to bignum
MM = 256*256*256
a = bits%MM
if a < 0x8000:
a *= 256
target = (a) * pow(2, 8 * (bits/MM - 3))
# new target
new_target = min( max_target, (target * nActualTimespan)/nTargetTimespan )
# convert it to bits
c = ("%064X"%new_target)[2:]
i = 31
while c[0:2]=="00":
c = c[2:]
i -= 1
c = int('0x'+c[0:6],16)
if c >= 0x800000:
c /= 256
i += 1
new_bits = c + MM * i
#print 'new bits: ', hex(new_bits)
#print 'new target: ', hex(new_target)
return new_bits, new_target
def get_target(self, height, chain=None):
if chain is None:
chain = [] # Do not use mutables as default values!
# Ixcoin: target changes every 144 blocks after block 20160
# https://github.com/FrictionlessCoin/iXcoin/blob/master/src/main.cpp#L1196
if height >= 20160:
#print height , '%', 144 , '=', height % 144
return self.get_ixcoin_target(height, chain)
index = height / 2016
if index == 0: return 0x1d00ffff, max_target
first = self.read_header((index-1)*2016)
last = self.read_header(index*2016-1)
if last is None:
for h in chain:
if h.get('block_height') == index*2016-1:
last = h
nActualTimespan = last.get('timestamp') - first.get('timestamp')
nTargetTimespan = 14*24*60*60
nActualTimespan = max(nActualTimespan, nTargetTimespan/4)
nActualTimespan = min(nActualTimespan, nTargetTimespan*4)
return self.get_target_from_timespans(last.get('bits'), nActualTimespan, nTargetTimespan)
def request_header(self, i, h, queue):
print_error("requesting header %d from %s"%(h, i.server))
i.send_request({'method':'blockchain.block.get_header', 'params':[h]}, queue)
def retrieve_request(self, queue):
while True:
try:
ir = queue.get(timeout=1)
except Queue.Empty:
print_error('blockchain: request timeout')
continue
i, r = ir
result = r['result']
return result
def get_chain(self, interface, final_header):
header = final_header
chain = [ final_header ]
requested_header = False
queue = Queue.Queue()
while self.is_running():
if requested_header:
header = self.retrieve_request(queue)
if not header: return
chain = [ header ] + chain
requested_header = False
height = header.get('block_height')
previous_header = self.read_header(height -1)
if not previous_header:
self.request_header(interface, height - 1, queue)
requested_header = True
continue
# verify that it connects to my chain
prev_hash = self.hash_header(previous_header)
if prev_hash != header.get('prev_block_hash'):
print_error("reorg")
self.request_header(interface, height - 1, queue)
requested_header = True
continue
else:
# the chain is complete
return chain
def get_and_verify_chunks(self, i, header, height):
queue = Queue.Queue()
min_index = (self.local_height + 1)/2016
max_index = (height + 1)/2016
n = min_index
while n < max_index + 1:
print_error( "Requesting chunk:", n )
# todo: ixcoin get_auxblock_chunk after block 45000...?
# todo: call blockchain.block.get_auxblock from verify_chunk instead?
i.send_request({'method':'blockchain.block.get_chunk', 'params':[n]}, queue)
r = self.retrieve_request(queue)
#print 'chunk compressed length : ', len(r)
r = zlib.decompress(r.decode('hex'))
#print 'chunk uncompressed length : ', len(r)
try:
self.verify_chunk(n, r)
n = n + 1
except Exception:
print traceback.format_exc()
print_error('Verify chunk failed!')
n = n - 1
if n < 0:
return False
return True
# START electrum-ixc-server
# the following code was copied from the server's utils.py file
def tx_from_string(s):
vds = BCDataStream()
vds.write(s)
#vds.write(raw.decode('hex'))
d = {}
d['version'] = vds.read_int32()
n_vin = vds.read_compact_size()
d['vin'] = []
for i in xrange(n_vin):
txin = {}
# dirty hack: add outpoint structure to get correct txid later
outpoint_pos = vds.read_cursor
txin['coinbase'] = vds.read_bytes(vds.read_compact_size()).encode('hex')
txin['sequence'] = vds.read_uint32()
d['vin'].append(txin)
n_vout = vds.read_compact_size()
d['vout'] = []
for i in xrange(n_vout):
txout = {}
txout['value'] = vds.read_int64()
txout['scriptPubKey'] = vds.read_bytes(vds.read_compact_size()).encode('hex')
d['vout'].append(txout)
d['lockTime'] = vds.read_uint32()
# compute txid
# dirty hack to insert coinbase outpoint structure before hashing
raw = s[0:outpoint_pos]
COINBASE_OP = '0' * 64 + 'F' * 8
raw += (COINBASE_OP).decode('hex')
raw += s[outpoint_pos:vds.read_cursor]
d['txid'] = Hash(raw)[::-1].encode('hex')
return d, s[vds.read_cursor:] # +1?
def merkle_branch_from_string(s):
vds = BCDataStream()
vds.write(s)
#vds.write(raw.decode('hex'))
hashes = []
n_hashes = vds.read_compact_size()
for i in xrange(n_hashes):
_hash = vds.read_bytes(32)
hashes.append(hash_encode(_hash))
index = vds.read_int32()
return hashes, index, s[vds.read_cursor:]
def hex_to_int(s):
return int('0x' + s[::-1].encode('hex'), 16)
def header_from_string(s):
#OK ixcoin todo: include auxpow position in auxpow file (offset(s))
res = {
'version': hex_to_int(s[0:4]),
'prev_block_hash': hash_encode(s[4:36]),
'merkle_root': hash_encode(s[36:68]),
'timestamp': hex_to_int(s[68:72]),
'bits': hex_to_int(s[72:76]),
'nonce': hex_to_int(s[76:80]),
}
if (len(s) > 80):
res['auxpow_offset'] = hex_to_int(s[80:84])
res['auxpow_length'] = hex_to_int(s[84:88])
return res
# END electrum-ixc-server
| gpl-3.0 | 960,714,723,618,996,600 | 31.584541 | 97 | 0.540252 | false |
sebgoa/client-python | kubernetes/client/models/v1_container_state.py | 2 | 4601 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ContainerState(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, running=None, terminated=None, waiting=None):
"""
V1ContainerState - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'running': 'V1ContainerStateRunning',
'terminated': 'V1ContainerStateTerminated',
'waiting': 'V1ContainerStateWaiting'
}
self.attribute_map = {
'running': 'running',
'terminated': 'terminated',
'waiting': 'waiting'
}
self._running = running
self._terminated = terminated
self._waiting = waiting
@property
def running(self):
"""
Gets the running of this V1ContainerState.
Details about a running container
:return: The running of this V1ContainerState.
:rtype: V1ContainerStateRunning
"""
return self._running
@running.setter
def running(self, running):
"""
Sets the running of this V1ContainerState.
Details about a running container
:param running: The running of this V1ContainerState.
:type: V1ContainerStateRunning
"""
self._running = running
@property
def terminated(self):
"""
Gets the terminated of this V1ContainerState.
Details about a terminated container
:return: The terminated of this V1ContainerState.
:rtype: V1ContainerStateTerminated
"""
return self._terminated
@terminated.setter
def terminated(self, terminated):
"""
Sets the terminated of this V1ContainerState.
Details about a terminated container
:param terminated: The terminated of this V1ContainerState.
:type: V1ContainerStateTerminated
"""
self._terminated = terminated
@property
def waiting(self):
"""
Gets the waiting of this V1ContainerState.
Details about a waiting container
:return: The waiting of this V1ContainerState.
:rtype: V1ContainerStateWaiting
"""
return self._waiting
@waiting.setter
def waiting(self, waiting):
"""
Sets the waiting of this V1ContainerState.
Details about a waiting container
:param waiting: The waiting of this V1ContainerState.
:type: V1ContainerStateWaiting
"""
self._waiting = waiting
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ContainerState):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | -1,741,430,488,379,125,000 | 26.224852 | 105 | 0.562921 | false |
manparvesh/manparvesh.github.io | oldsitejekyll/markdown_generator/professional.py | 1 | 3829 |
# coding: utf-8
# # Publications markdown generator for academicpages
#
# Takes a TSV of publications with metadata and converts them for use with [academicpages.github.io](academicpages.github.io). This is an interactive Jupyter notebook, with the core python code in publications.py. Run either from the `markdown_generator` folder after replacing `publications.tsv` with one that fits your format.
#
# TODO: Make this work with BibTex and other databases of citations, rather than Stuart's non-standard TSV format and citation style.
#
# ## Data format
#
# The TSV needs to have the following columns: pub_date, title, venue, excerpt, citation, site_url, and paper_url, with a header at the top.
#
# - `excerpt` and `paper_url` can be blank, but the others must have values.
# - `pub_date` must be formatted as YYYY-MM-DD.
# - `url_slug` will be the descriptive part of the .md file and the permalink URL for the page about the paper. The .md file will be `YYYY-MM-DD-[url_slug].md` and the permalink will be `https://[yourdomain]/publications/YYYY-MM-DD-[url_slug]`
# ## Import pandas
#
# We are using the very handy pandas library for dataframes.
# In[2]:
import pandas as pd
# ## Import TSV
#
# Pandas makes this easy with the read_csv function. We are using a TSV, so we specify the separator as a tab, or `\t`.
#
# I found it important to put this data in a tab-separated values format, because there are a lot of commas in this kind of data and comma-separated values can get messed up. However, you can modify the import statement, as pandas also has read_excel(), read_json(), and others.
# In[3]:
professional = pd.read_csv("professional.tsv", sep="\t", header=0)
professional
# ## Escape special characters
#
# YAML is very picky about how it takes a valid string, so we are replacing single and double quotes (and ampersands) with their HTML encoded equivilents. This makes them look not so readable in raw format, but they are parsed and rendered nicely.
# In[4]:
html_escape_table = {
"&": "&",
'"': """,
"'": "'"
}
def html_escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in text)
# ## Creating the markdown files
#
# This is where the heavy lifting is done. This loops through all the rows in the TSV dataframe, then starts to concatentate a big string (```md```) that contains the markdown for each type. It does the YAML metadata first, then does the description for the individual page. If you don't want something to appear (like the "Recommended citation")
# In[5]:
import os
SPACE = ' '
STAR = '*'
TAB = SPACE + SPACE
TAB_BULLET = SPACE + STAR + SPACE
ENDL = '\n'
WIP = '*[ WIP ]*'
TODO = '*[TODO]*'
def is_not_NaN(num):
return num == num
def is_not_empty(s):
return is_not_NaN(s) and len(str(s)) > 0
def bold(s):
return STAR + STAR + str(s) + STAR + STAR
def italicize(s):
return STAR + str(s) + STAR
def coursera_icon_link(s):
return '<a href="' + str(s) + '" target="_blank"><i class="ai ai-courser"></i></a>'
def github_icon_link(s):
return '<a href="' + str(s) + '" target="_blank"><i class="fa fa-github" aria-hidden="true"></i> </a>'
def certificate_icon_link(s):
return '<a href="' + str(s) + '" target="_blank"><i class="fa fa-certificate" aria-hidden="true"></i> </a>'
with open("../_pages/professional.md", 'w') as f:
for row, item in professional.iterrows():
md = ''
md += TAB_BULLET
md += str(item.course_name)
md += SPACE
md += "by "
md += '[' + str(item.provider) + '](' + str(item.provider_url) + ')'
md += SPACE
if is_not_empty(item.certificate_link):
md += certificate_icon_link(item.certificate_link)
md += ENDL
f.write(md)
| mit | 1,995,675,648,712,673,500 | 34.453704 | 346 | 0.665709 | false |
qxf2/qxf2-page-object-model | utils/excel_compare.py | 1 | 2729 | """
Qxf2 Services: Utility script to compare two excel files using openxl module
"""
import openpyxl
import os
class Excel_Compare():
def is_equal(self,xl_actual,xl_expected):
"Method to compare the Actual and Expected xl file"
result_flag = True
if not os.path.exists(xl_actual):
result_flag = False
print('Could not locate the excel file: %s'%xl_actual)
if not os.path.exists(xl_expected):
result_flag = False
print('Could not locate the excel file %s'%xl_expected)
if os.path.exists(xl_actual) and os.path.exists(xl_expected):
#Open the xl file and put the content to list
actual_xlfile = openpyxl.load_workbook(xl_actual)
xl_sheet = actual_xlfile.active
actual_file = []
for row in xl_sheet.iter_rows(min_row=1, max_col=xl_sheet.max_column, max_row=xl_sheet.max_row):
for cell in row:
actual_file.append(cell.value)
exp_xlfile = openpyxl.load_workbook(xl_expected)
xl_sheet = exp_xlfile.active
exp_file = []
for row in xl_sheet.iter_rows(min_row=1, max_col=xl_sheet.max_column, max_row=xl_sheet.max_row):
for cell in row:
exp_file.append(cell.value)
#If there is row and column mismatch result_flag = False
if (len(actual_file)!= len(exp_file)):
result_flag = False
print("Mismatch in number of rows or columns. The actual row or column count didn't match with expected row or column count")
else:
for actual_row, actual_col in zip(actual_file,exp_file):
if actual_row == actual_col:
pass
else:
print("Mismatch between actual and expected file at position(each row consists of 23 coordinates):",actual_file.index(actual_row))
print("Data present only in Actual file: %s"%actual_row)
print("Data present only in Expected file: %s"%actual_col)
result_flag = False
return result_flag
#---USAGE EXAMPLES
if __name__=='__main__':
print("Start of %s"%__file__)
# Enter the path details of the xl files here
file1 = 'Add path to the first xl file'
file2 = 'Add path to the second xl file'
#Initialize the excel object
xl_obj = Excel_Compare()
#Sample code to compare excel files
if xl_obj.is_equal(file1,file2) is True:
print("Data matched in both the excel files\n")
else:
print("Data mismatch between the actual and expected excel files") | mit | 4,930,645,045,380,420,000 | 39.746269 | 154 | 0.585562 | false |
HackatONG-ProgramAR/cordoba-aprende | aulalibre/aulavirtual/models.py | 1 | 4354 | # -*- coding: utf-8 -*-
from django.db import models
from educar import get_descripciones_ebooks
# Create your models here.
class Alumno(models.Model):
apellido = models.CharField(max_length=100)
nombre = models.CharField(max_length=100)
colegio = models.ForeignKey('Colegio')
curso = models.ForeignKey('Curso')
email = models.EmailField(blank=True)
class Meta:
verbose_name = ('Alumno')
verbose_name_plural = ('Alumnos')
def __unicode__(self):
return "%s, %s" % (self.apellido, self.nombre)
class Profesor(models.Model):
apellido = models.CharField(max_length=100)
nombre = models.CharField(max_length=100)
colegio = models.ForeignKey('Colegio')
email = models.EmailField(blank=True)
class Meta:
verbose_name = ('Profesor')
verbose_name_plural = ('Profesores')
def __unicode__(self):
return "%s, %s" % (self.apellido, self.nombre)
class Colegio(models.Model):
nombre = models.CharField(max_length=100)
ciudad = models.CharField(max_length=100)
class Meta:
verbose_name = ('Colegio')
verbose_name_plural = ('Colegios')
def __unicode__(self):
return self.nombre
class Curso(models.Model):
colegio = models.ForeignKey('Colegio')
anio = models.IntegerField(verbose_name=u'Año')
division = models.CharField(max_length=100)
class Meta:
verbose_name = ('Curso')
verbose_name_plural = ('Cursos')
def __unicode__(self):
return u'%s "%s" - %s' % (self.anio, self.division, self.colegio)
class CursoMateria(models.Model):
curso = models.ForeignKey('Curso')
profesor = models.ForeignKey('Profesor')
anio_materia = models.ForeignKey('AnioMateria')
class Meta:
verbose_name = ('Curso Materia')
verbose_name_plural = ('Curso Materias')
def __unicode__(self):
return u"%s - %s - %s" % (self.curso, self.anio_materia, self.profesor)
class AnioMateria(models.Model):
materia = models.ForeignKey('Materia')
anio = models.IntegerField(verbose_name=u'Año')
class Meta:
verbose_name = (u'Año Materia')
verbose_name_plural = (u'Año Materias')
def __unicode__(self):
return u"%s - %s" % (self.materia, self.anio)
class Materia(models.Model):
nombre = models.CharField(max_length=100)
area_tematica = models.ForeignKey('AreaTematica')
class Meta:
verbose_name = ('Materia')
verbose_name_plural = ('Materias')
def __unicode__(self):
return self.nombre
class AreaTematica(models.Model):
nombre = models.CharField(max_length=100)
class Meta:
verbose_name = ('Área Temática')
verbose_name_plural = ('Áreas Temáticas')
def __unicode__(self):
return self.nombre
@classmethod
def crear_areas(cls):
areas = ["Matemática", "Lengua", "Ciencias"]
for n in areas:
cls.objects.create(nombre=n)
class Eje(models.Model):
nombre = models.CharField(max_length=100)
anio_materia = models.ForeignKey('AnioMateria')
# contenidos
class Meta:
verbose_name = ('Eje')
verbose_name_plural = ('Ejes')
def __unicode__(self):
return self.nombre
class Recurso(models.Model):
tipo = models.CharField(max_length=100)
nombre = models.CharField(max_length=100)
descripcion = models.TextField()
enlace = models.TextField()
adjunto = models.FileField(upload_to='recursos')
area_tematica = models.ForeignKey('AreaTematica')
anio = models.IntegerField(verbose_name=u'Año')
class Meta:
verbose_name = ('Recurso')
verbose_name_plural = ('Recursos')
@classmethod
def cargar_ebooks(cls, descripciones=None):
if descripciones is None:
descripciones = get_descripciones_ebooks()
# TODO: traer el area posta
area = AreaTematica.objects.get(nombre="Ciencias")
for desc in descripciones:
cls.objects.create(
tipo="ebook",
nombre=desc[u'titulo'],
descripcion=desc['descripcion'],
area_tematica=area,
anio=3,
enlace=desc['visualizacion_educar']
)
def __unicode__(self):
return self.nombre
| gpl-2.0 | 926,056,478,757,999,900 | 26.320755 | 79 | 0.617173 | false |
tswast/google-cloud-python | firestore/google/cloud/firestore_admin_v1/gapic/firestore_admin_client_config.py | 2 | 2613 | config = {
"interfaces": {
"google.firestore.admin.v1.FirestoreAdmin": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
"non_idempotent": [],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 60000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 600000,
}
},
"methods": {
"CreateIndex": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ListIndexes": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"GetIndex": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"DeleteIndex": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ImportDocuments": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ExportDocuments": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetField": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListFields": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"UpdateField": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
},
}
}
}
| apache-2.0 | 7,393,237,340,222,576,000 | 37.426471 | 79 | 0.38385 | false |
Fewbytes/cosmo-plugin-openstack-neutron-router-provisioner | openstack_neutron_router_provisioner/tasks.py | 1 | 4033 | # vim: ts=4 sw=4 et
# Standard
import json
import os
# Celery
from celery import task
# OpenStack
import keystoneclient.v2_0.client as ksclient
from neutronclient.neutron import client
# Cosmo
from cosmo.events import send_event
# TODO: rename to create()
@task
def provision(__cloudify_id, router, enable_snat=True, **kwargs):
neutron_client = _init_client()
if _get_router_by_name(neutron_client, router['name']):
raise RuntimeError("Can not provision router with name '{0}' because router with such name already exists"
.format(router['name']))
rtr_dict = {
'name': router['name'],
}
if 'gateway' in router:
rtr_dict['external_gateway_info'] = {
'network_id': _get_network_by_name(neutron_client, router['gateway'])['id'],
'enable_snat': enable_snat,
}
rtr = neutron_client.create_router({'router': rtr_dict})['router']
send_event(__cloudify_id, "rtr-" + router['name'], "router status", "state", "running")
@task
def connect_gateway(router, network, enable_snat=True, **kwargs):
neutron_client = _init_client()
rtr = _get_router_by_name(neutron_client, router['name'])
net = _get_network_by_name(neutron_client, network['name'])
neutron_client.add_gateway_router(rtr['id'], {'network_id': net['id'], 'enable_snat': enable_snat})
@task
def connect_subnet(__source_properties, __target_properties, **kwargs):
subnet = __source_properties['subnet']
router = __target_properties['router']
neutron_client = _init_client()
rtr = _get_router_by_name(neutron_client, router['name'])
subnet = _get_subnet_by_name(neutron_client, subnet['name'])
# print(dir(neutron_client))
neutron_client.add_interface_router(rtr['id'], {'subnet_id': subnet['id']})
@task
def disconnect_subnet(router, subnet, **kwargs):
neutron_client = _init_client()
rtr = _get_router_by_name(neutron_client, router['name'])
subnet = _get_subnet_by_name(neutron_client, subnet['name'])
neutron_client.remove_interface_router(rtr['id'], {'subnet_id': subnet['id']})
# TODO: rename to delete()
@task
def terminate(router, **kwargs):
neutron_client = _init_client()
rtr = _get_router_by_name(neutron_client, router['name'])
neutron_client.delete_router(rtr['id'])
# TODO: cache the token, cache client
def _init_client():
config_path = os.getenv('NEUTRON_CONFIG_PATH', os.path.expanduser('~/neutron_config.json'))
with open(config_path) as f:
neutron_config = json.loads(f.read())
keystone_client = _init_keystone_client()
neutron_client = client.Client('2.0', endpoint_url=neutron_config['url'], token=keystone_client.auth_token)
neutron_client.format = 'json'
return neutron_client
def _init_keystone_client():
config_path = os.getenv('KEYSTONE_CONFIG_PATH', os.path.expanduser('~/keystone_config.json'))
with open(config_path) as f:
cfg = json.loads(f.read())
# Not the same config as nova client. Same parameters, different names.
args = {field: cfg[field] for field in ('username', 'password', 'tenant_name', 'auth_url')}
return ksclient.Client(**args)
def _make_get_obj_by_name(single):
plural = single + 's'
def f(neutron_client, name):
matching_objs = getattr(neutron_client, 'list_' + plural)(name=name)[plural]
if len(matching_objs) == 0:
return None
if len(matching_objs) == 1:
return matching_objs[0]
raise RuntimeError("Lookup of {0} by name failed. There are {2} {1} named '{3}'"
.format(single, plural, len(matching_objs), name))
f.func_name = '_get_' + single + '_by_name'
return f
_get_router_by_name = _make_get_obj_by_name('router')
_get_network_by_name = _make_get_obj_by_name('network')
_get_subnet_by_name = _make_get_obj_by_name('subnet')
if __name__ == '__main__':
neutron_client = _init_client()
json.dumps(neutron_client.list_routers(), indent=4, sort_keys=True)
| apache-2.0 | 2,368,053,449,231,438,300 | 32.330579 | 114 | 0.646169 | false |
zsjohny/jumpserver | apps/users/views/login.py | 1 | 6958 | # ~*~ coding: utf-8 ~*~
from __future__ import unicode_literals
from django.shortcuts import render
from django.views.generic import RedirectView
from django.core.files.storage import default_storage
from django.shortcuts import reverse, redirect
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateView
from django.conf import settings
from django.urls import reverse_lazy
from formtools.wizard.views import SessionWizardView
from django.views.generic import FormView
from common.utils import get_object_or_none
from common.permissions import PermissionsMixin, IsValidUser
from ..models import User
from ..utils import (
send_reset_password_mail, get_password_check_rules, check_password_rules
)
from .. import forms
__all__ = [
'UserLoginView', 'UserForgotPasswordSendmailSuccessView',
'UserResetPasswordSuccessView', 'UserResetPasswordSuccessView',
'UserResetPasswordView', 'UserForgotPasswordView', 'UserFirstLoginView',
]
class UserLoginView(RedirectView):
url = reverse_lazy('authentication:login')
query_string = True
class UserForgotPasswordView(FormView):
template_name = 'users/forgot_password.html'
form_class = forms.UserForgotPasswordForm
def form_valid(self, form):
request = self.request
email = form.cleaned_data['email']
user = get_object_or_none(User, email=email)
if not user:
error = _('Email address invalid, please input again')
form.add_error('email', error)
return self.form_invalid(form)
elif not user.can_update_password():
error = _('User auth from {}, go there change password')
form.add_error('email', error.format(user.get_source_display()))
return self.form_invalid(form)
else:
send_reset_password_mail(user)
return redirect('users:forgot-password-sendmail-success')
class UserForgotPasswordSendmailSuccessView(TemplateView):
template_name = 'flash_message_standalone.html'
def get_context_data(self, **kwargs):
context = {
'title': _('Send reset password message'),
'messages': _('Send reset password mail success, '
'login your mail box and follow it '),
'redirect_url': reverse('authentication:login'),
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class UserResetPasswordSuccessView(TemplateView):
template_name = 'flash_message_standalone.html'
def get_context_data(self, **kwargs):
context = {
'title': _('Reset password success'),
'messages': _('Reset password success, return to login page'),
'redirect_url': reverse('authentication:login'),
'auto_redirect': True,
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class UserResetPasswordView(FormView):
template_name = 'users/reset_password.html'
form_class = forms.UserTokenResetPasswordForm
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
errors = kwargs.get('errors')
if errors:
context['errors'] = errors
return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
token = self.request.GET.get('token', '')
user = User.validate_reset_password_token(token)
if not user:
context['errors'] = _('Token invalid or expired')
context['token_invalid'] = True
check_rules = get_password_check_rules()
context['password_check_rules'] = check_rules
return context
def form_valid(self, form):
token = self.request.GET.get('token')
user = User.validate_reset_password_token(token)
if not user:
error = _('Token invalid or expired')
form.add_error('new_password', error)
return self.form_invalid(form)
if not user.can_update_password():
error = _('User auth from {}, go there change password')
form.add_error('new_password', error.format(user.get_source_display()))
return self.form_invalid(form)
password = form.cleaned_data['new_password']
is_ok = check_password_rules(password)
if not is_ok:
error = _('* Your password does not meet the requirements')
form.add_error('new_password', error)
return self.form_invalid(form)
user.reset_password(password)
User.expired_reset_password_token(token)
return redirect('users:reset-password-success')
class UserFirstLoginView(PermissionsMixin, SessionWizardView):
template_name = 'users/first_login.html'
permission_classes = [IsValidUser]
form_list = [
forms.UserProfileForm,
forms.UserPublicKeyForm,
forms.UserMFAForm,
forms.UserFirstLoginFinishForm
]
file_storage = default_storage
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated and not request.user.is_first_login:
return redirect(reverse('index'))
return super().dispatch(request, *args, **kwargs)
def done(self, form_list, **kwargs):
user = self.request.user
for form in form_list:
for field in form:
if field.value():
setattr(user, field.name, field.value())
user.is_first_login = False
user.save()
context = {
'user_guide_url': settings.USER_GUIDE_URL
}
return render(self.request, 'users/first_login_done.html', context)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'app': _('Users'), 'action': _('First login')})
return context
def get_form_initial(self, step):
user = self.request.user
if step == '0':
return {
'username': user.username or '',
'name': user.name or user.username,
'email': user.email or '',
'wechat': user.wechat or '',
'phone': user.phone or ''
}
return super().get_form_initial(step)
def get_form(self, step=None, data=None, files=None):
form = super().get_form(step, data, files)
form.instance = self.request.user
if isinstance(form, forms.UserMFAForm):
choices = form.fields["mfa_level"].choices
if self.request.user.mfa_force_enabled:
choices = [(k, v) for k, v in choices if k == 2]
else:
choices = [(k, v) for k, v in choices if k in [0, 1]]
form.fields["mfa_level"].choices = choices
form.fields["mfa_level"].initial = self.request.user.mfa_level
return form
| gpl-2.0 | 1,119,947,825,107,735,700 | 35.621053 | 83 | 0.624174 | false |
centaurialpha/ninja-ide | ninja_ide/core/file_handling/file_manager.py | 1 | 10239 | # -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import sys
import os
import re
import threading
import shutil
from PyQt5 import QtCore
from ninja_ide.core import settings
if sys.version_info.major == 3:
python3 = True
else:
python3 = False
# Lock to protect the file's writing operation
file_store_content_lock = threading.Lock()
class NinjaIOException(Exception):
"""
IO operation's exception
"""
pass
class NinjaNoFileNameException(Exception):
"""
Tried to write a file but I lack a file name
"""
pass
class NinjaFileExistsException(Exception):
"""
Try to override existing file without confirmation exception.
"""
def __init__(self, filename=''):
Exception.__init__(self, 'The file already exists.')
self.filename = filename
def create_init_file(folderName):
"""Create a __init__.py file in the folder received."""
if not os.path.isdir(folderName):
raise NinjaIOException("The destination folder does not exist")
name = os.path.join(folderName, '__init__.py')
if file_exists(name):
raise NinjaFileExistsException(name)
f = open(name, 'w')
f.flush()
f.close()
def create_init_file_complete(folderName):
"""Create a __init__.py file in the folder received.
This __init__.py will contain the information of the files inside
this folder."""
if not os.path.isdir(folderName):
raise NinjaIOException("The destination folder does not exist")
patDef = re.compile('^def .+')
patClass = re.compile('^class .+')
patExt = re.compile('.+\\.py')
files = os.listdir(folderName)
files = list(filter(patExt.match, files))
files.sort()
imports_ = []
for f in files:
read = open(os.path.join(folderName, f), 'r')
imp = [re.split('\\s|\\(', line)[1] for line in read.readlines()
if patDef.match(line) or patClass.match(line)]
imports_ += ['from ' + f[:-3] + ' import ' + i for i in imp]
name = os.path.join(folderName, '__init__.py')
fi = open(name, 'w')
for import_ in imports_:
fi.write(import_ + '\n')
fi.flush()
fi.close()
def create_folder(folderName, add_init_file=True):
"""Create a new Folder inside the one received as a param."""
if os.path.exists(folderName):
raise NinjaIOException("The folder already exist")
os.makedirs(folderName)
if add_init_file:
create_init_file(folderName)
def create_tree_folders(folderName):
"""Create a group of folders, one inside the other."""
if os.path.exists(folderName):
raise NinjaIOException("The folder already exist")
os.makedirs(folderName)
def folder_exists(folderName):
"""Check if a folder already exists."""
return os.path.isdir(folderName)
def file_exists(path, fileName=''):
"""Check if a file already exists."""
if fileName != '':
path = os.path.join(path, fileName)
return os.path.isfile(path)
def _search_coding_line(txt):
"""Search a pattern like this: # -*- coding: utf-8 -*-."""
coding_pattern = "coding[:=]\s*([-\w.]+)"
pat_coding = re.search(coding_pattern, txt)
if pat_coding and pat_coding.groups()[0] != 'None':
return pat_coding.groups()[0]
return None
def get_file_encoding(content):
"""Try to get the encoding of the file using the PEP 0263 rules
search the first or the second line of the file
Returns the encoding or the default UTF-8
"""
encoding = None
try:
lines_to_check = content.split("\n", 2)
for index in range(2):
if len(lines_to_check) > index:
line_encoding = _search_coding_line(lines_to_check[index])
if line_encoding:
encoding = line_encoding
break
except UnicodeDecodeError as error:
# add logger
print(error)
# if not encoding is set then use UTF-8 as default
if encoding is None:
encoding = "UTF-8"
return encoding
def read_file_content(fileName):
"""Read a file content, this function is used to load Editor content."""
try:
with open(fileName, 'rU') as f:
content = f.read()
except IOError as reason:
raise NinjaIOException(reason)
except:
raise
return content
def get_basename(fileName):
"""Get the name of a file or folder specified in a path."""
if fileName.endswith(os.path.sep):
fileName = fileName[:-1]
return os.path.basename(fileName)
def get_folder(fileName):
"""Get the name of the folder containing the file or folder received."""
return os.path.dirname(fileName)
def store_file_content(fileName, content, addExtension=True, newFile=False):
"""Save content on disk with the given file name."""
if fileName == '':
raise Exception()
ext = (os.path.splitext(fileName)[-1])[1:]
if ext == '' and addExtension:
fileName += '.py'
if newFile and file_exists(fileName):
raise NinjaFileExistsException(fileName)
try:
flags = QtCore.QIODevice.WriteOnly | QtCore.QIODevice.Truncate
f = QtCore.QFile(fileName)
if settings.use_platform_specific_eol():
flags |= QtCore.QIODevice.Text
if not f.open(flags):
raise NinjaIOException(f.errorString())
stream = QtCore.QTextStream(f)
encoding = get_file_encoding(content)
if encoding:
stream.setCodec(encoding)
encoded_stream = stream.codec().fromUnicode(content)
f.write(encoded_stream)
f.flush()
f.close()
except:
raise
return os.path.abspath(fileName)
def open_project(path):
"""Return a dict structure containing the info inside a folder."""
return open_project_with_extensions(settings.SUPPORTED_EXTENSIONS)
def open_project_with_extensions(path, extensions):
"""Return a dict structure containing the info inside a folder.
This function uses the extensions specified by each project."""
if not os.path.exists(path):
raise NinjaIOException("The folder does not exist")
valid_extensions = [ext.lower() for ext in extensions
if not ext.startswith('-')]
d = {}
for root, dirs, files in os.walk(path, followlinks=True):
for f in files:
ext = os.path.splitext(f.lower())[-1]
if ext in valid_extensions or '.*' in valid_extensions:
d[root] = [f, dirs]
elif ext == '' and '*' in valid_extensions:
d[root] = [f, dirs]
return d
def delete_file(path, fileName=None):
"""Delete the proper file.
If fileName is None, path and fileName are joined to create the
complete path, otherwise path is used to delete the file."""
if fileName:
path = os.path.join(path, fileName)
if os.path.isfile(path):
os.remove(path)
def delete_folder(path, fileName=None):
"""Delete the proper folder."""
if fileName:
path = os.path.join(path, fileName)
if os.path.isdir(path):
shutil.rmtree(path)
def rename_file(old, new):
"""Rename a file, changing its name from 'old' to 'new'."""
if os.path.isfile(old):
if file_exists(new):
raise NinjaFileExistsException(new)
os.rename(old, new)
return new
return ''
def get_file_extension(fileName):
"""Get the file extension in the form of: 'py'"""
return os.path.splitext(fileName.lower())[-1][1:]
def get_file_name(fileName):
"""Get the file name, without the extension."""
return os.path.splitext(fileName)[0]
def get_module_name(fileName):
"""Get the name of the file without the extension."""
module = os.path.basename(fileName)
return (os.path.splitext(module)[0])
def convert_to_relative(basePath, fileName):
"""Convert a absolut path to relative based on its start with basePath."""
if fileName.startswith(basePath):
fileName = fileName.replace(basePath, '')
if fileName.startswith(os.path.sep):
fileName = fileName[1:]
return fileName
def create_path(*args):
"""Join the paths provided in order to create an absolut path."""
return os.path.join(*args)
def belongs_to_folder(path, fileName):
"""Determine if fileName is located under path structure."""
if not path.endswith(os.path.sep):
path += os.path.sep
return fileName.startswith(path)
def get_last_modification(fileName):
"""Get the last time the file was modified."""
return QtCore.QFileInfo(fileName).lastModified()
def has_write_permission(fileName):
"""Check if the file has writing permissions."""
return os.access(fileName, os.W_OK)
def check_for_external_modification(fileName, old_mtime):
"""Check if the file was modified outside ninja."""
new_modification_time = get_last_modification(fileName)
# check the file mtime attribute calling os.stat()
if new_modification_time > old_mtime:
return True
return False
def get_files_from_folder(folder, ext):
"""Get the files in folder with the specified extension."""
try:
filesExt = os.listdir(folder)
except:
filesExt = []
filesExt = [f for f in filesExt if f.endswith(ext)]
return filesExt
def is_supported_extension(filename, extensions=None):
if extensions is None:
extensions = settings.SUPPORTED_EXTENSIONS
if os.path.splitext(filename.lower())[-1] in extensions:
return True
return False
| gpl-3.0 | 8,059,544,523,464,693,000 | 28.938596 | 78 | 0.64518 | false |
bluekyu/kyuNotebook | src/kyunotebooklib/icons_rc.py | 1 | 18057 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: ? 2? 20 17:07:44 2012
# by: The Resource Compiler for PyQt (Qt v4.7.4)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = b"\
\x00\x00\x00\xf6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\x98\x49\x44\x41\x54\x58\x85\xed\
\x96\x4b\x0e\xc0\x20\x08\x44\xb5\xe9\xfd\xaf\x6c\xb7\x8d\xa9\x32\
\x7c\x46\x16\x65\x56\x8d\x01\xe7\x05\xb0\xa1\xb5\xd2\xdf\xd5\x17\
\xe7\x23\xe8\x1e\x57\xa2\x16\xc2\x04\x82\x24\x50\xab\xa1\x01\xf8\
\x8a\x5d\xc1\xc1\x10\x5e\x80\x1d\x0c\x04\x71\x21\x41\xa0\xfa\x64\
\x0a\xb5\x2e\x12\xc0\x24\x06\x80\x6a\x08\xd9\x15\x10\xdb\xc0\x02\
\x80\xab\x90\x3e\x03\x33\xa9\xe5\xef\xe7\xf2\x56\x3f\x9b\x48\xf3\
\xf7\x47\x8a\x39\xaa\xd1\x6c\x80\x50\x5e\xfa\x10\xb2\x00\xe0\x8a\
\xb1\x2b\x20\xf6\x9b\x01\xa0\x9a\x97\x3b\xcb\x38\x0a\x60\x67\x0a\
\x3d\x37\xc6\x4a\x06\x9b\x4b\x81\xa9\x4b\xe9\xb1\xb5\xbc\x54\x7a\
\x00\x93\x3c\x17\x24\xc6\xc5\x4c\xcb\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x01\x54\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\xf6\x49\x44\x41\x54\x58\x85\xed\
\x57\xc1\x0d\x83\x30\x10\x73\x10\x23\xf2\x44\x62\x05\x36\xc0\xd9\
\x20\x43\xf0\xe4\xc1\x26\xb0\x11\x7d\x11\x15\x1a\xe8\x91\x3b\xa4\
\xaa\xc2\xaf\x4b\x94\x60\x63\x73\x22\x71\x24\x17\x18\x60\x1c\xc7\
\x58\x0f\xc3\x90\x5c\x53\x55\x55\xac\xe7\x79\x76\x00\x50\x02\x40\
\xd7\x75\xa6\x02\x00\xa0\xef\xfb\xcd\xb8\xae\xeb\xe4\xbe\x72\x2d\
\xbc\xf7\x6a\x11\x12\xc2\x43\x01\x80\xce\x09\xb5\x03\x2b\xac\x9c\
\xc8\x72\x60\x45\x8e\x13\x66\x0e\x58\x41\xe5\x80\x16\xef\xed\xa6\
\x12\x20\xf9\x1e\xb4\x2d\x7c\x2a\x20\xf7\xe1\x6d\xdb\x26\xe7\x43\
\x08\x1f\x73\x45\x16\x83\x00\x21\x84\x24\xe1\x1e\xff\x13\xc1\x34\
\x4d\x9b\x71\xd3\x34\xb1\xf6\xde\x83\xa4\x4b\xed\xbb\x2d\x02\x29\
\xd4\x11\x9c\x41\xe2\xe0\x2d\x5d\x70\x05\x4f\x04\x4f\x04\x4f\x04\
\xbf\x1d\x81\x14\x9a\xa8\xcc\x0e\x24\x47\x71\x9d\xfd\x07\x80\x9b\
\x23\xf8\x46\x7e\xab\x00\x09\x79\x14\x60\x7d\x29\x91\x92\x03\x40\
\x41\xd2\x49\x17\x5b\x93\x03\xbb\x08\xb4\x4e\x5c\x25\x4f\x82\xe4\
\x72\x15\x24\x97\xdc\x5b\x76\xb2\x0d\x73\x9c\xc8\x7d\xf3\x17\x4e\
\xf9\x9e\x1b\xe7\xa2\x40\xb4\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x00\xe5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\x87\x49\x44\x41\x54\x58\x85\xed\
\x96\xc1\x0e\xc0\x20\x08\x43\xeb\xb2\xff\xff\x65\x77\x35\x26\xcc\
\xd6\xd0\x8b\xd2\xab\x48\x5f\x40\x09\x40\xe9\x76\xb5\x9f\xb3\x6e\
\xc8\x29\x07\xdb\x21\xd8\xc0\x1d\x10\x2a\xb7\x52\xae\x11\x22\xba\
\x37\x83\x2e\xf3\x3f\x02\x00\x23\xa9\xff\x0e\x00\x59\x05\x70\x1c\
\x80\xfc\x5d\x33\x01\xe4\x2f\x08\x00\x6f\x92\xd9\xb6\x9c\x6f\x80\
\x82\x74\x3f\xc2\x25\x44\xf6\x28\x8e\x8c\xc3\x78\x57\x05\x66\xc3\
\xb0\x12\xce\x16\x50\xd5\x3d\x6e\x10\x15\xc0\x28\x6a\x0e\x38\x57\
\x32\xca\xc7\xb5\x94\xd2\x1e\x8e\xb5\x9c\x36\x2f\x95\x00\xe0\x03\
\xdd\x9e\x11\x28\xbc\x88\x84\x82\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x01\x3e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\xe0\x49\x44\x41\x54\x58\x85\xed\
\x96\xc1\x0e\xc3\x30\x08\x43\x9d\x69\xff\x0d\x7c\x39\x3d\x4d\xcb\
\x5a\x48\x20\x23\xda\x0e\xf5\xa9\x4a\x2b\x6c\xf1\x20\x2a\xf0\x63\
\xb5\x2d\x55\x59\xf5\x7a\xd6\x4c\x2f\xf3\x90\x99\xaf\x05\xa2\xde\
\xa0\xc1\xcb\x6b\x88\xa7\xf7\x2d\xd1\xa0\x90\xa3\x26\xef\x67\x25\
\xe3\x9c\x55\xcf\x21\x1e\x69\x97\x80\xf9\x50\x27\x3c\x25\x01\xc2\
\xe6\x46\x08\x17\xc1\x4b\x22\x7e\xf5\x33\x26\x75\xa8\x99\x38\xa2\
\x01\x56\x66\x21\xa3\xb2\x19\x58\x55\x29\x82\xe9\x16\xac\x04\x88\
\x20\x50\x4a\x0e\x62\xb7\x8a\x65\x08\xbc\x01\x1c\x99\x03\x5f\x22\
\xe8\x45\x44\x1f\x9d\x30\x3b\x92\xb9\x09\xfb\xc2\x19\x79\x38\x18\
\x02\x36\xbe\x9f\x06\x58\x91\x85\xc3\x6b\x64\x19\x02\x4f\xb3\x0e\
\xde\x17\xd1\x8d\xe0\xff\x11\x44\xb5\x8a\xaa\xf4\x1e\xf0\x70\x89\
\x08\x98\xd9\xfc\xff\xdc\x8e\x60\x64\xbe\x3d\xc0\xcc\x7c\x6b\x80\
\x88\xf9\xb6\x00\x51\xf3\x2d\x01\x32\xe6\xe5\x01\xb2\xe6\xc0\x60\
\x0d\x57\xf6\x3a\x6b\x7e\x0b\x00\x0e\xaa\x20\x59\x6e\x6d\x47\x47\
\x49\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x46\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\xe8\x49\x44\x41\x54\x58\x85\xed\
\x57\xdb\x0a\xc5\x20\x0c\x6b\xe1\xfc\xb7\xf5\xcb\xb3\x87\xb1\x9d\
\xce\x5b\xd5\x55\xc6\x60\x01\x5f\xc4\x99\x98\x35\x15\x89\x1e\x06\
\x2f\xdb\x59\x80\x7c\x8e\x33\xbe\xa2\x00\x11\x39\x3f\x0e\x41\x86\
\xb9\x39\xe6\xdc\x2d\x11\x55\x72\x80\x20\x22\x00\xa8\x7b\x90\xe0\
\x1c\x1a\x7a\x5e\xf3\xfd\x5a\x22\x62\x1c\x38\x36\x19\x27\xbf\x10\
\x00\x87\x13\x35\x01\x4c\xb4\x3b\x10\xa3\xf8\x92\x27\x22\x86\x1c\
\x28\x89\x49\x6b\x04\xa1\xcc\xa7\xe7\x59\xed\x3b\xe4\xc0\x4c\x41\
\x5a\x70\xad\x01\x4f\x01\x45\x07\x7a\x7e\x81\xb6\xb7\x66\xfb\x65\
\x7d\x81\x3c\xab\xa6\x5e\xeb\xd3\x42\x6c\x0a\xf0\x4e\xc1\x4e\xc8\
\x7d\x69\x50\xcd\x68\xc8\x01\x4b\xcc\xb1\xee\xd1\x4e\x58\xea\x88\
\xff\xce\x48\x99\xb2\x65\x29\x40\xe8\xbb\xe7\x6e\xa7\xc0\x82\x55\
\xc0\xae\x29\xe8\x01\xf3\x95\xd3\x35\x05\x33\xb8\x7d\x17\x58\x18\
\x76\xcf\x2b\x05\xd5\x74\x24\x78\xff\x5d\x60\xe1\x4b\x81\x85\x2f\
\x05\xef\x4a\xc1\x0a\x2c\x79\x19\x35\x09\x79\xe1\x73\xf0\xc3\x0c\
\x36\xc1\x2d\xa3\x71\x96\xca\xcf\x76\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x01\x42\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\xe4\x49\x44\x41\x54\x58\x85\xed\
\x56\xc9\x11\xc3\x20\x0c\xdc\x64\x52\x90\x8b\xe1\xe9\x22\xdc\x81\
\x45\x07\x14\xc1\x93\x07\xa5\xd0\x91\xf3\x8a\x27\xce\x08\x73\x49\
\xe3\x3c\xbc\x2f\x60\x04\xbb\xa3\x95\x00\xe0\x62\x3c\x24\x0e\x99\
\xa6\x69\xfb\x8c\x43\x08\x6c\x8c\x31\x66\x1f\xa7\x94\x76\xde\x17\
\x17\x4c\x44\x1b\xb7\x9e\x43\x8c\xf1\x30\xf7\xde\x1f\xe6\xf3\x3c\
\x67\xf7\xb2\x02\x00\x60\x5d\xd7\x6e\x01\x67\x84\xd5\x02\x46\x20\
\x92\x81\x11\x88\x66\xc0\x5a\xdb\x2c\x40\x34\x03\x35\xb5\xf0\x57\
\x35\xf0\xdd\x6e\x22\x02\x7a\x2c\x10\x15\xd0\x6a\xc1\xb2\x2c\x6c\
\x8c\x73\x8e\x5d\x7f\x16\x4f\xef\x80\x73\x2e\x4b\xf8\x8b\xcb\x2d\
\x60\x41\x44\x9b\x34\x72\xd7\xbb\x8a\x05\x2d\x50\xb7\xa0\x54\xc4\
\x22\x5d\x30\x82\xdb\x82\xdb\x02\xb1\xc7\xa8\xd7\x2a\xd1\xd7\x30\
\x67\x97\xb5\x16\x44\xc4\x7e\x80\xd5\x2d\x38\x23\x57\x17\x50\x22\
\x57\x15\x50\x43\xae\x26\xa0\x96\x5c\x45\x40\x0b\xb9\xb8\x80\x56\
\x72\xe0\xa4\x0d\x7b\xfa\xba\x95\xfc\x06\x00\xbc\x01\xf7\x46\xa7\
\x79\x50\x22\xf9\x39\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x00\xb5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\x57\x49\x44\x41\x54\x58\x85\xed\
\x96\x31\x0e\x00\x20\x08\xc4\xd0\xff\xff\x59\x17\x27\x17\xee\x88\
\xc6\xc1\x36\x61\x83\xa3\x71\x30\x44\x00\xe4\x8c\x55\x57\xe6\x7a\
\x21\xf8\x28\x08\x20\x80\x00\x02\x08\x20\x80\x80\x23\xe0\xdc\x04\
\x72\x6f\x2b\x04\x66\x33\x4e\xaf\x2c\xb0\x07\x2b\x48\xd9\x8e\x80\
\x23\xe1\xe6\x7e\x8c\xf2\x54\x95\x8b\x58\xde\xf1\xfc\x1f\x00\x98\
\xd2\x58\x0b\x21\x5f\xa5\xbe\xc4\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x01\xad\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x01\x4f\x49\x44\x41\x54\x58\x85\xed\
\x56\x4b\x4e\xc3\x30\x14\x1c\x23\xd6\x65\xc5\x05\x8a\x90\x7a\x8b\
\xe4\x02\x48\xd9\x70\x89\x22\xa4\xc2\x05\xf2\xde\x09\x5a\x09\x95\
\x13\x70\x10\x73\x0e\x44\xd7\x48\x5d\x85\x0b\x98\x05\x4d\xea\x0a\
\x7f\x92\x67\x9b\x0d\xcc\xd2\x71\x3c\xe3\x19\x3f\xfb\x01\x7f\x1d\
\xca\x35\x48\x44\xa6\x04\x19\x11\x29\xa0\x3e\xac\xad\x15\x00\x9c\
\xfb\x26\xb7\x6d\x9b\x95\xfc\xf3\xf5\x19\x34\x90\x1f\x71\x96\x95\
\x25\x40\xbe\xd9\x3c\x59\x23\x7a\x70\xbe\xb8\x80\x9e\xfc\x6a\x7e\
\x0d\x6e\x3e\xd0\xe9\xdb\x93\xef\xde\x08\x7a\x30\xb3\x98\xfc\xa1\
\xbe\x1c\xc8\xdf\x77\x6f\x58\xad\xee\x71\x51\xdf\x39\xcf\xdd\x09\
\x88\xc8\xa4\xa2\xd3\x5b\xc3\xcd\xc2\xbc\x3c\xde\x18\x6e\x16\xa6\
\xd3\x5b\xe7\xc1\x2e\x12\x81\x6d\x7b\xbf\xf3\xb5\xde\x3b\xe7\x66\
\x8f\xc0\x65\xfb\xac\x5a\x02\xda\xbd\x4e\x54\xc0\x94\x72\x74\xed\
\x7c\x56\x2d\x83\xff\x64\x8b\x40\x42\x0e\x64\x8a\xc0\x65\xfb\x5a\
\xef\x01\xcd\x51\x07\x93\x23\xf0\xed\xbc\xad\xa2\xba\x01\x24\x46\
\x20\xb5\xdd\x86\x38\x82\x90\xed\x36\x44\x6f\xca\x98\x8b\x08\x80\
\x01\x30\x5c\x32\x31\xf8\x5e\x58\x51\x04\x4a\x1d\x6f\x53\x89\xed\
\x36\x92\xaa\xe0\xdb\x88\x34\x88\xaa\x20\x67\xaf\xf0\x2b\xfd\x40\
\x08\x51\x07\xc6\x42\xfa\x6c\x67\x13\x00\xf8\xa3\x61\xe6\x43\x3f\
\xf8\x13\xc5\x23\x08\x91\x17\x17\x10\x23\x2f\x2a\x60\x0c\x79\x31\
\x01\x63\xc9\x8b\x08\x98\x42\x9e\x5d\xc0\x54\x72\x20\x50\x86\x92\
\xba\x9e\x4a\xfe\x0f\x00\xf8\x02\x0d\xc0\x38\x79\x69\x5a\xc3\x72\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\xab\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x01\x4d\x49\x44\x41\x54\x58\x85\xed\
\x57\x41\x6a\xc3\x30\x10\x1c\x43\xcf\xc9\xa9\xd0\x73\x42\xc0\xbf\
\x88\x3f\x50\xf0\x3f\x42\x21\xc9\x07\xbc\x7a\x41\x0c\xc1\x3f\xe8\
\x43\xd4\x77\x94\xf4\x03\x3d\xb9\x1f\xd8\x1c\x1c\xa5\xa9\x2c\x45\
\x96\xac\x2a\x85\x66\x40\x60\x64\x8b\x9d\x99\xdd\x95\x64\xe0\xbf\
\x23\x33\x4d\x12\x11\xab\xe7\xaa\xa2\xb8\x01\x33\x73\x4c\x63\x70\
\x66\x30\x11\x31\x33\xa2\x0d\x3d\xde\xc3\x35\x12\x42\x8c\x94\x3b\
\x00\x36\x02\x19\xd0\x39\x20\x04\xdd\x84\x80\xd1\x81\x10\x32\xfd\
\x1a\x2a\x4e\x69\x90\xe6\x5a\xf8\xad\x1a\x68\x65\xc3\x5d\x70\x35\
\x3a\x24\xa9\x81\xaf\xb7\x06\x75\xbd\x07\xf0\x04\x5d\xbd\x57\x0d\
\x84\xa4\x60\x53\x3c\xa2\xae\xf7\x98\xcf\x16\x10\xe5\x3b\xd6\xeb\
\x17\x4c\x0b\x79\x7e\xdf\xcb\xc3\x65\x0a\x14\x42\xf7\x02\xa5\x7c\
\x3e\x5b\xe0\xf0\xa1\x82\xaf\xd2\xec\x03\xad\x6c\x58\x94\x39\xbf\
\x6e\x9f\x59\x94\x39\xb7\xb2\x19\xbf\x0f\x0c\x4d\xc1\xa5\xed\x4a\
\xf9\x64\xb9\x1a\xb4\x76\xb4\x03\x36\xe5\x49\x76\x42\x53\xce\x5d\
\xca\xa3\x75\x81\xc9\xf6\x9d\xfc\x44\xb5\xbc\x4e\x3a\x4a\x17\xf8\
\x28\x8f\x7e\x1a\xba\x72\xee\x3a\x0d\xbd\x1c\xd0\x53\x60\xb3\xdd\
\xb4\x36\xba\x03\xbe\xca\xa3\x38\xa0\x29\x01\x00\x88\x32\xf7\xea\
\x73\xdd\x81\xa0\x2e\x20\xfa\x9e\x3b\xdb\x2e\xfb\x24\x6d\xe4\x7f\
\x10\x32\x04\x77\x3a\xa0\xd4\x73\xef\x4b\x37\x46\x39\xa0\x10\x12\
\xd8\x86\x1b\xdc\x88\x1c\xb8\xdf\x8a\x4f\x04\x92\xdd\x8a\xff\xde\
\x9f\xd1\x1d\xa9\x71\x04\x8c\x5e\x77\xa9\x75\x59\x5e\x6e\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x21\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\xc3\x49\x44\x41\x54\x58\x85\xed\
\x96\x4b\x0e\x80\x20\x0c\x44\xd1\xfb\xdf\x19\x57\x26\x0a\xed\x7c\
\x90\x44\x13\xe9\x92\xb6\x33\x8f\x12\x91\x52\x56\xfc\x3d\x36\xb7\
\xa1\x96\x52\x81\x98\xad\x27\x37\x20\xe3\x27\x20\x52\xa1\x63\xee\
\x42\xd0\xa2\x11\x73\x07\x62\xb7\xcc\x2b\x61\x69\xf2\x0a\x7c\x4a\
\x08\xcd\xb7\xa0\x0d\xe4\xd1\x24\xc2\x84\xb4\xf3\xab\x09\xcb\x03\
\x08\x78\x04\xa9\xf8\x75\x9d\xe5\x49\x74\x54\xe1\xb9\x89\x62\x77\
\xe5\x7e\xc3\xd1\x14\xf8\x04\x12\xb1\x59\xf5\x1a\x80\x23\x6a\xc2\
\xea\x00\x8a\xb8\x3b\x29\x1b\xc0\xbc\x07\xe6\x02\xa8\xe2\x26\x84\
\x06\xe0\xee\xcc\xa8\xef\x00\xba\x4f\x05\x89\xa1\x33\x6f\xfa\xc6\
\x2f\xa2\xcc\xe4\x5c\x67\x79\x26\x9f\x25\x5e\xfd\x17\x40\x08\x36\
\x76\xc3\x9c\x02\x84\x10\x46\x28\xef\x81\xef\xbf\x88\x46\x20\xa6\
\xbf\x09\x55\x90\x91\x57\xf1\x8a\x15\xaf\xc7\x01\xcb\x89\x4c\x1c\
\x35\x87\x3c\xf5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x01\x7d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x01\x1f\x49\x44\x41\x54\x58\x85\xdd\
\x57\xd1\x0e\x83\x20\x0c\x3c\x16\xff\xbb\xed\x97\x77\x0f\x8a\x43\
\x04\x69\x3b\x78\xd8\x2e\x21\x26\x88\xed\x79\xed\x21\x26\x2c\x82\
\x02\x5a\xcf\x25\x20\x35\xe6\xd6\x27\x7e\x22\x72\x21\xc0\xcc\xe7\
\xc3\x44\xec\xcf\xee\x78\x9d\x9b\x1a\x65\x72\x55\x28\x33\xab\x2a\
\xec\x03\xfe\x01\x00\x5b\x8b\x84\xc8\x67\x4e\xc4\xa6\x04\xc1\xb6\
\xae\x46\x4d\x20\x65\x05\xca\xc4\x44\xf4\x1c\x25\xc5\x5a\x49\x01\
\x1d\x2a\xe0\x8b\x38\xec\xbf\x0f\x0e\xd2\x26\x05\x56\xc2\xa5\x80\
\x74\x6e\x0c\x0a\x64\x43\xcf\x05\xfb\x75\x80\x80\x03\xf2\x98\xd3\
\x03\xaa\xa1\x46\x4c\x40\x72\xf5\x40\xaf\x04\x40\xbc\x0c\x2e\x05\
\x1e\xed\x48\xe4\x52\x21\xef\x84\x73\x5d\x60\x2c\x85\x30\x03\xbc\
\xc7\x9f\xe2\x82\x0b\x8e\xc0\x74\x5c\x6f\x04\xab\x04\x2e\x05\x86\
\x3b\xe2\x75\xb1\x69\xd9\xdc\x9d\x30\x80\x69\x2e\xb0\xa0\xa5\xe0\
\x3c\x17\x04\xf1\x1f\xdf\x02\x2b\x2c\x25\x98\xe7\x02\x23\x7e\xcb\
\x05\x56\x78\x4a\xb5\x4c\x81\x5e\xb9\x44\xe4\x7c\xd1\x16\x81\xa5\
\x2e\xa8\x93\xb7\x08\x2c\xeb\x81\x56\xf2\x16\x81\x25\x0a\xf4\x92\
\xdf\xc0\xcc\x9a\x47\xfe\xe1\x30\x1d\xc9\x2a\x94\xcf\xe4\x58\x61\
\xf6\xdf\x10\x28\xcf\x99\x3d\xbc\xc2\xcc\x06\xb0\xca\xbe\x8d\x16\
\xe4\x60\x5e\x98\x6a\x0e\xe0\x0d\xb3\x77\xca\x0f\x98\xaa\x2f\x08\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xdd\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\
\x01\x95\x2b\x0e\x1b\x00\x00\x00\x7f\x49\x44\x41\x54\x58\x85\xed\
\x96\x41\x0a\xc0\x20\x10\x03\x63\xff\xff\x67\xbd\xb7\x52\x6a\x12\
\x9a\x15\x9c\xa3\x87\x10\xc7\x65\x11\x38\x84\x69\xa6\x9c\xce\x66\
\x3a\x0a\xf4\xc9\xd9\xe7\xdc\xcb\x50\x40\x22\x5e\x20\x3e\x03\x71\
\x98\xb6\xb3\xa1\xa3\xf3\x98\x19\xb0\x2a\x56\xc3\xee\x36\x7e\x31\
\x60\x45\x31\x20\xdf\x1e\x28\x60\x20\x5e\x80\x7d\x02\x8b\x7e\xa0\
\x80\x81\x78\x01\xc7\x26\x94\x76\x49\xdc\x40\xbc\xc0\xaa\x3e\xab\
\x7e\xa0\x80\x81\x78\x81\x15\x85\x76\xfd\xc0\xe6\x06\x98\x8c\x07\
\x0e\x03\x6f\x5f\xb4\x43\x7d\x06\x5f\xb5\x0c\x28\x24\x96\x55\x60\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x09\
\x09\x01\x73\xa4\
\x00\x73\
\x00\x74\x00\x72\x00\x69\x00\x6b\x00\x65\x00\x6f\x00\x75\x00\x74\
\x00\x0d\
\x00\x3f\x0c\xb5\
\x00\x73\
\x00\x61\x00\x76\x00\x65\x00\x5f\x00\x61\x00\x6c\x00\x6c\x00\x5f\x00\x70\x00\x61\x00\x67\x00\x65\
\x00\x04\
\x00\x06\x96\x24\
\x00\x62\
\x00\x6f\x00\x6c\x00\x64\
\x00\x08\
\x0c\xd6\x69\x55\
\x00\x6e\
\x00\x65\x00\x77\x00\x5f\x00\x70\x00\x61\x00\x67\x00\x65\
\x00\x08\
\x0c\xd6\x58\x25\
\x00\x6e\
\x00\x65\x00\x77\x00\x5f\x00\x6e\x00\x6f\x00\x74\x00\x65\
\x00\x09\
\x0c\xb6\x94\xd5\
\x00\x73\
\x00\x61\x00\x76\x00\x65\x00\x5f\x00\x70\x00\x61\x00\x67\x00\x65\
\x00\x09\
\x0a\xc9\xd8\xc5\
\x00\x75\
\x00\x6e\x00\x64\x00\x65\x00\x72\x00\x6c\x00\x69\x00\x6e\x00\x65\
\x00\x09\
\x00\xa6\xb0\xb5\
\x00\x65\
\x00\x64\x00\x69\x00\x74\x00\x5f\x00\x70\x00\x61\x00\x67\x00\x65\
\x00\x0b\
\x0f\x48\x42\x1e\
\x00\x61\
\x00\x70\x00\x70\x00\x6c\x00\x69\x00\x63\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\
\x00\x04\
\x00\x07\x8c\x04\
\x00\x71\
\x00\x75\x00\x69\x00\x74\
\x00\x0b\
\x0c\x47\x82\x7d\
\x00\x72\
\x00\x65\x00\x6d\x00\x6f\x00\x76\x00\x65\x00\x5f\x00\x69\x00\x74\x00\x65\x00\x6d\
\x00\x06\
\x07\x0a\x82\xf3\
\x00\x69\
\x00\x74\x00\x61\x00\x6c\x00\x69\x00\x63\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0c\x00\x00\x00\x01\
\x00\x00\x00\x38\x00\x00\x00\x00\x00\x01\x00\x00\x02\x52\
\x00\x00\x00\xd6\x00\x00\x00\x00\x00\x01\x00\x00\x0b\x26\
\x00\x00\x00\x18\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfa\
\x00\x00\x00\xa2\x00\x00\x00\x00\x00\x01\x00\x00\x07\xc6\
\x00\x00\x01\x00\x00\x00\x00\x00\x00\x01\x00\x00\x0d\xcc\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x07\x0d\
\x00\x00\x00\xe4\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x4b\
\x00\x00\x00\x72\x00\x00\x00\x00\x00\x01\x00\x00\x05\xc7\
\x00\x00\x00\x5c\x00\x00\x00\x00\x00\x01\x00\x00\x04\x7d\
\x00\x00\x00\x46\x00\x00\x00\x00\x00\x01\x00\x00\x03\x3b\
\x00\x00\x00\xba\x00\x00\x00\x00\x00\x01\x00\x00\x09\x77\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| gpl-3.0 | 3,928,658,607,921,388,000 | 50.591429 | 97 | 0.72177 | false |
pkimber/checkout | checkout/migrations/0002_auto_20150625_1159.py | 1 | 1640 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def _init_state(model, name, slug):
try:
model.objects.get(slug=slug)
except model.DoesNotExist:
instance = model(**dict(name=name, slug=slug))
instance.save()
instance.full_clean()
def _init_state_action(model, name, slug, payment):
try:
obj = model.objects.get(slug=slug)
except model.DoesNotExist:
instance = model(**dict(name=name, slug=slug, payment=payment))
instance.save()
instance.full_clean()
def default_state(apps, schema_editor):
"""Create default states.
We can't import a model directly as it may be a newer version than this
migration expects. We use the historical version.
"""
model = apps.get_model('checkout', 'CheckoutAction')
_init_state_action(model, 'Card Refresh', 'card_refresh', False)
_init_state_action(model, 'Charge', 'charge', True)
_init_state_action(model, 'Invoice', 'invoice', False)
_init_state_action(model, 'Manual', 'manual', False)
_init_state_action(model, 'Payment', 'payment', True)
_init_state_action(model, 'Payment Plan', 'payment_plan', False)
model = apps.get_model('checkout', 'CheckoutState')
_init_state(model, 'Fail', 'fail')
_init_state(model, 'Pending', 'pending')
_init_state(model, 'Request', 'request')
_init_state(model, 'Success', 'success')
class Migration(migrations.Migration):
dependencies = [
('checkout', '0001_initial'),
]
operations = [
migrations.RunPython(default_state),
]
| apache-2.0 | -5,201,387,871,116,438,000 | 28.818182 | 75 | 0.643902 | false |
GeoNode/django-osgeo-importer | osgeo_importer/tests/tests_original.py | 1 | 53760 | # -*- coding: UTF-8 -*-
# (see test_utf8 for the reason why this file needs a coding cookie)
from geonode.geoserver.helpers import ogc_server_settings
from geonode.layers.models import Layer
import json
import logging
import os
import shutil
import unittest
from django import db
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.gis.gdal import DataSource
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.test import TestCase, Client, override_settings
from django.test.utils import setup_test_environment
import gdal
from geoserver.catalog import Catalog, FailedRequestError
import osgeo
from django.db.models import Sum
from osgeo_importer.handlers.geoserver import GeoWebCacheHandler
from osgeo_importer.handlers.geoserver import configure_time
from osgeo_importer.importers import OGRImport
from osgeo_importer.inspectors import GDALInspector
from osgeo_importer.models import (
UploadedData, UploadFile, UploadLayer,
validate_file_extension, ValidationError, validate_inspector_can_read
)
from osgeo_importer.tests.test_settings import _TEST_FILES_DIR
from osgeo_importer.utils import load_handler, launder, ImportHelper
OSGEO_IMPORTER = getattr(settings, 'OSGEO_IMPORTER', 'osgeo_importer.importers.OGRImport')
# In normal unittest runs, this will be set in setUpModule; set here for the
# benefit of static analysis and users importing this instead of running tests.
User = None
def get_testfile_path(filename):
"""Convenience function for getting the path to a test file.
"""
return os.path.join(_TEST_FILES_DIR, filename)
def get_layer_attr(layer, attr_value):
"""Convenience function for getting a date attribute from a layer.
"""
date_attrs = [
attr for attr in layer.attributes
if attr.attribute == attr_value
]
if not date_attrs:
return None
return date_attrs[0]
def setUpModule():
"""unittest runs this automatically after import, before running tests.
This function is a place to put code which is needed to set up the global
test environment, while avoiding side effects at import time and also
unintended changes to the module namespace.
"""
# This isn't great but at least it's explicit and confined to User.
global User
setup_test_environment()
User = get_user_model()
class AdminClient(Client):
def login_as_admin(self):
"""Convenience method to login admin.
"""
return self.login(username='admin', password='admin')
def login_as_non_admin(self):
"""Convenience method to login a non-admin.
"""
return self.login(username='non_admin', password='non_admin')
class UploaderTests(ImportHelper, TestCase):
"""Basic checks to make sure pages load, etc.
"""
def create_datastore(self, connection, catalog):
"""Convenience method for creating a datastore.
"""
settings = connection.settings_dict
ds_name = settings['NAME']
params = {
'database': ds_name,
'passwd': settings['PASSWORD'],
'namespace': 'http://www.geonode.org/',
'type': 'PostGIS',
'dbtype': 'postgis',
'host': settings['HOST'],
'user': settings['USER'],
'port': settings['PORT'],
'enabled': 'True'
}
store = catalog.create_datastore(ds_name, workspace=self.workspace)
store.connection_parameters.update(params)
try:
catalog.save(store)
except FailedRequestError:
# assuming this is because it already exists
pass
return catalog.get_store(ds_name)
def create_user(self, username, password, **kwargs):
"""Convenience method for creating users.
"""
user, created = User.objects.get_or_create(username=username, **kwargs)
if created:
user.set_password(password)
user.save()
return user
def setUp(self):
self.assertTrue(
os.path.exists(_TEST_FILES_DIR),
'Test could not run due to missing test data at {0!r}'
.format(_TEST_FILES_DIR)
)
# These tests require geonode to be running on :80!
self.postgis = db.connections['datastore']
self.postgis_settings = self.postgis.settings_dict
self.admin_user = self.create_user('admin', 'admin', is_superuser=True)
self.non_admin_user = self.create_user('non_admin', 'non_admin')
self.catalog = Catalog(
ogc_server_settings.internal_rest,
*ogc_server_settings.credentials
)
if self.catalog.get_workspace('geonode') is None:
self.catalog.create_workspace('geonode', 'http://www.geonode.org/')
self.workspace = 'geonode'
self.datastore = self.create_datastore(self.postgis, self.catalog)
def tearDown(self):
"""Clean up geoserver.
"""
self.catalog.delete(self.datastore, recurse=True)
def prepare_file_for_import(self, filepath):
""" Prepares the file path provided for import; performs some housekeeping, uploads & configures the file.
Returns a list of dicts of the form {'index': <layer_index>, 'upload_layer_id': <upload_layer_id>}
these may be used as configuration options for importing all of the layers in the file.
"""
# Make a copy of the test file, as it's removed in configure_upload()
filename = os.path.basename(filepath)
tmppath = os.path.join('/tmp', filename)
shutil.copy(get_testfile_path(filename), tmppath)
# upload & configure_upload expect closed file objects
# This is heritage from originally being closely tied to a view passing request.Files
of = open(tmppath, 'rb')
of.close()
files = [of]
uploaded_data = self.upload(files, self.admin_user)
self.configure_upload(uploaded_data, files)
configs = [{'index': l.index, 'upload_layer_id': l.id} for l in uploaded_data.uploadlayer_set.all()]
return configs
def import_file(self, path, configs=None):
"""Imports the file.
"""
if configs is None:
configs = []
self.assertTrue(os.path.exists(path), path)
# run ogr2ogr
ogr = OGRImport(path)
layers = ogr.handle(configuration_options=configs)
return layers
def generic_import(self, filename, configs=None):
if configs is None:
configs = [{'index': 0}]
path = get_testfile_path(filename)
results = self.import_file(path, configs=configs)
layer_results = []
for result in results:
if result[1].get('raster'):
layer_path = result[0]
layer_name = os.path.splitext(os.path.basename(layer_path))[0]
layer = Layer.objects.get(name=layer_name)
self.assertTrue(layer_path.endswith('.tif'))
self.assertTrue(os.path.exists(layer_path))
gdal_layer = gdal.OpenEx(layer_path)
self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
layer_results.append(layer)
else:
layer = Layer.objects.get(name=result[0])
self.assertEqual(layer.srid, 'EPSG:4326')
self.assertEqual(layer.store, self.datastore.name)
self.assertEqual(layer.storeType, 'dataStore')
#if not path.endswith('zip'):
#self.assertGreaterEqual(
# layer.attributes.count(),
# DataSource(path)[0].num_fields
#)
layer_results.append(layer)
return layer_results[0]
def generic_api_upload(self, filenames, configs=None):
"""Tests the import api.
"""
client = AdminClient()
client.login_as_non_admin()
if configs is None:
configs = [{'index': 0}]
# Don't accidentally iterate over given 'foo' as ['f', 'o', 'o'].
self.assertNotIsInstance(filenames, str)
# Upload Files
outfiles = []
for filename in filenames:
path = get_testfile_path(filename)
with open(path) as stream:
data = stream.read()
upload = SimpleUploadedFile(filename, data)
outfiles.append(upload)
response = client.post(
reverse('uploads-new-json'),
{'file': outfiles,
'json': json.dumps(configs)},
follow=True)
content = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertEqual(content['id'], 1)
# Configure Uploaded Files
upload_id = content['id']
upload_layers = UploadLayer.objects.filter(upload_id=upload_id)
for upload_layer in upload_layers:
for config in configs:
if config['upload_file_name'] == os.path.basename(upload_layer.upload_file.file.name):
payload = config['config']
url = '/importer-api/data-layers/{0}/configure/'.format(upload_layer.id)
response = client.post(
url, data=json.dumps(payload),
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
url = '/importer-api/data-layers/{0}/'.format(upload_layer.id)
response = client.get(url, content_type='application/json')
self.assertEqual(response.status_code, 200)
return content
def generic_raster_import(self, filename, configs=None):
if configs is None:
configs = [{'index': 0}]
path = get_testfile_path(filename)
results = self.import_file(path, configs=configs)
layer_path = results[0][0]
layer_name = os.path.splitext(os.path.basename(layer_path))[0]
layer = Layer.objects.get(name=layer_name)
self.assertTrue(layer_path.endswith('.tif'))
self.assertTrue(os.path.exists(layer_path))
gdal_layer = gdal.OpenEx(layer_path)
self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
return layer
def test_multi_upload(self):
"""Tests Uploading Multiple Files
"""
# Number of layers in each file
upload_layer_counts = [1, 1, 1]
upload = self.generic_api_upload(
filenames=[
'boxes_with_year_field.zip',
'boxes_with_date.zip',
'point_with_date.geojson'
],
configs=[
{
'upload_file_name': 'boxes_with_year_field.shp',
'config': [{'index': 0}]
},
{
'upload_file_name': 'boxes_with_date.shp',
'config': [{'index': 0}]
},
{
'upload_file_name': 'point_with_date.geojson',
'config': [{'index': 0}]
}
]
)
self.assertEqual(Layer.objects.count(), sum(upload_layer_counts))
self.assertEqual(9, upload['count'])
def test_upload_with_slds(self):
"""Tests Uploading sld
"""
upload = self.generic_api_upload(
filenames=[
'boxes_with_date.zip',
'boxes.sld',
'boxes1.sld'
],
configs=[
{
'upload_file_name': 'boxes_with_date.shp',
'config': [
{
'index': 0,
'default_style': 'boxes.sld',
'styles': ['boxes.sld', 'boxes1.sld']
}
]
}
]
)
self.assertEqual(6, upload['count'])
upload_id = upload['id']
uplayers = UploadLayer.objects.filter(upload=upload_id)
layer_id = uplayers[0].pk
upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
self.assertEqual(6, upfiles_count)
# Warning: this assumes that Layer pks equal UploadLayer pks
layer = Layer.objects.get(pk=layer_id)
gslayer = self.catalog.get_layer(layer.name)
default_style = gslayer.default_style
# TODO: can we use public API or omit this?
self.catalog._cache.clear()
self.assertEqual('boxes.sld', default_style.filename)
def test_upload_with_metadata(self):
"""Tests Uploading metadata
"""
upload = self.generic_api_upload(
filenames=[
'boxes_with_date.zip',
'samplemetadata.xml',
],
configs=[
{
'upload_file_name': 'boxes_with_date.shp',
'config': [
{
'index': 0,
'metadata': 'samplemetadata.xml'
}
]
}
]
)
self.assertEqual(5, upload['count'])
upload_id = upload['id']
uplayers = UploadLayer.objects.filter(upload=upload_id)
layer_id = uplayers[0].pk
upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
self.assertEqual(5, upfiles_count)
layer = Layer.objects.get(pk=layer_id)
self.assertEqual(layer.language, 'eng')
self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar')
#def test_geotiff_raster(self):
# """Exercise GeoTIFF raster import, ensuring import doesn't cause any exceptions.
# """
# filename = 'test_grid.tif'
# configs = self.prepare_file_for_import(filename)
#
# try:
# self.generic_raster_import(filename, configs=configs)
# except Exception as ex:
# self.fail(ex)
#def test_nitf_raster(self):
# """Tests NITF raster import
# """
# filename = 'test_nitf.nitf'
# configs = self.prepare_file_for_import(get_testfile_path(filename))
#
# try:
# self.generic_raster_import(filename, configs=configs)
# except Exception as ex:
# self.fail(ex)
def test_box_with_year_field(self):
""" Tests the import of test_box_with_year_field, checking that date conversion is performed correctly.
"""
filename = 'boxes_with_year_field.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
layer = self.generic_import(
'boxes_with_year_field.shp',
configs=configs
)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_with_date(self):
"""Tests the import of test_boxes_with_date.
"""
filename = 'boxes_with_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True})
layer = self.generic_import(
'boxes_with_date.shp',
configs=configs
)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_with_date_gpkg(self):
"""Tests the import of test_boxes_with_date.gpkg.
"""
filename = 'boxes_with_date.gpkg'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_plus_raster_gpkg_by_index(self):
""" Tests the import of multilayer vector + tile geopackage using index, treating tile layers
as rasters.
Tile layers are now treated by default as a distinct layer type.
This test forces them to still be treated as rasters and should be
removed once tests for vector/tile geopackage files are in place.
"""
filename = 'boxes_plus_raster.gpkg'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True})
configs[6].update({'layer_type': 'raster'})
configs[7].update({'layer_type': 'raster'})
layer = self.generic_import(
'boxes_plus_raster.gpkg',
configs=configs
)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_with_date_csv(self):
"""Tests a CSV with WKT polygon.
"""
filename = 'boxes_with_date.csv'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_csv_missing_features(self):
"""Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding.
"""
filename = 'missing-features.csv'
configs = self.prepare_file_for_import(get_testfile_path(filename))
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
def test_boxes_with_iso_date(self):
"""Tests the import of test_boxes_with_iso_date.
"""
filename = 'boxes_with_date_iso_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_with_date_gdb(self):
"""Tests the import of test_boxes_with_date.gdb.
"""
filename = 'boxes_with_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_duplicate_imports(self):
"""Import the same layer twice to ensure names don't collide.
"""
filename = 'boxes_with_date_iso_date.zip'
configs1 = self.prepare_file_for_import(get_testfile_path(filename))
ogr = OGRImport(get_testfile_path(filename))
layer1 = ogr.handle(configs1)
configs2 = self.prepare_file_for_import(get_testfile_path(filename))
layer2 = ogr.handle(configs2)
self.assertNotEqual(layer1[0][0], layer2[0][0])
def test_launder(self):
"""Ensure the launder function works as expected.
"""
self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3')
self.assertEqual(launder('Testing#'), 'testing_')
self.assertEqual(launder(' '), '_')
def test_boxes_with_date_iso_date_zip(self):
"""Tests the import of test_boxes_with_iso_date.
"""
filename = 'boxes_with_date_iso_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_with_dates_bc(self):
"""Tests the import of test_boxes_with_dates_bc.
"""
filename = 'boxes_with_dates_bc.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_point_with_date(self):
"""Tests the import of point_with_date.geojson
"""
filename = 'point_with_date.geojson'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
layer = self.generic_import(filename, configs=configs)
# Make sure the layer isn't named OGR default 'OGRGeoJSON'
self.assertNotEqual(layer.name, 'OGRGeoJSON')
date_attr = get_layer_attr(layer, 'date_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def test_boxes_with_end_date(self):
"""Tests the import of test_boxes_with_end_date.
This layer has a date and an end date field that are typed correctly.
"""
filename = 'boxes_with_end_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({
'convert_to_date': ['date', 'enddate'],
'start_date': 'date',
'end_date': 'enddate',
'configureTime': True
})
layer = self.generic_import(filename, configs=configs)
date_attr = get_layer_attr(layer, 'date_as_date')
end_date_attr = get_layer_attr(layer, 'enddate_as_date')
self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute,
end_attribute=end_date_attr.attribute
)
self.generic_time_check(
layer,
attribute=date_attr.attribute,
end_attribute=end_date_attr.attribute
)
def test_us_states_kml(self):
""" Tests the import of us_states_kml, just checking that the import doesn't raise an exception.
This layer has a date and an end date field that are typed correctly.
"""
filename = 'us_states.kml'
configs = self.prepare_file_for_import(get_testfile_path(filename))
# TODO: Support time in kmls.
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
def test_mojstrovka_gpx(self):
"""Tests the import of mojstrovka.gpx.
This layer has a date and an end date field that are typed correctly.
"""
filename = 'mojstrovka.gpx'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['time'], 'configureTime': True})
layer = self.generic_import(filename, configs)
date_attr = get_layer_attr(layer, 'time_as_date')
self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_attr.attribute
)
self.generic_time_check(layer, attribute=date_attr.attribute)
def generic_time_check(self, layer, attribute=None, end_attribute=None):
"""Convenience method to run generic tests on time layers.
"""
# TODO: can we use public API or omit this?
self.catalog._cache.clear()
resource = self.catalog.get_resource(
layer.name, store=layer.store, workspace=self.workspace
)
time_info = resource.metadata['time']
self.assertEqual('LIST', time_info.presentation)
self.assertEqual(True, time_info.enabled)
self.assertEqual(attribute, time_info.attribute)
self.assertEqual(end_attribute, time_info.end_attribute)
def test_us_shootings_csv(self):
"""Tests the import of US_Shootings.csv.
"""
if osgeo.gdal.__version__ < '2.0.0':
self.skipTest('GDAL Version does not support open options')
path = get_testfile_path('US_Shootings.csv')
configs = self.prepare_file_for_import(path)
configs[0].update({'convert_to_date': ['Date']})
layer = self.generic_import(path, configs=configs)
self.assertTrue(layer.name.startswith('us_shootings'))
date_field = 'date'
configure_time(
self.catalog.get_layer(layer.name).resource,
attribute=date_field
)
self.generic_time_check(layer, attribute=date_field)
def test_sitins(self):
"""Tests the import of US_Civil_Rights_Sitins0.csv
"""
if osgeo.gdal.__version__ < '2.0.0':
self.skipTest('GDAL Version does not support open options')
filename = 'US_Civil_Rights_Sitins0.csv'
configs = self.prepare_file_for_import(get_testfile_path(filename))
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
def get_layer_names(self, path):
"""Gets layer names from a data source.
"""
data_source = DataSource(path)
return [layer.name for layer in data_source]
def test_gdal_import(self):
""" Check that geojson file imports without exception.
"""
filename = 'point_with_date.geojson'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'convert_to_date': ['date']})
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
# WFS imports aren't working & this test was previously passing due to an error in the test
# rather than working code (no layers were returned, and that wasn't checked so none of the checks were ever run).
# I've fixed the test, but since it doesn't seem like the wfs functionality is being used much I'm commenting
# out this test until someone has motivation & time to fix the code.
# def test_wfs(self):
# """Tests the import from a WFS Endpoint
# """
# expected_layer_count = 4
# wfs = 'WFS:http://demo.geo-solutions.it/geoserver/tiger/wfs'
# ih = ImportHelper()
# configs = ih.configure_endpoint(wfs)
# ogr = OGRImport(wfs)
# layers = ogr.handle(configuration_options=configs)
# self.assertEqual(len(layers), expected_layer_count)
# for result in layers:
# layer = Layer.objects.get(name=result[0])
# self.assertEqual(layer.srid, 'EPSG:4326')
# self.assertEqual(layer.store, self.datastore.name)
# self.assertEqual(layer.storeType, 'dataStore')
# skipping this test as urls are not enabled in the ui and this breaks with no
# upload folder to use
# def test_arcgisjson(self):
# """Tests the import from a WFS Endpoint
# """
# endpoint = 'http://sampleserver6.arcgisonline.com/arcgis/rest/services/Water_Network/FeatureServer/16/query'\
# '?where=objectid=326&outfields=*&f=json'
# ih = ImportHelper()
# ih.configure_endpoint(endpoint)
#
# ogr = OGRImport(endpoint)
# configs = [{'index': 0, 'upload_layer_id': 1}]
# layers = ogr.handle(configuration_options=configs)
# for result in layers:
# layer = Layer.objects.get(name=result[0])
# self.assertEqual(layer.srid, 'EPSG:4326')
# self.assertEqual(layer.store, self.datastore.name)
# self.assertEqual(layer.storeType, 'dataStore')
def test_file_add_view(self):
"""Tests the file_add_view.
"""
client = AdminClient()
# test login required for this view
request = client.get(reverse('uploads-new'))
self.assertEqual(request.status_code, 302)
client.login_as_non_admin()
with open(get_testfile_path('point_with_date.geojson')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['request'].path, reverse('uploads-list'))
self.assertEqual(len(response.context['object_list']), 1)
upload = response.context['object_list'][0]
self.assertEqual(upload.user.username, 'non_admin')
self.assertEqual(upload.file_type, 'GeoJSON')
self.assertTrue(upload.uploadlayer_set.all())
self.assertEqual(upload.state, 'UPLOADED')
self.assertIsNotNone(upload.name)
uploaded_file = upload.uploadfile_set.first()
self.assertTrue(os.path.exists(uploaded_file.file.path))
with open(get_testfile_path('empty_file.geojson')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
self.assertEqual(response.status_code, 200)
#self.assertIn('file', response.context_data['form'].errors)
def test_file_add_view_as_json(self):
"""Tests the file_add_view.
"""
client = AdminClient()
client.login_as_non_admin()
with open(get_testfile_path('point_with_date.geojson')) as stream:
response = client.post(
reverse('uploads-new-json'),
{'file': stream},
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertIn('application/json', response.get('Content-Type', ''))
content = json.loads(response.content)
self.assertIn('state', content)
self.assertIn('id', content)
def test_describe_fields(self):
"""Tests the describe fields functionality.
"""
path = get_testfile_path('US_Shootings.csv')
with GDALInspector(path) as inspector:
layers = inspector.describe_fields()
self.assertTrue(layers[0]['layer_name'], 'us_shootings')
self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed',
'Wounded', 'Location', 'City',
'Longitude', 'Latitude'])
self.assertEqual(layers[0]['feature_count'], 203)
def test_gdal_file_type(self):
"""Tests the describe fields functionality.
"""
filenames = {
'US_Shootings.csv': {'CSV'},
'point_with_date.geojson': {'GeoJSON'},
'mojstrovka.gpx': {'GPX'},
'us_states.kml': {'LIBKML', 'KML'},
'boxes_with_year_field.shp': {'ESRI Shapefile'},
'boxes_with_date_iso_date.zip': {'ESRI Shapefile'}
}
from osgeo_importer.models import NoDataSourceFound
try:
for filename, file_type in sorted(filenames.items()):
path = get_testfile_path(filename)
with GDALInspector(path) as inspector:
self.assertIn(inspector.file_type(), file_type)
except NoDataSourceFound:
logging.exception('No data source found in: {0}'.format(path))
raise
def test_configure_view(self):
"""Tests the configuration view.
"""
path = get_testfile_path('point_with_date.geojson')
new_user = User.objects.create(username='test')
new_user_perms = ['change_resourcebase_permissions']
client = AdminClient()
client.login_as_non_admin()
with open(path) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
upload = response.context['object_list'][0]
payload = [
{
'index': 0,
'convert_to_date': ['date'],
'start_date': 'date',
'configureTime': True,
'editable': True,
'permissions': {
'users': {
'test': new_user_perms,
'AnonymousUser': [
'change_layer_data',
'download_resourcebase',
'view_resourcebase'
]
}
}
}
]
response = client.post(
'/importer-api/data-layers/{0}/configure/'.format(upload.id),
data=json.dumps(payload),
content_type='application/json'
)
self.assertTrue(response.status_code, 200)
first_layer = Layer.objects.all()[0]
self.assertEqual(first_layer.srid, 'EPSG:4326')
self.assertEqual(first_layer.store, self.datastore.name)
self.assertEqual(first_layer.storeType, 'dataStore')
self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
self.assertEqual(
Layer.objects.all()[0].owner.username,
self.non_admin_user.username
)
perms = first_layer.get_all_level_info()
user = User.objects.get(username=self.non_admin_user.username)
# check user permissions
expected_perms = [
u'publish_resourcebase',
u'change_resourcebase_permissions',
u'delete_resourcebase',
u'change_resourcebase',
u'change_resourcebase_metadata',
u'download_resourcebase',
u'view_resourcebase',
u'change_layer_style',
u'change_layer_data'
]
for perm in expected_perms:
self.assertIn(perm, perms['users'][user])
self.assertTrue(perms['users'][new_user])
self.assertIn(
'change_resourcebase_permissions',
perms['users'][new_user]
)
self.assertIn(
'change_layer_data',
perms['users'][User.objects.get(username='AnonymousUser')]
)
catalog_layer = self.catalog.get_layer(first_layer.name)
self.assertIn('time', catalog_layer.resource.metadata)
self.assertEqual(UploadLayer.objects.first().layer, first_layer)
def test_configure_view_convert_date(self):
"""Tests the configure view with a dataset that needs to be converted to a date.
"""
client = AdminClient()
client.login_as_non_admin()
with open(get_testfile_path('US_Shootings.csv')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
upload = response.context['object_list'][0]
payload = [
{
'index': 0,
'convert_to_date': ['Date'],
'start_date': 'Date',
'configureTime': True,
'editable': True
}
]
response = client.get(
'/importer-api/data-layers/{0}/configure/'.format(upload.id)
)
self.assertEqual(response.status_code, 405)
response = client.post(
'/importer-api/data-layers/{0}/configure/'.format(upload.id)
)
self.assertEqual(response.status_code, 400)
response = client.post(
'/importer-api/data-layers/{0}/configure/'.format(upload.id),
data=json.dumps(payload),
content_type='application/json'
)
self.assertTrue(response.status_code, 200)
first_layer = Layer.objects.all()[0]
self.assertEqual(first_layer.srid, 'EPSG:4326')
self.assertEqual(first_layer.store, self.datastore.name)
self.assertEqual(first_layer.storeType, 'dataStore')
self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime')
catalog_layer = self.catalog.get_layer(first_layer.name)
self.assertIn('time', catalog_layer.resource.metadata)
# ensure a user who does not own the upload cannot configure an import from it.
client.logout()
client.login_as_admin()
response = client.post(
'/importer-api/data-layers/{0}/configure/'.format(upload.id)
)
self.assertEqual(response.status_code, 404)
def test_list_api(self):
client = AdminClient()
response = client.get('/importer-api/data/')
self.assertEqual(response.status_code, 401)
client.login_as_non_admin()
response = client.get('/importer-api/data/')
self.assertEqual(response.status_code, 200)
admin = User.objects.get(username=self.admin_user.username)
non_admin = User.objects.get(username=self.non_admin_user.username)
path = get_testfile_path('US_Shootings.csv')
with open(path, 'rb') as stream:
uploaded_file = SimpleUploadedFile('test_data', stream.read())
admin_upload = UploadedData.objects.create(state='Admin test', user=admin)
admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))
non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin)
non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))
client.login_as_admin()
response = client.get('/importer-api/data/')
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(len(body['objects']), 2)
response = client.get('/importer-api/data/?user__username=admin')
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(len(body['objects']), 1)
def test_layer_list_api(self):
client = AdminClient()
response = client.get('/importer-api/data-layers/')
self.assertEqual(response.status_code, 401)
client.login_as_non_admin()
response = client.get('/importer-api/data-layers/')
self.assertEqual(response.status_code, 200)
def test_delete_from_non_admin_api(self):
"""Ensure users can delete their data.
"""
client = AdminClient()
client = AdminClient()
client.login_as_non_admin()
with open(get_testfile_path('point_with_date.geojson')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
self.assertEqual(UploadedData.objects.all().count(), 1)
upload_id = UploadedData.objects.first().id
response = client.delete('/importer-api/data/{0}/'.format(upload_id))
self.assertEqual(response.status_code, 204)
self.assertEqual(UploadedData.objects.all().count(), 0)
def test_delete_from_admin_api(self):
"""Ensure that administrators can delete data that isn't theirs.
"""
client = AdminClient()
client.login_as_non_admin()
with open(get_testfile_path('point_with_date.geojson')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
self.assertEqual(UploadedData.objects.all().count(), 1)
client.logout()
client.login_as_admin()
upload_id = UploadedData.objects.first().id
response = client.delete('/importer-api/data/{0}/'.format(upload_id))
self.assertEqual(response.status_code, 204)
self.assertEqual(UploadedData.objects.all().count(), 0)
def naming_an_import(self):
"""Tests providing a name in the configuration options.
"""
client = AdminClient()
client.login_as_non_admin()
name = 'point-with-a-date'
with open(get_testfile_path('point_with_date.geojson')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
payload = {
'index': 0,
'convert_to_date': ['date'],
'start_date': 'date',
'configureTime': True,
'name': name,
'editable': True
}
response = client.post(
'/importer-api/data-layers/1/configure/',
data=json.dumps(payload),
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
first_layer = Layer.objects.all()[0]
self.assertEqual(first_layer.title, name.replace('-', '_'))
def test_api_import(self):
"""Tests the import api.
"""
client = AdminClient()
client.login_as_non_admin()
with open(get_testfile_path('point_with_date.geojson')) as stream:
response = client.post(
reverse('uploads-new'),
{'file': stream},
follow=True
)
payload = {
'index': 0,
'convert_to_date': ['date'],
'start_date': 'date',
'configureTime': True,
'editable': True,
'upload_layer_id': 1,
}
self.assertIsInstance(
UploadLayer.objects.first().configuration_options,
dict
)
response = client.get('/importer-api/data-layers/1/')
self.assertEqual(response.status_code, 200)
response = client.post(
'/importer-api/data-layers/1/configure/',
data=json.dumps([payload]),
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
self.assertIn('task', response.content)
first_layer = Layer.objects.all()[0]
self.assertEqual(first_layer.srid, 'EPSG:4326')
self.assertEqual(first_layer.store, self.datastore.name)
self.assertEqual(first_layer.storeType, 'dataStore')
self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
catalog_layer = self.catalog.get_layer(first_layer.name)
self.assertIn('time', catalog_layer.resource.metadata)
self.assertEqual(UploadLayer.objects.first().layer, first_layer)
self.assertTrue(UploadLayer.objects.first().task_id)
def test_valid_file_extensions(self):
"""Test the file extension validator.
"""
for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions:
filename = 'test.{0}'.format(extension)
upload = SimpleUploadedFile(filename, '')
self.assertIsNone(validate_file_extension(upload))
logging.disable(logging.ERROR)
with self.assertRaises(ValidationError):
validate_file_extension(SimpleUploadedFile('test.txt', ''))
logging.disable(logging.NOTSET)
def test_no_geom(self):
"""Test the file extension validator.
"""
logging.disable(logging.ERROR)
with self.assertRaises(ValidationError):
validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))
logging.disable(logging.NOTSET)
# This should pass (geom type is unknown)
validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))
def test_numeric_overflow(self):
"""Regression test for numeric field overflows in shapefiles.
# https://trac.osgeo.org/gdal/ticket/5241
"""
filename = 'Walmart.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({
'configureTime': False,
'convert_to_date': ['W1_OPENDAT'],
'editable': True,
'start_date': 'W1_OPENDAT'
})
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
def test_multipolygon_shapefile(self):
""" Tests shapefile with multipart polygons imports without raising exception.
"""
filename = 'PhoenixFirstDues.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
def test_istanbul(self):
"""Tests shapefile with multipart polygons and non-WGS84 SR.
"""
filename = 'Istanbul.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
result = self.generic_import(filename, configs=configs)
feature_type = self.catalog.get_resource(result.name)
self.assertEqual(feature_type.projection, 'EPSG:32635')
# removing test, file not in aws bucket
# def test_houston_tx_annexations(self):
# """Tests Shapefile with originally unsupported EPSG Code.
# """
# filename = 'HoustonTXAnnexations.zip'
# configs = self.prepare_file_for_import(get_testfile_path(filename))
# result = self.generic_import(filename, configs=configs)
# feature_type = self.catalog.get_resource(result.name)
# self.assertEqual(feature_type.projection, 'EPSG:4326')
def test_gwc_handler(self):
"""Tests the GeoWebCache handler
"""
filename = 'boxes_with_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({
'convert_to_date': ['date'],
'start_date': 'date',
'configureTime': True
})
layer = self.generic_import(filename, configs=configs)
gwc = GeoWebCacheHandler(None)
gs_layer = self.catalog.get_layer(layer.name)
self.assertTrue(gwc.time_enabled(gs_layer))
gs_layer.fetch()
payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
self.assertIn('regexParameterFilter', payload[1])
self.assertEqual(int(payload[0]['status']), 200)
# Don't configure time, ensure everything still works
filename = 'boxes_with_date_iso_date.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
layer = self.generic_import(filename, configs)
gs_layer = self.catalog.get_layer(layer.name)
self.catalog._cache.clear()
gs_layer.fetch()
payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
self.assertNotIn('regexParameterFilter', payload[1])
self.assertEqual(int(payload[0]['status']), 200)
# utf8 failing right now, there are some existing issues
# that are slated to be dealt with
# def test_utf8(self):
# """Tests utf8 characters in attributes
# """
# path = get_testfile_path('china_provinces.zip')
# configs = self.prepare_file_for_import(path)
# layer = self.generic_import(path, configs=configs)
# ogr = OGRImport(path)
# datastore, _ = ogr.open_target_datastore(ogr.target_store)
# sql = (
# "select NAME_CH from {0} where NAME_PY = 'An Zhou'"
# .format(layer.name)
# )
# result = datastore.ExecuteSQL(sql)
# feature = result.GetFeature(0)
# self.assertEqual(feature.GetField('name_ch'), '安州')
def test_non_converted_date(self):
"""Test converting a field as date.
"""
filename = 'TM_WORLD_BORDERS_2005.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({'start_date': 'Year', 'configureTime': True})
results = self.generic_import(filename, configs=configs)
layer = self.catalog.get_layer(results.typename)
self.assertIn('time', layer.resource.metadata)
self.assertEqual('year', layer.resource.metadata['time'].attribute)
def test_fid_field(self):
"""
Regression test for preserving an FID field when target layer supports
it but source does not.
"""
filename = 'noaa_paleoclimate.zip'
configs = self.prepare_file_for_import(get_testfile_path(filename))
try:
self.generic_import(filename, configs=configs)
except Exception as ex:
self.fail(ex)
def test_csv_with_wkb_geometry(self):
"""Exercise import of CSV files with multiple geometries.
"""
filenames = [
'police_csv.csv',
'police_csv_nOGC.csv',
'police_csv_noLatLon.csv',
'police_csv_WKR.csv',
'police_csv_nFID.csv',
'police_csv_nOGFID.csv',
'police_csv_noWKB.csv'
]
for filename in filenames:
configs = self.prepare_file_for_import(get_testfile_path(filename))
configs[0].update({
'configureTime': True,
'convert_to_date': ['date_time'],
'editable': True,
'permissions': {
'users': {
'AnonymousUser': [
'change_layer_data',
'download_resourcebase',
'view_resourcebase'
]
}
},
'start_date': 'date_time',
})
self.generic_import(filename, configs=configs)
def test_quota(self):
""" Exercise using the user lifetime maximum quota setting
by uploading two files where the first should work,
the second should error
"""
client = AdminClient()
client.login_as_non_admin()
filename = 'plate_id_pangaea.zip'
configs = [{'index': 0}]
path = get_testfile_path(filename)
with open(path) as stream:
data = stream.read()
upload = SimpleUploadedFile(filename, data)
outfiles=[upload]
response = client.post(
reverse('uploads-new-json'),
{'file': outfiles,
'json': json.dumps(configs)},
follow=True)
content = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertEqual(content['state'],'UPLOADED')
# Running second time should trigger exceeded quota
path = get_testfile_path(filename)
with open(path) as stream:
data = stream.read()
upload = SimpleUploadedFile(filename, data)
outfiles=[upload]
response = client.post(
reverse('uploads-new-json'),
{'file': outfiles,
'json': json.dumps(configs)},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertIn('Quota Exceeded', response.content)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -3,969,625,452,851,933,000 | 36.098689 | 118 | 0.587134 | false |
dials/dials | algorithms/background/gmodel/algorithm.py | 1 | 2792 | import pickle
class ModelCache:
"""
A class to cache the model
"""
def __init__(self):
"""
Create a model dictionary
"""
self.model = dict()
def get(self, name):
"""
Get the model
"""
if name is None:
raise RuntimeError("Model is not specified")
try:
model = self.model[name]
except KeyError:
with open(name, "rb") as infile:
model = pickle.load(infile)
self.model[name] = model
return model
# Instance of the model cache
global_model_cache = ModelCache()
class BackgroundAlgorithm:
"""Class to do background subtraction."""
def __init__(
self,
experiments,
model=None,
robust=False,
tuning_constant=1.345,
min_pixels=10,
):
"""
Initialise the algorithm.
:param experiments: The list of experiments
:param model: The background model
:param robust: Use the robust background algorithm
:param tuning_constant: The robust tuning constant
"""
from dials.algorithms.background.gmodel import Creator
# Get the model
model = global_model_cache.get(model)
# Create the background creator
self._create = Creator(model=model, robust=robust, min_pixels=min_pixels)
def compute_background(self, reflections, image_volume=None):
"""
Compute the background.
:param reflections: The list of reflections
"""
# Do the background subtraction
if image_volume is None:
success = self._create(reflections)
reflections["background.mean"] = reflections[
"shoebox"
].mean_modelled_background()
else:
success = self._create(reflections, image_volume)
reflections.set_flags(~success, reflections.flags.dont_integrate)
return success
class GModelBackgroundCalculatorFactory:
"""Class to do background subtraction."""
@staticmethod
def create(experiments, model=None, robust=False, min_pixels=10):
"""
Initialise the algorithm.
:param experiments: The list of experiments
:param model: The background model
:param robust: Use the robust background algorithm
:param tuning_constant: The robust tuning constant
"""
from dials.algorithms.integration.parallel_integrator import (
GModelBackgroundCalculator,
)
# Get the model
model = global_model_cache.get(model)
# Create the background creator
return GModelBackgroundCalculator(
model=model, robust=robust, min_pixels=min_pixels
)
| bsd-3-clause | -6,660,291,910,596,019,000 | 26.372549 | 81 | 0.59563 | false |
solackerman/sqlalchemy-redshift | sqlalchemy_redshift/dialect.py | 1 | 30445 | import re
from collections import defaultdict
import pkg_resources
import sqlalchemy as sa
from sqlalchemy import Column, exc, inspect, schema
from sqlalchemy.dialects.postgresql.base import PGCompiler, PGDDLCompiler
from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
from sqlalchemy.engine import reflection
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import (
BinaryExpression, BooleanClauseList, Delete
)
from sqlalchemy.types import VARCHAR, NullType
from .commands import CopyCommand, UnloadFromSelect
from .compat import string_types
try:
from alembic.ddl import postgresql
except ImportError:
pass
else:
from alembic.ddl.base import RenameTable
compiles(RenameTable, 'redshift')(postgresql.visit_rename_table)
class RedshiftImpl(postgresql.PostgresqlImpl):
__dialect__ = 'redshift'
__all__ = ['CopyCommand', 'UnloadFromSelect', 'RedshiftDialect']
# Regex for parsing and identity constraint out of adsrc, e.g.:
# "identity"(445178, 0, '1,1'::text)
IDENTITY_RE = re.compile(r"""
"identity" \(
(?P<current>-?\d+)
,\s
(?P<base>-?\d+)
,\s
'(?P<seed>-?\d+),(?P<step>-?\d+)'
.*
\)
""", re.VERBOSE)
# Regex for SQL identifiers (valid table and column names)
SQL_IDENTIFIER_RE = re.compile(r"""
[_a-zA-Z][\w$]* # SQL standard identifier
| # or
(?:"[^"]+")+ # SQL delimited (quoted) identifier
""", re.VERBOSE)
# Regex for foreign key constraints, e.g.:
# FOREIGN KEY(col1) REFERENCES othertable (col2)
# See https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
# for a definition of valid SQL identifiers.
FOREIGN_KEY_RE = re.compile(r"""
^FOREIGN\ KEY \s* \( # FOREIGN KEY, arbitrary whitespace, literal '('
(?P<columns> # Start a group to capture the referring columns
(?: # Start a non-capturing group
\s* # Arbitrary whitespace
([_a-zA-Z][\w$]* | ("[^"]+")+) # SQL identifier
\s* # Arbitrary whitespace
,? # There will be a colon if this isn't the last one
)+ # Close the non-capturing group; require at least one
) # Close the 'columns' group
\s* \) # Arbitrary whitespace and literal ')'
\s* REFERENCES \s*
((?P<referred_schema>([_a-zA-Z][\w$]* | ("[^"]*")+))\.)? # SQL identifier
(?P<referred_table>[_a-zA-Z][\w$]* | ("[^"]*")+) # SQL identifier
\s* \( # FOREIGN KEY, arbitrary whitespace, literal '('
(?P<referred_columns> # Start a group to capture the referring columns
(?: # Start a non-capturing group
\s* # Arbitrary whitespace
([_a-zA-Z][\w$]* | ("[^"]+")+) # SQL identifier
\s* # Arbitrary whitespace
,? # There will be a colon if this isn't the last one
)+ # Close the non-capturing group; require at least one
) # Close the 'columns' group
\s* \) # Arbitrary whitespace and literal ')'
""", re.VERBOSE)
# Regex for primary key constraints, e.g.:
# PRIMARY KEY (col1, col2)
PRIMARY_KEY_RE = re.compile(r"""
^PRIMARY \s* KEY \s* \( # FOREIGN KEY, arbitrary whitespace, literal '('
(?P<columns> # Start a group to capture column names
(?:
\s* # Arbitrary whitespace
# SQL identifier or delimited identifier
( [_a-zA-Z][\w$]* | ("[^"]*")+ )
\s* # Arbitrary whitespace
,? # There will be a colon if this isn't the last one
)+ # Close the non-capturing group; require at least one
)
\s* \) \s* # Arbitrary whitespace and literal ')'
""", re.VERBOSE)
def _get_relation_key(name, schema):
if schema is None:
return name
else:
return schema + "." + name
def _get_schema_and_relation(key):
if '.' not in key:
return (None, key)
identifiers = SQL_IDENTIFIER_RE.findall(key)
if len(identifiers) == 1:
return (None, key)
elif len(identifiers) == 2:
return identifiers
raise ValueError("%s does not look like a valid relation identifier")
def unquoted(key):
"""
Return *key* with one level of double quotes removed.
Redshift stores some identifiers without quotes in internal tables,
even though the name must be quoted elsewhere.
In particular, this happens for tables named as a keyword.
"""
if key.startswith('"') and key.endswith('"'):
return key[1:-1]
return key
class RedshiftCompiler(PGCompiler):
def visit_now_func(self, fn, **kw):
return "SYSDATE"
class RedshiftDDLCompiler(PGDDLCompiler):
"""
Handles Redshift-specific ``CREATE TABLE`` syntax.
Users can specify the `diststyle`, `distkey`, `sortkey` and `encode`
properties per table and per column.
Table level properties can be set using the dialect specific syntax. For
example, to specify a distribution key and style you apply the following:
>>> import sqlalchemy as sa
>>> from sqlalchemy.schema import CreateTable
>>> engine = sa.create_engine('redshift+psycopg2://example')
>>> metadata = sa.MetaData()
>>> user = sa.Table(
... 'user',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column('name', sa.String),
... redshift_diststyle='KEY',
... redshift_distkey='id',
... redshift_interleaved_sortkey=['id', 'name'],
... )
>>> print(CreateTable(user).compile(engine))
<BLANKLINE>
CREATE TABLE "user" (
id INTEGER NOT NULL,
name VARCHAR,
PRIMARY KEY (id)
) DISTSTYLE KEY DISTKEY (id) INTERLEAVED SORTKEY (id, name)
<BLANKLINE>
<BLANKLINE>
A single sort key can be applied without a wrapping list:
>>> customer = sa.Table(
... 'customer',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column('name', sa.String),
... redshift_sortkey='id',
... )
>>> print(CreateTable(customer).compile(engine))
<BLANKLINE>
CREATE TABLE customer (
id INTEGER NOT NULL,
name VARCHAR,
PRIMARY KEY (id)
) SORTKEY (id)
<BLANKLINE>
<BLANKLINE>
Column-level special syntax can also be applied using the column info
dictionary. For example, we can specify the ENCODE for a column:
>>> product = sa.Table(
... 'product',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column('name', sa.String, info={'encode': 'lzo'})
... )
>>> print(CreateTable(product).compile(engine))
<BLANKLINE>
CREATE TABLE product (
id INTEGER NOT NULL,
name VARCHAR ENCODE lzo,
PRIMARY KEY (id)
)
<BLANKLINE>
<BLANKLINE>
We can also specify the distkey and sortkey options:
>>> sku = sa.Table(
... 'sku',
... metadata,
... sa.Column('id', sa.Integer, primary_key=True),
... sa.Column(
... 'name', sa.String, info={'distkey': True, 'sortkey': True}
... )
... )
>>> print(CreateTable(sku).compile(engine))
<BLANKLINE>
CREATE TABLE sku (
id INTEGER NOT NULL,
name VARCHAR DISTKEY SORTKEY,
PRIMARY KEY (id)
)
<BLANKLINE>
<BLANKLINE>
"""
def post_create_table(self, table):
text = ""
info = table.dialect_options['redshift']
diststyle = info.get('diststyle')
if diststyle:
diststyle = diststyle.upper()
if diststyle not in ('EVEN', 'KEY', 'ALL'):
raise exc.CompileError(
u"diststyle {0} is invalid".format(diststyle)
)
text += " DISTSTYLE " + diststyle
distkey = info.get('distkey')
if distkey:
text += " DISTKEY ({0})".format(self.preparer.quote(distkey))
sortkey = info.get('sortkey')
interleaved_sortkey = info.get('interleaved_sortkey')
if sortkey and interleaved_sortkey:
raise exc.ArgumentError(
"Parameters sortkey and interleaved_sortkey are "
"mutually exclusive; you may not specify both."
)
if sortkey or interleaved_sortkey:
if isinstance(sortkey, string_types):
keys = [sortkey]
else:
keys = sortkey or interleaved_sortkey
keys = [key.name if isinstance(key, Column) else key
for key in keys]
if interleaved_sortkey:
text += " INTERLEAVED"
sortkey_string = ", ".join(self.preparer.quote(key)
for key in keys)
text += " SORTKEY ({0})".format(sortkey_string)
return text
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column)
colspec += " " + self.dialect.type_compiler.process(column.type)
default = self.get_column_default_string(column)
if default is not None:
# Identity constraints show up as *default* when reflected.
m = IDENTITY_RE.match(default)
if m:
colspec += " IDENTITY({seed},{step})".format(**m.groupdict())
else:
colspec += " DEFAULT " + default
colspec += self._fetch_redshift_column_attributes(column)
if not column.nullable:
colspec += " NOT NULL"
return colspec
def _fetch_redshift_column_attributes(self, column):
text = ""
if not hasattr(column, 'info'):
return text
info = column.info
identity = info.get('identity')
if identity:
text += " IDENTITY({0},{1})".format(identity[0], identity[1])
encode = info.get('encode')
if encode:
text += " ENCODE " + encode
distkey = info.get('distkey')
if distkey:
text += " DISTKEY"
sortkey = info.get('sortkey')
if sortkey:
text += " SORTKEY"
return text
class RedshiftDialect(PGDialect_psycopg2):
"""
Define Redshift-specific behavior.
Most public methods are overrides of the underlying interfaces defined in
:class:`~sqlalchemy.engine.interfaces.Dialect` and
:class:`~sqlalchemy.engine.Inspector`.
"""
name = 'redshift'
statement_compiler = RedshiftCompiler
ddl_compiler = RedshiftDDLCompiler
max_identifier_length = 127
construct_arguments = [
(schema.Index, {
"using": False,
"where": None,
"ops": {}
}),
(schema.Table, {
"ignore_search_path": False,
"diststyle": None,
"distkey": None,
"sortkey": None,
"interleaved_sortkey": None,
}),
]
def __init__(self, *args, **kw):
super(RedshiftDialect, self).__init__(*args, **kw)
# Cache domains, as these will be static;
# Redshift does not support user-created domains.
self._domains = None
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
"""
Return information about columns in `table_name`.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_columns`.
"""
cols = self._get_redshift_columns(connection, table_name, schema, **kw)
if not self._domains:
self._domains = self._load_domains(connection)
domains = self._domains
columns = []
for col in cols:
column_info = self._get_column_info(
name=col.name, format_type=col.format_type,
default=col.default, notnull=col.notnull, domains=domains,
enums=[], schema=col.schema, encode=col.encode)
columns.append(column_info)
return columns
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
"""
Return information about the primary key constraint on `table_name`.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`.
"""
constraints = self._get_redshift_constraints(connection, table_name,
schema)
pk_constraints = [c for c in constraints if c.contype == 'p']
if not pk_constraints:
return {'constrained_columns': [], 'name': ''}
pk_constraint = pk_constraints[0]
m = PRIMARY_KEY_RE.match(pk_constraint.condef)
colstring = m.group('columns')
constrained_columns = SQL_IDENTIFIER_RE.findall(colstring)
return {
'constrained_columns': constrained_columns,
'name': None,
}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
"""
Return information about foreign keys in `table_name`.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`.
"""
constraints = self._get_redshift_constraints(connection, table_name,
schema)
fk_constraints = [c for c in constraints if c.contype == 'f']
uniques = defaultdict(lambda: defaultdict(dict))
for con in fk_constraints:
uniques[con.conname]["key"] = con.conkey
uniques[con.conname]["condef"] = con.condef
fkeys = []
for conname, attrs in uniques.items():
m = FOREIGN_KEY_RE.match(attrs['condef'])
colstring = m.group('referred_columns')
referred_columns = SQL_IDENTIFIER_RE.findall(colstring)
referred_table = m.group('referred_table')
referred_schema = m.group('referred_schema')
colstring = m.group('columns')
constrained_columns = SQL_IDENTIFIER_RE.findall(colstring)
fkey_d = {
'name': None,
'constrained_columns': constrained_columns,
'referred_schema': referred_schema,
'referred_table': referred_table,
'referred_columns': referred_columns,
}
fkeys.append(fkey_d)
return fkeys
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
"""
Return a list of table names for `schema`.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_table_names`.
"""
return self._get_table_or_view_names('r', connection, schema, **kw)
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
"""
Return a list of view names for `schema`.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_names`.
"""
return self._get_table_or_view_names('v', connection, schema, **kw)
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None, **kw):
"""Return view definition.
Given a :class:`.Connection`, a string `view_name`,
and an optional string `schema`, return the view definition.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_definition`.
"""
view = self._get_redshift_relation(connection, view_name, schema, **kw)
return sa.text(view.view_definition)
def get_indexes(self, connection, table_name, schema, **kw):
"""
Return information about indexes in `table_name`.
Because Redshift does not support traditional indexes,
this always returns an empty list.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_indexes`.
"""
return []
@reflection.cache
def get_unique_constraints(self, connection, table_name,
schema=None, **kw):
"""
Return information about unique constraints in `table_name`.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.get_unique_constraints`.
"""
constraints = self._get_redshift_constraints(connection,
table_name, schema)
constraints = [c for c in constraints if c.contype == 'u']
uniques = defaultdict(lambda: defaultdict(dict))
for con in constraints:
uniques[con.conname]["key"] = con.conkey
uniques[con.conname]["cols"][con.attnum] = con.attname
return [
{'name': None,
'column_names': [uc["cols"][i] for i in uc["key"]]}
for name, uc in uniques.items()
]
@reflection.cache
def get_table_options(self, connection, table_name, schema, **kw):
"""
Return a dictionary of options specified when the table of the
given name was created.
Overrides interface
:meth:`~sqlalchemy.engine.Inspector.get_table_options`.
"""
def keyfunc(column):
num = int(column.sortkey)
# If sortkey is interleaved, column numbers alternate
# negative values, so take abs.
return abs(num)
table = self._get_redshift_relation(connection, table_name,
schema, **kw)
columns = self._get_redshift_columns(connection, table_name,
schema, **kw)
sortkey_cols = sorted([col for col in columns if col.sortkey],
key=keyfunc)
interleaved = any([int(col.sortkey) < 0 for col in sortkey_cols])
sortkey = [col.name for col in sortkey_cols]
interleaved_sortkey = None
if interleaved:
interleaved_sortkey = sortkey
sortkey = None
distkeys = [col.name for col in columns if col.distkey]
distkey = distkeys[0] if distkeys else None
return {
'redshift_diststyle': table.diststyle,
'redshift_distkey': distkey,
'redshift_sortkey': sortkey,
'redshift_interleaved_sortkey': interleaved_sortkey,
}
def create_connect_args(self, *args, **kwargs):
"""
Build DB-API compatible connection arguments.
Overrides interface
:meth:`~sqlalchemy.engine.interfaces.Dialect.create_connect_args`.
"""
default_args = {
'sslmode': 'verify-full',
'sslrootcert': pkg_resources.resource_filename(
__name__,
'redshift-ssl-ca-cert.pem'
),
}
cargs, cparams = super(RedshiftDialect, self).create_connect_args(
*args, **kwargs
)
default_args.update(cparams)
return cargs, default_args
def _get_table_or_view_names(self, relkind, connection, schema=None, **kw):
default_schema = inspect(connection).default_schema_name
if not schema:
schema = default_schema
info_cache = kw.get('info_cache')
all_relations = self._get_all_relation_info(connection,
info_cache=info_cache)
relation_names = []
for key, relation in all_relations.items():
this_schema, this_relation = _get_schema_and_relation(key)
if this_schema is None:
this_schema = default_schema
if this_schema == schema and relation.relkind == relkind:
relation_names.append(this_relation)
return relation_names
def _get_column_info(self, *args, **kwargs):
kw = kwargs.copy()
encode = kw.pop('encode', None)
column_info = super(RedshiftDialect, self)._get_column_info(
*args,
**kw
)
if isinstance(column_info['type'], VARCHAR):
if column_info['type'].length is None:
column_info['type'] = NullType()
if 'info' not in column_info:
column_info['info'] = {}
if encode and encode != 'none':
column_info['info']['encode'] = encode
return column_info
def _get_redshift_relation(self, connection, table_name,
schema=None, **kw):
info_cache = kw.get('info_cache')
all_relations = self._get_all_relation_info(connection,
info_cache=info_cache)
key = _get_relation_key(table_name, schema)
if key not in all_relations.keys():
key = unquoted(key)
try:
return all_relations[key]
except KeyError:
raise sa.exc.NoSuchTableError(key)
def _get_redshift_columns(self, connection, table_name, schema=None, **kw):
info_cache = kw.get('info_cache')
all_columns = self._get_all_column_info(connection,
info_cache=info_cache)
key = _get_relation_key(table_name, schema)
if key not in all_columns.keys():
key = unquoted(key)
return all_columns[key]
def _get_redshift_constraints(self, connection, table_name,
schema=None, **kw):
info_cache = kw.get('info_cache')
all_constraints = self._get_all_constraint_info(connection,
info_cache=info_cache)
key = _get_relation_key(table_name, schema)
if key not in all_constraints.keys():
key = unquoted(key)
return all_constraints[key]
@reflection.cache
def _get_all_relation_info(self, connection, **kw):
result = connection.execute("""
SELECT
c.relkind,
n.oid as "schema_oid",
n.nspname as "schema",
c.oid as "rel_oid",
c.relname,
CASE c.reldiststyle
WHEN 0 THEN 'EVEN' WHEN 1 THEN 'KEY' WHEN 8 THEN 'ALL' END
AS "diststyle",
c.relowner AS "owner_id",
u.usename AS "owner_name",
TRIM(TRAILING ';' FROM pg_catalog.pg_get_viewdef(c.oid, true))
AS "view_definition",
pg_catalog.array_to_string(c.relacl, '\n') AS "privileges"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
JOIN pg_catalog.pg_user u ON u.usesysid = c.relowner
WHERE c.relkind IN ('r', 'v', 'm', 'S', 'f')
AND n.nspname !~ '^pg_'
ORDER BY c.relkind, n.oid, n.nspname;
""")
relations = {}
for rel in result:
schema = rel.schema
if schema == inspect(connection).default_schema_name:
schema = None
key = _get_relation_key(rel.relname, schema)
relations[key] = rel
return relations
@reflection.cache
def _get_all_column_info(self, connection, **kw):
all_columns = defaultdict(list)
with connection.contextual_connect() as cc:
# We fetch the current search_path, which may or may not quote
# '$user' depending on whether other schemas need quoting.
search_path = cc.execute("SHOW search_path").scalar()
if '$user' in search_path and '"$user"' not in search_path:
search_path = search_path.replace('$user', '"$user"')
# Because pg_table_def only shows results for schemas on the
# search_path, we explicitly include all non-system schemas, then
# replace the original value for search_path.
schema_names = ['"%s"' % r.name for r in cc.execute("""
SELECT nspname AS "name"
FROM pg_catalog.pg_namespace
WHERE nspname !~ '^pg_' AND nspname <> 'information_schema'
ORDER BY 1
""")]
modified_search_path = ','.join(schema_names)
cc.execute("SET LOCAL search_path TO %s" % modified_search_path)
result = cc.execute("""
SELECT
n.nspname as "schema",
c.relname as "table_name",
d.column as "name",
encoding as "encode",
type, distkey, sortkey, "notnull", adsrc, attnum,
pg_catalog.format_type(att.atttypid, att.atttypmod),
pg_catalog.pg_get_expr(ad.adbin, ad.adrelid) AS DEFAULT,
n.oid as "schema_oid",
c.oid as "table_oid"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n
ON n.oid = c.relnamespace
JOIN pg_catalog.pg_table_def d
ON (d.schemaname, d.tablename) = (n.nspname, c.relname)
JOIN pg_catalog.pg_attribute att
ON (att.attrelid, att.attname) = (c.oid, d.column)
LEFT JOIN pg_catalog.pg_attrdef ad
ON (att.attrelid, att.attnum) = (ad.adrelid, ad.adnum)
WHERE n.nspname !~ '^pg_'
ORDER BY n.nspname, c.relname, att.attnum
""")
for col in result:
schema = col.schema
if schema == inspect(connection).default_schema_name:
schema = None
key = _get_relation_key(col.table_name, schema)
all_columns[key].append(col)
cc.execute("SET LOCAL search_path TO %s" % search_path)
return dict(all_columns)
@reflection.cache
def _get_all_constraint_info(self, connection, **kw):
result = connection.execute("""
SELECT
n.nspname as "schema",
c.relname as "table_name",
t.contype,
t.conname,
t.conkey,
a.attnum,
a.attname,
pg_catalog.pg_get_constraintdef(t.oid, true) as condef,
n.oid as "schema_oid",
c.oid as "rel_oid"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n
ON n.oid = c.relnamespace
JOIN pg_catalog.pg_constraint t
ON t.conrelid = c.oid
JOIN pg_catalog.pg_attribute a
ON t.conrelid = a.attrelid AND a.attnum = ANY(t.conkey)
WHERE n.nspname !~ '^pg_'
ORDER BY n.nspname, c.relname
""")
all_constraints = defaultdict(list)
for con in result:
schema = con.schema
if schema == inspect(connection).default_schema_name:
schema = None
key = _get_relation_key(con.table_name, schema)
all_constraints[key].append(con)
return all_constraints
def gen_columns_from_children(root):
"""
Generates columns that are being used in child elements of the delete query
this will be used to determine tables for the using clause.
:param root: the delete query
:return: a generator of columns
"""
if isinstance(root, (Delete, BinaryExpression, BooleanClauseList)):
for child in root.get_children():
yc = gen_columns_from_children(child)
for it in yc:
yield it
elif isinstance(root, sa.Column):
yield root
@compiles(Delete, 'redshift')
def visit_delete_stmt(element, compiler, **kwargs):
"""
Adds redshift-dialect specific compilation rule for the
delete statement.
Redshift DELETE syntax can be found here:
https://docs.aws.amazon.com/redshift/latest/dg/r_DELETE.html
.. :code-block: sql
DELETE [ FROM ] table_name
[ { USING } table_name, ...]
[ WHERE condition ]
By default, SqlAlchemy compiles DELETE statements with the
syntax:
.. :code-block: sql
DELETE [ FROM ] table_name
[ WHERE condition ]
problem illustration:
>>> from sqlalchemy import Table, Column, Integer, MetaData, delete
>>> from sqlalchemy_redshift.dialect import RedshiftDialect
>>> meta = MetaData()
>>> table1 = Table(
... 'table_1',
... meta,
... Column('pk', Integer, primary_key=True)
... )
...
>>> table2 = Table(
... 'table_2',
... meta,
... Column('pk', Integer, primary_key=True)
... )
...
>>> del_stmt = delete(table1).where(table1.c.pk==table2.c.pk)
>>> str(del_stmt.compile(dialect=RedshiftDialect()))
'DELETE FROM table_1 USING table_2 WHERE table_1.pk = table_2.pk'
>>> str(del_stmt)
'DELETE FROM table_1 WHERE table_1.pk = table_2.pk'
>>> del_stmt2 = delete(table1)
>>> str(del_stmt2)
'DELETE FROM table_1'
>>> del_stmt3 = delete(table1).where(table1.c.pk > 1000)
>>> str(del_stmt3)
'DELETE FROM table_1 WHERE table_1.pk > :pk_1'
>>> str(del_stmt3.compile(dialect=RedshiftDialect()))
'DELETE FROM table_1 WHERE table_1.pk > %(pk_1)s'
"""
# Set empty strings for the default where clause and using clause
whereclause = ''
usingclause = ''
# determine if the delete query needs a ``USING`` injected
# by inspecting the whereclause's children & their children...
# first, the where clause text is buit, if applicable
# then, the using clause text is built, if applicable
# note:
# the tables in the using clause are sorted in the order in
# which they first appear in the where clause.
delete_stmt_table = compiler.process(element.table, asfrom=True, **kwargs)
whereclause_tuple = element.get_children()
if whereclause_tuple:
usingclause_tables = []
whereclause = ' WHERE {clause}'.format(
clause=compiler.process(*whereclause_tuple, **kwargs)
)
whereclause_columns = gen_columns_from_children(element)
for col in whereclause_columns:
table = compiler.process(col.table, asfrom=True, **kwargs)
if table != delete_stmt_table and table not in usingclause_tables:
usingclause_tables.append(table)
if usingclause_tables:
usingclause = ' USING {clause}'.format(
clause=', '.join(usingclause_tables)
)
return 'DELETE FROM {table}{using}{where}'.format(
table=delete_stmt_table,
using=usingclause,
where=whereclause)
| mit | 1,824,985,647,472,262,000 | 35.461078 | 79 | 0.570636 | false |
wgwoods/blivet | blivet/flags.py | 1 | 3889 | # flags.py
#
# Copyright (C) 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Lehman <[email protected]>
#
import shlex
import selinux
class Flags(object):
def __init__(self):
#
# mode of operation
#
self.testing = False
self.installer_mode = False
#
# minor modes (installer-specific)
#
self.automated_install = False
self.live_install = False
self.image_install = False
#
# enable/disable functionality
#
self.selinux = selinux.is_selinux_enabled()
self.multipath = True
self.dmraid = True
self.ibft = True
self.noiswmd = False
self.gfs2 = True
self.jfs = True
self.reiserfs = True
self.arm_platform = None
self.gpt = False
self.multipath_friendly_names = True
# set to False to suppress the default LVM behavior of saving
# backup metadata in /etc/lvm/{archive,backup}
self.lvm_metadata_backup = True
# whether to include nodev filesystems in the devicetree (only
# meaningful when flags.installer_mode is False)
self.include_nodev = False
self.boot_cmdline = {}
self.update_from_boot_cmdline()
self.allow_degraded_mdraid = True
def get_boot_cmdline(self):
buf = open("/proc/cmdline").read().strip()
args = shlex.split(buf)
for arg in args:
(opt, _equals, val) = arg.partition("=")
if val:
self.boot_cmdline[opt] = val
def update_from_boot_cmdline(self):
self.get_boot_cmdline()
if "nompath" in self.boot_cmdline:
self.multipath = False
if "nodmraid" in self.boot_cmdline:
self.dmraid = False
if "noiswmd" in self.boot_cmdline:
self.noiswmd = True
def update_from_anaconda_flags(self, anaconda_flags):
self.installer_mode = True
self.testing = anaconda_flags.testing
self.automated_install = anaconda_flags.automatedInstall
self.live_install = anaconda_flags.livecdInstall
self.image_install = anaconda_flags.imageInstall
self.selinux = anaconda_flags.selinux
self.gfs2 = "gfs2" in self.boot_cmdline
self.jfs = "jfs" in self.boot_cmdline
self.reiserfs = "reiserfs" in self.boot_cmdline
self.arm_platform = anaconda_flags.armPlatform
self.gpt = anaconda_flags.gpt
self.multipath_friendly_names = anaconda_flags.mpathFriendlyNames
self.allow_degraded_mdraid = anaconda_flags.rescue_mode
self.ibft = anaconda_flags.ibft
self.dmraid = anaconda_flags.dmraid
# We don't want image installs writing backups of the *image* metadata
# into the *host's* /etc/lvm. This can get real messy on build systems.
if self.image_install:
self.lvm_metadata_backup = False
flags = Flags()
| gpl-2.0 | -8,068,387,037,196,000,000 | 31.957627 | 79 | 0.646953 | false |
beaker-project/beaker | Server/bkr/server/model/openstack.py | 1 | 1285 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from sqlalchemy.schema import Column, ForeignKey
from sqlalchemy.types import Integer, Unicode
from sqlalchemy.orm import relationship
from bkr.server.model.base import DeclarativeMappedObject
from bkr.server.model.lab import LabController
from .types import UUID
# Currently Beaker does not understand OpenStack regions, so there should only
# be one row in this table, created by the administrator. In future this can be
# expanded to track multiple regions associated with different lab controllers.
class OpenStackRegion(DeclarativeMappedObject):
__tablename__ = 'openstack_region'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, autoincrement=True, nullable=False, primary_key=True)
lab_controller_id = Column(Integer, ForeignKey('lab_controller.id',
name='openstack_region_lab_controller_id_fk'), nullable=False)
lab_controller = relationship(LabController, back_populates='openstack_regions')
# NULL ipxe_image_id means not uploaded yet
ipxe_image_id = Column(UUID)
| gpl-2.0 | 1,555,254,236,307,001,600 | 46.592593 | 84 | 0.768093 | false |
apache/allura | Allura/allura/tests/test_multifactor.py | 2 | 11754 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
import shutil
import tempfile
import textwrap
import os
from paste.deploy.converters import asint
import ming
from cryptography.hazmat.primitives.twofactor import InvalidToken
from mock import patch, Mock
from alluratest.tools import assert_equal, assert_raises
from tg import config
from allura import model as M
from allura.lib.exceptions import InvalidRecoveryCode, MultifactorRateLimitError
from allura.lib.multifactor import GoogleAuthenticatorFile, TotpService, MongodbTotpService
from allura.lib.multifactor import GoogleAuthenticatorPamFilesystemTotpService
from allura.lib.multifactor import RecoveryCodeService, MongodbRecoveryCodeService
from allura.lib.multifactor import GoogleAuthenticatorPamFilesystemRecoveryCodeService
class TestGoogleAuthenticatorFile(object):
sample = textwrap.dedent('''\
7CL3WL756ISQCU5HRVNAODC44Q
" RATE_LIMIT 3 30
" DISALLOW_REUSE
" TOTP_AUTH
43504045
16951331
16933944
38009587
49571579
''')
# different key length
sample2 = textwrap.dedent('''\
LQQTTQUEW3VAGA6O5XICCWGBXUWXI737
" TOTP_AUTH
''')
def test_parse(self):
gaf = GoogleAuthenticatorFile.load(self.sample)
assert_equal(gaf.key, b'\xf8\x97\xbb/\xfd\xf2%\x01S\xa7\x8dZ\x07\x0c\\\xe4')
assert_equal(gaf.options['RATE_LIMIT'], '3 30')
assert_equal(gaf.options['DISALLOW_REUSE'], None)
assert_equal(gaf.options['TOTP_AUTH'], None)
assert_equal(gaf.recovery_codes, [
'43504045',
'16951331',
'16933944',
'38009587',
'49571579',
])
def test_dump(self):
gaf = GoogleAuthenticatorFile.load(self.sample)
assert_equal(gaf.dump(), self.sample)
def test_dump2(self):
gaf = GoogleAuthenticatorFile.load(self.sample2)
assert_equal(gaf.dump(), self.sample2)
class GenericTotpService(TotpService):
def enforce_rate_limit(self, *args, **kwargs):
pass
class TestTotpService(object):
sample_key = b'\x00K\xda\xbfv\xc2B\xaa\x1a\xbe\xa5\x96b\xb2\xa0Z:\xc9\xcf\x8a'
sample_time = 1472502664
# these generate code 283397
def test_constructor(self):
totp = TotpService().Totp(key=None)
assert totp
@patch('allura.lib.multifactor.time')
def test_verify_types(self, time):
time.return_value = self.sample_time
srv = GenericTotpService()
totp = srv.Totp(key=self.sample_key)
srv.verify(totp, '283 397', None)
srv.verify(totp, '283397', None)
@patch('allura.lib.multifactor.time')
def test_verify_window(self, time):
time.return_value = self.sample_time
srv = GenericTotpService()
totp = srv.Totp(key=self.sample_key)
srv.verify(totp, '283397', None)
time.return_value = self.sample_time + 30
srv.verify(totp, '283397', None)
time.return_value = self.sample_time + 60
with assert_raises(InvalidToken):
srv.verify(totp, '283397', None)
time.return_value = self.sample_time - 30
with assert_raises(InvalidToken):
srv.verify(totp, '283397', None)
def test_get_qr_code(self):
srv = TotpService()
totp = srv.Totp(key=None)
user = Mock(username='some-user-guy')
config['site_name'] = 'Our Website'
assert srv.get_qr_code(totp, user)
class TestAnyTotpServiceImplementation(object):
__test__ = False
sample_key = b'\x00K\xda\xbfv\xc2B\xaa\x1a\xbe\xa5\x96b\xb2\xa0Z:\xc9\xcf\x8a'
sample_time = 1472502664
# these generate code 283397
def mock_user(self):
return M.User(username='some-user-guy')
def test_none(self):
srv = self.Service()
user = self.mock_user()
assert_equal(None, srv.get_secret_key(user))
def test_set_get(self):
srv = self.Service()
user = self.mock_user()
srv.set_secret_key(user, self.sample_key)
assert_equal(self.sample_key, srv.get_secret_key(user))
def test_delete(self):
srv = self.Service()
user = self.mock_user()
srv.set_secret_key(user, self.sample_key)
assert_equal(self.sample_key, srv.get_secret_key(user))
srv.set_secret_key(user, None)
assert_equal(None, srv.get_secret_key(user))
@patch('allura.lib.multifactor.time')
def test_rate_limiting(self, time):
time.return_value = self.sample_time
srv = self.Service()
user = self.mock_user()
totp = srv.Totp(key=self.sample_key)
# 4th attempt (good or bad) will trip over the default limit of 3 in 30s
with assert_raises(InvalidToken):
srv.verify(totp, '34dfvdasf', user)
with assert_raises(InvalidToken):
srv.verify(totp, '234asdfsadf', user)
srv.verify(totp, '283397', user)
with assert_raises(MultifactorRateLimitError):
srv.verify(totp, '283397', user)
class TestMongodbTotpService(TestAnyTotpServiceImplementation):
__test__ = True
Service = MongodbTotpService
def setUp(self):
config = {
'ming.main.uri': 'mim://host/allura_test',
}
ming.configure(**config)
class TestGoogleAuthenticatorPamFilesystemMixin(object):
def setUp(self):
self.totp_basedir = tempfile.mkdtemp(prefix='totp-test', dir=os.getenv('TMPDIR', '/tmp'))
config['auth.multifactor.totp.filesystem.basedir'] = self.totp_basedir
def tearDown(self):
if os.path.exists(self.totp_basedir):
shutil.rmtree(self.totp_basedir)
class TestGoogleAuthenticatorPamFilesystemTotpService(TestAnyTotpServiceImplementation,
TestGoogleAuthenticatorPamFilesystemMixin):
__test__ = True
Service = GoogleAuthenticatorPamFilesystemTotpService
def test_rate_limiting(self):
# make a regular .google-authenticator file first, so rate limit info has somewhere to go
self.Service().set_secret_key(self.mock_user(), self.sample_key)
# then run test
super(TestGoogleAuthenticatorPamFilesystemTotpService, self).test_rate_limiting()
class TestRecoveryCodeService(object):
def test_generate_one_code(self):
code = RecoveryCodeService().generate_one_code()
assert code
another_code = RecoveryCodeService().generate_one_code()
assert code != another_code
def test_regenerate_codes(self):
class DummyRecoveryService(RecoveryCodeService):
def replace_codes(self, user, codes):
self.saved_user = user
self.saved_codes = codes
recovery = DummyRecoveryService()
user = Mock(username='some-user-guy')
recovery.regenerate_codes(user)
assert_equal(recovery.saved_user, user)
assert_equal(len(recovery.saved_codes), asint(config.get('auth.multifactor.recovery_code.count', 10)))
class TestAnyRecoveryCodeServiceImplementation(object):
__test__ = False
def mock_user(self):
return M.User(username='some-user-guy')
def test_get_codes_none(self):
recovery = self.Service()
user = self.mock_user()
assert_equal(recovery.get_codes(user), [])
def test_regen_get_codes(self):
recovery = self.Service()
user = self.mock_user()
recovery.regenerate_codes(user)
assert recovery.get_codes(user)
def test_replace_codes(self):
recovery = self.Service()
user = self.mock_user()
codes = [
'12345',
'67890'
]
recovery.replace_codes(user, codes)
assert_equal(recovery.get_codes(user), codes)
def test_verify_fail(self):
recovery = self.Service()
user = self.mock_user()
with assert_raises(InvalidRecoveryCode):
recovery.verify_and_remove_code(user, '11111')
with assert_raises(InvalidRecoveryCode):
recovery.verify_and_remove_code(user, '')
def test_verify_and_remove_code(self):
recovery = self.Service()
user = self.mock_user()
codes = [
'12345',
'67890'
]
recovery.replace_codes(user, codes)
result = recovery.verify_and_remove_code(user, '12345')
assert_equal(result, True)
assert_equal(recovery.get_codes(user), ['67890'])
def test_rate_limiting(self):
recovery = self.Service()
user = self.mock_user()
codes = [
'11111',
'22222',
]
recovery.replace_codes(user, codes)
# 4th attempt (good or bad) will trip over the default limit of 3 in 30s
with assert_raises(InvalidRecoveryCode):
recovery.verify_and_remove_code(user, '13485u0233')
with assert_raises(InvalidRecoveryCode):
recovery.verify_and_remove_code(user, '34123rdxafs')
recovery.verify_and_remove_code(user, '11111')
with assert_raises(MultifactorRateLimitError):
recovery.verify_and_remove_code(user, '22222')
class TestMongodbRecoveryCodeService(TestAnyRecoveryCodeServiceImplementation):
__test__ = True
Service = MongodbRecoveryCodeService
def setUp(self):
config = {
'ming.main.uri': 'mim://host/allura_test',
}
ming.configure(**config)
class TestGoogleAuthenticatorPamFilesystemRecoveryCodeService(TestAnyRecoveryCodeServiceImplementation,
TestGoogleAuthenticatorPamFilesystemMixin):
__test__ = True
Service = GoogleAuthenticatorPamFilesystemRecoveryCodeService
def setUp(self):
super(TestGoogleAuthenticatorPamFilesystemRecoveryCodeService, self).setUp()
# make a regular .google-authenticator file first, so recovery keys have somewhere to go
GoogleAuthenticatorPamFilesystemTotpService().set_secret_key(self.mock_user(),
b'\x00K\xda\xbfv\xc2B\xaa\x1a\xbe\xa5\x96b\xb2\xa0Z:\xc9\xcf\x8a')
def test_get_codes_none_when_no_file(self):
# this deletes the file
GoogleAuthenticatorPamFilesystemTotpService().set_secret_key(self.mock_user(), None)
super(TestGoogleAuthenticatorPamFilesystemRecoveryCodeService, self).test_get_codes_none()
def test_replace_codes_when_no_file(self):
# this deletes the file
GoogleAuthenticatorPamFilesystemTotpService().set_secret_key(self.mock_user(), None)
# then it errors because no .google-authenticator file
with assert_raises(IOError):
super(TestGoogleAuthenticatorPamFilesystemRecoveryCodeService, self).test_replace_codes()
| apache-2.0 | -8,189,101,684,502,008,000 | 33.570588 | 135 | 0.648715 | false |
talset/monitoring-plugins | disk/check_disk.py | 1 | 4769 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Florian Lambert <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Requirments: python
#
import sys
import argparse
import subprocess
VERSION = '1.1'
STATE_TEXT = ['Ok', 'Warning', 'Critical', 'Unknow']
PARSER = argparse.ArgumentParser(description='Disk check recurcive')
PARSER.add_argument("-b", "--base",
type=str,
help='base directory to monitor. For example if you want to monitor only volume mounted under /host/ (Default: /)',
default="/")
PARSER.add_argument("-e", "--excludes",
type=str, nargs='+',
help='List of mountpoint to exclude recurcively ex: /var/lib will exclude /var/lib*',
default=[])
PARSER.add_argument("-w", "--warning",
type=int,
help='Warning value (Default: 85)',
default=85)
PARSER.add_argument("-c", "--critical",
type=int,
help='Critical value (Default: 95)',
default=95)
PARSER.add_argument("-v", "--version",
action='store_true',
help='Print script version')
ARGS = PARSER.parse_args()
def check_df(base,warning,critical,excludes):
STATE_OK = 0
STATE_WARNING = 1
STATE_CRITICAL = 2
STATE_UNKNOWN = 3
STATE = STATE_OK
df_cmd = ("df --exclude-type=tmpfs "
"--exclude-type=devtmpfs "
"--output=source,target,fstype,iused,itotal,ipcent,used,size,pcent "
"--block-size G")
stdout = subprocess.check_output(df_cmd, shell=True).strip().split("\n")
# remove the header output
del stdout[0]
_output_message = []
_disk_ok = []
for line in stdout:
# Exclude filter on target mount point
col = line.split()
# 0: source
# 1: target
# 2: fstype
# 3: iused
# 4: itotal
# 5: ipcent
# 6: used
# 7: size
# 8: pcent
if not is_based(base,col[1]) or is_excluded(excludes,col[1]):
continue
_disk_ok.append(col[1])
# csize: pourcent usage
csize = int(col[8].rstrip('%'))
if csize >= int(critical): # CRITICAL
STATE = STATE_CRITICAL
_output_message.append("Disk Block %s %s Used" % (col[1], col[8]))
elif csize >= int(warning): # WARNING
# Update state warning only if the current is not critical
if STATE < STATE_CRITICAL:
STATE = STATE_WARNING
_output_message.append("Disk Block %s %s Used" % (col[1], col[8]))
# cinode: pourcent usage inode
cinode = int(col[5].rstrip('%'))
if cinode >= int(critical): # CRITICAL
STATE = STATE_CRITICAL
_output_message.append("Disk Inode %s %s Used" % (col[1], col[5]))
elif cinode >= int(warning): # WARNING
# Update state warning only if the current is not critical
if STATE < STATE_CRITICAL:
STATE = STATE_WARNING
_output_message.append("Disk Inode %s %s Used" % (col[1], col[5]))
if STATE == STATE_OK:
output_message = "Disk %s" % (' || '.join(_disk_ok))
else:
output_message = ' || '.join(_output_message)
return output_message,STATE
def is_excluded(excludes,path):
#Check if the mount path is in the excludes
for ex in excludes:
if path.startswith(ex):
return True
return False
def is_based(base,path):
#Check if the mount path is in the base path
if path.startswith(base):
return True
return False
if __name__ == "__main__":
if ARGS.version:
print "version: %s" % (VERSION)
sys.exit(0)
(OUTPUT_MESSAGE,STATE) = check_df(base=ARGS.base,
warning=ARGS.warning,
critical=ARGS.critical,
excludes=ARGS.excludes)
try:
print "%s: %s" % (STATE_TEXT[STATE], OUTPUT_MESSAGE)
sys.exit(STATE)
except ValueError:
print "Oops! cant return STATE"
sys.exit(STATE_UNKNOWN)
| apache-2.0 | -8,505,828,684,046,612,000 | 32.118056 | 135 | 0.563011 | false |
phobson/bokeh | bokeh/application/handlers/tests/test_code.py | 1 | 3031 | from __future__ import absolute_import, print_function
from os.path import abspath, sep
import unittest
from bokeh.application.handlers import CodeHandler
from bokeh.document import Document
script_adds_two_roots = """
from bokeh.io import curdoc
from bokeh.model import Model
from bokeh.core.properties import Int, Instance
class AnotherModelInTestScript(Model):
bar = Int(1)
class SomeModelInTestScript(Model):
foo = Int(2)
child = Instance(Model)
curdoc().add_root(AnotherModelInTestScript())
curdoc().add_root(SomeModelInTestScript())
"""
class TestCodeHandler(unittest.TestCase):
def test_empty_script(self):
doc = Document()
handler = CodeHandler(source="# This script does nothing", filename="/test_filename")
handler.modify_document(doc)
if handler.failed:
raise RuntimeError(handler.error)
assert not doc.roots
def test_script_adds_roots(self):
doc = Document()
handler = CodeHandler(source=script_adds_two_roots, filename="/test_filename")
handler.modify_document(doc)
if handler.failed:
raise RuntimeError(handler.error)
assert len(doc.roots) == 2
def test_script_bad_syntax(self):
doc = Document()
handler = CodeHandler(source="This is a syntax error", filename="/test_filename")
handler.modify_document(doc)
assert handler.error is not None
assert 'Invalid syntax' in handler.error
def test_script_runtime_error(self):
doc = Document()
handler = CodeHandler(source="raise RuntimeError('nope')", filename="/test_filename")
handler.modify_document(doc)
assert handler.error is not None
assert 'nope' in handler.error
def test_script_sys_path(self):
doc = Document()
handler = CodeHandler(source="""import sys; raise RuntimeError("path: '%s'" % sys.path[0])""", filename="/test_filename")
handler.modify_document(doc)
assert handler.error is not None
assert "path: ''" in handler.error
def test_script_cwd(self):
doc = Document()
handler = CodeHandler(source="""import os; raise RuntimeError("cwd: '%s'" % os.getcwd())""", filename="/test_filename")
handler.modify_document(doc)
assert handler.error is not None
assert "cwd: '%s'" % abspath(sep) in handler.error
def test_script_argv(self):
doc = Document()
handler = CodeHandler(source="""import sys; raise RuntimeError("argv: %r" % sys.argv)""", filename="/test_filename")
handler.modify_document(doc)
assert handler.error is not None
assert "argv: ['test_filename']" in handler.error
doc = Document()
handler = CodeHandler(source="""import sys; raise RuntimeError("argv: %r" % sys.argv)""", filename="/test_filename", argv=[10, 20, 30])
handler.modify_document(doc)
assert handler.error is not None
assert "argv: ['test_filename', 10, 20, 30]" in handler.error
| bsd-3-clause | 9,053,226,742,000,388,000 | 32.677778 | 143 | 0.655559 | false |
feend78/evennia | evennia/typeclasses/migrations/0008_lock_and_perm_rename.py | 2 | 1717 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2017-01-25 22:30
from __future__ import unicode_literals
import re
from django.db import migrations
def update_perms_and_locks(apps, schema_editor):
# update all permissions
Tag = apps.get_model('typeclasses', 'Tag')
perm_map = {"guests": "guest", "players": "player", "playerhelpers": "helper",
"builders": "builder", "wizards": "admin", "immortals": "developer"}
for perm in Tag.objects.filter(db_tagtype="permission"):
if perm.db_key in perm_map:
perm.db_key = perm_map[perm.db_key]
perm.save(update_fields=("db_key",))
# update all locks on all entities
apps_models = [("objects", "ObjectDB"), ("accounts", "AccountDB"), ("scripts", "ScriptDB"),
("comms", "ChannelDB")]
p_reg = re.compile(r"(?<=perm\()(\w+)(?=\))|(?<=perm_above\()(\w+)(?=\))",
re.IGNORECASE + re.UNICODE)
def _sub(match):
perm = match.group(1)
return perm_map[perm.lower()].capitalize() if (perm and perm.lower() in perm_map) else perm
for app_tuple in apps_models:
TClass = apps.get_model(*app_tuple)
for obj in TClass.objects.filter(db_lock_storage__icontains="perm"):
orig_lock = obj.db_lock_storage
repl_lock = p_reg.sub(_sub, orig_lock)
if repl_lock != orig_lock:
obj.db_lock_storage = repl_lock
obj.save(update_fields=('db_lock_storage',))
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0007_tag_migrations_may_be_slow'),
]
operations = [
migrations.RunPython(update_perms_and_locks)
]
| bsd-3-clause | -2,164,045,350,960,106,800 | 33.34 | 99 | 0.585323 | false |
beeryardtech/scripts | python/dk_test/scenarios/scenario_common.py | 2 | 4295 |
__author__ = "Travis Goldie"
__email__ = "[email protected]"
__date__ = "11/14/12"
__copyright__ = "(c) SIOS Technology Corp 2012"
#For unittesting
import os
import sys
from time import sleep
sys.path.insert(0, r"C:\Program Files\dk_test\libs")
from dkconfig import dkconfig
from exectests import checkAppGUICmd, runTests
from winfuncs import deletealljobs, deleteallmirrors
from dklog import markTestScenEvent
class scenario_common(object):
"""
Purpose:
The top level scenario class that will set the basic (or most common)
functionality for each scenario.
Child classes can override the class methods and class properties to
customize for each test scenario
Class Methods:
runTest
executeTest
setUp
tearDown
"""
@classmethod
def __init__(self, _config, _settings, _loadedTestCases, _scenarioName):
#Setup
self.config = _config
self.logger = self.config.logger
self.results = self.config.results
self.settings = _settings
self.repeatcount = int(self.settings["repeat"])
self.sleep = float(self.settings.get("sleep",
self.config.settings["defaultsleep"]))
self.testCases = _loadedTestCases
self.scenarioName = _scenarioName
@classmethod
@markTestScenEvent
def runTest(self):
"""
Purpose:
Use the generated test cases to execute test cases as defined by
the parameter list. All these functions can be overriden in each
scenario module that inherent from this file.
"""
numOfTestCases = len(self.testCases)
numTestsPassed = 0
runCounter = 0
successfulTestCount = 0
#Run setup and prepare environment for test cases
self.setUp()
#Iterate ove the possible test cases. A test case should end in a state
#that will let the next test to run.
for testName, cmdParams in self.testCases.items():
successfulTestCount = 0 #Used to check pass/fail of each scenario
self.logger.info("START {}".format(testName))
self.results("START {}".format(testName))
#TODO Need to do this differently...
if self.repeatcount == -1:
numberOfRuns = 0
try:
while True:
if self.executeTest(testName, cmdParams):
self.results("PASSED {}".format(testName))
successfulTestCount += 1
else:
self.results("FAILED {}".format(testName))
numberOfRuns += 1
sleep(1.0)
except KeyboardInterrupt as err:
self.logger.info("Keyboard Interrupt recieved. Test ended.")
runCounter = numberOfRuns
elif self.repeatcount > 0:
#Repeat each test case based on the repeat count setting
for index in range(self.repeatcount):
successfulTestCount += self.executeTest(testName, cmdParams)
runCounter = self.repeatcount
#Did the test pass or fail?
if successfulTestCount == runCounter:
self.results("PASSED {}".format(testName))
numTestsPassed += 1
else:
self.results("FAILED {}".format(testName))
self.results("Scenario {}: PASSED {}, FAILED {}".
format(self.scenarioName,
numTestsPassed,
(numOfTestCases - numTestsPassed) ))
#After all the tests have run, teardown the environment and clean up.
self.tearDown()
@classmethod
def executeTest(self, testName, cmdParams):
"""
Purpose:
Execute the test commands. This can be overriden in the test case.
"""
successfulTestFlag = True
#Run all the commands and count how many returned correctly
for cmd, params in cmdParams.items():
if not checkAppGUICmd(self.config, testName, cmd, params):
successfulTestFlag = False
sleep(self.sleep)
return successfulTestFlag
@classmethod
def setUp(self):
"""
Purpose:
Prepares the environment for the test case. This can be overriden
in the test case.
"""
deleteallmirrors(self.config)
@classmethod
def tearDown(self):
"""
Purpose:
Cleans up after the completion of a test case. This can be overriden
in the test case.
"""
deleteallmirrors(self.config)
if __name__ == '__main__':
config = dkconfig(r"C:\Program Files\dk_test\scenarios\cluster.ini")
config.runEnvSetup()
runTests(config, r"C:\Program Files\dk_test\testsuite\test_smoke.ini")
| apache-2.0 | 98,617,112,899,321,780 | 26.071895 | 74 | 0.677299 | false |
vergecurrency/VERGE | test/functional/wallet_txn_doublespend.py | 1 | 5648 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there is a double-spend conflict."""
from decimal import Decimal
from test_framework.test_framework import VergeTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
disconnect_nodes,
find_output,
sync_blocks,
)
class TxnMallTest(VergeTestFramework):
def set_test_params(self):
self.num_nodes = 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 XSH:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar addresses:
node0_address_foo = self.nodes[0].getnewaddress()
fund_foo_txid = self.nodes[0].sendtoaddress(node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress()
fund_bar_txid = self.nodes[0].sendtoaddress(node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(),
starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# First: use raw transaction API to send 1240 XSH to node1_address,
# but don't broadcast:
doublespend_fee = Decimal('-.02')
rawtx_input_0 = {}
rawtx_input_0["txid"] = fund_foo_txid
rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 1219)
rawtx_input_1 = {}
rawtx_input_1["txid"] = fund_bar_txid
rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29)
inputs = [rawtx_input_0, rawtx_input_1]
change_address = self.nodes[0].getnewaddress()
outputs = {}
outputs[node1_address] = 1240
outputs[change_address] = 1248 - 1240 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransactionwithwallet(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 XSH coin each
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50XSH for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block:
expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance(), starting_balance - tx1["amount"] - tx2["amount"])
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -2)
assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 100XSH for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
# negative):
expected = starting_balance + 100 - 1240 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
assert_equal(self.nodes[0].getbalance(), expected)
# Node1's balance should be its initial balance (1250 for 25 block rewards) plus the doublespend:
assert_equal(self.nodes[1].getbalance(), 1250 + 1240)
if __name__ == '__main__':
TxnMallTest().main()
| mit | -443,130,276,645,554,200 | 41.787879 | 111 | 0.633853 | false |
NavarraBiomed/clips | clips_app/management/commands/test.py | 1 | 12162 | from django.core.management.base import BaseCommand, CommandError
from clips_app.models import Case
import django
import csv
def parse_num(value):
if value.isdigit():
return int(value)
else:
return None;
def parse_date(value):
pieces = value.split("/")
try:
return (pieces[2]+"-"+pieces[0]+"-"+pieces[1])
except IndexError:
return None;
class Command(BaseCommand):
help = 'Command test'
def add_arguments(self, parser):
parser.add_argument('file', nargs='+', type = str)
def handle(self, *args, **options):
input_file = options['file'][0]
print("Reading data from " + input_file)
model_to_row = {
'doctor' : 'Medicoresponsable',
'date' : 'Date',
'name' : 'Name',
'id_number' : 'IDnumber',
'age' : 'Age',
'age_interval' : 'Tramos_edad',
'sex' : 'Sex',
'asa' : 'ASA' ,
#'hypertension' : '',
'hb' : 'HB',
'platelets' : 'Platelets',
'inr' : 'INR',
'pt' : 'PT',
'aspirin' : 'Aspirin',
'anticoagulants' : 'Anticoagulants',
'antiplatelet_anticoagulant' : 'Antiplatelet_anticoagulant',
#'heparinbridgetherapy' : '',
#'nombre_p_activo_antiagreg_anticoag' : '',
#'day_of_reintroduction_antiagregant' : '',
'paris_calif' : 'ParisClasif',
'lst_yn' : 'LSTyn',
#'lst_morphology' : '',
'large_nodule_one_cm' : 'LargeNodule1cm',
'demacrated_depressed_area' : 'DemarcatedDepressedArea',
'sclerous_wall_change' : 'SclerousWallChange',
'fold_convergency' : 'FoldConvergency',
'chicken_skin_mucosa_around' : 'ChickenSkinMucosaAround',
'maximum_size_mm' : 'Size.mm', #?
'area_square_cm' : 'Areacm2',
'location' : 'Location',
'ileocecal_valve_involvement' : 'Ileocecalvalveinvolvement',
'high_definition' : 'HighDefinition',
#'histologyigh_definition' : '',
'endoscopemodel' : 'Endoscopemodel',
'nbi' : 'NBI',
'nbi_sano' : 'NBI.Sano',
'nbi_nice' : 'NBI.NICE',
'cromoendoscopy' : 'cromoendoscopy',
'kudo' : 'Kudo',
'prepathologic_endoscopic_diagnostic_a' : 'PrepathologicEndoscopicDiagnosticA',
'prepathologic_endoscopic_diagnostic_b' : 'PrepathologicEndoscopicDiagnosticB',
'correct_dx_adenoma_serrated': 'CorrectDxAdenomaSerrado',
'correct_dx_invasion' : 'CorrectDxInvasiónprofunda',
'histology' : 'Histology',
'histol_simplified' : 'Histol_simplified',
'time_of_procedure_in_mins' : 'Timeofprocedureinmins',
'difficulty_of_emr' : 'DifficultyofEMR',
'accesibility' : 'Accessibility',
'resection' : 'Resection',
'resection_yn' : 'ResectionYN',
'previous_biopsy' : 'Previous.biopsy',
'previous_attempt' : 'Previous.attempt',
'non_lifting_sign' : 'Nonliftingsign',
'technique' : 'Technique',
'technique_two' : 'Technique2',
'limit_marks' : 'LimitMarks',
'injection' : 'Injection',
'adrenaline' : 'Adrenaline',
'endocut' : 'Endocut',
'electrosurgical_generator_model' : 'Electrosurgicalgeneratormodel',
'polyp_retrieval' : 'PolypRetrieval',
'argon_PC' : 'ArgonPC',
'argon_coagulacion' : 'argón_coagulación',
'coagulation_forceps' : 'Coagulationforceps',
'clips' : 'Clipping', #?
#'clips_control_group' : '',
#'clips_tratment_group' : '',
#'not_tired_closure_by' : '',
#'closure_technique' : '',
'number_clips_needed' : 'ClipsNeeded',
'perforation' : 'Perforation',
'surgery_from_endoscopy' : 'Surgeryfromendoscopy',
'surgery_by_complication' : 'Surgerybycomplication',
'bleeding' : 'Bleeding',
#'immediate_bleeding' : '',
'delayed_bleeding' : 'Delayedbleeding',
'bleeding_treatment' : 'BleedingTreatment',
'transfusion' : 'Trasfusion',
'pps' : 'SPP', #?
#'fever' : '',
#'pain_requiring_medical_intervention' : '',
'hospital_stay_by_technique' : 'HospitalStayByTechniche',
'hospital_stay_by_complication' : 'HospitalStayByComplication',
'follow_up_months' : 'FollowUpInMonths',
'successful_treatment' : 'Successfultreatment',
'sedation' : 'Sedation',
'last_date_endoscopic_follow_up' : 'LastDateEndoscopicFollowUp',
'recurrence_three_six_months_control' : 'Recurrence3monthscontrol',
'recurrenec_one_year_control' : 'Recurrence1yearcontrol',
'global_recurrence' : 'Globalrecurrence',
'other_complications_comments' : 'OtherComplicationsComments',
'other_comments' : 'OtherComments'
}
with open(input_file, 'rt') as f:
reader = csv.DictReader(f)
#reader_list = list(reader)
#print(reader_list[0].keys())
for index, row in enumerate(reader):
#row = reader_list[0]
print("-------- Case #"+ str(index)+" ----------")
for field in Case._meta.get_fields():
if type(field) is django.db.models.fields.IntegerField:
try:
row[model_to_row[field.name]] = parse_num(row[model_to_row[field.name]])
except KeyError:
print("KeyError: "+field.name)
elif type(field) is django.db.models.fields.DateField:
try:
row[model_to_row[field.name]] = parse_date(row[model_to_row[field.name]])
except:
print("Date format error in :"+model_to_row[field.name]+ " -> "+row[model_to_row[field.name]])
Case.objects.create(
doctor = row['Medicoresponsable'],
date = row['Date'],
name = row['Name'],
id_number = row['IDnumber'],
age = row['Age'],
age_interval = row['Tramos_edad'],
sex = row['Sex'],
asa = row['ASA'] ,
#hypertension = row[],
hb = row['HB'],
platelets = row['Platelets'],
inr = row['INR'],
pt = row['PT'],
aspirin = row['Aspirin'],
anticoagulants = row['Anticoagulants'],
antiplatelet_anticoagulant = row['Antiplatelet_anticoagulant'],
#heparinbridgetherapy = row[''],
# nombre_p_activo_antiagreg_anticoag = row[''],
# day_of_reintroduction_antiagregant = row[''],
paris_calif = row['ParisClasif'],
lst_yn = row['LSTyn'],
#lst_morphology = row[''],
large_nodule_one_cm = row['LargeNodule1cm'],
demacrated_depressed_area = row['DemarcatedDepressedArea'],
sclerous_wall_change = row['SclerousWallChange'],
fold_convergency = row['FoldConvergency'],
chicken_skin_mucosa_around = row['ChickenSkinMucosaAround'],
maximum_size_mm = row['Size.mm'], #?
area_square_cm = row['Areacm2'],
location = row['Location'],
ileocecal_valve_involvement = row['Ileocecalvalveinvolvement'],
high_definition = row['HighDefinition'],
#histologyigh_definition = row[''],
endoscopemodel = row['Endoscopemodel'],
nbi = row['NBI'],
nbi_sano = row['NBI.Sano'],
nbi_nice = row['NBI.NICE'],
cromoendoscopy = row['cromoendoscopy'],
kudo = row['Kudo'],
prepathologic_endoscopic_diagnostic_a = row['PrepathologicEndoscopicDiagnosticA'],
prepathologic_endoscopic_diagnostic_b = row['PrepathologicEndoscopicDiagnosticB'],
correct_dx_adenoma_serrated= row['CorrectDxAdenomaSerrado'],
correct_dx_invasion = row['CorrectDxInvasiónprofunda'],
histology = row['Histology'],
histol_simplified = row['Histol_simplified'],
time_of_procedure_in_mins = row['Timeofprocedureinmins'],
difficulty_of_emr = row['DifficultyofEMR'],
accesibility = row['Accessibility'],
resection = row['Resection'],
resection_yn = row['ResectionYN'],
previous_biopsy = row['Previous.biopsy'],
previous_attempt = row['Previous.attempt'],
non_lifting_sign = row['Nonliftingsign'],
technique = row['Technique'],
technique_two = row['Technique2'],
limit_marks = row['LimitMarks'],
injection = row['Injection'],
adrenaline = row['Adrenaline'],
endocut = row['Endocut'],
electrosurgical_generator_model = row['Electrosurgicalgeneratormodel'],
polyp_retrieval = row['PolypRetrieval'],
argon_PC = row['ArgonPC'],
argon_coagulacion = row['argón_coagulación'],
coagulation_forceps = row['Coagulationforceps'],
clips = row['Clipping'], #?
#clips_control_group = row[''],
#clips_tratment_group = row[''],
#not_tired_closure_by = row[''],
#closure_technique = row[''],
number_clips_needed = row['ClipsNeeded'],
perforation = row['Perforation'],
surgery_from_endoscopy = row['Surgeryfromendoscopy'],
surgery_by_complication = row['Surgerybycomplication'],
bleeding = row['Bleeding'],
#immediate_bleeding = row[''],
delayed_bleeding = row['Delayedbleeding'],
bleeding_treatment = row['BleedingTreatment'],
transfusion = row['Trasfusion'],
pps = row['SPP'], #?
#fever = row[''],
#pain_requiring_medical_intervention = row[''],
hospital_stay_by_technique = row['HospitalStayByTechniche'],
hospital_stay_by_complication = row['HospitalStayByComplication'],
follow_up_months = row['FollowUpInMonths'],
successful_treatment = row['Successfultreatment'],
sedation = row['Sedation'],
last_date_endoscopic_follow_up = row['LastDateEndoscopicFollowUp'],
recurrence_three_six_months_control = row['Recurrence3monthscontrol'],
recurrenec_one_year_control = row['Recurrence1yearcontrol'],
global_recurrence = row['Globalrecurrence'],
other_complications_comments = row['OtherComplicationsComments'],
other_comments = row['OtherComments']
) | gpl-2.0 | 2,221,978,827,972,849,400 | 49.443983 | 122 | 0.492185 | false |
the0forge/sp | frontend/migrations/0009_auto__add_field_product_royalty.py | 1 | 21268 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Product.royalty'
db.add_column(u'frontend_product', 'royalty',
self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Product.royalty'
db.delete_column(u'frontend_product', 'royalty')
models = {
u'frontend.catalog': {
'Meta': {'object_name': 'Catalog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
},
u'frontend.catalogissue': {
'Meta': {'object_name': 'CatalogIssue'},
'catalog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issues'", 'to': u"orm['frontend.Catalog']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'catalog_issues'", 'symmetrical': 'False', 'through': u"orm['frontend.CatalogIssueProduct']", 'to': u"orm['frontend.Product']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
},
u'frontend.catalogissueproduct': {
'Meta': {'object_name': 'CatalogIssueProduct'},
'catalog_issue': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['frontend.CatalogIssue']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_ref': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'page_ref': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['frontend.Product']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'sub_ref': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'})
},
u'frontend.company': {
'Meta': {'object_name': 'Company'},
'fax': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'logo_img': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True'}),
'logo_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'registration': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
},
u'frontend.customer': {
'Meta': {'object_name': 'Customer'},
'address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'country': ('django.db.models.fields.CharField', [], {'default': "'Australia'", 'max_length': '100'}),
'customer_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'delivery_address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'delivery_address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'delivery_attn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'delivery_country': ('django.db.models.fields.CharField', [], {'default': "'Australia'", 'max_length': '100'}),
'delivery_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'delivery_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'delivery_suburb': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'from_src_company_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'from_src_membadd_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'c_notes'", 'blank': 'True', 'to': u"orm['frontend.Note']"}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'registration': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suburb': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
u'frontend.customercontact': {
'Meta': {'object_name': 'CustomerContact'},
'customer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contacts'", 'to': u"orm['frontend.Customer']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'frontend.importnote': {
'Meta': {'object_name': 'ImportNote'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'model_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'src_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'src_model_id_field': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'src_model_id_text': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50'})
},
u'frontend.invoice': {
'Meta': {'object_name': 'Invoice'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['frontend.Company']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoices'", 'to': u"orm['frontend.Order']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
u'frontend.medium': {
'Meta': {'object_name': 'Medium'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
},
u'frontend.note': {
'Meta': {'object_name': 'Note'},
'create_dt': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'frontend.order': {
'Meta': {'object_name': 'Order'},
'customer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'orders'", 'to': u"orm['frontend.Customer']"}),
'discount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'from_borders_fakeid': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'from_src_order_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice_address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invoice_address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invoice_company_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invoice_company_reg': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'invoice_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'invoice_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'invoice_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'invoice_suburb': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'o_notes'", 'symmetrical': 'False', 'to': u"orm['frontend.Note']"}),
'order_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'order_notes': ('django.db.models.fields.CharField', [], {'max_length': '510', 'null': 'True', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'+'", 'symmetrical': 'False', 'through': u"orm['frontend.OrderProduct']", 'to': u"orm['frontend.Product']"}),
'shipping_address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'shipping_address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'shipping_attn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'shipping_cost': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '9', 'decimal_places': '2'}),
'shipping_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'shipping_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_suburb': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'sp_cost': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'sub_total': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'total_cost': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'wanted_by': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
u'frontend.orderproduct': {
'Meta': {'object_name': 'OrderProduct'},
'back_order': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'discount_percentage': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '5', 'decimal_places': '2'}),
'discount_price': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '9', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['frontend.Order']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['frontend.Product']"}),
'quantity': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'royalty_amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '9', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'sp_price': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '9', 'decimal_places': '2'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '9', 'decimal_places': '2'}),
'unit_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '9', 'decimal_places': '2'})
},
u'frontend.orderstatus': {
'Meta': {'object_name': 'OrderStatus'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'statuses'", 'to': u"orm['frontend.Order']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PS'", 'max_length': '2'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'frontend.pricelevel': {
'Meta': {'ordering': "('-min_amount',)", 'object_name': 'PriceLevel'},
'block_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cost_per_block': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'cost_per_item': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_amount': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'min_amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'price_level_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'price_levels'", 'null': 'True', 'to': u"orm['frontend.PriceLevelGroup']"}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'price_levels'", 'symmetrical': 'False', 'to': u"orm['frontend.Product']"})
},
u'frontend.pricelevelgroup': {
'Meta': {'object_name': 'PriceLevelGroup'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
u'frontend.product': {
'Meta': {'object_name': 'Product'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'current_stock': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'medium': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['frontend.Medium']"}),
'message': ('django.db.models.fields.TextField', [], {}),
'minimum_stock': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'p_notes'", 'blank': 'True', 'to': u"orm['frontend.Note']"}),
'royalty': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'royalty_img': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['frontend.RoyaltyImg']"}),
'size': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['frontend.Size']"}),
'sp_cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2'}),
'supplier': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'to': u"orm['frontend.Supplier']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
u'frontend.royaltyimg': {
'Meta': {'object_name': 'RoyaltyImg'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True'}),
'image_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'image_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'percentage': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
},
u'frontend.size': {
'Meta': {'object_name': 'Size'},
'depth': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '4'}),
'height': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '4'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'sub_notes': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'units': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'}),
'width': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '4'})
},
u'frontend.supplier': {
'Meta': {'object_name': 'Supplier'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'})
}
}
complete_apps = ['frontend'] | gpl-3.0 | 6,447,043,179,708,878,000 | 81.758755 | 227 | 0.550169 | false |
yoseforb/lollypop | src/sync_mtp.py | 1 | 12625 | #!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import GLib, Gio
from time import sleep
from lollypop.define import Lp
from lollypop.utils import translate_artist_name, debug
class MtpSync:
"""
Init MTP synchronisation
"""
def __init__(self):
self._syncing = False
self._errors = False
self._total = 0 # Total files to sync
self._done = 0 # Handled files on sync
self._fraction = 0.0
self._copied_art_uris = []
############
# Private #
############
"""
Try to execute func 5 times
@param func as function
@param args as tuple
"""
def _retry(self, func, args, t=5):
if t == 0:
self._errors = True
return
try:
func(*args)
except Exception as e:
print("MtpSync::_retry(%s, %s): %s" % (func, args, e))
for a in args:
if isinstance(a, Gio.File):
print(a.get_uri())
sleep(5)
self._retry(func, args, t-1)
"""
Return children uris for uri
@param uri as str
@return [str]
"""
def _get_children_uris(self, uri):
children = []
dir_uris = [uri]
while dir_uris:
uri = dir_uris.pop(0)
d = Gio.File.new_for_uri(uri)
infos = d.enumerate_children(
'standard::name,standard::type',
Gio.FileQueryInfoFlags.NOFOLLOW_SYMLINKS,
None)
for info in infos:
if info.get_file_type() == Gio.FileType.DIRECTORY:
dir_uris.append(uri+'/'+info.get_name())
else:
children.append(uri+'/'+info.get_name())
return children
"""
Sync playlists with device as this
@param playlists as [str]
"""
def _sync(self, playlists):
try:
self._in_thread = True
self._errors = False
self._copied_art_uris = []
sql = Lp.db.get_cursor()
# For progress bar
self._total = 1
self._done = 0
self._fraction = 0.0
# New tracks
for playlist in playlists:
self._fraction = self._done/self._total
self._total += len(Lp.playlists.get_tracks(playlist))
# Old tracks
try:
children = self._get_children_uris(self._uri+'/tracks')
self._total += len(children)
except:
pass
GLib.idle_add(self._update_progress)
# Copy new tracks to device
if self._syncing:
self._copy_to_device(playlists, sql)
# Remove old tracks from device
if self._syncing:
self._remove_from_device(playlists, sql)
# Delete old playlists
d = Gio.File.new_for_uri(self._uri)
infos = d.enumerate_children(
'standard::name',
Gio.FileQueryInfoFlags.NOFOLLOW_SYMLINKS,
None)
for info in infos:
f = info.get_name()
if f.endswith(".m3u") and f[:-4] not in playlists:
uri = self._uri+'/'+f
d = Gio.File.new_for_uri(uri)
self._retry(d.delete, (None,))
except Exception as e:
print("DeviceManagerWidget::_sync(): %s" % e)
self._fraction = 1.0
if self._syncing:
GLib.idle_add(self._view.set_sensitive, True)
GLib.idle_add(self.emit, 'sync-finished')
GLib.idle_add(self._progress.hide)
self._syncing = False
self._in_thread = False
if self._errors:
GLib.idle_add(self._on_errors)
GLib.idle_add(self._on_finished)
"""
Copy file from playlist to device
@param playlists as [str]
@param sql cursor
"""
def _copy_to_device(self, playlists, sql):
for playlist in playlists:
try:
# Create playlist
m3u = Gio.File.new_for_path(
"/tmp/lollypop_%s.m3u" % (playlist,))
self._retry(m3u.replace_contents, (b'#EXTM3U\n', None, False,
Gio.FileCreateFlags.REPLACE_DESTINATION,
None))
stream = m3u.open_readwrite(None)
except Exception as e:
print("DeviceWidget::_copy_to_device(): %s" % e)
m3u = None
stream = None
# Start copying
tracks_id = Lp.playlists.get_tracks_id(playlist, sql)
for track_id in tracks_id:
if not self._syncing:
self._fraction = 1.0
self._in_thread = False
return
album_id = Lp.tracks.get_album_id(track_id, sql)
album_name = Lp.albums.get_name(album_id, sql)
# Sanitize file names as some MTP devices do not like this
# Or this is a Gio/GObject Introspection bug
album_name = "".join([c for c in album_name if c.isalpha()\
or c.isdigit() or c==' ']).rstrip()
artist_name = translate_artist_name(
Lp.albums.get_artist_name(album_id, sql))
# Sanitize file names as some MTP devices do not like this
# Or this is a Gio/GObject Introspection bug
artist_name = "".join([c for c in artist_name if c.isalpha()\
or c.isdigit() or c==' ']).rstrip()
track_path = Lp.tracks.get_path(track_id, sql)
on_device_album_uri = "%s/tracks/%s_%s" %\
(self._uri,
artist_name.lower(),
album_name.lower())
d = Gio.File.new_for_uri(on_device_album_uri)
if not d.query_exists(None):
self._retry(d.make_directory_with_parents, (None,))
# Copy album art
art = Lp.art.get_album_art_path(album_id, sql)
if art:
src_art = Gio.File.new_for_path(art)
art_uri = "%s/cover.jpg" % on_device_album_uri
self._copied_art_uris.append(art_uri)
dst_art = Gio.File.new_for_uri(art_uri)
if not dst_art.query_exists(None):
self._retry(src_art.copy,
(dst_art, Gio.FileCopyFlags.OVERWRITE,
None, None))
track_name = GLib.basename(track_path)
# Sanitize file names as some MTP devices do not like this
# Or this is a Gio/GObject Introspection bug
track_name = "".join([c for c in track_name if c.isalpha()\
or c.isdigit() or c==' ' or c=='.']).rstrip()
src_track = Gio.File.new_for_path(track_path)
info = src_track.query_info('time::modified',
Gio.FileQueryInfoFlags.NONE,
None)
# Prefix track with mtime to make sure updating it later
mtime = info.get_attribute_as_string('time::modified')
dst_uri = "%s/%s_%s" % (on_device_album_uri,
mtime, track_name)
if stream is not None:
line = "tracks/%s_%s/%s_%s\n" %\
(artist_name.lower(),
album_name.lower(),
mtime,
track_name)
self._retry(stream.get_output_stream().write,
(line.encode(encoding='UTF-8'), None))
dst_track = Gio.File.new_for_uri(dst_uri)
if not dst_track.query_exists(None):
self._retry(src_track.copy,
(dst_track, Gio.FileCopyFlags.OVERWRITE,
None, None))
else:
self._done += 1
self._done += 1
self._fraction = self._done/self._total
GLib.idle_add(self._update_progress)
if stream is not None:
stream.close()
if m3u is not None:
dst = Gio.File.new_for_uri(self._uri+'/'+playlist+'.m3u')
self._retry(m3u.move,
(dst, Gio.FileCopyFlags.OVERWRITE, None, None))
"""
Delete files not available in playlist
if sql None, delete all files
@param playlists as [str]
@param sql cursor
"""
def _remove_from_device(self, playlists, sql):
track_uris = []
tracks_id = []
# Get tracks ids
for playlist in playlists:
tracks_id += Lp.playlists.get_tracks_id(playlist, sql)
# Get tracks uris
for track_id in tracks_id:
if not self._syncing:
self._fraction = 1.0
self._in_thread = False
return
album_id = Lp.tracks.get_album_id(track_id, sql)
album_name = Lp.albums.get_name(album_id, sql)
# Sanitize file names as some MTP devices do not like this
# Or this is a Gio/GObject Introspection bug
album_name = "".join([c for c in album_name if c.isalpha()\
or c.isdigit() or c==' ']).rstrip()
artist_name = translate_artist_name(
Lp.albums.get_artist_name(album_id, sql))
# Sanitize file names as some MTP devices do not like this
# Or this is a Gio/GObject Introspection bug
artist_name = "".join([c for c in artist_name if c.isalpha()\
or c.isdigit() or c==' ']).rstrip()
track_path = Lp.tracks.get_path(track_id, sql)
album_uri = "%s/tracks/%s_%s" % (self._uri,
artist_name.lower(),
album_name.lower())
track_name = GLib.basename(track_path)
# Sanitize file names as some MTP devices do not like this
# Or this is a Gio/GObject Introspection bug
track_name = "".join([c for c in track_name if c.isalpha()\
or c.isdigit() or c==' ' or c=='.']).rstrip()
on_disk = Gio.File.new_for_path(track_path)
info = on_disk.query_info('time::modified',
Gio.FileQueryInfoFlags.NONE,
None)
# Prefix track with mtime to make sure updating it later
mtime = info.get_attribute_as_string('time::modified')
dst_uri = "%s/%s_%s" % (album_uri, mtime, track_name)
track_uris.append(dst_uri)
on_mtp_files = self._get_children_uris(self._uri+'/tracks')
# Delete file on device and not in playlists
for uri in on_mtp_files:
if not self._syncing:
self._fraction = 1.0
self._in_thread = False
return
if uri not in track_uris and uri not in self._copied_art_uris:
to_delete = Gio.File.new_for_uri(uri)
self._retry(to_delete.delete, (None,))
self._done += 1
self._fraction = self._done/self._total
GLib.idle_add(self._update_progress)
"""
Clean on finished. Do nothing
"""
def _on_finished(self):
pass
"""
Show something to the user. Do nothing.
"""
def _on_errors(self):
pass
| gpl-3.0 | 2,710,664,522,675,087,400 | 38.952532 | 77 | 0.493228 | false |
NLeSC/pointcloud-benchmark | python/pointcloud/postgres/blocks/LoaderOrdered.py | 1 | 2424 | #!/usr/bin/env python
################################################################################
# Created by Oscar Martinez #
# [email protected] #
################################################################################
import os, logging
from pointcloud import pdalops, postgresops
from pointcloud.postgres.blocks.Loader import Loader
class LoaderOrdered(Loader):
def getFileBlockTable(self, index):
return self.blockTable + '_' + str(index)
def process(self):
logging.info('Starting ordered data loading with PDAL (parallel by python) from ' + self.inputFolder + ' to ' + self.dbName)
return self.processMulti(self.inputFiles, self.numProcessesLoad, self.loadFromFile, self.loadFromFileSequential, True)
def loadFromFile(self, index, fileAbsPath):
# Get connection
connection = self.getConnection()
cursor = connection.cursor()
#Create a temporal blocks table for the blocks of the current file
fileBlockTable = self.getFileBlockTable(index)
self.createBlocksTable(cursor, fileBlockTable, self.indexTableSpace) # We use the index table space for the temporal table
# Add point cloud format to poinctcloud_formats table
(columns, pcid, compression) = self.addPCFormat(cursor, self.schemaFile, fileAbsPath, self.srid)
connection.close()
pdalCols = []
for c in cols:
pdalCols.append(self.DM_PDAL[c])
# Get PDAL config and run PDAL
xmlFile = os.path.basename(fileAbsPath) + '.xml'
pdalops.PostgreSQLWriter(xmlFile, fileAbsPath, self.getConnectionString(), pcid, pdalCols, fileBlockTable, self.srid, self.blockSize, compression)
pdalops.executePDAL(xmlFile)
def loadFromFileSequential(self, fileAbsPath, index, numFiles):
fileBlockTable = self.getFileBlockTable(index)
connection = self.getConnection()
cursor = connection.cursor()
# Insert the blocks on the global blocks table (with correct order)
query = "INSERT INTO " + self.blockTable + " (pa) SELECT pa FROM " + fileBlockTable + " ORDER BY id"
postgresops.mogrifyExecute(cursor, query)
# Drop the temporal table
postgresops.dropTable(cursor, fileBlockTable)
connection.close()
| apache-2.0 | 3,891,968,732,836,190,000 | 49.5 | 154 | 0.616337 | false |
rajalokan/glance | glance/db/sqlalchemy/alembic_migrations/versions/ocata_expand01_add_visibility.py | 1 | 5764 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add visibility to images
Revision ID: ocata_expand01
Revises: mitaka02
Create Date: 2017-01-27 12:58:16.647499
"""
from alembic import op
from sqlalchemy import Column, Enum, MetaData, Table
from glance.db import migration
# revision identifiers, used by Alembic.
revision = 'ocata_expand01'
down_revision = 'mitaka02'
branch_labels = migration.EXPAND_BRANCH
depends_on = None
ERROR_MESSAGE = 'Invalid visibility value'
MYSQL_INSERT_TRIGGER = """
CREATE TRIGGER insert_visibility BEFORE INSERT ON images
FOR EACH ROW
BEGIN
-- NOTE(abashmak):
-- The following IF/ELSE block implements a priority decision tree.
-- Strict order MUST be followed to correctly cover all the edge cases.
-- Edge case: neither is_public nor visibility specified
-- (or both specified as NULL):
IF NEW.is_public <=> NULL AND NEW.visibility <=> NULL THEN
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
-- Edge case: both is_public and visibility specified:
ELSEIF NOT(NEW.is_public <=> NULL OR NEW.visibility <=> NULL) THEN
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
-- Inserting with is_public, set visibility accordingly:
ELSEIF NOT NEW.is_public <=> NULL THEN
IF NEW.is_public = 1 THEN
SET NEW.visibility = 'public';
ELSE
SET NEW.visibility = 'shared';
END IF;
-- Inserting with visibility, set is_public accordingly:
ELSEIF NOT NEW.visibility <=> NULL THEN
IF NEW.visibility = 'public' THEN
SET NEW.is_public = 1;
ELSE
SET NEW.is_public = 0;
END IF;
-- Edge case: either one of: is_public or visibility,
-- is explicitly set to NULL:
ELSE
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
END IF;
END;
"""
MYSQL_UPDATE_TRIGGER = """
CREATE TRIGGER update_visibility BEFORE UPDATE ON images
FOR EACH ROW
BEGIN
-- Case: new value specified for is_public:
IF NOT NEW.is_public <=> OLD.is_public THEN
-- Edge case: is_public explicitly set to NULL:
IF NEW.is_public <=> NULL THEN
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
-- Edge case: new value also specified for visibility
ELSEIF NOT NEW.visibility <=> OLD.visibility THEN
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
-- Case: visibility not specified or specified as OLD value:
-- NOTE(abashmak): There is no way to reliably determine which
-- of the above two cases occurred, but allowing to proceed with
-- the update in either case does not break the model for both
-- N and N-1 services.
ELSE
-- Set visibility according to the value of is_public:
IF NEW.is_public <=> 1 THEN
SET NEW.visibility = 'public';
ELSE
SET NEW.visibility = 'shared';
END IF;
END IF;
-- Case: new value specified for visibility:
ELSEIF NOT NEW.visibility <=> OLD.visibility THEN
-- Edge case: visibility explicitly set to NULL:
IF NEW.visibility <=> NULL THEN
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
-- Edge case: new value also specified for is_public
ELSEIF NOT NEW.is_public <=> OLD.is_public THEN
SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = '%s';
-- Case: is_public not specified or specified as OLD value:
-- NOTE(abashmak): There is no way to reliably determine which
-- of the above two cases occurred, but allowing to proceed with
-- the update in either case does not break the model for both
-- N and N-1 services.
ELSE
-- Set is_public according to the value of visibility:
IF NEW.visibility <=> 'public' THEN
SET NEW.is_public = 1;
ELSE
SET NEW.is_public = 0;
END IF;
END IF;
END IF;
END;
"""
def _add_visibility_column(meta):
enum = Enum('private', 'public', 'shared', 'community', metadata=meta,
name='image_visibility')
enum.create()
v_col = Column('visibility', enum, nullable=True, server_default=None)
op.add_column('images', v_col)
op.create_index('visibility_image_idx', 'images', ['visibility'])
def _add_triggers(engine):
if engine.engine.name == 'mysql':
op.execute(MYSQL_INSERT_TRIGGER % (ERROR_MESSAGE, ERROR_MESSAGE,
ERROR_MESSAGE))
op.execute(MYSQL_UPDATE_TRIGGER % (ERROR_MESSAGE, ERROR_MESSAGE,
ERROR_MESSAGE, ERROR_MESSAGE))
def _change_nullability_and_default_on_is_public(meta):
# NOTE(hemanthm): we mark is_public as nullable so that when new versions
# add data only to be visibility column, is_public can be null.
images = Table('images', meta, autoload=True)
images.c.is_public.alter(nullable=True, server_default=None)
def upgrade():
migrate_engine = op.get_bind()
meta = MetaData(bind=migrate_engine)
_add_visibility_column(meta)
_change_nullability_and_default_on_is_public(meta)
_add_triggers(migrate_engine)
| apache-2.0 | 7,434,511,919,462,939,000 | 37.172185 | 78 | 0.637058 | false |
ahirner/TabulaRazr-OS | data_query.py | 1 | 6364 | from datetime import date
import dateutil.parser as date_parser
from backend import config
from fuzzywuzzy import fuzz
from itertools import product
# Cascades:
# 1) case sensitive partial ratio on character level with penalty
# 2) case insensitive partial ratio on character level with penalty
# 3) token sorted case insensitive ratio with penalty
FUZZY_INV_CASCADES = 1.0 / 3.0
def fuzzy_str_match(query, string):
score = 1.0
inv_cascades = FUZZY_INV_CASCADES
min_fuzzy_ratio = config["min_fuzzy_ratio"]
query = query.encode('ascii', errors='ignore')
string = string.encode('ascii', errors='ignore')
#Penalize shorter target strings and early exit on null length strings
len_query = len(query)
len_string = len(string.strip())
if not len_string: return None
if not len_query: return score
penalty = min(len_string / float(len_query), 1.0)
fuzzy_partial = (fuzz.partial_ratio(query, string)/100.0) * penalty
#print ("fuzzy_partial of %s vs %s * penalty %.2f" % (query, string, penalty), fuzzy_partial)
if fuzzy_partial > min_fuzzy_ratio:
f_score = score - (1.0 - (fuzzy_partial - (1.0 - min_fuzzy_ratio)) / min_fuzzy_ratio) * inv_cascades
return f_score
score -= inv_cascades
q_l = query.lower()
s_l = string.lower()
fuzzy_partial = (fuzz.partial_ratio(q_l, s_l)/100.0) * penalty
#print ("fuzzy_partial lower_case of %s vs %s * penalty %.2f" % (query, string, penalty), fuzzy_partial)
if fuzzy_partial > min_fuzzy_ratio:
f_score = score - (1.0 - (fuzzy_partial - (1.0 - min_fuzzy_ratio)) / min_fuzzy_ratio) * inv_cascades
return f_score
score -= inv_cascades
fuzzy_partial = (fuzz.partial_token_sort_ratio(q_l, s_l)/100.0) * penalty
#print ("fuzzy_partial token_sort_lower_case of %s vs %s * penalty %.2f" % (query, string, penalty), fuzzy_partial)
if fuzzy_partial > min_fuzzy_ratio:
f_score = score - (1.0 - (fuzzy_partial - (1.0 - min_fuzzy_ratio)) / min_fuzzy_ratio) * inv_cascades
return f_score
return None
#Flatmap from tables to sequence of tuples (confidence, table, row or None, value or None)
def filter_tables(tables, filter_dict, treshold = 0.0, only_max = False):
row = None
value = None
for t in tables:
if 'headers' in filter_dict:
max_conf, index, best_term = None, None, None
terms = filter_dict['headers']['terms']
_threshold = max(treshold, filter_dict['headers']['threshold'])
for term in terms:
if t['headers']:
current_max_conf = (max_conf if only_max else _threshold) or _threshold
scores_indices = ((val, idx) for (idx, val) in enumerate(fuzzy_str_match(term, h) for h in t['headers'] ) )
conf, idx = max(scores_indices)
if conf > max_conf:
max_conf = conf
index = idx
best_term = term
best_header = ""
#Todo: other filter criteria like column names, rows etc. and combinatorial confidence score
if max_conf:
yield max_conf, t, row, value
def get_fuzzy_date(string):
today = date.today()
v_ascii = string.encode("ascii", errors="ignore")
try:
dt = date_parser.parse(v_ascii, fuzzy=True, default=today)
if dt != today:
return dt
except:
return None
def get_first_date(lines, query_string, threshold = 0.4):
for i, l in enumerate(lines):
if fuzzy_str_match(query_string, l) > threshold:
dt = get_fuzzy_date(l)
if dt:
return dt, i, l
def find_row(table, query_string, threshold = 0.4):
#Find first 'other' typed row
try:
index = table['types'].index('other')
except ValueError:
print "no column consisting of mainly string data found"
return None
strings = (s[index]['value'] for s in table['data'])
#query_string can either be a single one or an iterable
if isinstance(query_string, basestring):
query_string = [query_string]
scores_indices = ((val, idx) for (idx, val) in ( (s[0], fuzzy_str_match(qs, s[1])) \
for qs, s in product(query_string, enumerate(strings))) )
val, idx = max(scores_indices)
if val >= threshold:
return table['data'][idx]
else:
return None
def closest_row_numeric_value(table, query_string, threshold = 0.4, raw_cell = False):
row = find_row(table, query_string, threshold)
if row:
for c in row:
if 'type' in c:
if c['type'] in ('integer'):
v = int(c['value'])
return (v, c) if raw_cell else v
elif c['type'] in ('large_num', 'small_float'):
v = float(c['value'].replace(",", ""))
return (v, c) if raw_cell else v
def get_key_values(table, key_queries, threshold = 0.4, raw_cell = False):
return { k : closest_row_numeric_value(table, kk, threshold, raw_cell) for k, kk in key_queries.iteritems() }
def find_column(table, query_string, types=None, subtypes=None, threshold = 0.4):
#Find first column with specific types
columns = []
for i, t in enumerate(zip(table['types'], table['subtypes'])):
t, st = t[0], t[1]
if t in (types or t) and st in (subtypes or st):
if fuzzy_str_match(query_string, table['captions'][i]) > threshold: return i
def filter_time_series(table, query_string, subtypes = ['dollar'], threshold = 0.4):
time_index = find_column(table, "", subtypes=['date', 'year'], threshold=threshold)
value_index = find_column(table, query_string, subtypes=subtypes, threshold=threshold)
for r in table['data']:
dt = get_fuzzy_date(r[time_index]['value'])
if dt:
c = r[value_index]
v = None
if c['type'] in ('integer'):
v = int(c['value'])
elif c['type'] in ('large_num', 'small_float'):
v = float(c['value'].replace(",", ""))
if v: yield dt, v
| agpl-3.0 | -6,371,302,341,923,677,000 | 38.042945 | 127 | 0.575896 | false |
dan-cristian/haiot | presence/__init__.py | 1 | 4256 | from main.logger_helper import L
from pydispatch import dispatcher
from main import thread_pool, sqlitedb
if sqlitedb:
from storage.sqalc import models
from common import Constant
from presence import presence_bt
from presence import presence_wifi
from storage.model import m
__author__ = 'Dan Cristian<[email protected]>'
initialised = False
def not_used_record_update(json=''):
# Log.logger.info('Got presence update')
if sqlitedb:
models.Presence().json_to_record_query(json_obj=json)
else:
# fixme
pass
def handle_event_presence_io(gpio_pin_code='', direction='', pin_value='', pin_connected=None):
try:
# Log.logger.info('Presence got event pin {} connected={}'.format(gpio_pin_code, pin_connected))
# skip too many updates, only capture when contact is not connected (for PIR sensors this is alarm)
#if not pin_connected:
if sqlitedb:
zonealarm = models.ZoneAlarm().query_filter_first(
models.ZoneAlarm.gpio_host_name.in_([Constant.HOST_NAME]),
models.ZoneAlarm.gpio_pin_code.in_([gpio_pin_code]))
else:
zonealarm = m.ZoneAlarm.find_one({m.ZoneAlarm.gpio_host_name: Constant.HOST_NAME,
m.ZoneAlarm.gpio_pin_code: gpio_pin_code})
# zone_id = None
# fixme: for now zonealarm holds gpio to zone mapping, should be made more generic
if zonealarm is not None:
zone_id = zonealarm.zone_id
if zone_id is not None:
zone = m.Zone.find_one({m.Zone.id: zone_id})
if zone is not None:
zone_name = zone.name
else:
L.l.warning("Zone not found for presence zoneid={}".format(zone_id))
zone_name = "zone_name not found"
record = m.Presence.find_one({m.Presence.zone_id: zone_id})
if record is None:
record = m.Presence()
record.event_type = zonealarm.sensor_type
record.zone_name = zone_name
# record.event_io_date = utils.get_base_location_now_date()
record.sensor_name = zonealarm.alarm_pin_name
record.is_connected = pin_connected
# Log.logger.info('Presence saving sensor {}'.format(record.sensor_name))
record.save_changed_fields(broadcast=True, persist=True)
else:
L.l.warning('Unable to find presence zone for pin {} in Alarm table'.format(gpio_pin_code))
except Exception as ex:
L.l.critical("Unable to save presence, er={}".format(ex), exc_info=True)
def handle_event_presence_cam(zone_name, cam_name, has_move):
L.l.debug("Got cam event zone {} cam {} move={}".format(zone_name, cam_name, has_move))
zone = m.Zone.find_one({m.Zone.name: zone_name})
if zone is not None:
record = m.Presence().find_one({m.Presence.zone_id: zone.id})
if record is None:
record = m.Presence()
record.event_type = Constant.PRESENCE_TYPE_CAM
record.zone_id = zone.id
record.zone_name = zone_name
# record.event_camera_date = utils.get_base_location_now_date()
record.sensor_name = cam_name
record.is_connected = bool(int(has_move))
L.l.debug("Saving cam event zone {} sensor {} is_conn={} record={}".format(
record.zone_name, record.sensor_name, record.is_connected, record))
record.save_changed_fields(broadcast=True, persist=True)
else:
L.l.warning('Unable to find presence zone for camera zone {}'.format(zone_name))
def unload():
L.l.info('Presence module unloading')
# ...
thread_pool.remove_callable(presence_bt.thread_run)
global initialised
initialised = False
def init():
L.l.debug('Presence module initialising')
thread_pool.add_interval_callable(presence_wifi.thread_run, run_interval_second=20)
dispatcher.connect(handle_event_presence_io, signal=Constant.SIGNAL_GPIO, sender=dispatcher.Any)
dispatcher.connect(handle_event_presence_cam, signal=Constant.SIGNAL_CAMERA, sender=dispatcher.Any)
global initialised
initialised = True
| gpl-2.0 | -5,611,527,734,522,979,000 | 41.989899 | 107 | 0.626175 | false |
pombredanne/quirinus | data/codecs/.base/rename.py | 1 | 1084 | import os
import re
import codecs
import struct
paths = [p for p in os.listdir(".") if not(p.endswith(".ct"))]
for src in os.listdir("."):
if (src.endswith(".py")):
continue
dest = src.replace(".cpp", ".ct")
with codecs.open(src, "rb") as stream:
data = stream.read()
pattern = re.compile(b"\\s*(0x[0-9A-Ha-h]+),\\s*(0x[0-9A-Ha-h]+),", re.A)
match = pattern.findall(data)
if (match):
name = src.replace(".cpp", "")
name = name.replace("_", "-")
if (name.startswith("MAC")):
name = name.lower()
elif (name.startswith("ATARIST")):
name = "AtariST"
elif (name.startswith("KPS9566")):
name = "KPS 9566"
elif (name.startswith("BIG5")):
name = "Big5"
name = name.encode("UTF-8")
print(name)
with codecs.open(dest, "wb") as stream:
stream.write(name)
stream.write(b'\0')
for pair in match:
byte = int(pair[0], 16)
code = int(pair[1], 16)
stream.write(struct.pack(">I", byte))
stream.write(struct.pack(">I", code))
""_u8
""_u16
""_u32
""_f64
""_f80
""_QF
""_F
| lgpl-3.0 | 3,352,182,879,483,439,000 | 24.209302 | 75 | 0.558118 | false |
sephii/django | django/contrib/contenttypes/models.py | 1 | 7903 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.db.utils import OperationalError, ProgrammingError
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text
from django.utils.encoding import python_2_unicode_compatible
class ContentTypeManager(models.Manager):
use_in_migrations = True
# Cache to avoid re-looking up ContentType objects all over the place.
# This cache is shared by all the get_for_* methods.
_cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self.__class__._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
self._add_to_cache(self.db, ct)
return ct
def _get_opts(self, model, for_concrete_model):
if for_concrete_model:
model = model._meta.concrete_model
elif model._deferred:
model = model._meta.proxy_for_model
return model._meta
def _get_from_cache(self, opts):
key = (opts.app_label, opts.model_name)
return self.__class__._cache[self.db][key]
def get_for_model(self, model, for_concrete_model=True):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = self._get_opts(model, for_concrete_model)
try:
return self._get_from_cache(opts)
except KeyError:
pass
# The ContentType entry was not found in the cache, therefore we
# proceed to load or create it.
try:
# We start with get() and not get_or_create() in order to use
# the db_for_read (see #20401).
ct = self.get(app_label=opts.app_label, model=opts.model_name)
except (OperationalError, ProgrammingError):
# It's possible to migrate a single app before contenttypes,
# as it's not a required initial dependency (it's contrib!)
# Have a nice error for this.
raise RuntimeError(
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
)
except self.model.DoesNotExist:
# Not found in the database; we proceed to create it. This time we
# use get_or_create to take care of any race conditions.
ct, created = self.get_or_create(
app_label=opts.app_label,
model=opts.model_name,
defaults={'name': opts.verbose_name_raw},
)
self._add_to_cache(self.db, ct)
return ct
def get_for_models(self, *models, **kwargs):
"""
Given *models, returns a dictionary mapping {model: content_type}.
"""
for_concrete_models = kwargs.pop('for_concrete_models', True)
# Final results
results = {}
# models that aren't already in the cache
needed_app_labels = set()
needed_models = set()
needed_opts = set()
for model in models:
opts = self._get_opts(model, for_concrete_models)
try:
ct = self._get_from_cache(opts)
except KeyError:
needed_app_labels.add(opts.app_label)
needed_models.add(opts.model_name)
needed_opts.add(opts)
else:
results[model] = ct
if needed_opts:
cts = self.filter(
app_label__in=needed_app_labels,
model__in=needed_models
)
for ct in cts:
model = ct.model_class()
if model._meta in needed_opts:
results[model] = ct
needed_opts.remove(model._meta)
self._add_to_cache(self.db, ct)
for opts in needed_opts:
# These weren't in the cache, or the DB, create them.
ct = self.create(
app_label=opts.app_label,
model=opts.model_name,
name=opts.verbose_name_raw,
)
self._add_to_cache(self.db, ct)
results[ct.model_class()] = ct
return results
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self.__class__._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
# Note it's possible for ContentType objects to be stale; model_class() will return None.
# Hence, there is no reliance on model._meta.app_label here, just using the model fields instead.
key = (ct.app_label, ct.model)
self.__class__._cache.setdefault(using, {})[key] = ct
self.__class__._cache.setdefault(using, {})[ct.id] = ct
@python_2_unicode_compatible
class ContentType(models.Model):
name = models.CharField(max_length=100)
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
ordering = ('name',)
unique_together = (('app_label', 'model'),)
def __str__(self):
# self.name is deprecated in favor of using model's verbose_name, which
# can be translated. Formal deprecation is delayed until we have DB
# migration to be able to remove the field from the database along with
# the attribute.
#
# We return self.name only when users have changed its value from the
# initial verbose_name_raw and might rely on it.
model = self.model_class()
if not model or self.name != model._meta.verbose_name_raw:
return self.name
else:
return force_text(model._meta.verbose_name)
def model_class(self):
"Returns the Python model class for this type of content."
try:
return apps.get_model(self.app_label, self.model)
except LookupError:
return None
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._base_manager.using(self._state.db).get(**kwargs)
def get_all_objects_for_this_type(self, **kwargs):
"""
Returns all objects of this type for the keyword arguments given.
"""
return self.model_class()._base_manager.using(self._state.db).filter(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| bsd-3-clause | -7,295,056,630,378,533,000 | 38.515 | 105 | 0.594205 | false |
OpenCV-Python-Tutorials/Filter | filter.py | 1 | 1678 | import cv2
img_name = raw_input("Enter the image filename:")
img = cv2.imread(img_name,0)
def menu():
print "Select filter type:"
print "Press '1' for Low Pass filter."
print "Press '2' for High Pass filter."
print "Press '3' for Band Pass filter."
print "Press '4' for Notch filter."
print "Press 'q' to quit the program."
menu()
minTh=100
maxTh=200
def lpf(minTh):
l = img.shape[0]
w = img.shape[1]
for x in range(l):
for y in range(w):
if img[x,y]>minTh:
img[x,y]=0
cv2.imshow('Output',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
def hpf(maxTh):
l = img.shape[0]
w = img.shape[1]
for x in range(l):
for y in range(w):
if img[x,y]<maxTh:
img[x,y]=0
cv2.imshow('Output',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
def bpf():
l = img.shape[0]
w = img.shape[1]
def brf():
l = img.shape[0]
w = img.shape[1]
while(True):
key = raw_input("Enter you choice:")
if key=='1':
cv2.namedWindow('Output',cv2.WINDOW_NORMAL)
cv2.createTrackbar('minTh:','Output',minTh,255,lpf)
print "You selected Low Pass filter"
lpf(minTh)
elif key=='2':
cv2.namedWindow('Output',cv2.WINDOW_NORMAL)
cv2.createTrackbar('maxTh:','Output',maxTh,255,hpf)
print "You selected High Pass filter"
hpf(maxTh)
elif key=='3':
print "You selected Band Pass filter"
bpf()
elif key=='4':
print "You selected Notch filter"
brf()
elif key == 'q':
print "Exit"
break
else:
print "Invalid option"
| mit | 1,386,468,332,461,299,000 | 24.815385 | 59 | 0.558999 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.4.3/Lib/distutils/unixccompiler.py | 1 | 10493 | """distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
__revision__ = "$Id: unixccompiler.py 37184 2004-08-29 16:40:55Z loewis $"
import os, sys
from types import StringType, NoneType
from copy import copy
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
# Makefile and live with it. Is this adequate? If not, we might
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
# SunCCompiler, and I suspect down that road lies madness.
# * even if we don't know a warning flag from an optimization flag,
# we need some way for outsiders to feed preprocessor/compiler/linker
# flags in to us -- eg. a sysadmin might want to mandate certain flags
# via a site config file, or a user might want to set something for
# compiling this module distribution only via the setup.py command
# line, whatever. As long as these options come from something on the
# current system, they can be as system-dependent as they like, and we
# should just happily stuff them into the preprocessor/compiler/linker
# options and carry on.
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source,
output_file=None, macros=None, include_dirs=None,
extra_preargs=None, extra_postargs=None):
ignore, macros, include_dirs = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
raise CompileError, msg
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
try:
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
self.spawn(self.ranlib + [output_filename])
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
libraries, library_dirs, runtime_library_dirs = \
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if type(output_dir) not in (StringType, NoneType):
raise TypeError, "'output_dir' must be a string or None"
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ld_args = (objects + self.objects +
lib_opts + ['-o', output_filename])
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
if target_lang == "c++" and self.compiler_cxx:
linker[0] = self.compiler_cxx[0]
self.spawn(linker + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "-L" + dir
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
# http://sourceforge.net/tracker/index.php
# ?func=detail&aid=445902&group_id=5470&atid=105470
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
# is sought. GCC has to be told to pass the -R option through
# to the linker, whereas other compilers just know this.
# Other compilers may need something slightly different. At
# this time, there's no way to determine this information from
# the configuration data stored in the Python installation, so
# we use this hack.
compiler = os.path.basename(sysconfig.get_config_var("CC"))
if sys.platform[:6] == "darwin":
# MacOSX's linker doesn't understand the -R flag at all
return "-L" + dir
elif sys.platform[:5] == "hp-ux":
return "+s -L" + dir
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
return ["-rpath", dir]
elif compiler[:3] == "gcc" or compiler[:3] == "g++":
return "-Wl,-R" + dir
else:
return "-R" + dir
def library_option(self, lib):
return "-l" + lib
def find_library_file(self, dirs, lib, debug=0):
shared_f = self.library_filename(lib, lib_type='shared')
dylib_f = self.library_filename(lib, lib_type='dylib')
static_f = self.library_filename(lib, lib_type='static')
for dir in dirs:
shared = os.path.join(dir, shared_f)
dylib = os.path.join(dir, dylib_f)
static = os.path.join(dir, static_f)
# We're second-guessing the linker here, with not much hard
# data to go on: GCC seems to prefer the shared library, so I'm
# assuming that *all* Unix C compilers do. And of course I'm
# ignoring even GCC's "-static" option. So sue me.
if os.path.exists(dylib):
return dylib
elif os.path.exists(shared):
return shared
elif os.path.exists(static):
return static
# Oops, didn't find it in *any* of 'dirs'
return None
| mit | -7,394,403,266,320,112,000 | 42.903766 | 77 | 0.597637 | false |
mldbai/mldb | container_files/init/mldb_logger.py | 1 | 2869 | #!/usr/bin/env python
# Copyright mldb.ai inc 2016
# Author: Jean Raby <[email protected]>
# TODO:
# - configure logging so that access/error logs go somewhere else than stderr
import fcntl
import functools
import grp
import jinja2
import os
import pwd
import pytz
import sys
import time
import tornado.web
from tornado.ioloop import IOLoop
from datetime import datetime
from collections import namedtuple, deque
try:
from mldb_logger_utils import RUNAS, HTTP_LISTEN_PORT
except NameError:
# provide defaults if templating didn't run
RUNAS = "nobody"
HTTP_LISTEN_PORT = 1234
LOGBUFSIZE = 8192
LogLine = namedtuple('LogLine', ['dt', 'data', ])
LOGS_MLDB_TEMPLATE = \
"""
<html><body>
<pre>
{%- for l in logbuf -%}
{{l.dt.strftime(timeformat)}} {{l.data}}
{%- endfor %}
</pre>
<a name="end"></a>
</body></html>
"""
def droppriv():
if os.getuid() != 0:
return # not root?
new_uid = pwd.getpwnam(RUNAS).pw_uid
new_gid = grp.getgrnam(RUNAS).gr_gid
os.setgroups([])
os.setgid(new_gid)
os.setuid(new_uid)
old_umask = os.umask(077)
def stdin_ready(f, logbuf, fd, events):
if events & IOLoop.READ:
try:
for line in f:
logline = LogLine(dt=datetime.now(pytz.utc), data=line.decode('utf8', 'replace'))
logbuf.append(logline)
sys.stdout.write(line)
# line buffering is needed to make sure message are emitted in realtime
# simulate that by flushing every line...
sys.stdout.flush()
except IOError:
pass # If we get a EWOULDBLOCK, continue. EOF handled below
if events & IOLoop.ERROR:
exit(0)
class LogsMldbHandler(tornado.web.RequestHandler):
def get(self):
""" Sends the last n lines from logbuf, or all of it if n is not set """
n = self.get_argument("n", default=None)
try:
timeformat = "%FT%T.%f%z"
if logbuf[0].dt.tzname() == "UTC":
timeformat = "%FT%T.%fZ"
except IndexError:
pass # don't care, logbuf is probably empty
env = { "timeformat": timeformat,
"logbuf": list(logbuf)[-int(n):] if n else logbuf
}
out = jinja2.Environment().from_string(LOGS_MLDB_TEMPLATE).render(**env)
self.set_header('Content-Type', 'text/html')
self.write(out)
if __name__ == "__main__":
droppriv() # Early on, we don't need privileges for anything.
logbuf = deque(maxlen=LOGBUFSIZE)
io_loop = IOLoop.current()
# set stdin to non blocking mode for use with tornado
fl = fcntl.fcntl(sys.stdin.fileno(), fcntl.F_GETFL)
fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, fl | os.O_NONBLOCK)
callback = functools.partial(stdin_ready, sys.stdin, logbuf)
io_loop.add_handler(sys.stdin.fileno(), callback,
io_loop.READ | io_loop.ERROR)
app = tornado.web.Application([ ("/logs/mldb", LogsMldbHandler) ])
app.listen(HTTP_LISTEN_PORT)
io_loop.start()
| apache-2.0 | 2,752,010,964,470,450,000 | 24.389381 | 89 | 0.654932 | false |
davibe/pygobject | tests/test_repository.py | 1 | 17918 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2013 Simon Feltman <[email protected]>
#
# test_repository.py: Test for the GIRepository module
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
import unittest
import collections
import gi._gi as GIRepository
from gi.module import repository as repo
from gi.repository import GObject
from gi.repository import GLib
from gi.repository import GIMarshallingTests
from gi.repository import GIRepository as IntrospectedRepository
try:
import cairo
cairo
has_cairo = True
except ImportError:
has_cairo = False
def find_child_info(info, getter_name, name):
getter = getattr(info, getter_name)
for child in getter():
if child.get_name() == name:
return child
else:
raise ValueError('child info %s not found' % name)
class Test(unittest.TestCase):
def setUp(self):
repo.require('GObject')
repo.require('GIMarshallingTests')
def test_arg_info(self):
func_info = repo.find_by_name('GIMarshallingTests', 'array_fixed_out_struct')
args = func_info.get_arguments()
self.assertTrue(len(args), 1)
arg = args[0]
self.assertEqual(arg.get_container(), func_info)
self.assertEqual(arg.get_direction(), GIRepository.Direction.OUT)
self.assertEqual(arg.get_name(), 'structs')
self.assertEqual(arg.get_namespace(), 'GIMarshallingTests')
self.assertFalse(arg.is_caller_allocates())
self.assertFalse(arg.is_optional())
self.assertFalse(arg.is_return_value())
self.assertFalse(arg.may_be_null())
self.assertEqual(arg.get_destroy(), -1)
self.assertEqual(arg.get_ownership_transfer(), GIRepository.Transfer.NOTHING)
self.assertEqual(arg.get_scope(), GIRepository.ScopeType.INVALID)
self.assertEqual(arg.get_type().get_tag(), GIRepository.TypeTag.ARRAY)
def test_base_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Object')
self.assertEqual(info.__name__, 'Object')
self.assertEqual(info.get_name(), 'Object')
self.assertEqual(info.__module__, 'gi.repository.GIMarshallingTests')
self.assertEqual(info.get_name_unescaped(), 'Object')
self.assertEqual(info.get_namespace(), 'GIMarshallingTests')
self.assertEqual(info.get_container(), None)
info2 = repo.find_by_name('GIMarshallingTests', 'Object')
self.assertFalse(info is info2)
self.assertEqual(info, info2)
self.assertTrue(info.equal(info2))
def test_object_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Object')
self.assertEqual(info.get_parent(), repo.find_by_name('GObject', 'Object'))
self.assertTrue(isinstance(info.get_methods(), collections.Iterable))
self.assertTrue(isinstance(info.get_fields(), collections.Iterable))
self.assertTrue(isinstance(info.get_interfaces(), collections.Iterable))
self.assertTrue(isinstance(info.get_constants(), collections.Iterable))
self.assertTrue(isinstance(info.get_vfuncs(), collections.Iterable))
self.assertTrue(isinstance(info.get_properties(), collections.Iterable))
self.assertFalse(info.get_abstract())
self.assertEqual(info.get_class_struct(), repo.find_by_name('GIMarshallingTests', 'ObjectClass'))
self.assertEqual(info.get_type_name(), 'GIMarshallingTestsObject')
self.assertEqual(info.get_type_init(), 'gi_marshalling_tests_object_get_type')
self.assertFalse(info.get_fundamental())
self.assertEqual(info.get_parent(), repo.find_by_name('GObject', 'Object'))
def test_registered_type_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Object')
# Call these from the class because GIObjectInfo overrides them
self.assertEqual(GIRepository.RegisteredTypeInfo.get_g_type(info),
GObject.type_from_name('GIMarshallingTestsObject'))
self.assertEqual(GIRepository.RegisteredTypeInfo.get_type_name(info),
'GIMarshallingTestsObject')
self.assertEqual(GIRepository.RegisteredTypeInfo.get_type_init(info),
'gi_marshalling_tests_object_get_type')
@unittest.skipUnless(has_cairo, 'Regress needs cairo')
def test_fundamental_object_info(self):
repo.require('Regress')
info = repo.find_by_name('Regress', 'TestFundamentalObject')
self.assertTrue(info.get_abstract())
self.assertTrue(info.get_fundamental())
self.assertEqual(info.get_ref_function(), 'regress_test_fundamental_object_ref')
self.assertEqual(info.get_unref_function(), 'regress_test_fundamental_object_unref')
self.assertEqual(info.get_get_value_function(), 'regress_test_value_get_fundamental_object')
self.assertEqual(info.get_set_value_function(), 'regress_test_value_set_fundamental_object')
def test_interface_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Interface')
self.assertTrue(isinstance(info.get_methods(), collections.Iterable))
self.assertTrue(isinstance(info.get_vfuncs(), collections.Iterable))
self.assertTrue(isinstance(info.get_constants(), collections.Iterable))
self.assertTrue(isinstance(info.get_prerequisites(), collections.Iterable))
self.assertTrue(isinstance(info.get_properties(), collections.Iterable))
self.assertTrue(isinstance(info.get_signals(), collections.Iterable))
method = info.find_method('test_int8_in')
vfunc = info.find_vfunc('test_int8_in')
self.assertEqual(method.get_name(), 'test_int8_in')
self.assertEqual(vfunc.get_invoker(), method)
self.assertEqual(method.get_vfunc(), vfunc)
iface = info.get_iface_struct()
self.assertEqual(iface, repo.find_by_name('GIMarshallingTests', 'InterfaceIface'))
def test_struct_info(self):
info = repo.find_by_name('GIMarshallingTests', 'InterfaceIface')
self.assertTrue(isinstance(info, GIRepository.StructInfo))
self.assertTrue(isinstance(info.get_fields(), collections.Iterable))
self.assertTrue(isinstance(info.get_methods(), collections.Iterable))
self.assertTrue(isinstance(info.get_size(), int))
self.assertTrue(isinstance(info.get_alignment(), int))
self.assertTrue(info.is_gtype_struct())
self.assertFalse(info.is_foreign())
def test_enum_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Enum')
self.assertTrue(isinstance(info, GIRepository.EnumInfo))
self.assertTrue(isinstance(info.get_values(), collections.Iterable))
self.assertTrue(isinstance(info.get_methods(), collections.Iterable))
self.assertFalse(info.is_flags())
self.assertTrue(info.get_storage_type() > 0) # might be platform dependent
def test_union_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Union')
self.assertTrue(isinstance(info, GIRepository.UnionInfo))
self.assertTrue(isinstance(info.get_fields(), collections.Iterable))
self.assertTrue(isinstance(info.get_methods(), collections.Iterable))
def test_type_info(self):
func_info = repo.find_by_name('GIMarshallingTests', 'array_fixed_out_struct')
arg_info, = func_info.get_arguments()
type_info = arg_info.get_type()
self.assertTrue(type_info.is_pointer())
self.assertEqual(type_info.get_tag(), GIRepository.TypeTag.ARRAY)
self.assertEqual(type_info.get_tag_as_string(), 'array')
self.assertEqual(type_info.get_param_type(0).get_tag(),
GIRepository.TypeTag.INTERFACE)
self.assertEqual(type_info.get_param_type(0).get_interface(),
repo.find_by_name('GIMarshallingTests', 'SimpleStruct'))
self.assertEqual(type_info.get_interface(), None)
self.assertEqual(type_info.get_array_length(), -1)
self.assertEqual(type_info.get_array_fixed_size(), 2)
self.assertFalse(type_info.is_zero_terminated())
self.assertEqual(type_info.get_array_type(), GIRepository.ArrayType.C)
def test_field_info(self):
info = repo.find_by_name('GIMarshallingTests', 'InterfaceIface')
field = find_child_info(info, 'get_fields', 'test_int8_in')
self.assertEqual(field.get_name(), 'test_int8_in')
self.assertTrue(field.get_flags() & GIRepository.FieldInfoFlags.IS_READABLE)
self.assertFalse(field.get_flags() & GIRepository.FieldInfoFlags.IS_WRITABLE)
self.assertEqual(field.get_type().get_tag(), GIRepository.TypeTag.INTERFACE)
# don't test actual values because that might fail with architecture differences
self.assertTrue(isinstance(field.get_size(), int))
self.assertTrue(isinstance(field.get_offset(), int))
def test_property_info(self):
info = repo.find_by_name('GIMarshallingTests', 'PropertiesObject')
prop = find_child_info(info, 'get_properties', 'some-object')
flags = GObject.ParamFlags.READABLE | GObject.ParamFlags.WRITABLE | GObject.ParamFlags.CONSTRUCT
self.assertEqual(prop.get_flags(), flags)
self.assertEqual(prop.get_type().get_tag(), GIRepository.TypeTag.INTERFACE)
self.assertEqual(prop.get_type().get_interface(),
repo.find_by_name('GObject', 'Object'))
self.assertEqual(prop.get_ownership_transfer(), GIRepository.Transfer.NOTHING)
def test_callable_info(self):
func_info = repo.find_by_name('GIMarshallingTests', 'array_fixed_out_struct')
self.assertTrue(hasattr(func_info, 'invoke'))
self.assertTrue(isinstance(func_info.get_arguments(), collections.Iterable))
self.assertEqual(func_info.get_caller_owns(), GIRepository.Transfer.NOTHING)
self.assertFalse(func_info.may_return_null())
self.assertEqual(func_info.get_return_type().get_tag(), GIRepository.TypeTag.VOID)
self.assertRaises(AttributeError, func_info.get_return_attribute, '_not_an_attr')
@unittest.expectedFailure # https://bugzilla.gnome.org/show_bug.cgi?id=709462
@unittest.skipUnless(has_cairo, 'Regress needs cairo')
def test_signal_info(self):
repo.require('Regress')
info = repo.find_by_name('Regress', 'TestObj')
sig_info = find_child_info(info, 'get_signals', 'test')
sig_flags = GObject.SignalFlags.RUN_LAST | \
GObject.SignalFlags.NO_RECURSE | GObject.SignalFlags.NO_HOOKS
self.assertTrue(sig_info is not None)
self.assertTrue(isinstance(sig_info, GIRepository.CallableInfo))
self.assertTrue(isinstance(sig_info, GIRepository.SignalInfo))
self.assertEqual(sig_info.get_name(), 'test')
self.assertEqual(sig_info.get_class_closure(), None)
self.assertFalse(sig_info.true_stops_emit())
self.assertEqual(sig_info.get_flags(), sig_flags)
@unittest.expectedFailure # https://bugzilla.gnome.org/show_bug.cgi?id=709462
@unittest.skipUnless(has_cairo, 'Regress needs cairo')
def test_notify_signal_info_with_obj(self):
repo.require('Regress')
info = repo.find_by_name('Regress', 'TestObj')
sig_info = find_child_info(info, 'get_signals', 'sig-with-array-prop')
sig_flags = GObject.SignalFlags.RUN_LAST
self.assertTrue(sig_info is not None)
self.assertTrue(isinstance(sig_info, GIRepository.CallableInfo))
self.assertTrue(isinstance(sig_info, GIRepository.SignalInfo))
self.assertEqual(sig_info.get_name(), 'sig-with-array-prop')
self.assertEqual(sig_info.get_class_closure(), None)
self.assertFalse(sig_info.true_stops_emit())
self.assertEqual(sig_info.get_flags(), sig_flags)
def test_object_constructor(self):
info = repo.find_by_name('GIMarshallingTests', 'Object')
method = find_child_info(info, 'get_methods', 'new')
self.assertTrue(isinstance(method, GIRepository.CallableInfo))
self.assertTrue(isinstance(method, GIRepository.FunctionInfo))
self.assertTrue(method in info.get_methods())
self.assertEqual(method.get_name(), 'new')
self.assertFalse(method.is_method())
self.assertTrue(method.is_constructor())
self.assertEqual(method.get_symbol(), 'gi_marshalling_tests_object_new')
flags = method.get_flags()
self.assertFalse(flags & GIRepository.FunctionInfoFlags.IS_METHOD)
self.assertTrue(flags & GIRepository.FunctionInfoFlags.IS_CONSTRUCTOR)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.IS_GETTER)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.IS_SETTER)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.WRAPS_VFUNC)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.THROWS)
def test_method_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Object')
method = find_child_info(info, 'get_methods', 'vfunc_return_value_only')
self.assertTrue(isinstance(method, GIRepository.CallableInfo))
self.assertTrue(isinstance(method, GIRepository.FunctionInfo))
self.assertTrue(method in info.get_methods())
self.assertEqual(method.get_name(), 'vfunc_return_value_only')
self.assertFalse(method.is_constructor())
self.assertEqual(method.get_symbol(), 'gi_marshalling_tests_object_vfunc_return_value_only')
self.assertTrue(method.is_method())
flags = method.get_flags()
self.assertTrue(flags & GIRepository.FunctionInfoFlags.IS_METHOD)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.IS_CONSTRUCTOR)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.IS_GETTER)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.IS_SETTER)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.WRAPS_VFUNC)
self.assertFalse(flags & GIRepository.FunctionInfoFlags.THROWS)
def test_vfunc_info(self):
info = repo.find_by_name('GIMarshallingTests', 'Object')
invoker = find_child_info(info, 'get_methods', 'vfunc_return_value_only')
vfunc = find_child_info(info, 'get_vfuncs', 'vfunc_return_value_only')
self.assertTrue(isinstance(vfunc, GIRepository.CallableInfo))
self.assertTrue(isinstance(vfunc, GIRepository.VFuncInfo))
self.assertEqual(vfunc.get_name(), 'vfunc_return_value_only')
self.assertEqual(vfunc.get_invoker(), invoker)
self.assertEqual(invoker, info.find_method('vfunc_return_value_only'))
self.assertEqual(vfunc.get_flags(), 0)
self.assertEqual(vfunc.get_offset(), 0xFFFF) # unknown offset
self.assertEqual(vfunc.get_signal(), None)
def test_flags_double_registration_error(self):
# a warning is printed for double registration and pygobject will
# also raise a RuntimeError.
GIMarshallingTests.NoTypeFlags # cause flags registration
info = repo.find_by_name('GIMarshallingTests', 'NoTypeFlags')
old_mask = GLib.log_set_always_fatal(GLib.LogLevelFlags.LEVEL_ERROR)
try:
self.assertRaises(RuntimeError,
GIRepository.flags_register_new_gtype_and_add,
info)
finally:
GLib.log_set_always_fatal(old_mask)
def test_enum_double_registration_error(self):
# a warning is printed for double registration and pygobject will
# also raise a RuntimeError.
GIMarshallingTests.Enum # cause enum registration
info = repo.find_by_name('GIMarshallingTests', 'Enum')
old_mask = GLib.log_set_always_fatal(GLib.LogLevelFlags.LEVEL_ERROR)
try:
self.assertRaises(RuntimeError,
GIRepository.enum_register_new_gtype_and_add,
info)
finally:
GLib.log_set_always_fatal(old_mask)
def test_enums(self):
self.assertTrue(hasattr(GIRepository, 'Direction'))
self.assertTrue(hasattr(GIRepository, 'Transfer'))
self.assertTrue(hasattr(GIRepository, 'ArrayType'))
self.assertTrue(hasattr(GIRepository, 'ScopeType'))
self.assertTrue(hasattr(GIRepository, 'VFuncInfoFlags'))
self.assertTrue(hasattr(GIRepository, 'FieldInfoFlags'))
self.assertTrue(hasattr(GIRepository, 'FunctionInfoFlags'))
self.assertTrue(hasattr(GIRepository, 'TypeTag'))
self.assertTrue(hasattr(GIRepository, 'InfoType'))
def test_introspected_argument_info(self):
self.assertTrue(isinstance(IntrospectedRepository.Argument.__info__,
GIRepository.UnionInfo))
arg = IntrospectedRepository.Argument()
self.assertTrue(isinstance(arg.__info__, GIRepository.UnionInfo))
old_info = IntrospectedRepository.Argument.__info__
IntrospectedRepository.Argument.__info__ = 'not an info'
self.assertRaises(TypeError, IntrospectedRepository.Argument)
IntrospectedRepository.Argument.__info__ = old_info
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -2,148,817,928,267,952,600 | 49.331461 | 105 | 0.683726 | false |
Dolyphin/FSI_1D | initialize_data_structure.py | 1 | 1765 | # % ---------------------------------
# % filename: initialize_data_structure.py
# %
# % we set the physical data of the structure.
# %============================================================
# % physical data of the structure
# %===========================================================
import numpy as np
from initialize_data_fluid import *
vprel=np.zeros((2,1))
vprel[0] = 1e7 #% spring rigidity
vprel[1]= 100 #% mass of the piston
Lsp0 = 1.2 #% length of the spring (unstretched)
Lspe = Lsp0-(pres_init0-p_ext)*A/vprel[0] #% length at equilibrium
if(Lspe<=0):
print(1,'Length of the spring at equilibrium Lspe= %g! meters ! \n',Le)
# % ----------------------------------
# % We compute the natural period of the (mass+spring) system
# %
omega0 = np.sqrt(vprel[0]/vprel[1]) #% natural pulsation
freq0 = omega0/(2*np.pi) #% natural frequency
T0 = 1/freq0 #% natural period
#%
print(1,'Piston mass= %g kg \n',vprel[1])
print(1,'Spring rigidity= %g N/m \n',vprel[0])
print(1,'Natural frequency of the mass-spring system= %g Hz \n\n',freq0)
# % ============================================================
# % Data initialization for the structure
# % ===========================================================
# % beware of the sign of U0
# % u_t is the current displacement of the piston set to the initial displacement
# % u_dot_t is the current velocity of the piston
# % u_double_dot_t is the current acceleration of the piston
# %
u_t = U0
u_dot_t = 0
vsols0 = u_t
# %
# % ------------------------
# % initialization of the acceleration
vfg0 = (vpres[nnt-1]-0*pres_init)*A
u_double_dot_t = (vfg0+vprel[0]*(Lspe - u_t - Lsp0))/vprel[1]
| gpl-3.0 | 3,326,060,273,394,111,000 | 35.553191 | 81 | 0.504816 | false |
simzacks/jjb | jenkins_jobs/parser.py | 1 | 13461 | #!/usr/bin/env python
# Copyright (C) 2015 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Manage JJB yaml feature implementation
import copy
import fnmatch
import io
import itertools
import logging
import pkg_resources
import jenkins_jobs.local_yaml as local_yaml
from jenkins_jobs.constants import MAGIC_MANAGE_STRING
from jenkins_jobs.errors import JenkinsJobsException
from jenkins_jobs.registry import ModuleRegistry
from jenkins_jobs.formatter import deep_format
from jenkins_jobs import utils
from jenkins_jobs.xml_config import XmlJob
logger = logging.getLogger(__name__)
def matches(what, glob_patterns):
"""
Checks if the given string, ``what``, matches any of the glob patterns in
the iterable, ``glob_patterns``
:arg str what: String that we want to test if it matches a pattern
:arg iterable glob_patterns: glob patterns to match (list, tuple, set,
etc.)
"""
return any(fnmatch.fnmatch(what, glob_pattern)
for glob_pattern in glob_patterns)
class YamlParser(object):
def __init__(self, config=None, plugins_info=None):
self.data = {}
self.jobs = []
self.xml_jobs = []
self.config = config
self.registry = ModuleRegistry(self.config, plugins_info)
self.path = ["."]
if self.config:
if config.has_section('job_builder') and \
config.has_option('job_builder', 'include_path'):
self.path = config.get('job_builder',
'include_path').split(':')
self.keep_desc = self.get_keep_desc()
def get_keep_desc(self):
keep_desc = False
if self.config and self.config.has_section('job_builder') and \
self.config.has_option('job_builder', 'keep_descriptions'):
keep_desc = self.config.getboolean('job_builder',
'keep_descriptions')
return keep_desc
def parse_fp(self, fp):
# wrap provided file streams to ensure correct encoding used
data = local_yaml.load(utils.wrap_stream(fp), search_path=self.path)
if data:
if not isinstance(data, list):
raise JenkinsJobsException(
"The topmost collection in file '{fname}' must be a list,"
" not a {cls}".format(fname=getattr(fp, 'name', fp),
cls=type(data)))
for item in data:
cls, dfn = next(iter(item.items()))
group = self.data.get(cls, {})
if len(item.items()) > 1:
n = None
for k, v in item.items():
if k == "name":
n = v
break
# Syntax error
raise JenkinsJobsException("Syntax error, for item "
"named '{0}'. Missing indent?"
.format(n))
name = dfn['name']
if name in group:
self._handle_dups("Duplicate entry found in '{0}: '{1}' "
"already defined".format(fp.name, name))
group[name] = dfn
self.data[cls] = group
def parse(self, fn):
with io.open(fn, 'r', encoding='utf-8') as fp:
self.parse_fp(fp)
def _handle_dups(self, message):
if not (self.config and self.config.has_section('job_builder') and
self.config.getboolean('job_builder', 'allow_duplicates')):
logger.error(message)
raise JenkinsJobsException(message)
else:
logger.warn(message)
def getJob(self, name):
job = self.data.get('job', {}).get(name, None)
if not job:
return job
return self.applyDefaults(job)
def getJobGroup(self, name):
return self.data.get('job-group', {}).get(name, None)
def getJobTemplate(self, name):
job = self.data.get('job-template', {}).get(name, None)
if not job:
return job
return self.applyDefaults(job)
def applyDefaults(self, data, override_dict=None):
if override_dict is None:
override_dict = {}
whichdefaults = data.get('defaults', 'global')
defaults = copy.deepcopy(self.data.get('defaults',
{}).get(whichdefaults, {}))
if defaults == {} and whichdefaults != 'global':
raise JenkinsJobsException("Unknown defaults set: '{0}'"
.format(whichdefaults))
for key in override_dict.keys():
if key in defaults.keys():
defaults[key] = override_dict[key]
newdata = {}
newdata.update(defaults)
newdata.update(data)
return newdata
def formatDescription(self, job):
if self.keep_desc:
description = job.get("description", None)
else:
description = job.get("description", '')
if description is not None:
job["description"] = description + \
self.get_managed_string().lstrip()
def expandYaml(self, jobs_glob=None):
changed = True
while changed:
changed = False
for module in self.registry.modules:
if hasattr(module, 'handle_data'):
if module.handle_data(self):
changed = True
for job in self.data.get('job', {}).values():
if jobs_glob and not matches(job['name'], jobs_glob):
logger.debug("Ignoring job {0}".format(job['name']))
continue
logger.debug("Expanding job '{0}'".format(job['name']))
job = self.applyDefaults(job)
self.formatDescription(job)
self.jobs.append(job)
for project in self.data.get('project', {}).values():
logger.debug("Expanding project '{0}'".format(project['name']))
# use a set to check for duplicate job references in projects
seen = set()
for jobspec in project.get('jobs', []):
if isinstance(jobspec, dict):
# Singleton dict containing dict of job-specific params
jobname, jobparams = next(iter(jobspec.items()))
if not isinstance(jobparams, dict):
jobparams = {}
else:
jobname = jobspec
jobparams = {}
job = self.getJob(jobname)
if job:
# Just naming an existing defined job
if jobname in seen:
self._handle_dups("Duplicate job '{0}' specified "
"for project '{1}'".format(
jobname, project['name']))
seen.add(jobname)
continue
# see if it's a job group
group = self.getJobGroup(jobname)
if group:
for group_jobspec in group['jobs']:
if isinstance(group_jobspec, dict):
group_jobname, group_jobparams = \
next(iter(group_jobspec.items()))
if not isinstance(group_jobparams, dict):
group_jobparams = {}
else:
group_jobname = group_jobspec
group_jobparams = {}
job = self.getJob(group_jobname)
if job:
if group_jobname in seen:
self._handle_dups(
"Duplicate job '{0}' specified for "
"project '{1}'".format(group_jobname,
project['name']))
seen.add(group_jobname)
continue
template = self.getJobTemplate(group_jobname)
# Allow a group to override parameters set by a project
d = {}
d.update(project)
d.update(jobparams)
d.update(group)
d.update(group_jobparams)
# Except name, since the group's name is not useful
d['name'] = project['name']
if template:
self.expandYamlForTemplateJob(d, template,
jobs_glob)
continue
# see if it's a template
template = self.getJobTemplate(jobname)
if template:
d = {}
d.update(project)
d.update(jobparams)
self.expandYamlForTemplateJob(d, template, jobs_glob)
else:
raise JenkinsJobsException("Failed to find suitable "
"template named '{0}'"
.format(jobname))
# check for duplicate generated jobs
seen = set()
# walk the list in reverse so that last definition wins
for job in self.jobs[::-1]:
if job['name'] in seen:
self._handle_dups("Duplicate definitions for job '{0}' "
"specified".format(job['name']))
self.jobs.remove(job)
seen.add(job['name'])
def expandYamlForTemplateJob(self, project, template, jobs_glob=None):
dimensions = []
template_name = template['name']
# reject keys that are not useful during yaml expansion
for k in ['jobs']:
project.pop(k)
for (k, v) in project.items():
tmpk = '{{{0}}}'.format(k)
if tmpk not in template_name:
logger.debug("Variable %s not in name %s, rejecting from job"
" matrix expansion.", tmpk, template_name)
continue
if type(v) == list:
dimensions.append(zip([k] * len(v), v))
# XXX somewhat hackish to ensure we actually have a single
# pass through the loop
if len(dimensions) == 0:
dimensions = [(("", ""),)]
for values in itertools.product(*dimensions):
params = copy.deepcopy(project)
params = self.applyDefaults(params, template)
expanded_values = {}
for (k, v) in values:
if isinstance(v, dict):
inner_key = next(iter(v))
expanded_values[k] = inner_key
expanded_values.update(v[inner_key])
else:
expanded_values[k] = v
params.update(expanded_values)
params = deep_format(params, params)
allow_empty_variables = self.config \
and self.config.has_section('job_builder') \
and self.config.has_option(
'job_builder', 'allow_empty_variables') \
and self.config.getboolean(
'job_builder', 'allow_empty_variables')
for key in template.keys():
if key not in params:
params[key] = template[key]
params['template-name'] = template_name
expanded = deep_format(template, params, allow_empty_variables)
job_name = expanded.get('name')
if jobs_glob and not matches(job_name, jobs_glob):
continue
self.formatDescription(expanded)
self.jobs.append(expanded)
def get_managed_string(self):
# The \n\n is not hard coded, because they get stripped if the
# project does not otherwise have a description.
return "\n\n" + MAGIC_MANAGE_STRING
def generateXML(self):
for job in self.jobs:
self.xml_jobs.append(self.getXMLForJob(job))
def getXMLForJob(self, data):
kind = data.get('project-type', 'freestyle')
for ep in pkg_resources.iter_entry_points(
group='jenkins_jobs.projects', name=kind):
Mod = ep.load()
mod = Mod(self.registry)
xml = mod.root_xml(data)
self.gen_xml(xml, data)
job = XmlJob(xml, data['name'])
return job
def gen_xml(self, xml, data):
for module in self.registry.modules:
if hasattr(module, 'gen_xml'):
module.gen_xml(self, xml, data)
| apache-2.0 | -351,560,834,032,763,600 | 39.667674 | 79 | 0.508209 | false |
dhellmann/athensdocket | docket/server/browse.py | 1 | 2993 | import calendar
import datetime
from .app import app, mongo
from .filters import date
from .nav import set_navbar_active
from flask import render_template, g
from flask.ext.pymongo import ASCENDING
@app.route('/browse')
@set_navbar_active
def browse():
locations = sorted(mongo.db.cases.distinct('location'))
return render_template('browse.html',
locations=locations,
)
@app.route('/browse/date')
@app.route('/browse/date/<int:year>')
@app.route('/browse/date/<int:year>/<int:month>')
@app.route('/browse/date/<int:year>/<int:month>/<int:day>')
def browse_date(year=None, month=None, day=None):
g.navbar_active = 'browse'
if month and day:
first_day = datetime.datetime(year, month, day, 0, 0, 0)
last_day = first_day + datetime.timedelta(days=1)
date_range = date(first_day)
elif month:
first_day = datetime.datetime(year, month, 1, 0, 0, 0)
weekday, num_days = calendar.monthrange(year, month)
last_day = first_day + datetime.timedelta(days=num_days)
date_range = '%s-%02d' % (year, month)
elif year:
first_day = datetime.datetime(year, 1, 1, 0, 0, 0)
last_day = datetime.datetime(year + 1, 1, 1, 0, 0, 0)
date_range = unicode(year)
else:
# Show the list of years and months
books = mongo.db.books.find()
years = sorted(set(b['year'] for b in books))
return render_template('browse_date.html',
years=years,
)
app.logger.debug('first_day=%s, last_day=%s', first_day, last_day)
cases = mongo.db.cases.find({'date': {'$gte': first_day,
'$lt': last_day,
},
},
sort=[('date', ASCENDING)],
)
return render_template('browse_date_cases.html',
date_range=date_range,
cases=cases,
year=year,
month=month,
day=day,
)
@app.route('/browse/location')
@app.route('/browse/location/<location>')
def browse_location(location=None):
if location:
cases = mongo.db.cases.find({'location': location,
},
sort=[('date', ASCENDING)],
)
return render_template('browse_location_cases.html',
location=location,
cases=cases,
)
else:
# Show the list of locations
locations = sorted(mongo.db.cases.distinct('location'))
return render_template('browse_location.html',
locations=locations,
)
| apache-2.0 | 2,354,671,943,219,793,000 | 36.4125 | 70 | 0.495155 | false |
sprymix/importkit | importkit/utils/adapter.py | 1 | 4422 | ##
# Copyright (c) 2008-2013 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
class AdapterError(Exception):
pass
class Adapter(type):
adapters = {}
instance_adapters = {}
def __new__(mcls, name, bases, clsdict, *, adapts=None,
adapts_instances_of=None, pure=False,
adapterargs=None, **kwargs):
if adapts is not None and adapts_instances_of is not None:
msg = 'adapter class: adapts and adapts_instances_of args are ' + \
'mutually exclusive'
raise AdapterError(msg)
collection = None
if adapts is not None and not pure:
bases = bases + (adapts,)
if adapts_instances_of is not None:
pure = True
adapts = adapts_instances_of
collection = Adapter.instance_adapters
else:
collection = Adapter.adapters
result = super().__new__(mcls, name, bases, clsdict, **kwargs)
if adapts is not None:
assert issubclass(mcls, Adapter) and mcls is not Adapter
registry_key = mcls.get_registry_key(adapterargs)
try:
adapters = collection[registry_key]
except KeyError:
adapters = collection[registry_key] = {}
mcls.register_adapter(adapters, adapts, result)
result.__sx_adaptee__ = adapts
return result
def __init__(cls, name, bases, clsdict, *, adapts=None,
adapts_instances_of=None, pure=False,
adapterargs=None, **kwargs):
super().__init__(name, bases, clsdict, **kwargs)
@classmethod
def register_adapter(mcls, registry, adaptee, adapter):
assert adaptee not in registry
registry[adaptee] = adapter
@classmethod
def match_adapter(mcls, obj, adaptee, adapter):
if issubclass(obj, adapter) and obj is not adapter:
return obj
elif issubclass(obj, adaptee):
return adapter
@classmethod
def _get_adapter(mcls, obj, reversed_mro, collection, kwargs):
registry_key = mcls.get_registry_key(kwargs)
adapters = collection.get(registry_key)
if adapters is None:
return
result = None
seen = set()
for base in reversed_mro:
for adaptee, adapter in adapters.items():
found = mcls.match_adapter(base, adaptee, adapter)
if found and found not in seen:
result = found
seen.add(found)
if result is not None:
return result
@classmethod
def get_adapter(mcls, obj, **kwargs):
if isinstance(obj, type):
collection = Adapter.adapters
mro = obj.__mro__
else:
collection = Adapter.instance_adapters
mro = type(obj).__mro__
reversed_mro = tuple(reversed(mro))
result = mcls._get_adapter(obj, reversed_mro, collection, kwargs)
if result is not None:
return result
for mc in mcls.__subclasses__(mcls):
result = mc._get_adapter(obj, reversed_mro, collection, kwargs)
if result is not None:
return result
@classmethod
def adapt(mcls, obj):
adapter = mcls.get_adapter(obj.__class__)
if adapter is None:
raise AdapterError('could not find {}.{} adapter for {}'.format(
mcls.__module__, mcls.__name__,
obj.__class__.__name__))
elif adapter is not obj.__class__:
return adapter.adapt(obj)
else:
return obj
@classmethod
def get_registry_key(mcls, adapterargs):
if adapterargs:
return (mcls, frozenset(adapterargs.items()))
else:
return mcls
def get_adaptee(cls):
return cls.__sx_adaptee__
class MultiAdapter(Adapter):
@classmethod
def register_adapter(mcls, registry, adaptee, adapter):
try:
registry[adaptee] += (adapter,)
except KeyError:
registry[adaptee] = (adapter,)
@classmethod
def match_adapter(mcls, obj, adaptee, adapter):
if issubclass(obj, adapter) and obj not in adapter:
return (obj,)
elif issubclass(obj, adaptee):
return adapter
| mit | -4,098,590,764,919,314,400 | 28.878378 | 79 | 0.558345 | false |
elki-project/elki | addons/joglvis/src-manual/build.py | 1 | 1307 | #!/usr/bin/python
from lxml import etree
import gzip, re, copy, tempfile, subprocess, os
SVG_NAMESPACE="http://www.w3.org/2000/svg"
INKSCAPE_NAMESPACE="http://www.inkscape.org/namespaces/inkscape"
_safe = re.compile("^[A-Za-z]+$")
sizes=[64,32,16,8,4]
tree = etree.parse(gzip.open("Markers.svgz"))
labels = etree.ETXPath("//{%s}g/@{%s}label" % (SVG_NAMESPACE, INKSCAPE_NAMESPACE))(tree)
for l in labels:
if not _safe.match(l): raise Exception("Label not safe: "+l)
ctree = copy.deepcopy(tree)
layers = etree.ETXPath("//{%s}g[./@{%s}label]" % (SVG_NAMESPACE, INKSCAPE_NAMESPACE))(ctree)
for layer in layers:
l2 = layer.get("{%s}label" % INKSCAPE_NAMESPACE)
if l2 == l:
layer.attrib["style"]=""
else:
layer.attrib["style"]="display:none"
f = tempfile.NamedTemporaryFile(delete=False)
f.write(etree.tostring(ctree))
f.close()
cmd=["rsvg-convert",
"-w", "62", "-h", "62",
"-o", "/tmp/%s.png" % l,
f.name]
print "Running", " ".join(cmd)
subprocess.call(cmd)
os.unlink(f.name)
for size in sizes:
cmd = ["montage"]
for l in labels: cmd.append("/tmp/%s.png" % l)
cmd.extend(["-geometry", "%sx%s+1+1" % (size-2, size-2), "-background", "none", "PNG32:markers-%s.png" % size ])
print "Running", " ".join(cmd)
subprocess.call(cmd)
for l in labels: os.unlink("/tmp/%s.png" % l)
| agpl-3.0 | -8,183,996,135,032,793,000 | 29.395349 | 113 | 0.644989 | false |
bonus85/tilt-editor | tilt_hack.py | 1 | 13660 | #!/usr/bin/env python
"""
Analyze and edit .sketch files (internal in .tilt)
Also supports generating .sketch files from json
@author: Sindre Tosse
"""
import struct
import json
import pdb
import numpy as np
try:
from stl import mesh
STL_SUPPORTED = True
STL_COLOR = (1., 0., 0., 1.)
STL_BRUSH_SIZE = 0.08
except ImportError:
print 'stl files not supported (run "pip install numpy-stl" to enable)'
STL_SUPPORTED = False
END = '' # Struct format
ORDERS_OF_TWO = [2**i for i in range(32)]
MAX_BYTE_VALUE = ORDERS_OF_TWO[-1] - 1
def bits(byte, max_order=32):
assert byte <= MAX_BYTE_VALUE
return [min(byte&oot, 1) for oot in ORDERS_OF_TWO[:max_order]]
MAX_DV = 0.5 # Max length (x,y,z) between two points (from_json)
class SketchEditor:
STROKE_EXTENSION_ENCODING = {
0: 'I', # uint32 flags
}
POINT_EXTENSION_ENCODING = {
0: 'f', # float stroke pressure
1: 'I', # uint32 timestamp (ms)
}
def __init__(self, sentinel=-982080051, version=5, expected_brush_strokes=None):
self.sentinel = sentinel
self.version = version
self.expected_brush_strokes = expected_brush_strokes
self.strokes = []
@classmethod
def from_sketch_file(cls, file_name):
with open(file_name, 'rb') as f:
header_bytes = f.read(16)
sentinel, version, reserved, extra_size = \
struct.unpack(END+'iiii', header_bytes)
assert reserved == 0, \
'.sketch header reserved bytes are not zero: %d' %reserved
if extra_size > 0:
additional_header_data = f.read(extra_size)
print 'Warning: Additional header data present (skipping):'
print ' %r' %additional_header_data
num_strokes_bytes = f.read(4)
num_strokes = struct.unpack(END+'i', num_strokes_bytes)[0]
instance = SketchEditor(sentinel, version , num_strokes)
for i in range(num_strokes):
stroke_header = f.read(32)
#print repr(stroke_header), len(stroke_header)
idx, r, g, b, a, brush_size, stroke_extension, point_extension = \
struct.unpack(END+'ifffffII', stroke_header)
# int32/float32 for each set bit in stroke_extension & ffff
stroke_extension_mask = bits(stroke_extension & 0xffff, 16)
stroke_extension_data = {}
for i, bit in enumerate(stroke_extension_mask):
if bit:
fmt = SketchEditor.STROKE_EXTENSION_ENCODING.get(i, 'cccc')
stroke_extension_data[i] = struct.unpack(END+fmt, f.read(4))[0]
# uint32 size + <size> for each set bit in stroke_extension & ~ffff
stroke_extension_mask_extra = bits(stroke_extension & ~0xffff, 16)
stroke_extension_data_extra = {}
for i, bit in enumerate(stroke_extension_mask_extra):
if bit:
size = struct.unpack(END+'I', f.read(4))[0]
stroke_extension_data_extra[i] = f.read(size)
num_points = struct.unpack(END+'i', f.read(4))[0]
point_extension_mask = bits(point_extension & 0xffff)
stroke = Stroke(
(r, g, b, a),
brush_size,
brush_index=idx,
stroke_extension_mask=stroke_extension_mask,
stroke_extension_data=stroke_extension_data,
stroke_extension_mask_extra=stroke_extension_mask_extra,
stroke_extension_data_extra=stroke_extension_data_extra,
point_extension_mask=point_extension_mask,
expected_points=num_points
)
for j in range(num_points):
point_data = f.read(28)
x, y, z, or1, or2, or3, or4 = \
struct.unpack(END+'fffffff', point_data) # position and orientation
# int32/float32 for each set bit in point_extension
point_extension_data = {}
for i, bit in enumerate(point_extension_mask):
if bit:
fmt = SketchEditor.POINT_EXTENSION_ENCODING.get(i, 'cccc')
point_extension_data[i] = struct.unpack(END+fmt, f.read(4))[0]
point = StrokePoint(
stroke,
(x, y, z),
(or1, or2, or3, or4),
point_extension_data
)
stroke.add(point)
instance.add_stroke(stroke)
assert f.read() == '',\
'Error: file did not match format specification (incorrect length)'
return instance
@classmethod
def from_json(cls, file_name):
with open(file_name) as f:
json_sketch = json.load(f)
instance = SketchEditor()
for stroke_spec in json_sketch['strokes']:
stroke = Stroke(tuple(stroke_spec['color']), stroke_spec['brush_size'])
positions = np.array(stroke_spec['points'], dtype=float)
prev_pos = np.roll(positions, 1, 0)
prev_pos[0][0] = np.nan
for prev, position in zip(prev_pos, positions):
if not np.isnan(prev[0]):
dv = MAX_DV * (position-prev) / np.linalg.norm(position-prev)
print prev, position, dv
while np.linalg.norm(position-prev) > MAX_DV:
prev += dv
#print prev
stroke.add(StrokePoint(stroke, tuple(prev)))
#print position
stroke.add(StrokePoint(stroke, tuple(position)))
instance.add_stroke(stroke)
return instance
@classmethod
def from_stl(cls, file_name):
assert STL_SUPPORTED
stl_mesh = mesh.Mesh.from_file(file_name)
instance = SketchEditor()
for p0, p1, p2 in zip(stl_mesh.v0, stl_mesh.v1, stl_mesh.v2):
stroke = Stroke(STL_COLOR, STL_BRUSH_SIZE)
positions = np.array([p0, p1, p2, p0], dtype=float)
prev_pos = np.roll(positions, 1, 0)
prev_pos[0][0] = np.nan
for prev, position in zip(prev_pos, positions):
if not np.isnan(prev[0]):
dv = MAX_DV * (position-prev) / np.linalg.norm(position-prev)
print prev, position, dv
while np.linalg.norm(position-prev) > MAX_DV:
prev += dv
#print prev
stroke.add(StrokePoint(stroke, tuple(prev)))
#print position
stroke.add(StrokePoint(stroke, tuple(position)))
instance.add_stroke(stroke)
return instance
def add_stroke(self, stroke):
self.strokes.append(stroke)
def write(self, file_name):
with open(file_name, 'wb') as f:
f.write(struct.pack(END+'iiiii',
self.sentinel, self.version, 0, 0, len(self.strokes)))
for stroke in self.strokes:
f.write(stroke.pack())
def write_points(self, file_name):
npoints = sum(len(s.points) for s in self.strokes)
with open(file_name, 'w') as f:
f.write(str(npoints)+'\n')
for stroke in self.strokes:
for point in stroke.points:
f.write('{p.x} {p.y} {p.z}\n'.format(p=point))
def info(self):
print 'Sentinel: %d' %self.sentinel
print 'Version: %d' %self.version
print 'Brush strokes: %s expected, %d actual' %(
self.expected_brush_strokes, len(self.strokes))
Z16 = [0 for i in range(16)]
Z32 = [0 for i in range(32)]
class Stroke:
def __init__(
self,
(r, g, b, a),
brush_size,
brush_index=0,
stroke_extension_mask=Z16,
stroke_extension_data=None,
stroke_extension_mask_extra=Z16,
stroke_extension_data_extra=None,
point_extension_mask=Z32,
expected_points=None
):
self.r = r
self.g = g
self.b = b
self.a = a
self.brush_size = brush_size
self.brush_index = brush_index
self.stroke_extension_mask = stroke_extension_mask
self.stroke_extension_mask_extra = stroke_extension_mask_extra
self.point_extension_mask = point_extension_mask
self.stroke_extension_data = stroke_extension_data
self.stroke_extension_data_extra = stroke_extension_data_extra
self.expected_stroke_points = expected_points
self.points = []
def pack(self):
stroke_extension = sum(b * oot for b, oot in
zip(self.stroke_extension_mask, ORDERS_OF_TWO[:16]))
stroke_extension += sum(b * oot for b, oot in
zip(self.stroke_extension_mask_extra, ORDERS_OF_TWO[16:]))
point_extension = sum(b * oot for b, oot in
zip(self.point_extension_mask, ORDERS_OF_TWO))
s = struct.pack(END+'ifffffII',
self.brush_index, self.r, self.g, self.b, self.a,
self.brush_size, stroke_extension, point_extension)
for i, bit in enumerate(self.stroke_extension_mask):
if bit:
fmt = SketchEditor.STROKE_EXTENSION_ENCODING.get(i, 'cccc')
s += struct.pack(END+fmt, self.stroke_extension_data[i])
for i, bit in enumerate(self.stroke_extension_mask_extra):
if bit:
s += struct.pack(END+'I', len(self.stroke_extension_data_extra[i]))
s += self.stroke_extension_data_extra[i]
s += struct.pack(END+'i', len(self.points))
for point in self.points:
s += point.pack()
return s
def add(self, point):
self.points.append(point)
def info(self):
print 'Stroke color: (%f, %f, %f, %f)' %(self.r, self.g, self.b, self.a)
print 'Brush size: %f' %self.brush_size
print 'Stroke extension:'
for i, bit in enumerate(self.stroke_extension_mask):
if bit:
print ' %d: %r' %(i, self.stroke_extension_data[i])
print 'Stroke extension (extra):'
for i, bit in enumerate(self.stroke_extension_mask_extra):
if bit:
print ' %d: %r' %(i, self.stroke_extension_data_extra[i])
print 'Number of stroke points: %s expected, %d actual' %(
self.expected_stroke_points, len(self.points))
print 'First point:'
self.points[0].info()
print 'Last point:'
self.points[-1].info()
class StrokePoint:
def __init__(
self,
parent_stroke,
(x, y, z),
(or1, or2, or3, or4)=(0.,0.,0.,0.),
point_extension_data=None
):
self.parent_stroke = parent_stroke
self.x = x
self.y = y
self.z = z
self.or1 = or1
self.or2 = or2
self.or3 = or3
self.or4 = or4
self.point_extension_data = point_extension_data
def pack(self):
s = struct.pack(END+'fffffff',
self.x, self.y, self.z, self.or1, self.or2, self.or3, self.or4)
for i, bit in enumerate(self.parent_stroke.point_extension_mask):
if bit:
fmt = SketchEditor.POINT_EXTENSION_ENCODING.get(i, 'cccc')
s += struct.pack(END+fmt, self.point_extension_data[i])
return s
def info(self):
print 'Position: (%f, %f, %f)' %(self.x, self.y, self.z)
print 'Orientation: (%f, %f, %f, %f)' %(self.or1, self.or2, self.or3, self.or4)
print 'Point extension:'
for i, bit in enumerate(self.parent_stroke.point_extension_mask):
if bit:
print ' %d: %r' %(i, self.point_extension_data[i])
if __name__ == '__main__':
import argparse
import os
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("file_name", type=str, help="Name of file to open")
opts = parser.parse_args() # Parses sys.argv by default
name, ext = os.path.splitext(opts.file_name)
if ext == '.sketch':
t = SketchEditor.from_sketch_file(opts.file_name)
t.info()
for stroke in t.strokes:
stroke.info()
print 'Removing stroke extension'
t.strokes[0].stroke_extension_mask = [0 for i in range(16)]
t.strokes[0].stroke_extension_mask_extra = [0 for i in range(16)]
print 'Removing point extension'
t.strokes[0].point_extension_mask = [0 for i in range(32)]
print "Saving"
t.write('data.sketch')
elif ext == '.json':
t = SketchEditor.from_json(opts.file_name)
t.info()
for stroke in t.strokes:
stroke.info()
t.write('data.sketch')
elif ext == '.stl':
t = SketchEditor.from_stl(opts.file_name)
t.info()
for stroke in t.strokes:
stroke.info()
t.write('data.sketch')
else:
print 'Unknown file type: %s' %ext
| gpl-3.0 | 1,189,793,421,834,732,800 | 38.252874 | 91 | 0.527965 | false |
Micronaet/micronaet-order | auto_order_nomail_check/auto.py | 1 | 3871 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import xlsxwriter
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class SaleOrder(orm.Model):
""" Model name: SaleOrder
"""
_inherit = 'sale.order'
def send_sale_order_email_check(self, cr, uid, context=None):
''' Generate email for check deadline status
'''
query = '''
SELECT name
FROM res_partner
WHERE
email_invoice_id is null and
email is null and
id IN (
SELECT distinct partner_id
FROM sale_order
WHERE
state not in ('cancel', 'draft', 'sent'));
'''
cr.execute(query)
partner_name = [item[0] for item in cr.fetchall()]
if not partner_name:
_logger.info('No email missed in partner with order found!')
return True
body = '<table>'
for name in partner_name:
body += '''<tr><td>%s</td></tr>''' % name
body += '</table>'
# ---------------------------------------------------------------------
# Send report:
# ---------------------------------------------------------------------
# Send mail with attachment:
group_pool = self.pool.get('res.groups')
model_pool = self.pool.get('ir.model.data')
thread_pool = self.pool.get('mail.thread')
group_id = model_pool.get_object_reference(
cr, uid,
'auto_order_nomail_check',
'group_order_email_report_admin')[1]
partner_ids = []
for user in group_pool.browse(
cr, uid, group_id, context=context).users:
partner_ids.append(user.partner_id.id)
thread_pool = self.pool.get('mail.thread')
thread_pool.message_post(cr, uid, False,
type='email',
body=body,
subject='%s: Partner senza mail per invio fattura: %s' % (
cr.dbname,
datetime.now().strftime(DEFAULT_SERVER_DATE_FORMAT),
),
partner_ids=[(6, 0, partner_ids)],
context=context,
)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,362,938,094,823,373,000 | 35.518868 | 87 | 0.54172 | false |
cgchemlab/chemlab | tools/convert_gromacs2espp.py | 1 | 4036 | #!/usr/bin/env python
# Copyright (C) 2012,2013,2015(H),2016
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import math
import re
def convertTable(gro_in_file, esp_out_file, sigma=1.0, epsilon=1.0, c6=1.0, c12=1.0):
"""Convert GROMACS tabulated file into ESPResSo++ tabulated file (new file
is created). First column of input file can be either distance or angle.
For non-bonded files, c6 and c12 can be provided. Default value for sigma, epsilon,
c6 and c12 is 1.0. Electrostatics are not taken into account (f and fd columns).
Keyword arguments:
gro_in_file -- the GROMACS tabulated file name (bonded, nonbonded, angle
or dihedral).
esp_out_file -- filename of the ESPResSo++ tabulated file to be written.
sigma -- optional, depending on whether you want to convert units or not.
epsilon -- optional, depending on whether you want to convert units or not.
c6 -- optional
c12 -- optional
"""
# determine file type
bonded, angle, dihedral = False, False, False
re_bond = re.compile('.*_b[0-9]+.*')
re_angle = re.compile('.*_a[0-9]+.*')
re_dihedral = re.compile('.*_d[0-9]+.*')
if re.match(re_bond, gro_in_file):
bonded = True
elif re.match(re_angle, gro_in_file):
angle = True
bonded = True
elif re.match(re_dihedral, gro_in_file):
dihedral = True
bonded = True
fin = open(gro_in_file, 'r')
fout = open(esp_out_file, 'w')
if bonded: # bonded has 3 columns
for line in fin:
if line[0] == "#": # skip comment lines
continue
columns = line.split()
r = float(columns[0])
f = float(columns[1]) # energy
fd= float(columns[2]) # force
# convert units
if angle or dihedral: # degrees to radians
r = math.radians(r)
fd=fd*180/math.pi
else:
r = r / sigma
e = f / epsilon
f = fd*sigma / epsilon
if (not angle and not dihedral and r != 0) or \
(angle and r <= math.pi and r > 0) or \
(dihedral and r >= -math.pi and r <= math.pi):
fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f))
else: # non-bonded has 7 columns
for line in fin:
if line.startswith('#'): # skip comment lines
continue
columns = line.split()
r = float(columns[0])
g = float(columns[3]) # dispersion
gd= float(columns[4])
h = float(columns[5]) # repulsion
hd= float(columns[6])
e = c6*g + c12*h
f = c6*gd+ c12*hd
# convert units
r = r / sigma
e = e / epsilon
f = f*sigma / epsilon
if r != 0: # skip 0
fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f))
fin.close()
fout.close()
def _args():
parser = argparse.ArgumentParser()
parser.add_argument('in_file')
parser.add_argument('out_file')
return parser
def main():
args = _args().parse_args()
convertTable(args.in_file, args.out_file)
if __name__ == '__main__':
main()
| gpl-3.0 | 4,729,603,492,576,089,000 | 31.031746 | 87 | 0.582755 | false |
matthewmacleod/gsds | training/lib/train.py | 1 | 7015 | import sys, os
import time
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.autograd import Variable
from torch.optim import lr_scheduler
import torchvision
from torchvision import datasets, models, transforms
from pmodels import RtResnet18ly2, FtResnet18
from trainer import train_model
from tester import test_model
import argparse
import random
def main():
'''
Run training and model saving..see args for options
'''
parser = argparse.ArgumentParser()
parser.add_argument('--bsize', help='mini batch size, lower if have memory issues', type=int, default=32)
parser.add_argument('--learning_rate', help='learning rate', type=float, default=0.001)
parser.add_argument('--lrs', help='learning rate step decay, ie how many epochs to weight before decaying rate', type=int, default=4)
parser.add_argument('--lrsg', help='learning rate step decay factor,gamma decay rate', type=float, default=0.1)
parser.add_argument('--L2', help='L2 weight decay', type=float, default=0.01)
parser.add_argument('--num_epochs', help='number of epochs', type=int, default=12)
parser.add_argument('--random_seed', help='use random seed, use 0 for false, 1 for generate, and more than 2 to seed', type=int, default=1)
parser.add_argument('--model_type', help='retrain or finetune', type=str, default='retrain')
parser.add_argument('--train_dir', help='train directory in data root', type=str, default='train5')
parser.add_argument('--model_dir', help='model directory', type=str, default='../data/models/')
parser.add_argument('--val_dir', help='validation directory in data root', type=str, default='val5')
parser.add_argument('--data_dir', help='data directory', type=str, default='../data')
parser.add_argument('--print_class_results', dest='print_class_results', action='store_true')
parser.add_argument('--no_print_class_results', dest='print_class_results', action='store_false')
parser.add_argument('--print_batches', dest='print_batches', action='store_true')
parser.add_argument('--no_print_batches', dest='print_batches', action='store_false')
parser.set_defaults(print_class_results=True)
parser.set_defaults(print_batches=True)
# parse the args
args = parser.parse_args()
print('Settings for training:', 'batch size:', args.bsize, 'epochs:', args.num_epochs, 'learning rate:', args.learning_rate, 'lr decay', args.lrs, 'gamma', args.lrsg)
if args.random_seed == 1:
random_seed = random.randint(1,1000)
print('Random seed:',random_seed)
# CPU seed
torch.manual_seed(random_seed)
# GPU seed
torch.cuda.manual_seed_all(random_seed)
else:
random_seed = args.random_seed
use_gpu = torch.cuda.is_available()
data_transforms = { 'train': transforms.Compose([
transforms.Scale(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010])
]),
'val': transforms.Compose([
transforms.Scale(224),
transforms.ToTensor(),
transforms.Normalize([0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010])
]),
}
image_datasets = {'train':
datasets.ImageFolder(os.path.join(args.data_dir,args.train_dir),
data_transforms['train']),
'val':
datasets.ImageFolder(os.path.join(args.data_dir, args.val_dir),
data_transforms['val']),
'test':
datasets.ImageFolder(os.path.join(args.data_dir, 'test'),
data_transforms['val']),
}
if use_gpu:
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=args.bsize,
shuffle=True, num_workers=8,
pin_memory=True)
for x in ['train', 'val','test']}
else:
dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=args.bsize,
shuffle=True, num_workers=8)
for x in ['train', 'val', 'test']}
dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'val', 'test']}
batch_frequency = 100
# assume batch sizes are the same
print_sizes = {x: len(image_datasets[x])//(args.bsize*batch_frequency) for x in ['train', 'val','test']}
class_names = image_datasets['train'].classes
nb_classes = len(class_names)
print('Data set sizes:', dataset_sizes)
print('Class names:', class_names)
print('Total classes:', nb_classes)
if args.model_type == 'retrain':
model_conv = RtResnet18ly2(nb_classes)
model_name = 'rt_resnet18ly2'
print('Model name:', model_name)
# optimize all parameters when we retrain
optimizer_conv = optim.Adam(model_conv.parameters(), lr=args.learning_rate, weight_decay=args.L2)
elif args.model_type == 'finetune':
model_conv = FtResnet18(nb_classes)
model_name = 'ft_resnet18'
print('Model name:', model_name)
# optimize only the last layers when we fine tune
optimizer_conv = optim.Adam(list(model_conv.preclassifier.parameters()) +
list(model_conv.classifier.parameters()), lr=args.learning_rate)
else:
sys.exit('Error check model type')
if use_gpu:
model_conv = model_conv.cuda()
criterion = nn.CrossEntropyLoss().cuda()
else:
criterion = nn.CrossEntropyLoss()
# Decay LR by a factor of lrsg (eg 0.1) every lrs epochs
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_conv, step_size=args.lrs, gamma=args.lrsg)
model_conv, val_acc = train_model(model_conv, criterion, optimizer_conv, exp_lr_scheduler,
class_names, args.bsize, args.model_dir, model_name, print_sizes,
data_transforms, image_datasets, dataloaders, dataset_sizes,
use_gpu, args.num_epochs, args.print_class_results, args.print_batches)
# evaluate test set
test_model(model_conv, criterion, class_names, args.bsize, args.model_dir, model_name, print_sizes,
dataloaders, dataset_sizes, use_gpu, True)
# write out best model to disk
val_acc = round(100*val_acc,1)
torch.save(model_conv.state_dict(), args.model_dir + model_name +
'_va_' + str(val_acc) +'_model_wts.pth')
return
if __name__ == '__main__':
main()
| mit | 2,261,371,510,341,813,000 | 44.551948 | 170 | 0.596436 | false |
FAForever/api | api/leaderboards.py | 1 | 8221 | from faf.api import LeaderboardSchema
from faf.api import LeaderboardStatsSchema
from flask import request
from pymysql.cursors import DictCursor
from api import app
from api.error import ApiException, ErrorCode
from api.error import Error
from api.query_commons import fetch_data
from faf import db
MAX_PAGE_SIZE = 5000
SELECT_EXPRESSIONS = {
'id': 'r.id',
'login': 'l.login',
'mean': 'r.mean',
'deviation': 'r.deviation',
'num_games': 'r.numGames',
'is_active': 'r.is_active',
'rating': 'ROUND(r.mean - 3 * r.deviation)',
'ranking': '@rownum:=@rownum+1'
}
TABLE1V1 = 'ladder1v1_rating r JOIN login l on r.id = l.id, (SELECT @rownum:=%(row_num)s) n'
TABLEGLOBAL = 'global_rating r JOIN login l on r.id = l.id, (SELECT @rownum:=%(row_num)s) n'
@app.route('/leaderboards/<string:leaderboard_type>')
def leaderboards_type(leaderboard_type):
"""
Lists all ranked 1v1 or global players.
**Example Request**:
**Default Values**:
page[number]=1
page[size]=5000
.. sourcecode:: http
GET /leaderboards/1v1 /leaderboards/global
Accept: application/vnd.api+json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"data": [
{
"attributes": {
"deviation": 48.4808,
"id": "781",
"login": "Zock",
"mean": 2475.69,
"num_games": 1285,
"ranking": 1,
"rating": 2330,
"won_games": 946
},
"id": "781",
"type": "ranked1v1"
},
...
]
}
:param page[number]: The page number being requested (EX.: /leaderboards/1v1?page[number]=2)
:type page[number]: int
:param page[size]: The total amount of players to grab by default (EX.: /leaderboards/1v1?page[size]=10)
:type page[size]: int
:param leaderboard_type: Finds players in the 1v1 or global rating
:type leaderboard_type: 1v1 OR global
:status 200: No error
"""
sort_field = request.values.get('sort')
if sort_field:
raise ApiException([Error(ErrorCode.QUERY_INVALID_SORT_FIELD, sort_field)])
page = int(request.values.get('page[number]', 1))
page_size = int(request.values.get('page[size]', MAX_PAGE_SIZE))
row_num = (page - 1) * page_size
select = SELECT_EXPRESSIONS
args = {'row_num': row_num}
rating = find_leaderboard_type(leaderboard_type, select)
return fetch_data(LeaderboardSchema(), rating['table'], rating['select'], MAX_PAGE_SIZE, request, sort='-rating',
args=args, where='is_active = 1 AND r.numGames > 0')
@app.route('/leaderboards/<string:leaderboard_type>/<int:player_id>')
def leaderboards_type_get_player(leaderboard_type, player_id):
"""
Gets a global or 1v1 player. Player must be active, played more than one ranked game, and must have statistics associated
with he/she.
**Example Request**:
.. sourcecode:: http
GET /leaderboards/1v1/781 /leaderboards/global/781
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"data": {
"attributes": {
"deviation": 48.4808,
"id": "781",
"login": "Zock",
"mean": 2475.69,
"num_games": 1285,
"ranking": 1,
"rating": 2330,
"won_games": 946
},
"id": "781",
"type": "leaderboard"
}
}
:param leaderboard_type: Finds players in the 1v1 or global rating
:type leaderboard_type: 1v1 OR global
:param player_id: Player ID
:type player_id: int
:status 200: No error
:status 404: No entry with this id was found
"""
select_expressions = SELECT_EXPRESSIONS.copy()
select = select_expressions
rating = find_leaderboard_type(leaderboard_type, select)
select_expressions['ranking'] = """(SELECT count(*) FROM """ + rating['tableName'] + """
WHERE ROUND(mean - 3 * deviation) >= ROUND(r.mean - 3 * r.deviation)
AND is_active = 1
AND numGames > 0)
"""
result = fetch_data(LeaderboardSchema(), rating['table'], rating['select'], MAX_PAGE_SIZE, request,
many=False, where='r.id=%(id)s', args=dict(id=player_id, row_num=0))
if 'id' not in result['data']:
return {'errors': [{'title': 'No entry with this id was found'}]}, 404
return result
@app.route("/leaderboards/<string:rating_type>/stats")
def rating_stats(rating_type):
"""
Gets all player stats sorted by rankings.
**Example Request**:
.. sourcecode:: http
GET /leaderboards/1v1/stats /leaderboards/global/stats
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"data": {
"attributes": {
"rating_distribution": {
"-100": 206,
"-200": 192,
"-300": 137,
"-400": 77,
"-500": 15,
"-600": 10,
"-700": 1,
"0": 268,
"100": 282,
"1000": 122,
"1100": 86,
"1200": 72,
"1300": 55,
"1400": 42,
"1500": 35,
"1600": 25,
"1700": 15,
"1800": 14,
"1900": 7,
"200": 284,
"2000": 5,
"2100": 2,
"2200": 1,
"2300": 2,
"300": 316,
"400": 296,
"500": 239,
"600": 238,
"700": 208,
"800": 177,
"900": 140
}
},
"id": "/leaderboards/1v1/stats",
"type": "leaderboard_stats"
}
}
:status 200: No error
"""
rating = find_leaderboard_type(rating_type)
with db.connection:
cursor = db.connection.cursor(DictCursor)
cursor.execute("""
SELECT
FLOOR((mean - 3 * deviation)/100) * 100 AS `rating`,
count(*) as count
FROM """ + rating['tableName'] + """
WHERE `is_active` = 1
AND mean BETWEEN 0 AND 3000
AND deviation <= 240
AND numGames > 0
GROUP BY `rating`
ORDER BY CAST(`rating` as SIGNED) ASC;
""")
result = cursor.fetchall()
data = dict(id='/leaderboards/' + rating_type + '/stats', rating_distribution={})
for item in result:
data['rating_distribution'][str(int(item['rating']))] = item['count']
return LeaderboardStatsSchema().dump(data, many=False).data
def find_leaderboard_type(rating_type, select=None):
rating = {}
if rating_type == '1v1':
rating['table'] = TABLE1V1
rating['select'] = append_select_expression()
rating['tableName'] = 'ladder1v1_rating'
elif rating_type == 'global':
rating['table'] = TABLEGLOBAL
rating['select'] = select
rating['tableName'] = 'global_rating'
else:
raise ApiException([Error(ErrorCode.QUERY_INVALID_RATING_TYPE, rating_type)])
return rating
def append_select_expression():
select = SELECT_EXPRESSIONS.copy()
select['won_games'] = 'r.winGames'
select['lost_games'] = 'r.numGames - r.winGames'
select['winning_percentage'] = 'ROUND((r.winGames/r.numGames) * 100)'
return select
| gpl-3.0 | -7,423,851,992,924,885,000 | 28.256228 | 129 | 0.505291 | false |
chemelnucfin/tensorflow | tensorflow/python/kernel_tests/resource_variable_ops_test.py | 1 | 60449 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.resource_variable_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import gc
import os
import pickle
import re
from absl.testing import parameterized
import numpy as np
from tensorflow.core.framework import tensor_pb2
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import cpp_shape_inference_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import momentum
from tensorflow.python.training import saver
from tensorflow.python.training import training_util
from tensorflow.python.util import compat
@test_util.with_control_flow_v2
class ResourceVariableOpsTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def tearDown(self):
gc.collect()
# This will only contain uncollectable garbage, i.e. reference cycles
# involving objects with __del__ defined.
self.assertEmpty(gc.garbage)
super(ResourceVariableOpsTest, self).tearDown()
@test_util.run_deprecated_v1
def testHandleDtypeShapeMatch(self):
with self.cached_session():
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
with self.assertRaises(ValueError):
resource_variable_ops.assign_variable_op(
handle, constant_op.constant(0.0, dtype=dtypes.float32)).run()
with self.assertRaises(ValueError):
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[0],
dtype=dtypes.int32)).run()
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
0,
dtype=dtypes.int32)).run()
@test_util.run_gpu_only
def testGPUInt64(self):
with context.eager_mode(), context.device("gpu:0"):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.int64)
self.assertAllEqual(1, v.numpy())
def testEagerNameNotIdentity(self):
with context.eager_mode():
v0 = resource_variable_ops.ResourceVariable(1.0, name="a")
v1 = resource_variable_ops.ResourceVariable(2.0, name="a")
self.assertAllEqual(v0.numpy(), 1.0)
self.assertAllEqual(v1.numpy(), 2.0)
def testEagerNameNotNeeded(self):
with context.eager_mode():
v0 = resource_variable_ops.ResourceVariable(1.0)
self.assertAllEqual(v0.numpy(), 1.0)
def testReadVariableDtypeMismatchEager(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
resource_variable_ops.assign_variable_op(handle, 1)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to read variable with wrong dtype. "
"Expected float got int32"):
_ = resource_variable_ops.read_variable_op(handle, dtype=dtypes.float32)
def testEagerInitializedValue(self):
with context.eager_mode():
variable = resource_variable_ops.ResourceVariable(1.0, name="eager-init")
self.assertAllEqual(variable.numpy(), 1.0)
self.assertAllEqual(variable.initialized_value().numpy(), 1.0)
def testInitializeVariableUsingInitializedValue(self):
var1 = resource_variable_ops.ResourceVariable(1.0, name="var1")
var2 = resource_variable_ops.ResourceVariable(var1.initialized_value(),
name="var2")
self.assertAllEqual(var2.initialized_value(), 1.0)
def testEagerBool(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(False, name="bool_test")
self.assertAllEqual(bool(v), False)
def testEagerDeepCopy(self):
with context.eager_mode():
init_value = np.ones((4, 4, 4))
variable = resource_variable_ops.ResourceVariable(init_value,
name="init")
copied_variable = copy.deepcopy(variable)
copied_variable.assign(4 * np.ones((4, 4, 4)))
# Copying the variable should create a new underlying tensor with distinct
# values.
self.assertFalse(np.allclose(variable.numpy(), copied_variable.numpy()))
@test_util.run_deprecated_v1
def testGraphDeepCopy(self):
with self.cached_session():
init_value = np.ones((4, 4, 4))
variable = resource_variable_ops.ResourceVariable(init_value,
name="init")
with self.assertRaises(NotImplementedError):
copy.deepcopy(variable)
@test_util.run_in_graph_and_eager_modes
def testStridedSliceAssign(self):
v = resource_variable_ops.ResourceVariable([1.0, 2.0])
self.evaluate(variables.global_variables_initializer())
self.evaluate(v[0].assign(2.0))
self.assertAllEqual(self.evaluate(v), [2.0, 2.0])
@test_util.run_in_graph_and_eager_modes
def testVariableShape(self):
v = resource_variable_ops.ResourceVariable([1., 1.])
self.assertAllEqual(
tensor_util.constant_value(
resource_variable_ops.variable_shape(v.handle)),
[2])
@test_util.run_deprecated_v1
def testDifferentAssignGraph(self):
with ops.Graph().as_default():
v = resource_variable_ops.ResourceVariable(1.0)
ops.reset_default_graph()
v.assign(2.0) # Note: this fails if we run convert_to_tensor on not the
# variable graph.
@test_util.run_deprecated_v1
def testFetchHandle(self):
with self.cached_session():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
self.assertNotEmpty(handle.eval())
@test_util.run_deprecated_v1
def testCachedValueReadBeforeWrite(self):
with self.cached_session() as sess:
v = resource_variable_ops.ResourceVariable(0.0, caching_device="cpu:0")
self.evaluate(v.initializer)
value, _ = sess.run([v, v.assign_add(1.0)])
self.assertAllEqual(value, 0.0)
def testAssignVariableDtypeMismatchEager(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1]))
with self.assertRaisesRegexp(
errors.InvalidArgumentError, "Trying to assign variable with wrong "
"dtype. Expected int32 got float"):
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1.], dtype=dtypes.float32))
def testUnprintableHandle(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
self.assertIn("<unprintable>", str(handle))
self.assertIn("<unprintable>", repr(handle))
@test_util.run_in_graph_and_eager_modes
def testDtypeSurvivesIdentity(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
id_handle = array_ops.identity(handle)
self.evaluate(resource_variable_ops.assign_variable_op(
id_handle, constant_op.constant(0, dtype=dtypes.int32)))
def testUnreadOpName(self):
v = resource_variable_ops.ResourceVariable(1.0)
self.assertNotEqual(v.name, v.assign_add(1.0).name)
@test_util.run_in_graph_and_eager_modes
def testCreateRead(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
value = self.evaluate(
resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32))
self.assertAllEqual(1, value)
@test_util.run_in_graph_and_eager_modes
def testManyAssigns(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
create = resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32))
with ops.control_dependencies([create]):
first_read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
with ops.control_dependencies([first_read]):
write = resource_variable_ops.assign_variable_op(
handle, constant_op.constant(2, dtype=dtypes.int32))
with ops.control_dependencies([write]):
second_read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
f, s = self.evaluate([first_read, second_read])
self.assertEqual(f, 1)
self.assertEqual(s, 2)
@test_util.run_in_graph_and_eager_modes
def testAssignAdd(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
self.evaluate(resource_variable_ops.assign_add_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
read = self.evaluate(
resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32))
self.assertEqual(read, 2)
@test_util.run_in_graph_and_eager_modes
def testScatterAdd(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testGradientGatherNd(self):
v = resource_variable_ops.ResourceVariable(
np.random.uniform(size=[2, 2]), dtype=dtypes.float32)
with backprop.GradientTape() as tape:
l = array_ops.gather_nd(v, [[1, 1]])
l = math_ops.reduce_sum(l)
grads = tape.gradient(l, v)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate(grads), [[0., 0.], [0., 1.]])
@test_util.run_deprecated_v1
def testDefaultGradientDtype(self):
v = resource_variable_ops.ResourceVariable(
np.random.uniform(size=[2, 2]), dtype=dtypes.float64)
c = constant_op.constant(1.)
identity = array_ops.identity_n([c, v.handle])
# TODO(b/137403775): Remove this.
custom_gradient.copy_handle_data(v.handle, identity[1])
g = gradients_impl.gradients(identity[0], [c, v.handle])
self.assertEqual(g[1].dtype, dtypes.float64)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(g[1], [[0., 0.], [0., 0.]])
@test_util.run_deprecated_v1
def testUnconnectedGradientZeros(self):
b = resource_variable_ops.ResourceVariable(initial_value=[[3., 4.]])
c = constant_op.constant(0.)
g = gradients_impl.gradients(c, [b], unconnected_gradients="zero")[0]
self.assertAllEqual(g.shape.as_list(), [1, 2])
@test_util.run_in_graph_and_eager_modes
def testGradientGatherNdIndexedSlices(self):
v = resource_variable_ops.ResourceVariable(
np.random.uniform(size=[2, 2]), dtype=dtypes.float32)
with backprop.GradientTape() as tape:
l = array_ops.gather_nd(v, [[1], [1]])
l = math_ops.reduce_sum(l)
grads = tape.gradient(l, v)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate(grads.values), [[1., 1.], [1., 1.]])
@test_util.run_in_graph_and_eager_modes
def testScatterSub(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
@test_util.run_in_graph_and_eager_modes
def testScatterMul(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant([[5]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
def testEagerPickle(self):
with context.eager_mode():
tmp_dir = self.get_temp_dir()
fname = os.path.join(tmp_dir, "var.pickle")
with open(fname, "wb") as f:
v = resource_variable_ops.ResourceVariable(
10.0,
dtype=dtypes.float16,
name="v")
pickle.dump(v, f)
with open(fname, "rb") as f:
new_v = pickle.load(f)
self.assertEqual(new_v.name, v.name)
self.assertEqual(new_v.shape, v.shape)
self.assertEqual(new_v.dtype, v.dtype)
self.assertEqual(new_v.trainable, v.trainable)
self.assertAllEqual(new_v.numpy(), v.numpy())
@test_util.run_in_graph_and_eager_modes
def testScatterDiv(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
def testUseResource(self):
v = variables.VariableV1(1.0, use_resource=True)
self.assertIsInstance(v, resource_variable_ops.ResourceVariable)
def testEagerNoUseResource(self):
with context.eager_mode():
v = variables.Variable(1.0)
self.assertIsInstance(v, resource_variable_ops.ResourceVariable)
@test_util.run_in_graph_and_eager_modes
def testScatterMin(self):
with ops.device("cpu:0"):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[[6]],
dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_min(handle, [0],
constant_op.constant(
[[3]],
dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testMetagraph(self):
with ops.Graph().as_default():
with variable_scope.variable_scope("foo", use_resource=True):
a = variable_scope.get_variable("a", initializer=10.0)
momentum.MomentumOptimizer(
learning_rate=0.001, momentum=0.1).minimize(
a,
colocate_gradients_with_ops=True,
global_step=training_util.get_or_create_global_step())
graph = ops.get_default_graph()
meta_graph_def = saver.export_meta_graph(graph=graph)
with ops.Graph().as_default():
saver.import_meta_graph(meta_graph_def, import_scope="")
meta_graph_two = saver.export_meta_graph(graph=graph)
self.assertEqual(meta_graph_def, meta_graph_two)
@test_util.run_in_graph_and_eager_modes
def testScatterMax(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
@test_util.run_in_graph_and_eager_modes
def testScatterAddScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterSubScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
@test_util.run_in_graph_and_eager_modes
def testScatterMulScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant(5, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
@test_util.run_in_graph_and_eager_modes
def testScatterDivScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
@test_util.run_in_graph_and_eager_modes
def testScatterMinScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_min(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterMaxScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
@test_util.run_in_graph_and_eager_modes
def testScatterAddVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 1.5], name="add")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_add(ops.IndexedSlices(indices=[1], values=[2.5])))
self.assertAllEqual([0.0, 4.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterSubVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 2.5], name="sub")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_sub(ops.IndexedSlices(indices=[1], values=[1.5])))
self.assertAllEqual([0.0, 1.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterMaxVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 4.0], name="max1")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_max(ops.IndexedSlices(indices=[1], values=[5.0])))
self.assertAllEqual([0.0, 5.0], self.evaluate(v))
v = resource_variable_ops.ResourceVariable([0.0, 3.5], name="max2")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_max(ops.IndexedSlices(indices=[1], values=[2.0])))
self.assertAllEqual([0.0, 3.5], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterMinVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 4.0], name="min1")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_min(ops.IndexedSlices(indices=[1], values=[5.0])))
self.assertAllEqual([0.0, 4.0], self.evaluate(v))
v = resource_variable_ops.ResourceVariable([0.0, 3.5], name="min2")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_min(ops.IndexedSlices(indices=[1], values=[2.0])))
self.assertAllEqual([0.0, 2.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterMulVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 4.0], name="mul")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_mul(ops.IndexedSlices(indices=[1], values=[3.0])))
self.assertAllEqual([0.0, 12.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterDivVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 6.0], name="div")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_div(ops.IndexedSlices(indices=[1], values=[2.0])))
self.assertAllEqual([0.0, 3.0], self.evaluate(v))
@test_util.run_in_graph_and_eager_modes
def testScatterUpdateVariableMethod(self):
v = resource_variable_ops.ResourceVariable([0.0, 6.0], name="update")
self.evaluate(variables.global_variables_initializer())
self.evaluate(
v.scatter_update(ops.IndexedSlices(indices=[1], values=[3.0])))
self.assertAllEqual([0.0, 3.0], self.evaluate(v))
@test_util.run_deprecated_v1
def testScatterUpdateString(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.string, shape=[1, 1])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant([["a"]], dtype=dtypes.string)))
self.evaluate(resource_variable_ops.resource_scatter_update(
handle, [0], constant_op.constant([["b"]], dtype=dtypes.string)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string)
self.assertEqual(compat.as_bytes(self.evaluate(read)[0][0]),
compat.as_bytes("b"))
@test_util.run_deprecated_v1
def testScatterUpdateStringScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.string, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[["a"]],
dtype=dtypes.string)))
self.evaluate(
resource_variable_ops.resource_scatter_update(handle, [0],
constant_op.constant(
"b",
dtype=dtypes.string)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string)
self.assertEqual(
compat.as_bytes(self.evaluate(read)[0][0]), compat.as_bytes("b"))
# TODO(alive): get this to work in Eager mode.
def testGPU(self):
with test_util.use_gpu():
abc = variable_scope.get_variable(
"abc",
shape=[1],
initializer=init_ops.ones_initializer(),
use_resource=True)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(
self.evaluate(
resource_variable_ops.var_is_initialized_op(abc.handle)),
True)
def testScatterBool(self):
with context.eager_mode():
ref = resource_variable_ops.ResourceVariable(
[False, True, False], trainable=False)
indices = math_ops.range(3)
updates = constant_op.constant([True, True, True])
state_ops.scatter_update(ref, indices, updates)
self.assertAllEqual(ref.read_value(), [True, True, True])
@test_util.run_in_graph_and_eager_modes
def testConstraintArg(self):
constraint = lambda x: x
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, constraint=constraint, name="var0")
self.assertEqual(v.constraint, constraint)
constraint = 0
with self.assertRaises(ValueError):
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, constraint=constraint, name="var1")
# TODO(alive): how should this work in Eager mode?
@test_util.run_deprecated_v1
def testInitFn(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, dtype=dtypes.float32)
self.assertEqual(v.handle.op.colocation_groups(),
v.initializer.inputs[1].op.colocation_groups())
def testCountUpTo(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(0, name="upto")
self.assertAllEqual(v.count_up_to(1), 0)
with self.assertRaises(errors.OutOfRangeError):
v.count_up_to(1)
def testCountUpToFunction(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(0, name="upto")
self.assertAllEqual(state_ops.count_up_to(v, 1), 0)
with self.assertRaises(errors.OutOfRangeError):
state_ops.count_up_to(v, 1)
@test_util.run_in_graph_and_eager_modes
def testInitFnDtype(self):
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, dtype=dtypes.float32, name="var0")
self.assertEqual(dtypes.float32, v.value().dtype)
@test_util.run_in_graph_and_eager_modes
def testInitFnNoDtype(self):
v = resource_variable_ops.ResourceVariable(initial_value=lambda: 1,
name="var2")
self.assertEqual(dtypes.int32, v.value().dtype)
@test_util.run_in_graph_and_eager_modes
def testInitializeAllVariables(self):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.float32,
name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testOperatorOverload(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(2.0, self.evaluate(v + v))
@test_util.run_in_graph_and_eager_modes
def testAssignMethod(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign(2.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign(3.0, read_value=True)
self.assertEqual(3.0, self.evaluate(assign_with_read))
assign_without_read = v.assign(4.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(4.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testLoad(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
v.load(2.0)
self.assertEqual(2.0, self.evaluate(v.value()))
def testShapePassedToGradient(self):
with ops.Graph().as_default():
@custom_gradient.custom_gradient
def differentiable_scatter_update(handle, indices, values):
with ops.control_dependencies([
resource_variable_ops.resource_scatter_update(
handle, indices, values)]):
new_handle = array_ops.identity(handle)
def grad(dresult):
self.assertIsNotNone(
tensor_util.constant_value(dresult.dense_shape))
return [dresult, None, None]
return new_handle, grad
var = variable_scope.get_variable(
"foo", shape=[20], initializer=init_ops.zeros_initializer,
dtype=dtypes.float64, use_resource=True)
indices = math_ops.range(10)
updates = math_ops.range(9, -1, -1, dtype=dtypes.float64)
new_handle = differentiable_scatter_update(var.handle, indices, updates)
gathered = resource_variable_ops.resource_gather(
new_handle, indices, dtype=var.dtype)
gradients_impl.gradients([gathered], [updates])
def testToFromProtoCachedValue(self):
with ops.Graph().as_default():
v_def = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(3.0)).to_proto()
v_prime = resource_variable_ops.ResourceVariable(variable_def=v_def)
self.assertIsNone(getattr(v_prime, "_cached_value", None))
other_v_def = resource_variable_ops.ResourceVariable(
caching_device="cpu:0",
initial_value=constant_op.constant(3.0)).to_proto()
other_v_prime = resource_variable_ops.ResourceVariable(
variable_def=other_v_def)
self.assertIsNotNone(other_v_prime._cached_value)
def testVariableDefInitializedInstances(self):
with ops.Graph().as_default(), self.cached_session():
v_def = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(3.0)).to_proto()
with ops.Graph().as_default(), self.cached_session():
# v describes a VariableDef-based variable without an initial value.
v = resource_variable_ops.ResourceVariable(variable_def=v_def)
self.assertEqual(3.0, self.evaluate(v.initialized_value()))
# initialized_value should not rerun the initializer_op if the variable
# has already been initialized elsewhere.
self.evaluate(v.assign(1.0))
self.assertEqual(1.0, v.initialized_value().eval())
v_def.ClearField("initial_value_name")
with ops.Graph().as_default(), self.cached_session():
# Restoring a legacy VariableDef proto that does not have
# initial_value_name set should still work.
v = resource_variable_ops.ResourceVariable(variable_def=v_def)
# We should also be able to re-export the variable to a new meta graph.
self.assertProtoEquals(v_def, v.to_proto())
# But attempts to use initialized_value will result in errors.
with self.assertRaises(ValueError):
self.evaluate(v.initialized_value())
def testTrainableInProto(self):
with ops.Graph().as_default():
non_trainable_variable = resource_variable_ops.ResourceVariable(
trainable=False,
initial_value=constant_op.constant(10.0))
self.assertEqual(
False,
resource_variable_ops.ResourceVariable(
variable_def=non_trainable_variable.to_proto())
.trainable)
trainable_variable = resource_variable_ops.ResourceVariable(
trainable=True,
initial_value=constant_op.constant(10.0))
self.assertEqual(
True,
resource_variable_ops.ResourceVariable(
variable_def=trainable_variable.to_proto())
.trainable)
@test_util.run_in_graph_and_eager_modes
def testSparseRead(self):
init_value = np.reshape(np.arange(np.power(4, 3)), (4, 4, 4))
v = resource_variable_ops.ResourceVariable(
constant_op.constant(init_value, dtype=dtypes.int32), name="var3")
self.evaluate(variables.global_variables_initializer())
value = self.evaluate(v.sparse_read([0, 3, 1, 2]))
self.assertAllEqual(init_value[[0, 3, 1, 2], ...], value)
@test_util.run_in_graph_and_eager_modes
def testGatherNd(self):
init_value = np.reshape(np.arange(np.power(4, 3)), (4, 4, 4))
v = resource_variable_ops.ResourceVariable(
constant_op.constant(init_value, dtype=dtypes.int32), name="var3")
self.evaluate(variables.global_variables_initializer())
value_op = v.gather_nd([[0, 0], [1, 2], [3, 3]])
self.assertAllEqual([3, 4], value_op.shape)
value = self.evaluate(value_op)
self.assertAllEqual([[0, 1, 2, 3], [24, 25, 26, 27], [60, 61, 62, 63]],
value)
value_op = v.gather_nd([[0, 0, 0], [1, 2, 3], [3, 3, 3]])
self.assertAllEqual([3], value_op.shape)
value = self.evaluate(value_op)
self.assertAllEqual([0, 27, 63], value)
@test_util.run_deprecated_v1
def testToFromProto(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertEqual(2, math_ops.add(w, 1).eval())
self.assertEqual(v._handle, w._handle)
self.assertEqual(v._graph_element, w._graph_element)
@test_util.run_in_graph_and_eager_modes
def testAssignAddMethod(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign_add(1.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign_add(1.0, read_value=True)
self.assertEqual(3.0, self.evaluate(assign_with_read))
assign_without_read = v.assign_add(1.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(4.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testAssignSubMethod(self):
v = resource_variable_ops.ResourceVariable(3.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign_sub(1.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign_sub(1.0, read_value=True)
self.assertEqual(1.0, self.evaluate(assign_with_read))
assign_without_read = v.assign_sub(1.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(0.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testDestroyResource(self):
v = resource_variable_ops.ResourceVariable(3.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(3.0, self.evaluate(v.value()))
self.evaluate(resource_variable_ops.destroy_resource_op(v.handle))
with self.assertRaises(errors.FailedPreconditionError):
self.evaluate(v.value())
# Handle to a resource not actually created.
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
# Should raise no exception
self.evaluate(resource_variable_ops.destroy_resource_op(
handle, ignore_lookup_error=True))
@test_util.run_deprecated_v1
def testAssignDifferentShapes(self):
with self.cached_session() as sess, variable_scope.variable_scope(
"foo", use_resource=True):
var = variable_scope.get_variable("x", shape=[1, 1], dtype=dtypes.float32)
placeholder = array_ops.placeholder(dtypes.float32)
assign = var.assign(placeholder)
sess.run(
[assign],
feed_dict={placeholder: np.zeros(shape=[2, 2], dtype=np.float32)})
def testAssignDifferentShapesEagerNotAllowed(self):
with context.eager_mode():
with variable_scope.variable_scope("foo"):
var = variable_scope.get_variable("x", shape=[1, 1],
dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError,
"Shapes.*and.*are incompatible"):
assign = var.assign(np.zeros(shape=[2, 2]))
self.evaluate(assign)
@test_util.disable_xla("XLA doesn't allow changing shape at assignment, as "
"dictated by tf2xla/xla_resource.cc:SetTypeAndShape")
@test_util.run_in_graph_and_eager_modes
def testAssignDifferentShapesAllowed(self):
var = resource_variable_ops.ResourceVariable(
initial_value=np.zeros(shape=[1, 1]),
shape=tensor_shape.TensorShape(None))
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(np.zeros(shape=[1, 1]), var.read_value())
self.evaluate(var.assign(np.zeros(shape=[2, 2])))
self.assertAllEqual(np.zeros(shape=[2, 2]), var.read_value())
@test_util.run_in_graph_and_eager_modes
def testInitValueWrongShape(self):
with self.assertRaisesWithPredicateMatch(
ValueError, r"not compatible with"):
var = resource_variable_ops.ResourceVariable(
initial_value=np.zeros(shape=[3]),
shape=[4])
self.evaluate(variables.global_variables_initializer())
self.evaluate(var.read_value())
@test_util.run_deprecated_v1
def testDtypeAfterFromProto(self):
v = resource_variable_ops.ResourceVariable(2.0)
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertIsInstance(w.dtype, dtypes.DType)
self.assertEqual(v.dtype, w.dtype)
# TODO(alive): get caching to work in eager mode.
@test_util.run_deprecated_v1
def testCachingDevice(self):
with ops.device("/job:server/task:1"):
v = resource_variable_ops.ResourceVariable(
2.0, caching_device="/job:localhost")
self.assertEqual("/job:localhost", v.value().device)
with self.assertRaises(ValueError):
_ = v.value().op.get_attr("_class")
with ops.colocate_with(v.op):
w = resource_variable_ops.ResourceVariable(
2.0, caching_device="/job:localhost")
self.assertEqual("/job:localhost", w.value().device)
with self.assertRaises(ValueError):
_ = w.value().op.get_attr("_class")
@test_util.run_deprecated_v1
def testSharedName(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(300.0, name="var4")
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var4",
# Needed in Eager since we get a unique container name by default.
container=ops.get_default_graph()._container)
w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype)
self.assertEqual(300.0, self.evaluate(w_read))
x = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var5",
container=ops.get_default_graph()._container)
with self.assertRaisesOpError(
"(Resource .*/var5/.* does not exist|Read of uninitialized variable)"
):
resource_variable_ops.read_variable_op(x, v.dtype.base_dtype).eval()
@test_util.run_deprecated_v1
def testSharedNameWithNamescope(self):
with self.cached_session():
with ops.name_scope("foo"):
v = resource_variable_ops.ResourceVariable(300.0, name="var6")
self.assertEqual("foo/var6", v._shared_name) # pylint: disable=protected-access
self.assertEqual("foo/var6:0", v.name)
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="foo/var6",
# Needed in Eager since we get a unique container name by default.
container=ops.get_default_graph()._container)
w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype)
self.assertEqual(300.0, self.evaluate(w_read))
@test_util.run_in_graph_and_eager_modes
def testShape(self):
v = resource_variable_ops.ResourceVariable(
name="var4", initial_value=array_ops.ones(shape=[10, 20, 35]))
self.assertEqual("(10, 20, 35)", str(v.shape))
self.assertEqual("(10, 20, 35)", str(v.get_shape()))
self.assertEqual("(10, 20, 35)", str(v.value().shape))
self.assertEqual("(3, 20, 35)", str(v.sparse_read([0, 1, 2]).shape))
if not context.executing_eagerly():
self.assertEqual(
"<unknown>",
str(v.sparse_read(array_ops.placeholder(dtypes.int32)).shape))
@test_util.run_deprecated_v1
def testSetInitialValue(self):
with self.cached_session():
# Initialize variable with a value different from the initial value passed
# in the constructor.
v = resource_variable_ops.ResourceVariable(2.0)
v.initializer.run(feed_dict={v.initial_value: 3.0})
self.assertEqual(3.0, v.value().eval())
@test_util.run_v1_only("b/120545219")
def testControlFlowInitialization(self):
"""Expects an error if an initializer is in a control-flow scope."""
def cond(i, _):
return i < 10
def body(i, _):
zero = array_ops.zeros([], dtype=dtypes.int32)
v = resource_variable_ops.ResourceVariable(initial_value=zero)
return (i + 1, v.read_value())
with self.assertRaisesRegexp(ValueError, "initializer"):
control_flow_ops.while_loop(cond, body, [0, 0])
def testVariableEager(self):
with context.eager_mode():
init = array_ops.ones(shape=[10, 20, 35], dtype=dtypes.int32)
constraint = lambda x: x
with ops.name_scope("foo"):
v = resource_variable_ops.ResourceVariable(
name="var7",
initial_value=init,
caching_device="cpu:0",
constraint=constraint)
# Test properties
self.assertEqual(dtypes.int32, v.dtype)
self.assertEqual("foo/var7:0", v.name)
self.assertAllEqual([10, 20, 35], v.shape.as_list())
self.assertIsInstance(v.handle, ops.EagerTensor)
self.assertEqual(constraint, v.constraint)
self.assertAllEqual(init.numpy(), v.read_value().numpy())
self.assertAllEqual(init.numpy(), v.value().numpy())
# Callable init.
callable_init = lambda: init * 2
v2 = resource_variable_ops.ResourceVariable(
initial_value=callable_init, name="var7")
self.assertEqual("var7:0", v2.name)
self.assertAllEqual(2 * init.numpy(), v2.read_value().numpy())
# Test assign_add.
new_v2_val = v2.assign_add(v.read_value())
self.assertAllEqual(v.read_value().numpy() * 3, new_v2_val.numpy())
# Test assign_sub.
new_v2_val = v2.assign_sub(v.read_value())
self.assertAllEqual(v.read_value().numpy() * 2, new_v2_val.numpy())
# Test assign.
v2.assign(v.read_value())
self.assertAllEqual(v.read_value().numpy(), v2.read_value().numpy())
# Test load
v2.load(2 * v.read_value())
self.assertAllEqual(2 * v.read_value().numpy(), v2.read_value().numpy())
# Test convert_to_tensor
t = ops.convert_to_tensor(v)
self.assertAllEqual(t.numpy(), v.read_value().numpy())
# Test operations
self.assertAllEqual((v * 2).numpy(), (v + v).numpy())
def testContainerEager(self):
with context.eager_mode():
v1 = resource_variable_ops.ResourceVariable(initial_value=lambda: 1,
name="same")
with ops.container("different"):
v2 = resource_variable_ops.ResourceVariable(initial_value=lambda: 0,
name="same")
v2.assign(2)
self.assertEqual(1, v1.read_value().numpy())
self.assertEqual(2, v2.read_value().numpy())
def testDestruction(self):
with context.eager_mode():
var = resource_variable_ops.ResourceVariable(initial_value=1.0,
name="var8")
var_handle = var._handle
del var
with self.assertRaisesRegexp(errors.NotFoundError,
r"Resource .* does not exist."):
resource_variable_ops.destroy_resource_op(var_handle,
ignore_lookup_error=False)
def testScatterUpdate(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update")
state_ops.scatter_update(v, [1], [3.0])
self.assertAllEqual([1.0, 3.0], v.numpy())
def testScatterAddStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="add")
state_ops.scatter_add(v, [1], [3])
self.assertAllEqual([1.0, 5.0], v.numpy())
def testScatterSubStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="sub")
state_ops.scatter_sub(v, [1], [3])
self.assertAllEqual([1.0, -1.0], v.numpy())
def testScatterUpdateVariant(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([
list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[])
])
v.scatter_update(
ops.IndexedSlices(
list_ops.tensor_list_from_tensor([1., 2.], element_shape=[]), 0))
self.assertAllEqual(
list_ops.tensor_list_get_item(v[0], 0, element_dtype=dtypes.float32),
1.)
def testGroupDoesntForceRead(self):
with ops.Graph().as_default():
v = resource_variable_ops.ResourceVariable(1.0)
assign = v.assign_add(1.0)
g = control_flow_ops.group([assign])
self.assertEqual(g.control_inputs[0].type, "AssignAddVariableOp")
def testScatterNdAddStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(
[1, 2, 3, 4, 5, 6, 7, 8], dtype=dtypes.float32, name="add")
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, 13, 3, 14, 14, 6, 7, 20])
state_ops.scatter_nd_add(v, indices, updates)
self.assertAllClose(expected, v.numpy())
@test_util.run_in_graph_and_eager_modes
def testUnreadVariableInsideFunction(self):
v = resource_variable_ops.ResourceVariable(1.0)
@def_function.function
def assign():
v.assign(1.0)
graph = assign.get_concrete_function().graph
self.assertTrue(all(x.type != "ReadVariableOp"
for x in graph.get_operations()))
def testScatterNdSubStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(
[1, 2, 3, 4, 5, 6, 7, 8], dtype=dtypes.float32, name="sub")
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, -9, 3, -6, -4, 6, 7, -4])
state_ops.scatter_nd_sub(v, indices, updates)
self.assertAllClose(expected, v.numpy())
def testScatterUpdateCast(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update")
state_ops.scatter_update(v, [1], [3])
self.assertAllEqual([1.0, 3.0], v.numpy())
@test_util.run_in_graph_and_eager_modes
def testScatterUpdateInvalidArgs(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3], name="update")
# The exact error and message differ between graph construction (where the
# error is realized during shape inference at graph construction time) and
# eager execution (where the error is realized during kernel execution).
with self.assertRaisesRegexp(Exception, r"shape.*2.*3"):
state_ops.scatter_update(v, [0, 1], [0, 1, 2])
@test_util.run_in_graph_and_eager_modes
def testAssignIncompatibleShape(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3])
self.evaluate(v.initializer)
pattern = re.compile("shapes must be equal", re.IGNORECASE)
with self.assertRaisesRegexp(Exception, pattern):
self.evaluate(v.assign_add(1))
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testCopyToGraphUninitialized(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3])
copy_to_graph = ops.Graph()
with copy_to_graph.as_default(): # Intentionally testing v1 behavior
copied = resource_variable_ops.copy_to_graph_uninitialized(v)
self.assertEqual(v.name, copied.name)
self.assertIsNone(copied.initializer)
def create_variant_shape_and_type_data(self):
variant_shape_and_type_data = (
cpp_shape_inference_pb2.CppShapeInferenceResult.HandleData())
variant_shape_and_type_data.is_set = True
stored_shape = tensor_shape.TensorShape([None, 4]).as_proto()
stored_dtype = dtypes.float32.as_datatype_enum
# NOTE(ebrevdo): shape_and_type lacks append() in some versions of protobuf.
variant_shape_and_type_data.shape_and_type.extend([
cpp_shape_inference_pb2.CppShapeInferenceResult.HandleShapeAndType(
shape=stored_shape, dtype=stored_dtype)])
return variant_shape_and_type_data
@def_function.function
def create_constant_variant(self, value):
value = constant_op.constant(
tensor_pb2.TensorProto(
dtype=dtypes.variant.as_datatype_enum,
tensor_shape=tensor_shape.TensorShape([]).as_proto(),
variant_val=[
tensor_pb2.VariantTensorDataProto(
# Match registration in variant_op_registry.cc
type_name=b"int",
metadata=np.array(value, dtype=np.int32).tobytes())
]))
return value
# TODO(ebrevdo): Add run_in_graph_and_eager_modes once we can create
# EagerTensor constants with TensorProto inputs.
@test_util.run_in_graph_and_eager_modes()
def testVariantInitializer(self):
variant_shape_and_type_data = self.create_variant_shape_and_type_data()
value = self.create_constant_variant(3)
initializer = array_ops.fill([3], value)
resource_variable_ops._set_handle_shapes_and_types( # pylint: disable=protected-access
initializer, variant_shape_and_type_data,
graph_mode=not context.executing_eagerly())
v = resource_variable_ops.ResourceVariable(initializer)
read = array_ops.identity(v)
read_variant_shape_and_type = (
resource_variable_ops.get_eager_safe_handle_data(read))
self.assertEqual(
read_variant_shape_and_type, variant_shape_and_type_data)
gather = v.sparse_read([0])
gather_variant_shape_and_type = (
resource_variable_ops.get_eager_safe_handle_data(gather))
self.assertEqual(
gather_variant_shape_and_type, variant_shape_and_type_data)
# Make sure initializer runs.
if not context.executing_eagerly():
self.evaluate(v.initializer)
self.evaluate(read.op)
self.evaluate(gather.op)
@parameterized.parameters([
# batch_dims=0 (equivalent to tf.gather)
dict( # 2D indices
batch_dims=0,
params=[6, 7, 8, 9],
indices=[[2, 1], [0, 3]],
expected=[[8, 7], [6, 9]]),
dict( # 3D indices
batch_dims=0,
params=[6, 7, 8, 9],
indices=[[[3, 1], [2, 0]], [[0, 3], [2, 2]]],
expected=[[[9, 7], [8, 6]], [[6, 9], [8, 8]]]),
dict( # 4D indices
batch_dims=0,
params=[8, 9],
indices=[[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
[[[1, 1], [0, 0]], [[0, 1], [1, 0]]]],
expected=[[[[8, 9], [9, 8]], [[8, 8], [9, 9]]],
[[[9, 9], [8, 8]], [[8, 9], [9, 8]]]]),
# batch_dims=indices.shape.ndims - 1 (equivalent to
# tf.compat.v1.batch_gather)
dict( # 2D indices (1 batch dim)
batch_dims=1,
params=[[10, 11, 12, 13], [20, 21, 22, 23]],
indices=[[2, 1], [0, 3]],
expected=[[12, 11], [20, 23]]),
dict( # 3D indices (2 batch dims)
batch_dims=2,
params=[[[100, 101], [110, 111]], [[200, 201], [210, 211]]],
indices=[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
expected=[[[100, 101], [111, 110]], [[200, 200], [211, 211]]]),
dict( # 2D indices (1 batch dim)
batch_dims=1,
params=[[10, 11, 12, 13], [20, 21, 22, 23]],
indices=[[2, 1], [0, 3]],
expected=[[12, 11], [20, 23]]),
dict( # 3D indices (2 batch dims)
batch_dims=2,
params=[[[100, 101], [110, 111]], [[200, 201], [210, 211]]],
indices=[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
expected=[[[100, 101], [111, 110]], [[200, 200], [211, 211]]]),
# 0 < batch_dims < indices.shape.ndims - 1
dict( # 3D indices (1 batch dim)
batch_dims=1,
params=[[10, 11, 12, 13], [20, 21, 22, 23]],
indices=[[[3, 1], [2, 0]], [[0, 3], [2, 2]]],
expected=[[[13, 11], [12, 10]], [[20, 23], [22, 22]]]),
dict( # 4D indices (1 batch dim)
batch_dims=1,
params=[[6, 7], [8, 9]],
indices=[[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
[[[1, 1], [0, 0]], [[0, 1], [1, 0]]]],
expected=[[[[6, 7], [7, 6]], [[6, 6], [7, 7]]],
[[[9, 9], [8, 8]], [[8, 9], [9, 8]]]]),
dict( # 4D indices (2 batch dims)
batch_dims=2,
params=[[[2, 3], [4, 5]], [[6, 7], [8, 9]]],
indices=[[[[0, 1], [1, 0]], [[0, 0], [1, 1]]],
[[[1, 1], [0, 0]], [[0, 1], [1, 0]]]],
expected=[[[[2, 3], [3, 2]], [[4, 4], [5, 5]]],
[[[7, 7], [6, 6]], [[8, 9], [9, 8]]]]),
])
@test_util.run_in_graph_and_eager_modes
def testGatherWithBatchDims(self, params, indices, batch_dims, expected):
var = resource_variable_ops.ResourceVariable(params, name="var0")
with ops.control_dependencies([var.initializer]):
result = resource_variable_ops.resource_gather(
var.handle, indices, dtype=var.dtype, batch_dims=batch_dims)
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 8, 9, 10],
batch_dims=0,
output_shape=[2, 3, 8, 9, 10, 3, 4, 5, 6, 7]
# = indices.shape + params.shape[1:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 8, 9, 10],
batch_dims=1,
output_shape=[2, 3, 8, 9, 10, 4, 5, 6, 7]
# = params.shape[:1] + indices.shape[1:] + params.shape[2:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 8, 9, 10],
batch_dims=2,
output_shape=[2, 3, 8, 9, 10, 5, 6, 7]
# = params.shape[:2] + indices.shape[2:] + params.shape[3:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 4, 9, 10],
batch_dims=3,
output_shape=[2, 3, 4, 9, 10, 6, 7]
# = params.shape[:3] + indices.shape[3:] + params.shape[4:]
),
dict(
params_shape=[2, 3, 4, 5, 6, 7],
indices_shape=[2, 3, 4, 5, 10],
batch_dims=4,
output_shape=[2, 3, 4, 5, 10, 7]
# = params.shape[:4] + indices.shape[4:] + params.shape[5:]
),
])
@test_util.run_in_graph_and_eager_modes
def testGatherWithBatchDimsMatchesTensor(self, params_shape, indices_shape,
batch_dims, output_shape):
"""Checks that gather with batch_dims returns the correct shape."""
# Generate a `params` tensor with the indicated shape.
params_size = np.prod(params_shape)
params = np.reshape(np.arange(params_size, dtype=np.int32), params_shape)
# Generate an `indices` tensor with the indicated shape, where each index
# is within the appropriate range.
indices_size = np.prod(indices_shape)
indices = np.reshape(np.arange(indices_size, dtype=np.int32), indices_shape)
indices = indices % params_shape[batch_dims]
var = resource_variable_ops.ResourceVariable(params, name="var0")
with ops.control_dependencies([var.initializer]):
expected = array_ops.gather(
var.read_value(), indices, batch_dims=batch_dims)
result = resource_variable_ops.resource_gather(
var.handle, indices, dtype=var.dtype, batch_dims=batch_dims)
self.assertAllEqual(output_shape, result.shape.as_list())
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(dtype=dtypes.bool),
dict(dtype=dtypes.int64),
dict(dtype=dtypes.half),
dict(dtype=dtypes.float32),
dict(dtype=dtypes.double),
])
@test_util.run_gpu_only
@test_util.run_in_graph_and_eager_modes
def testGatherWithDTypes(self, dtype):
if dtype == dtypes.bool:
params = constant_op.constant([False, True, False, True])
expected = constant_op.constant([[False, True], [False, True]])
else:
params = constant_op.constant([6, 7, 8, 9], dtype=dtype)
expected = constant_op.constant([[8, 7], [6, 9]], dtype=dtype)
indices = constant_op.constant([[2, 1], [0, 3]])
var = resource_variable_ops.ResourceVariable(params, name="var0")
with ops.control_dependencies([var.initializer]):
result = resource_variable_ops.resource_gather(
var.handle, indices, dtype=dtype)
self.assertAllEqual(expected, result)
if __name__ == "__main__":
test.main()
| apache-2.0 | -2,747,261,271,606,647,300 | 41.360897 | 91 | 0.642029 | false |
johncadigan/CategoryGenerator | db_hyponym_trees.py | 1 | 2540 | import os
CURRENT_DIR = os.path.dirname(__file__)
###Default Settings
DATA_DIR = 'data'
COUNTS_FILE = 'word-totals.txt'
WHITE_LIST = 'whitelist.csv'
DEFAULT_LIMIT = 50000
DEFAULT_DEPTH = 5
DEFAULT_SYNSETS = 3
##### DB Dependent variables
MYSQL_URL = 'mysql://user:password@host/database?charset=utf8'
from sqlalchemy import *
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, aliased
from tree import Tree
from hyponym_generator import HyponymGenerator
from model import *
class DBTree(Tree):
def write(self, child, parent): #Overriden function for db version
Session = sessionmaker(bind=create_engine(MYSQL_URL))
DBSession = Session()
parent_category = DBSession.query(Category).filter(Category.name==parent).first()
child_category = Category(name=child)
child_category.parent = parent_category
DBSession.add(child_category)
DBSession.commit()
queue = self[child].fpointer
if self[child].expanded:
for element in queue:
self.write(element, child_category.name) # recursive call
class DBGenerator(HyponymGenerator):
def __init__(self):
self.frequency_limit = DEFAULT_LIMIT
self.depth_limit = DEFAULT_DEPTH
self.synset_limit = DEFAULT_SYNSETS
#Add only relevant word frequencies
data_dir = os.path.join(CURRENT_DIR, DATA_DIR)
unigram_file = os.path.join(data_dir, COUNTS_FILE)
with open(unigram_file, "r") as unigrams:
unigrams = unigrams.readlines()
for unigram in unigrams:
word, frequency = unigram.split('\t')
frequency = int(frequency)
if frequency >= self.frequency_limit:
self.unigram_frequencies[word] = frequency
del unigrams
def set_tree(self): #Overriden function for db version
self.tree = DBTree()
def final_menu(self, word): #Overriden function for db version
Session = sessionmaker(bind=create_engine(MYSQL_URL))
DBSession = Session()
ppinput = "1"
while ppinput == "1":
pinput = raw_input("Please input the potential name of a grandparent in db to find parents\n")
parent = DBSession.query(Category).filter(Category.name == pinput).first()
descendants = DBSession.query(Category.name).filter(Category.left > parent.left).filter(Category.right < parent.right).all()
print "{0} \n \t {1}".format(parent.name, str(descendants))
ppinput = raw_input("Please input the name for tree's parent. Input 1 to look at other parts of database tree\n")
if ppinput != "1":
self.tree.write(child=word, parent=ppinput)
if __name__ == '__main__':
hg = DBGenerator()
hg.run_menus()
| mit | -2,648,622,299,959,671,000 | 30.75 | 127 | 0.720079 | false |
CTR-BFX/CambridgeHackathon | rRNA_MT_count.py | 1 | 4956 | #!/usr/bin/python
# Malwina Prater, [email protected], 2017, Copyright
# Centre for Trophoblast Research, University of Cambridge
#
# Script version: v01.
#
# Script to calculate the percent of transcripts mapping to rRNA
#
# INPUTS :
# 1. HTseq_counts file
# 2. Original reference transcriptome alignned to
#
# USAGE :
# For producing table(s) with rRNA and MT counts for each sample use commands like that:
#
# ./rRNA_MT_count.py --gtf /Users/malwina/Documents/CTR-Data/genomes/Mus_musculus/mm10/Mus_musculus.GRCm38.84.gtf --htseq C17_3_S20_Aligned.out.srt.bam_htseq_combined_counts.txt
#
# import modules:
import os,sys
from optparse import OptionParser
import re
# parse in the user options:
parser = OptionParser(usage="%prog [-x Excel [-i imagefile] [-s squares]",
version="%prog 0.1")
parser.add_option("--htseq", dest="FileName", type="string", action="store")
parser.add_option("--gtf", dest="GTF", type="string", action="store")
(options, args) = parser.parse_args()
#files = sys.argv[]
HTSEQ_COUNTS = options.FileName
GTF = options.GTF
# check if files supplied exist:
try:
handle = open(GTF, "rU")
handle.close()
except:
print "\nError->\tGTF File: %s does not exist\n" % GTF
sys.exit()
try:
handle = open(HTSEQ_COUNTS, "rU")
handle.close()
except:
print "\nError->\tFile: %s does not exist\n" % HTSEQ_COUNTS
sys.exit()
#
# First job is to extract all the identifiers of genes/transcripts mapping to the rRNA and MT genes and store in 2 arrays
#
rRNA_identifiers = {}
MT_identifiers = {}
with open(GTF, "rU") as handle:
#line = handle.readline()
for line in handle:
line.rstrip('\n')
if 'gene_biotype "rRNA"' in line:
identifier = line
identifier = re.sub('.*gene_id "', '', identifier)
identifier = re.sub('"; gene_version.*\n', '', identifier)
rRNA_identifiers[identifier] = 1
if 'MT' in line:
identifier = line
identifier = re.sub('.*gene_id "', '', identifier)
identifier = re.sub('"; gene_version.*\n', '', identifier)
MT_identifiers[identifier] = 1
handle.close()
#print("rRNA:")
#print(rRNA_identifiers.keys())
#print("MT:")
#print(MT_identifiers.keys())
#
# Second job is to go through the HTSEQ-couts and count reads matching the rRNA identifiers
#
Cummulative_rRNA_Count = 0
rRNA_genes = 0
ReadCount = 0
line_number = 0
MT_genes = 0;
Cummulative_MT_Count = 0;
with open(HTSEQ_COUNTS, "rU") as handle:
for line in handle:
line.rstrip('\n')
split_line = line.split("\t")
if line_number > 0:
if split_line[0] in rRNA_identifiers.keys(): # if rRNA_identifiers[gene_id]
rRNA_genes += 1
Cummulative_rRNA_Count += int(split_line[1])
if split_line[0] in MT_identifiers.keys():
MT_genes += 1
Cummulative_MT_Count += int(split_line[1])
ReadCount += int(split_line[1])
line_number += 1
handle.close()
#print(Cummulative_MT_Count)
#print(Cummulative_rRNA_Count)
#
# wiritng the output files:
#
out = HTSEQ_COUNTS + '_rRNAmtRNACounts.txt';
out = re.sub('.txt_', '_', out)
print "Summary output file: ", out, "\n"
OUT = open(out, "w")
OUT.write('HT-SEQ file name: \t' + HTSEQ_COUNTS + '\n\n')
OUT.write('GTF file name: \t\t' + GTF + '\n\n\n')
OUT.write('---------------------------------------------------------------------------------' + '\n')
OUT.write(' rRNA and MT identifiers\n')
OUT.write('---------------------------------------------------------------------------------' + '\n')
OUT.write('No. of rRNA identifiers: ' + str(len(rRNA_identifiers.keys())) + '\n') # PRINT size of this hash
OUT.write('No. of MT identifiers: ' + str(len(MT_identifiers.keys())) + '\n') # PRINT size of this hash
OUT.write('\n\n')
OUT.write('---------------------------------------------------------------------------------' + '\n')
OUT.write(' HTSEQ mapping summary\n')
OUT.write('---------------------------------------------------------------------------------' + '\n')
OUT.write('ReadCount: ' + str(ReadCount) + '\n\n')
#OUT.write(' Number of rRNA genes: ' + str(rRNA_genes) + '\n')
OUT.write('Total no. of rRNA transcripts: ' + str(Cummulative_rRNA_Count) + '\n')
perc_rRNA = 100*float(Cummulative_rRNA_Count)/float(ReadCount)
perc_rRNA = str(round(perc_rRNA, 3))
OUT.write('Percent rRNA mapped reads: ' + str(Cummulative_rRNA_Count) + ' / ' + str(ReadCount) + ' * 100 = ' + perc_rRNA + '%\n\n')
#OUT.write('\n Number of MT genes: ' + str(MT_genes) + '\n')
OUT.write('Total no. of MT transcripts: ' + str(Cummulative_MT_Count) + '\n')
perc_MT = 100*float(Cummulative_MT_Count)/float(ReadCount)
perc_MT = str(round(perc_MT, 3))
OUT.write('Percent MT mapped reads: ' + str(Cummulative_MT_Count) + ' / ' + str(ReadCount) + ' * 100 = ' + perc_MT + '%\n\n')
OUT.close()
| gpl-3.0 | -3,703,518,415,631,307,300 | 32.04 | 180 | 0.583535 | false |
codeofdusk/ProjectMagenta | src/update/update.py | 1 | 5042 | from logging import getLogger
logger = getLogger('update')
import contextlib
import io
import os
import platform
import requests
import tempfile
from wxUI import commonMessageDialogs
import widgetUtils
import webbrowser
try:
import czipfile as zipfile
except ImportError:
import zipfile
from platform_utils import paths
def perform_update(endpoint, current_version, app_name='', password=None, update_available_callback=None, progress_callback=None, update_complete_callback=None):
requests_session = create_requests_session(app_name=app_name, version=current_version)
available_update = find_update(endpoint, requests_session=requests_session)
if not available_update:
logger.debug("No update available")
return False
available_version = float(available_update['current_version'])
if not float(available_version) > float(current_version) or platform.system()+platform.architecture()[0][:2] not in available_update['downloads']:
logger.debug("No update for this architecture")
return False
available_description = available_update.get('description', None)
update_url = available_update ['downloads'][platform.system()+platform.architecture()[0][:2]]
logger.info("A new update is available. Version %s" % available_version)
donation()
if callable(update_available_callback) and not update_available_callback(version=available_version, description=available_description): #update_available_callback should return a falsy value to stop the process
logger.info("User canceled update.")
return
base_path = tempfile.mkdtemp()
download_path = os.path.join(base_path, 'update.zip')
update_path = os.path.join(base_path, 'update')
downloaded = download_update(update_url, download_path, requests_session=requests_session, progress_callback=progress_callback)
extracted = extract_update(downloaded, update_path, password=password)
bootstrap_path = move_bootstrap(extracted)
execute_bootstrap(bootstrap_path, extracted)
logger.info("Update prepared for installation.")
if callable(update_complete_callback):
update_complete_callback()
def create_requests_session(app_name=None, version=None):
user_agent = ''
session = requests.session()
if app_name:
user_agent = ' %s/%r' % (app_name, version)
session.headers['User-Agent'] = session.headers['User-Agent'] + user_agent
return session
def find_update(endpoint, requests_session):
response = requests_session.get(endpoint)
response.raise_for_status()
content = response.json()
return content
def download_update(update_url, update_destination, requests_session, progress_callback=None, chunk_size=io.DEFAULT_BUFFER_SIZE):
total_downloaded = total_size = 0
with io.open(update_destination, 'w+b') as outfile:
download = requests_session.get(update_url, stream=True)
total_size = int(download.headers.get('content-length', 0))
logger.debug("Total update size: %d" % total_size)
download.raise_for_status()
for chunk in download.iter_content(chunk_size):
outfile.write(chunk)
total_downloaded += len(chunk)
if callable(progress_callback):
call_callback(progress_callback, total_downloaded, total_size)
logger.debug("Update downloaded")
return update_destination
def extract_update(update_archive, destination, password=None):
"""Given an update archive, extracts it. Returns the directory to which it has been extracted"""
with contextlib.closing(zipfile.ZipFile(update_archive)) as archive:
if password:
archive.setpassword(password)
archive.extractall(path=destination)
logger.debug("Update extracted")
return destination
def move_bootstrap(extracted_path):
working_path = os.path.abspath(os.path.join(extracted_path, '..'))
if platform.system() == 'Darwin':
extracted_path = os.path.join(extracted_path, 'Contents', 'Resources')
downloaded_bootstrap = os.path.join(extracted_path, bootstrap_name())
new_bootstrap_path = os.path.join(working_path, bootstrap_name())
os.rename(downloaded_bootstrap, new_bootstrap_path)
return new_bootstrap_path
def execute_bootstrap(bootstrap_path, source_path):
arguments = r'"%s" "%s" "%s" "%s"' % (os.getpid(), source_path, paths.app_path(), paths.get_executable())
if platform.system() == 'Windows':
import win32api
win32api.ShellExecute(0, 'open', bootstrap_path, arguments, '', 5)
else:
import subprocess
make_executable(bootstrap_path)
subprocess.Popen(['%s %s' % (bootstrap_path, arguments)], shell=True)
logger.info("Bootstrap executed")
def bootstrap_name():
if platform.system() == 'Windows': return 'bootstrap.exe'
if platform.system() == 'Darwin': return 'bootstrap-mac.sh'
return 'bootstrap-lin.sh'
def make_executable(path):
import stat
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IEXEC)
def call_callback(callback, *args, **kwargs):
# try:
callback(*args, **kwargs)
# except:
# logger.exception("Failed calling callback %r with args %r and kwargs %r" % (callback, args, kwargs))
def donation():
dlg = commonMessageDialogs.donation()
if dlg == widgetUtils.YES:
webbrowser.open_new_tab("http://twblue.es/?q=donate") | gpl-2.0 | 3,068,892,833,140,432,000 | 39.344 | 211 | 0.754859 | false |
txomon/SpockBot | spock/plugins/helpers/physics.py | 1 | 6091 | """
PhysicsPlugin is planned to provide vectors and tracking necessary to implement
SMP-compliant client-side physics for entities. Primarirly this will be used to
keep update client position for gravity/knockback/water-flow etc. But it should
also eventually provide functions to track other entities affected by SMP
physics
Minecraft client/player physics is unfortunately very poorly documented.
Most of
these values are based of experimental results and the contributions of a
handful of people (Thank you 0pteron!) to the Minecraft wiki talk page on
Entities and Transportation. Ideally someone will decompile the client with MCP
and document the totally correct values and behaviors.
"""
# Gravitational constants defined in blocks/(client tick)^2
PLAYER_ENTITY_GAV = 0.08
THROWN_ENTITY_GAV = 0.03
RIDING_ENTITY_GAV = 0.04
BLOCK_ENTITY_GAV = 0.04
ARROW_ENTITY_GAV = 0.05
# Air drag constants defined in 1/tick
PLAYER_ENTITY_DRG = 0.02
THROWN_ENTITY_DRG = 0.01
RIDING_ENTITY_DRG = 0.05
BLOCK_ENTITY_DRG = 0.02
ARROW_ENTITY_DRG = 0.01
# Player ground acceleration isn't actually linear, but we're going to pretend
# that it is. Max ground velocity for a walking client is 0.215blocks/tick, it
# takes a dozen or so ticks to get close to max velocity. Sprint is 0.28, just
# apply more acceleration to reach a higher max ground velocity
PLAYER_WLK_ACC = 0.15
PLAYER_SPR_ACC = 0.20
PLAYER_GND_DRG = 0.41
# Seems about right, not based on anything
PLAYER_JMP_ACC = 0.45
import logging
import math
from spock.mcmap import mapdata
from spock.plugins.base import PluginBase
from spock.utils import BoundingBox, Position, pl_announce
from spock.vector import Vector3
logger = logging.getLogger('spock')
class PhysicsCore(object):
def __init__(self, vec, pos):
self.vec = vec
self.pos = pos
def jump(self):
if self.pos.on_ground:
self.pos.on_ground = False
self.vec += Vector3(0, PLAYER_JMP_ACC, 0)
def walk(self, angle, radians=False):
if not radians:
angle = math.radians(angle)
z = math.cos(angle) * PLAYER_WLK_ACC
x = math.sin(angle) * PLAYER_WLK_ACC
self.vec += Vector3(x, 0, z)
def sprint(self, angle, radians=False):
if not radians:
angle = math.radians(angle)
z = math.cos(angle) * PLAYER_SPR_ACC
x = math.sin(angle) * PLAYER_SPR_ACC
self.vec += Vector3(x, 0, z)
@pl_announce('Physics')
class PhysicsPlugin(PluginBase):
requires = ('Event', 'ClientInfo', 'World')
events = {
'physics_tick': 'tick',
}
def __init__(self, ploader, settings):
super(PhysicsPlugin, self).__init__(ploader, settings)
self.vec = Vector3(0.0, 0.0, 0.0)
# wiki says 0.6 but I made it 0.8 to give a little wiggle room
self.playerbb = BoundingBox(0.8, 1.8)
self.pos = self.clientinfo.position
ploader.provides('Physics', PhysicsCore(self.vec, self.pos))
def tick(self, _, __):
self.check_collision()
self.apply_horizontal_drag()
self.apply_vector()
def check_collision(self):
cb = Position(math.floor(self.pos.x), math.floor(self.pos.y),
math.floor(self.pos.z))
if self.block_collision(cb, y=2): # we check +2 because above my head
self.vec.y = 0
if self.block_collision(cb, y=-1): # we check below feet
self.pos.on_ground = True
self.vec.y = 0
self.pos.y = cb.y
else:
self.pos.on_ground = False
self.vec -= Vector3(0, PLAYER_ENTITY_GAV, 0)
self.apply_vertical_drag()
# feet or head collide with x
if self.block_collision(cb, x=1) or \
self.block_collision(cb, x=-1) or \
self.block_collision(cb, y=1, x=1) or \
self.block_collision(cb, y=1, x=-1):
self.vec.x = 0
# replace with real info in event
self.event.emit("phy_collision", "x")
# feet or head collide with z
if self.block_collision(cb, z=1) or \
self.block_collision(cb, z=-1) or \
self.block_collision(cb, y=1, z=1) or \
self.block_collision(cb, y=1, z=-1):
self.vec.z = 0
# replace with real info in event
self.event.emit("phy_collision", "z")
def block_collision(self, cb, x=0, y=0, z=0):
block_id, meta = self.world.get_block(cb.x + x, cb.y + y, cb.z + z)
block = mapdata.get_block(block_id, meta)
if block is None:
return False
# possibly we want to use the centers of blocks as the starting
# points for bounding boxes instead of 0,0,0 this might make thinks
# easier when we get to more complex shapes that are in the center
# of a block aka fences but more complicated for the player uncenter
# the player position and bump it up a little down to prevent
# colliding in the floor
pos1 = Position(self.pos.x - self.playerbb.w / 2, self.pos.y - 0.2,
self.pos.z - self.playerbb.d / 2)
bb1 = self.playerbb
bb2 = block.bounding_box
if bb2 is not None:
pos2 = Position(cb.x + x + bb2.x, cb.y + y + bb2.y,
cb.z + z + bb2.z)
if ((pos1.x + bb1.w) >= (pos2.x) and (pos1.x) <= (
pos2.x + bb2.w)) and (
(pos1.y + bb1.h) >= (pos2.y) and (pos1.y) <= (
pos2.y + bb2.h)) and (
(pos1.z + bb1.d) >= (pos2.z) and (pos1.z) <= (
pos2.z + bb2.d)):
return True
return False
def apply_vertical_drag(self):
self.vec.y -= self.vec.y * PLAYER_ENTITY_DRG
def apply_horizontal_drag(self):
self.vec.x -= self.vec.x * PLAYER_GND_DRG
self.vec.z -= self.vec.z * PLAYER_GND_DRG
def apply_vector(self):
p = self.pos
p.x = p.x + self.vec.x
p.y = p.y + self.vec.y
p.z = p.z + self.vec.z
| mit | 4,147,684,726,026,790,000 | 35.915152 | 79 | 0.602036 | false |
why2pac/dp-tornado | example/model/tests/model_test/db_test/mysql.py | 1 | 6056 | # -*- coding: utf-8 -*-
from dp_tornado.engine.model import Model as dpModel
class MysqlModel(dpModel):
def index(self):
self.model.tests.schema_test.migrate()
@property
def parent_test_id(self):
return 100
def test(self):
datetime_of_birth = self.helper.datetime.now()
self.execute_test_tuple(datetime_of_birth)
assert self.scalar_test_tuple_name() == 'James'
assert self.scalar_test_tuple_birth_year() == 1988
dt_tuple_value = self.helper.datetime.tuple(datetime_of_birth)
dt_tuple_fetch = self.helper.datetime.tuple(self.scalar_test_tuple_birth_datetime())
assert dt_tuple_value[0:-1] == dt_tuple_fetch[0:-1]
datetime_of_birth = self.helper.datetime.date.now()
self.execute_test_dict(datetime_of_birth)
assert self.scalar_test_dict() == 1989
parent = self.row_test()
dt_tuple_value = self.helper.datetime.tuple(datetime_of_birth)
dt_tuple_fetch = self.helper.datetime.tuple(parent['datetime_of_birth'])
assert dt_tuple_value == dt_tuple_fetch
assert self.row_test()['parent_name'] == 'James'
assert self.rows_test()[0]['parent_name'] == 'James'
assert self.transaction_succ_test()
assert not self.transaction_fail_test()
def execute_test_tuple(self, datetime_of_birth):
return self.execute("""
INSERT INTO `parents`
(`parent_id`, `parent_name`, `parent_type`, `year_of_birth`, `datetime_of_birth`)
VALUES (%s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
`parent_name` = VALUES(`parent_name`),
`parent_type` = VALUES(`parent_type`),
`year_of_birth` = VALUES(`year_of_birth`),
`datetime_of_birth` = VALUES(`datetime_of_birth`)
""", (self.parent_test_id, 'James', 'FATHER', 1988, datetime_of_birth), 'tests.model_test/drv_mysql_test')
def execute_test_dict(self, datetime_of_birth):
params = {
'parent_id': self.parent_test_id,
'parent_name': 'James',
'parent_type': 'FATHER',
'year_of_birth': 1989,
'datetime_of_birth': datetime_of_birth}
return self.execute("""
INSERT INTO `parents`
(`parent_id`, `parent_name`, `parent_type`, `year_of_birth`, `datetime_of_birth`)
VALUES (%(parent_id)s, %(parent_name)s, %(parent_type)s, %(year_of_birth)s, %(datetime_of_birth)s)
ON DUPLICATE KEY UPDATE
`parent_name` = VALUES(`parent_name`),
`parent_type` = VALUES(`parent_type`),
`year_of_birth` = VALUES(`year_of_birth`),
`datetime_of_birth` = VALUES(`datetime_of_birth`)
""", params, 'tests.model_test/drv_mysql_test')
def scalar_test_tuple_name(self):
return self.scalar("""
SELECT
`parent_name`
FROM
`parents`
WHERE
`parent_id` = %s
""", self.parent_test_id, 'tests.model_test/drv_mysql_test')
def scalar_test_tuple_birth_year(self):
return self.scalar("""
SELECT
`year_of_birth`
FROM
`parents`
WHERE
`parent_id` = %s
""", self.parent_test_id, 'tests.model_test/drv_mysql_test')
def scalar_test_tuple_birth_datetime(self):
return self.scalar("""
SELECT
`datetime_of_birth`
FROM
`parents`
WHERE
`parent_id` = %s
""", self.parent_test_id, 'tests.model_test/drv_mysql_test')
def scalar_test_dict(self):
params = {
'parent_id': self.parent_test_id,
'parent_type': 'FATHER'
}
return self.scalar("""
SELECT
`year_of_birth`
FROM
`parents`
WHERE
`parent_id` = %(parent_id)s AND
`parent_type` = %(parent_type)s
""", params, 'tests.model_test/drv_mysql_test')
def row_test(self):
return self.row("""
SELECT
`parent_name`, `parent_type`, `year_of_birth`, `datetime_of_birth`
FROM
`parents`
WHERE
`parent_id` = %s
""", self.parent_test_id, 'tests.model_test/drv_mysql_test')
def rows_test(self):
return self.rows("""
SELECT
`parent_name`, `parent_type`
FROM
`parents`
WHERE
`parent_id` = %s
""", self.parent_test_id, 'tests.model_test/drv_mysql_test')
def transaction_succ_test(self):
tran = self.begin('tests.model_test/drv_mysql_test')
try:
tran.execute("""
INSERT INTO `parents`
(`parent_id`, `parent_name`, `parent_type`)
VALUES (%s, %s, %s)
ON DUPLICATE KEY UPDATE
`parent_name` = VALUES(`parent_name`),
`parent_type` = VALUES(`parent_type`)
""", (self.parent_test_id + 1, 'James', 'FATHER'))
tran.commit()
return True
except Exception as e:
tran.rollback()
return False
def transaction_fail_test(self):
tran = self.begin('tests.model_test/drv_mysql_test')
try:
tran.execute("""
INSERT INTO `childs`
(`parent_id`, `child_name`, `child_type`)
VALUES (%s, %s, %s)
""", (self.parent_test_id + 9999, 'Kim', 'MOTHER'))
tran.commit()
return True
except Exception as e:
tran.rollback()
return False
| mit | 3,780,059,883,511,069,700 | 32.458564 | 118 | 0.498184 | false |
jhpyle/docassemble | docassemble_base/docassemble/base/mako/doc/build/conf.py | 1 | 9486 | # -*- coding: utf-8 -*-
#
# Mako documentation build configuration file
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
sys.path.insert(0, os.path.abspath('.'))
import mako
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
# 'sphinx.ext.doctest', 'builder.builders']
extensions = ['sphinx.ext.autodoc','sphinx.ext.intersphinx',
'changelog', 'sphinx_paramlinks',
'builder.builders']
changelog_render_ticket = "https://bitbucket.org/zzzeek/mako/issue/%s/"
changelog_render_pullreq = {
"bitbucket": "https://bitbucket.org/zzzeek/mako/pull-request/%s",
"default": "https://bitbucket.org/zzzeek/mako/pull-request/%s",
"github": "https://github.com/zzzeek/mako/pull/%s",
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
nitpicky = True
site_base = "http://www.makotemplates.org"
# The suffix of source filenames.
source_suffix = '.rst'
template_bridge = "builder.builders.MakoBridge"
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Mako'
copyright = u'the Mako authors and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = mako.__version__
# The full version, including alpha/beta/rc tags.
release = mako.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "%s %s Documentation" % (project, release)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%m/%d/%Y %H:%M:%S'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Makodoc'
#autoclass_content = 'both'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'mako_%s.tex' % release.replace('.', '_'), r'Mako Documentation',
r'Mike Bayer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
# sets TOC depth to 2.
latex_preamble = '\setcounter{tocdepth}{3}'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
#latex_elements = {
# 'papersize': 'letterpaper',
# 'pointsize': '10pt',
#}
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mako', u'Mako Documentation',
[u'Mako authors'], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Mako'
epub_author = u'Mako authors'
epub_publisher = u'Mako authors'
epub_copyright = u'Mako authors'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
intersphinx_mapping = {
'dogpilecache':('http://dogpilecache.readthedocs.org/en/latest', None),
'beaker':('http://beaker.readthedocs.org/en/latest',None),
}
| mit | -6,735,364,583,275,619,000 | 31.047297 | 80 | 0.700822 | false |
Skeletrox/usb-backend-pinut | file_upload/fileupload/USBFinder.py | 1 | 5689 | import os, inspect, json, re #needed for os files
from django.conf import settings
from glob import glob #Needed for directories
import subprocess #Running lsusb
import getpass #used for getuser()
import time #temp fix; used to sleep
from stat import * #imports stats like ST_SIZE
import threading #Multithreading
from shutil import copy2 #Copies files
process = None
staticFileLocRoot = settings.CONTENT_ROOT
data_folder = settings.USB_DIR
extns = settings.ACCEPTED_EXTNS
def get_usb_name():
lsblk_out = subprocess.check_output("lsblk", shell=True)
lsblk_list = lsblk_out.split('\n')
media_dir = None
for line in lsblk_list:
if '/media/' in line:
media_loc = line.index('/media/')
media_dir = line[media_loc:].strip()
return media_dir
def verify(device_mnt):
blkid_out = subprocess.check_output("blkid", shell=True)
blkid_list = blkid_out.split('\n')
for line in blkid_list:
if ("/dev/" + device_mnt) in line:
return check_if_line_usb(line)
def check_if_line_usb(line):
UUID_beg = line.index('UUID') + 5
UUID_end = line.find('\"', UUID_beg+1)
print str(UUID_end - UUID_beg)
if UUID_end - UUID_beg == 10:
return True
return False
def transfer_file(file):
print "file " + file + "staticFileLocRoot " + staticFileLocRoot
index=file.rfind('/')
file_name=file[index+1:]
print "file_name " + file_name + "staticFileLocRoot " + staticFileLocRoot
sendString = "cp " + file + " " + staticFileLocRoot + file_name
proc = subprocess.Popen (sendString, shell=True)
proc.communicate()[0]
return proc.returncode
def attemptMount():
lsblk_out = subprocess.check_output("lsblk", shell=True)
lsblk_list = lsblk_out.split('\n')
media_dir = None
devmnt_regex = r'([s][d][a-zA-Z][0-9]+)'
for line in lsblk_list:
if '/media/' in line:
media_loc = line.index('/media/')
media_dir = line[media_loc:].strip()
try:
media_mntpnt = re.findall(devmnt_regex, line)[0]
except:
return None
is_needed = verify(media_mntpnt)
if is_needed:
break
if media_dir is None:
return None
try:
os.chdir(media_dir + '/' + data_folder)
except:
return None
temps = [name for name in os.listdir(".")]
print 'Temporary files are ' + str(temps)
files = []
for root, subfolders, usb_files in os.walk("."):
for name in usb_files:
if (not os.path.isdir(name)):
if(name.endswith(tuple(extns))):
#if (not os.path.isdir(name)) and (name[-5:] == '.data' or name == 'content.json'):
files.append(os.path.join(root, name))
return files
def main():
#enableAutoMount()
df = subprocess.check_output("lsusb", stderr=subprocess.STDOUT) #subprocess prints to stderr for some reason, making it think stdout is stderr
oldDeviceList = df.split("\n") #gets list of previously connected usb devices
while True:
df = subprocess.check_output("lsusb", stderr=subprocess.STDOUT) #do it again
newDeviceList = df.split('\n') #store in a NEW list
if len(newDeviceList) > len(oldDeviceList): #new usb device inserted!
for line in newDeviceList:
if line not in oldDeviceList: #this points to the newer device we have attached
IDAnchor = line.index("ID")
line = line[IDAnchor:] #slice off unwanted line info [such as bus information]
print ("You have attached " + line) #debug purposes
time.sleep(3) #prevents python from attempting to access the files before the OS itself, might need to be increased
attemptMount() #attempt mounting the device
if len(newDeviceList) < len(oldDeviceList): #some USB device has been removed!
for line in oldDeviceList:
if line not in newDeviceList:
IDAnchor = line.index("ID")
line = line[IDAnchor:]
print ("You have removed " + line)
attemptRemoval()
oldDeviceList = list(newDeviceList) #allows for the loop to function properly
if __name__ == '__main__':
main()
| apache-2.0 | -8,428,731,860,846,621,000 | 49.794643 | 198 | 0.465108 | false |
pyfa-org/eos | eos/eve_obj/buff_template.py | 1 | 1741 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos.util.repr import make_repr_str
class WarfareBuffTemplate:
def __init__(
self,
buff_id=None,
affectee_filter=None,
affectee_filter_extra_arg=None,
affectee_attr_id=None,
operator=None,
aggregate_mode=None
):
self.buff_id = buff_id
self.affectee_filter = affectee_filter
self.affectee_filter_extra_arg = affectee_filter_extra_arg
self.affectee_attr_id = affectee_attr_id
self.operator = operator
self.aggregate_mode = aggregate_mode
# Auxiliary methods
def __repr__(self):
spec = [
'buff_id',
'affectee_filter',
'affectee_filter_extra_arg',
'affectee_attr_id',
'operator',
'aggregate_mode']
return make_repr_str(self, spec)
| lgpl-3.0 | -2,537,342,386,661,229,600 | 32.480769 | 80 | 0.593912 | false |
SteveNguyen/poppy-software | poppytools/primitive/dance.py | 1 | 2812 | import numpy
import pypot.primitive
def sinus(ampl,t,freq=0.5, phase=0, offset=0):
pi = numpy.pi
return ampl * numpy.sin(freq * 2.0 * pi * t + phase * pi / 180.0 ) + offset
class SimpleBodyBeats(pypot.primitive.LoopPrimitive):
'''
Simple primitive to make Poppy shake its booty following a given beat rate in bpm.
'''
def __init__(self, poppy_robot, bpm, motion_amplitude=10):
pypot.primitive.LoopPrimitive.__init__(self, poppy_robot, 50)
self.poppy_robot = poppy_robot
self._bpm = bpm
self.amplitude = motion_amplitude
self.frequency = bpm / 60.0
self.pi = numpy.pi
for m in self.poppy_robot.motors:
m.moving_speed = 50.0
def update(self):
t = self.elapsed_time
amp = self._amplitude
freq = self.frequency
self.poppy_robot.head_y.goal_position = sinus(amp / 2.0, t, freq)
self.poppy_robot.head_z.goal_position = sinus(amp / 2.0, t, freq / 2.0)
self.poppy_robot.bust_x.goal_position = sinus(amp / 6.0, t, freq / 2.0) + sinus(amp / 6.0, t, freq / 4.0)
self.poppy_robot.abs_x.goal_position = - sinus(amp / 8.0, t, freq / 4.0) + sinus(amp / 6.0, t, freq / 4.0)
self.poppy_robot.l_shoulder_y.goal_position = sinus(amp / 3.0, t, freq / 2.0)
self.poppy_robot.r_shoulder_y.goal_position = - sinus(amp / 3.0, t, freq / 2.0)
self.poppy_robot.r_elbow_y.goal_position = sinus(amp / 2.0, t, freq, offset=-20)
self.poppy_robot.l_elbow_y.goal_position = sinus(amp / 2.0, t, freq / 2.0, offset=-20)
@property
def bpm(self):
return self._bpm
@bpm.setter
def bpm(self, new_bpm):
'''
Permits to change the beat rate while the motion is performing
'''
self._bpm = new_bpm
self.frequency = self._bpm / 60.0
@property
def amplitude(self):
return self._amplitude
@amplitude.setter
def amplitude(self, new_amp):
self._amplitude = new_amp
if __name__ == '__main__':
import time
import pypot.robot
from poppytools.configuration.config import poppy_config
from poppytools.primitive.basic import StandPosition
# create the robot from the configuration file
poppy = pypot.robot.from_config(poppy_config)
poppy.start_sync()
# Init robot position
poppy.attach_primitive(StandPosition(poppy),'stand')
poppy.stand.start()
poppy.stand.wait_to_stop()
# Create dancing primitive
bpm = 100
poppy.attach_primitive(SimpleBodyBeats(poppy, bpm), 'beats' )
poppy.beats.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
poppy.beats.stop()
poppy.stand.start()
poppy.stand.wait_to_stop()
break
| gpl-3.0 | 6,003,110,220,526,650,000 | 27.989691 | 114 | 0.607041 | false |
fracpete/wekamooc | moredataminingwithweka/class-2.1.py | 1 | 2401 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# More Data Mining with Weka - Class 2.1
# Copyright (C) 2014 Fracpete (fracpete at gmail dot com)
# Use the WEKAMOOC_DATA environment variable to set the location
# for the datasets
import os
data_dir = os.environ.get("WEKAMOOC_DATA")
if data_dir is None:
data_dir = "." + os.sep + "data"
import os
import weka.core.jvm as jvm
from weka.core.converters import Loader
from weka.core.classes import Random
from weka.filters import Filter
from weka.classifiers import Classifier, Evaluation
jvm.start()
# load ionosphere
fname = data_dir + os.sep + "ionosphere.arff"
print("\nLoading dataset: " + fname + "\n")
loader = Loader(classname="weka.core.converters.ArffLoader")
data = loader.load_file(fname)
data.class_is_last()
for equal in ["", "-F"]:
print("\nEqual frequency binning? " + str(equal == "-F") + "\n")
for bins in [0, 40, 10, 5, 2]:
if bins > 0:
fltr = Filter(classname="weka.filters.unsupervised.attribute.Discretize", options=["-B", str(bins), equal])
fltr.inputformat(data)
filtered = fltr.filter(data)
else:
filtered = data
cls = Classifier(classname="weka.classifiers.trees.J48")
# cross-validate
evl = Evaluation(filtered)
evl.crossvalidate_model(cls, filtered, 10, Random(1))
# build classifier on full dataset
cls.build_classifier(filtered)
# get size of tree from model strings
lines = str(cls).split("\n")
nodes = "N/A"
for line in lines:
if line.find("Size of the tree :") > -1:
nodes = line.replace("Size of the tree :", "").strip()
# output stats
print("bins=%i accuracy=%0.1f nodes=%s" % (bins, evl.percent_correct, nodes))
jvm.stop()
| gpl-3.0 | 7,711,363,147,533,410,000 | 36.515625 | 119 | 0.669304 | false |
ntduong/ML | DecisionTree/treepredict.py | 1 | 6510 | '''
Created on Feb 21, 2013
@author: Administrator
'''
from collections import defaultdict
from math import log
def readDataFromFile(filename='decision_tree_example.txt'):
with open(filename, 'rt') as f:
data = []
for line in f:
data.append(line.strip().split('\t'))
return data
def uniquecounts(rows):
results = defaultdict(int)
for row in rows:
r = row[len(row)-1]
results[r] += 1
return results
def gini_impurity(rows):
total = len(rows)
counts = uniquecounts(rows)
imp = 0
for k1 in counts:
p1 = float(counts[k1])/total
for k2 in counts:
if k1 == k2: continue
p2 = float(counts[k2])/total
imp += p1*p2
return imp
def entropy(rows):
log2 = lambda x: log(x)/log(2)
results = uniquecounts(rows)
ent = 0.0
total = len(rows)
for r in results:
p = float(results[r])/total
ent -= p*log2(p)
return ent
def divide_set(rows, col, value):
split_function = None
if isinstance(value, int) or isinstance(value, float):
split_function = lambda row: row[col] >= value
else:
split_function = lambda row: row[col] == value
set1 = [row for row in rows if split_function(row)]
set2 = [row for row in rows if not split_function(row)]
return (set1, set2)
class treenode(object):
def __init__(self, col=-1, value=None, results=None, tb=None, fb=None):
self.col = col
self.value = value
self.results = results
self.fb = fb
self.tb = tb
def buildtree(rows, score_function=entropy):
if len(rows) == 0: return treenode()
current_score = score_function(rows)
best_gain = 0.0
best_criteria = None
best_sets = None
column_cnt = len(rows[0])-1 # excluding the last column
for col in range(0, column_cnt):
col_values = {}
for row in rows:
col_values[row[col]] = 1
for value in col_values:
(set1, set2) = divide_set(rows, col, value)
len1 = len(set1)
total = len(rows)
p = float(len1)/total
gain = current_score - p*score_function(set1) - (1-p)*score_function(set2)
if gain > best_gain and len(set1) > 0 and len(set2) > 0:
best_gain = gain
best_criteria = (col, value)
best_sets = (set1, set2)
if best_gain > 0:
trueBranch = buildtree(best_sets[0], score_function)
falseBranch = buildtree(best_sets[1], score_function)
return treenode(col=best_criteria[0], value=best_criteria[1], tb=trueBranch, fb=falseBranch)
else:
return treenode(results=uniquecounts(rows))
def print_tree(node, indent=''):
if node.results != None:
print str(node.results)
else:
print str(node.col) + ':' + str(node.value) + '?'
print indent + 'T->',
print_tree(node.tb, indent+' ')
print indent + 'F->',
print_tree(node.fb, indent+' ')
def getWidth(node):
if node.tb == None and node.fb == None:
return 1
return getWidth(node.fb) + getWidth(node.tb)
def getHeight(node):
if node.tb == None and node.fb == None:
return 1
return getHeight(node.tb) + getHeight(node.fb) + 1
from PIL import Image, ImageDraw
def drawNode(draw, node, x, y):
if node.results == None:
w1 = getWidth(node.fb)*100
w2 = getWidth(node.tb)*100
left = x-(w1+w2)/2
right = x+(w1+w2)/2
draw.text((x-20,y-10),str(node.col)+':'+str(node.value),(0,0,0))
draw.line((x, y, left+w1/2, y+100), fill=(255,0,0))
draw.line((x, y, right-w2/2, y+100), fill=(255,0,0))
drawNode(draw, node.fb, left+w1/2, y+100)
drawNode(draw, node.tb, right-w2/2, y+100)
else:
txt = ' \n'.join(['%s:%d' %v for v in node.results.items()])
draw.text((x-20,y), txt, (0,0,0))
def drawTree(node, jpeg='tree.jpg'):
w = getWidth(node)*100
h = getHeight(node)*100+120
img = Image.new('RGB', (w,h), (255,255,255))
draw = ImageDraw.Draw(img)
drawNode(draw, node, w/2, 20)
img.save(jpeg, 'JPEG')
def classify(observation, node):
if node.results != None:
return node.results
else:
v = observation[node.col]
branch = None
if isinstance(v,int) or isinstance(v,float):
if v >= node.value: branch = node.tb
else: branch = node.fb
else:
if v == node.value: branch = node.tb
else: branch = node.fb
return classify(observation, branch)
def prune(node, mingain):
if node.tb.results == None:
prune(node.tb, mingain)
if node.fb.results == None:
prune(node.fb, mingain)
if node.tb.results != None and node.fb.results != None:
tb, fb = [], []
for v, c in node.tb.results.items():
tb.extend([[v]]*c)
for v, c in node.fb.results.items():
fb.extend([[v]]*c)
delta = entropy(tb+fb) - (entropy(tb) + entropy(fb))/2
if delta < mingain:
node.tb, node.fb = None, None
node.results = uniquecounts(tb+fb)
def missing_value_classify(observation, node):
if node.results != None:
return node.results
else:
v = observation[node.col]
if v == None:
tr, fr = missing_value_classify(observation, node.tb), missing_value_classify(observation, node.fb)
tcount = sum(tr.values())
fcount = sum(fr.values())
tw = float(tcount)/(tcount+fcount)
fw = 1-tw
result = {}
for k,v in tr.items():
result[k] = v*tw
for k,v in fr.items():
if k not in result: result[k] = 0
result[k] += v*fw
return result
else:
if isinstance(v, int) or isinstance(v, float):
if v >= node.value: branch = node.tb
else: branch = node.fb
else:
if v == node.value: branch = node.tb
else: branch = node.fb
return missing_value_classify(observation, branch)
if __name__ == '__main__':
data = readDataFromFile()
root = buildtree(data)
print missing_value_classify(['google',None,'yes',None], root) | mit | 9,111,104,916,089,302,000 | 29.85782 | 111 | 0.541782 | false |
futuresimple/triggear | tests/config/test_triggear_config.py | 1 | 3457 | import os
from typing import Dict
import pytest
import yaml
from mockito import when, expect
from app.config.triggear_config import TriggearConfig
from app.clients.jenkins_client import JenkinsInstanceConfig
pytestmark = pytest.mark.asyncio
@pytest.mark.usefixtures('unstub')
class TestTriggearConfig:
VALID_CREDS_DATA = {
'jenkins_instances': [{
'url': 'URL',
'user': 'USER',
'token': 'JENKINS_TOKEN',
}],
'github_token': 'GITHUB_TOKEN',
'triggear_token': 'TRIGGEAR_TOKEN'
}
async def test__when_creds_path_is_invalid__should_raise_file_not_found_error(self):
when(os).getenv('CREDS_PATH', 'creds.yml').thenReturn('does/not/exist')
with pytest.raises(FileNotFoundError) as file_not_found_error:
TriggearConfig().read_credentials_file()
assert str(file_not_found_error.value) == "[Errno 2] No such file or directory: 'does/not/exist'"
@pytest.mark.parametrize("yaml_data, missing_key", [
({}, 'jenkins_instances'),
({'jenkins_instances': []}, 'github_token'),
({'jenkins_instances': [], 'github_token': ''}, 'triggear_token')
])
async def test__when_any_key_is_missing_in_creds_file__should_raise_proper_key_error(self, yaml_data: Dict, missing_key: str):
when(os).getenv('CREDS_PATH', 'creds.yml').thenReturn('./tests/config/example_configs/creds.yaml')
when(yaml).load(any).thenReturn(yaml_data)
with pytest.raises(KeyError) as key_error:
TriggearConfig().read_credentials_file()
assert str(key_error.value) == f"'{missing_key}'"
async def test__when_yaml_files_are_valid__should_store_proper_values(self):
when(os).getenv('CREDS_PATH', 'creds.yml').thenReturn('./tests/config/example_configs/creds.yaml')
triggear_config = TriggearConfig()
assert triggear_config.triggear_token == 'TRIGGEAR_TOKEN'
assert triggear_config.github_token == 'GITHUB_TOKEN'
first_instance_url = "https://JENKINS_URL/"
second_instance_url = "https://ci.triggear.com/"
assert set(triggear_config.jenkins_instances.keys()) == {first_instance_url, second_instance_url}
first_instance: JenkinsInstanceConfig = triggear_config.jenkins_instances.get(first_instance_url)
second_instance: JenkinsInstanceConfig = triggear_config.jenkins_instances.get(second_instance_url)
assert first_instance.url == first_instance_url
assert first_instance.username == 'JENKINS_USER'
assert first_instance.token == 'JENKINS_USER_API_TOKEN'
assert second_instance.url == second_instance_url
assert second_instance.username == "other_user"
assert second_instance.token == "other_api_token"
async def test__when_properties_are_not_set__setter_is_called(self):
triggear_config = TriggearConfig()
expect(triggear_config).read_credentials_file().thenReturn(('gh_token', 'token', {}))
assert triggear_config.github_token == 'gh_token'
triggear_config = TriggearConfig()
expect(triggear_config).read_credentials_file().thenReturn(('token', 'triggear_token', {}))
assert triggear_config.jenkins_instances == {}
triggear_config = TriggearConfig()
expect(triggear_config).read_credentials_file().thenReturn(('token', 'triggear_token', {}))
assert triggear_config.triggear_token == 'triggear_token'
| mit | 4,146,779,010,753,406,000 | 43.320513 | 130 | 0.666763 | false |
eknowles/CV | app.py | 1 | 2096 | import os
from flask import Flask, render_template, send_from_directory
from calendar_parser import CalendarParser
# initialization
app = Flask(__name__)
app.config.update(
DEBUG=True,
)
events = {}
# settings
ics_url = "https://www.google.com/calendar/ical/88kil28s7t686h1p5aoem6ui24%40group.calendar.google.com/public/basic.ics"
class Event(object):
name = ''
location = ''
start_time = None
end_time = None
description = ''
clean_dates = ''
def tidytime(start, end):
output = ''
if start.day + 1 == end.day:
sameday = True
else:
sameday = False
if start.month == end.month:
samemonth = True
else:
samemonth = False
if start.year == end.year:
sameyear = True
else:
sameyear = False
if sameyear and samemonth and sameday:
output = start.strftime("%A, %d %B %Y")
elif sameyear and samemonth and not sameday:
output = start.strftime("%A, %d-") + end.strftime("%d %B %Y")
elif sameyear and not samemonth:
output = start.strftime("%d %B - ") + end.strftime("%d %B %Y")
return output
# controllers
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'), 'ico/favicon.ico')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route("/")
def index():
return render_template('index.html')
@app.route("/update")
def reset_events():
global events
event_list = []
cal = CalendarParser(ics_url=ics_url)
for event in cal.parse_calendar():
NewEvent = Event()
NewEvent.name = event["name"]
NewEvent.location = event["location"]
NewEvent.start_time = event["start_time"]
NewEvent.end_time = event["end_time"]
NewEvent.description = event["description"]
NewEvent.clean_dates = tidytime(event["start_time"], event["end_time"])
event_list.append(NewEvent)
event_list.sort(key=lambda r: r.start_time)
k = 0
for event in event_list:
events[k] = event
k += 1
# print events
return render_template('reset.html', events=events)
# launch
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='127.0.0.1', port=port) | gpl-2.0 | -1,447,502,892,584,952,000 | 22.043956 | 120 | 0.68416 | false |
hzlf/openbroadcast | website/djangorestframework/tests/views.py | 1 | 1564 | from django.conf.urls.defaults import patterns, url
from django.test import TestCase
from django.test import Client
urlpatterns = patterns('djangorestframework.utils.staticviews',
url(r'^robots.txt$', 'deny_robots'),
url(r'^favicon.ico$', 'favicon'),
url(r'^accounts/login$', 'api_login'),
url(r'^accounts/logout$', 'api_logout'),
)
class ViewTests(TestCase):
"""Test the extra views djangorestframework provides"""
urls = 'djangorestframework.tests.views'
def test_robots_view(self):
"""Ensure the robots view exists"""
response = self.client.get('/robots.txt')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'text/plain')
def test_favicon_view(self):
"""Ensure the favicon view exists"""
response = self.client.get('/favicon.ico')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'image/vnd.microsoft.icon')
def test_login_view(self):
"""Ensure the login view exists"""
response = self.client.get('/accounts/login')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'].split(';')[0], 'text/html')
def test_logout_view(self):
"""Ensure the logout view exists"""
response = self.client.get('/accounts/logout')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'].split(';')[0], 'text/html')
# TODO: Add login/logout behaviour tests
| gpl-3.0 | 8,763,692,571,406,726,000 | 35.372093 | 78 | 0.658568 | false |
iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert_gui/viewer/slice_viewer.py | 1 | 8469 | import os
from OpenGL.GL import *
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QApplication, QMainWindow, QDockWidget
import sys
import traceback
from ert.ecl import EclTypeEnum, EclKW, EclGrid
from ert.ecl.faults import FaultCollection
from ert.geo.xyz_io import XYZIo
from ert_gui.viewer import Texture3D, Bounds, SliceViewer, SliceSettingsWidget, Texture1D
def loadGrid(path, load_actnum=True):
""" @rtype: EclGrid """
with open(path, "r") as f:
specgrid = EclKW.read_grdecl(f, "SPECGRID", ecl_type=EclTypeEnum.ECL_INT_TYPE, strict=False)
zcorn = EclKW.read_grdecl(f, "ZCORN")
coord = EclKW.read_grdecl(f, "COORD")
actnum = None
if load_actnum:
actnum = EclKW.read_grdecl(f, "ACTNUM", ecl_type=EclTypeEnum.ECL_INT_TYPE)
mapaxes = EclKW.read_grdecl(f, "MAPAXES")
grid = EclGrid.create(specgrid, zcorn, coord, actnum, mapaxes=mapaxes)
return grid
def loadKW(keyword, ecl_type, path):
""" @rtype: EclKW """
with open(path, "r") as f:
kw_data = EclKW.read_grdecl(f, keyword, ecl_type=ecl_type)
return kw_data
def loadGridData(path):
grid = loadGrid(path)
nx, ny, nz, nactive = grid.dims
print(nx, ny, nz)
bounds = Bounds()
grid_data = []
index = 0
for z in range(nz):
for y in range(ny):
for x in range(nx):
# x, y, z = grid.get_corner_xyz(0, global_index=index)
x, y, z = grid.get_xyz(global_index=index)
active = grid.active(global_index=index)
if active:
active = 1.0
else:
active = 0.0
bounds.addPoint(x, y, z)
grid_data.append(x)
grid_data.append(y)
grid_data.append(z)
grid_data.append(active)
index += 1
print(bounds)
return nx, ny, nz, grid_data, bounds,grid
def loadKWData(path, keyword, ecl_type=EclTypeEnum.ECL_FLOAT_TYPE):
kw_data = loadKW(keyword, ecl_type, path)
print(kw_data.min, kw_data.max)
min_value = kw_data.min
data_range = kw_data.max - kw_data.min
result = []
for value in kw_data:
value = float(value - min_value) / data_range
result.append(value)
return result, data_range
def rgb(r, g, b):
return [r / 255.0, g / 255.0, b / 255.0, 1.0]
def createColorBrewerScale():
color_list = [rgb(141,211,199),
rgb(255,255,179),
rgb(190,186,218),
rgb(251,128,114),
rgb(128,177,211),
rgb(253,180,98),
rgb(179,222,105),
rgb(252,205,229),
rgb(217,217,217),
rgb(188,128,189),
rgb(204,235,197),
rgb(255,237,111)]
colors = [component for color in color_list for component in color]
return Texture1D(len(colors) / 4, colors)
def createSeismicScale():
color_list = [rgb(0, 0, 255), rgb(255, 255, 255), rgb(255, 0, 0)]
colors = [component for color in color_list for component in color]
return Texture1D(len(colors) / 4, colors, wrap=GL_CLAMP_TO_EDGE)
def createLinearGreyScale():
color_list = [rgb(128, 128, 128), rgb(255, 255, 255)]
colors = [component for color in color_list for component in color]
return Texture1D(len(colors) / 4, colors, wrap=GL_CLAMP_TO_EDGE)
def createRainbowScale():
color_list = [rgb(200, 0, 255), rgb(0, 0, 255), rgb(0, 255, 0), rgb(255, 255, 0), rgb(255, 127, 0), rgb(255, 0, 0)]
colors = [component for color in color_list for component in color]
return Texture1D(len(colors) / 4, colors, wrap=GL_CLAMP_TO_EDGE, internal_format=GL_RGBA8)
def createColorScales():
return {
"region_colors": createColorBrewerScale(),
"seismic": createSeismicScale(),
"linear_grey": createLinearGreyScale(),
"rainbow": createRainbowScale()
}
def loadFaults(grid , fault_file):
faults = FaultCollection( grid )
faults.load( fault_file )
try:
faults.load( fault_file )
except Exception as e:
traceback.print_tb(e)
print("Loading from fault file:%s failed" % fault_file)
return faults
def createDataStructures(grid_path=None, grid_data_path=None , polyline_root_path = None):
if grid_path is not None:
nx, ny, nz, grid_data, bounds, grid = loadGridData(grid_path)
data, data_range = loadKWData(grid_data_path, "FLTBLCK", ecl_type=EclTypeEnum.ECL_INT_TYPE)
faults = loadFaults( grid , os.path.join(polyline_root_path , "faults.grdecl"))
else:
# nx, ny, nz, grid_data, bounds = loadGridData("/Volumes/Statoil/data/faultregion/grid.grdecl")
# data, data_range = loadKWData("/Volumes/Statoil/data/faultregion/fltblck.grdecl", "FLTBLCK", ecl_type=EclTypeEnum.ECL_INT_TYPE)
nx, ny, nz, grid_data, bounds, grid = loadGridData("/Volumes/Statoil/data/TestCase/eclipse/include/example_grid_sim.GRDECL")
data, data_range = loadKWData("/Volumes/Statoil/data/TestCase/eclipse/include/example_permx.GRDECL", "PERMX", ecl_type=EclTypeEnum.ECL_FLOAT_TYPE)
faults = loadFaults( grid , os.path.join("/Volumes/Statoil/data/TestCase/eclipse/include" , "example_faults_sim.GRDECL"))
grid_texture = Texture3D(nx, ny, nz, grid_data, GL_RGBA32F, GL_RGBA)
attribute_texture = Texture3D(nx, ny, nz, data)
textures = {"grid": grid_texture,
"grid_data": attribute_texture}
return textures, bounds, nx, ny, nz, data_range , faults
def readPolylines(root_path):
polyline_files = ["pol1.xyz",
"pol2.xyz",
"pol3.xyz",
"pol4.xyz",
"pol5.xyz",
"pol6.xyz",
"pol7.xyz",
"pol8.xyz",
"pol9.xyz",
"pol10.xyz",
"pol11.xyz"]
polylines = []
if root_path is not None and os.path.exists(root_path):
for polyline_file in polyline_files:
path = os.path.join(root_path, polyline_file)
polyline = XYZIo.readXYZFile(path)
polylines.append(polyline)
return polylines
if __name__ == '__main__':
grid_path = None
grid_data_path = None
polyline_root_path = None
if len(sys.argv) == 4:
grid_path = sys.argv[1]
grid_data_path = sys.argv[2]
polyline_root_path = sys.argv[3]
app = QApplication(["Slice Viewer"])
window = QMainWindow()
window.resize(1024, 768)
textures, bounds, nx, ny, nz, data_range , faults = createDataStructures(grid_path, grid_data_path , polyline_root_path)
polylines = readPolylines(root_path=polyline_root_path)
color_scales = createColorScales()
textures["color_scale"] = color_scales[color_scales.keys()[0]]
viewer = SliceViewer(textures=textures, volume_bounds=bounds, color_scales=color_scales, data_range=data_range, polylines=polylines , faults = faults)
viewer.setSliceSize(width=nx, height=ny)
slice_settings = SliceSettingsWidget(max_slice_count=nz, color_scales=color_scales.keys())
slice_settings.inactiveCellsHidden.connect(viewer.hideInactiveCells)
slice_settings.currentSliceChanged.connect(viewer.setCurrentSlice)
slice_settings.toggleOrthographicProjection.connect(viewer.useOrthographicProjection)
slice_settings.toggleLighting.connect(viewer.useLighting)
slice_settings.colorScalesChanged.connect(viewer.changeColorScale)
slice_settings.regionToggling.connect(viewer.useRegionScaling)
slice_settings.toggleInterpolation.connect(viewer.useInterpolationOnData)
slice_settings.mirrorX.connect(viewer.mirrorX)
slice_settings.mirrorY.connect(viewer.mirrorY)
slice_settings.mirrorZ.connect(viewer.mirrorZ)
slice_settings.toggleFlatPolylines.connect(viewer.toggleFlatPolylines)
dock_widget = QDockWidget("Settings")
dock_widget.setObjectName("SliceSettingsDock")
dock_widget.setWidget(slice_settings)
dock_widget.setAllowedAreas(Qt.AllDockWidgetAreas)
dock_widget.setFeatures(QDockWidget.NoDockWidgetFeatures)
window.addDockWidget(Qt.LeftDockWidgetArea, dock_widget)
window.setCentralWidget(viewer)
window.show()
window.activateWindow()
window.raise_()
app.exec_()
| gpl-3.0 | 4,388,381,655,475,250,700 | 33.149194 | 154 | 0.627465 | false |
blade2005/dosage | scripts/mklanguages.py | 1 | 1450 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2016 Tobias Gruetzmacher
'''update languages.py from pycountry'''
from __future__ import absolute_import, division, print_function
import os
import sys
import codecs
basepath = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, basepath)
from dosagelib.scraper import get_scrapers # noqa
def main():
"""Update language information in dosagelib/languages.py."""
fn = os.path.join(basepath, 'dosagelib', 'languages.py')
encoding = 'utf-8'
with codecs.open(fn, 'w', encoding) as f:
f.write('# -*- coding: %s -*-%s' % (encoding, os.linesep))
f.write('# ISO 693-1 language codes from pycountry%s' % os.linesep)
f.write('# This file is automatically generated, DO NOT EDIT!%s' % os.linesep)
lang = get_used_languages()
write_languages(f, lang)
def get_used_languages():
lang = {}
for scraperobj in get_scrapers():
l = scraperobj.lang
if l not in lang:
lang[l] = scraperobj.language()
return lang
def write_languages(f, l):
"""Write language information."""
f.write("Languages = {%s" % os.linesep)
for lang in sorted(l):
f.write(" %r: %r,%s" % (lang, l[lang], os.linesep))
f.write("}%s" % os.linesep)
if __name__ == '__main__':
main()
| mit | -1,067,473,444,864,903,400 | 28.591837 | 86 | 0.631034 | false |
Petraea/jsonbot | jsb/plugs/core/core.py | 1 | 17821 | # jsb/plugs/core/core.py
#
#
""" core bot commands. """
## jsb imports
from jsb.lib.aliases import setalias
from jsb.lib.config import getmainconfig
from jsb.utils.statdict import StatDict
from jsb.utils.log import setloglevel, getloglevel
from jsb.utils.timeutils import elapsedstring
from jsb.utils.exception import handle_exception
from jsb.lib.commands import cmnds
from jsb.lib.examples import examples
from jsb.lib.plugins import plugs
from jsb.lib.boot import plugin_packages, getpluginlist, boot, getcmndtable, whatcommands, getshorttable
from jsb.lib.persist import Persist
from jsb.lib.reboot import reboot, reboot_stateful
from jsb.lib.eventhandler import mainhandler
from jsb.lib.fleet import getfleet
from jsb.lib.partyline import partyline
from jsb.lib.exit import globalshutdown
from jsb.lib.runner import defaultrunner, cmndrunner, longrunner, apirunner, threadrunner
from jsb.lib.errors import NoSuchPlugin
from jsb.utils.statdict import StatDict
from jsb.contrib.natural import date
from jsb.lib.threadloop import threadloops
from jsb.lib.threads import getname
## basic imports
import time
import threading
import sys
import re
import os
import copy
import cgi
import logging
import functools
## defines
cpy = copy.deepcopy
## reboot command
def handle_reboot(bot, ievent):
""" no arguments - reboot the bot. """
ievent.reply("rebooting")
#time.sleep(3)
if ievent.rest == "cold": stateful = False
else: stateful = True
if stateful:
if bot.type == "tornado":
callback = functools.partial(reboot_stateful, bot, ievent, getfleet(), partyline)
bot.server.io_loop.add_callback(callback)
else: mainhandler.put(0, reboot_stateful, bot, ievent, getfleet(), partyline)
else:
getfleet().exit()
mainhandler.put(0, reboot)
cmnds.add("reboot", handle_reboot, "OPER")
examples.add("reboot", "reboot the bot.", "reboot")
## ping command
def handle_ping(bot, event):
event.reply("pong")
cmnds.add("ping", handle_ping, ["OPER", "USER"])
examples.add("ping", "ping/pong", "ping")
## quit command
def handle_quit(bot, ievent):
""" no arguments - disconnect from the server. """
ievent.reply("quiting")
bot.exit()
cmnds.add("quit", handle_quit, "OPER")
examples.add("quit", "quit the bot.", "quit")
## encoding command
def handle_encoding(bot, ievent):
""" not arguments - show default encoding. """
ievent.reply('default encoding is %s' % bot.encoding or sys.getdefaultencoding())
cmnds.add('encoding', handle_encoding, ['USER', 'GUEST'])
examples.add('encoding', 'show default encoding', 'encoding')
## uptime command
def handle_uptime(bot, ievent):
""" no arguments - show uptime. """
ievent.reply("<b>uptime is %s</b>" % elapsedstring(time.time()-bot.starttime))
cmnds.add('uptime', handle_uptime, ['USER', 'GUEST'])
examples.add('uptime', 'show uptime of the bot', 'uptime')
## list command
def handle_available(bot, ievent):
""" no arguments - show available plugins .. to enable use !plug-enable. """
if ievent.rest: ievent.reply("%s plugin has the following commands: " % ievent.rest, whatcommands(ievent.rest))
else: ievent.reply("available plugins: ", getpluginlist(), raw=True) ; return
cmnds.add('list', handle_available, ['USER', 'GUEST'])
examples.add('list', 'list available plugins', 'list')
## commands command
def handle_commands(bot, ievent):
""" arguments: [<plugname>] - show commands of plugin. """
try: plugin = ievent.args[0].lower()
except IndexError: plugin = ""
result = []
cmnds = getcmndtable()
for cmnd, plugname in cmnds.iteritems():
if plugname:
if not plugin or plugin in plugname: result.append(cmnd)
if result:
result.sort()
if not plugin: plugin = "JSONBOT"
ievent.reply('%s has the following commands: ' % plugin, result)
else: ievent.reply('no commands found for plugin %s' % plugin)
cmnds.add('commands', handle_commands, ['USER', 'GUEST'])
examples.add('commands', 'show commands of <plugin>', '1) commands core')
## perm command
def handle_perm(bot, ievent):
""" arguments: <cmnd> - get permission of command. """
try:cmnd = ievent.args[0]
except IndexError:
ievent.missing("<cmnd>")
return
try: perms = cmnds.perms(cmnd)
except KeyError:
ievent.reply("no %sw command registered")
return
if perms: ievent.reply("%s command needs %s permission" % (cmnd, perms))
else: ievent.reply("can't find perm for %s" % cmnd)
cmnds.add('perm', handle_perm, ['USER', 'GUEST'])
examples.add('perm', 'show permission of command', 'perm quit')
## version command
def handle_version(bot, ievent):
""" no arguments - show bot's version. """
from jsb.version import getversion
version = getversion(bot.type.upper())
cfg = getmainconfig()
if cfg.dbenable: version += " " + cfg.dbtype.upper()
if ievent.rest and ievent.rest == "repo":
try:
from mercurial import context, hg, node, repo, ui
repository = hg.repository(ui.ui(), '.')
ctx = context.changectx(repository)
tip = str(ctx.rev())
except: tip = None
if tip: version2 = version + " HG " + tip
else: version2 = version
ievent.reply(version2)
cmnds.add('version', handle_version, ['USER', 'GUEST'])
examples.add('version', 'show version of the bot', 'version')
setalias('v', "version")
## short command
def handle_short(bot, ievent):
""" arguments: <cmnd> - give short possibilities for a command. """
try: cmnd = ievent.args[0]
except IndexError: ievent.missing('<cmnd>') ; return
try: ievent.reply("short commands for %s" % cmnd, getshorttable()[cmnd])
except KeyError: ievent.reply("no commands found for %s" % cmnd)
cmnds.add('short', handle_short, ['USER', 'GUEST'])
examples.add('short', 'find full comamands', 'short admin-exceptions')
## whereis command
def handle_whereis(bot, ievent):
""" arguments: <cmnd> - locate a command. """
try: cmnd = ievent.args[0]
except IndexError:
ievent.missing('<cmnd>')
return
cmnds.reloadcheck(bot, ievent, cmnd)
plugin = cmnds.whereis(cmnd)
if plugin: ievent.reply("%s command is in: %s" % (cmnd, plugin))
else: ievent.reply("can't find " + cmnd)
cmnds.add('whereis', handle_whereis, ['USER', 'GUEST'])
examples.add('whereis', 'show in which plugins <what> is', 'whereis test')
## help-plug command
def handle_helpplug(bot, ievent):
""" arguments: <plugname> - how help on plugin/command or show basic help msg. """
try: what = ievent.args[0]
except (IndexError, TypeError):
ievent.reply("available plugins: ", getpluginlist())
ievent.reply("see !help <plugin> to get help on a plugin.")
return
ievent.untildone = True
cmnds.reloadcheck(bot, ievent, what)
plugin = None
modname = ""
perms = []
for package in plugin_packages:
try:
modname = "%s.%s" % (package, what)
try:
plugin = plugs.load_mod(modname)
if plugin: break
except NoSuchPlugin: continue
except(KeyError, ImportError): pass
if not plugin:
ievent.reply("no %s plugin loaded" % what)
return
try: phelp = plugin.__doc__
except (KeyError, AttributeError):
ievent.reply('no description of %s plugin available' % what)
return
cmndresult = []
if phelp:
counter = 1
for i, j in cmnds.iteritems():
if what == j.plugname:
try:
descr = j.func.__doc__
if not descr: descr = "no description provided"
try: cmndresult.append(u" <b>!%s</b> - <i>%s</i> - perms: %s" % (i, descr, j.perms))
except KeyError: pass
except AttributeError: pass
counter += 1
if cmndresult and phelp:
res = []
for r in cmndresult:
if bot.type in ['web', ]: res.append("%s<br>" % r)
elif bot.type in ['irc', ]: res.append(r.strip())
else: res.append(r)
res.sort()
what = what.upper()
ievent.reply('<br><b>plugin %s: </b><br>%s' % (what, phelp))
ievent.reply("<b>commands: </b>", res, dot="count")
else:
if perms: ievent.reply('no commands available for permissions: %s' % ", ".join(perms))
else: ievent.reply("can't find help on %s" % what)
ievent.done(silent=True)
cmnds.add('help-plug', handle_helpplug, ['USER', 'GUEST'], how="msg")
examples.add('help-plug', 'get help on <plugin>', '1) help-plug 2) help-plug misc')
## help-site command
def handle_helpsite(bot, event):
""" arguments: <cmnd> - help commands that gives a url to the docs. """
if event.rest:
target = cmnds.whereis(event.rest)
target = target or event.rest
where = bot.plugs.getmodule(target)
if where:
theplace = os.sep.join(where.split(".")[-2:])
event.reply("help for %s is at http://jsonbot.org/plugins/%s.html" % (event.rest.upper(), theplace))
else: event.reply("can't find a help url for %s" % event.rest)
else:
event.reply("documentation for jsonbot can be found at http://jsonbot.org or http://jsonbot.appspot.com/docs")
event.reply('see !list for loaded plugins and "!help plugin" for a url to the plugin docs.')
cmndhelp = cmnds.gethelp(event.rest)
if cmndhelp: event.reply("<br><b>%s docstring: </b><br>" % event.rest, cmndhelp.split("\n"))
cmnds.add("help-site", handle_helpsite, ["OPER", "USER", "GUEST"])
examples.add("help-site", "show url pointing to the docs", "1) help 2) help rss")
## help command
def handle_help(bot, event):
""" arguments: [<cmndname or plugname>] - show help. """
if not event.args:
event.reply("documentation for jsonbot can be found at http://jsonbot.org")
event.reply('see !list for loaded plugins and "!help plugin" for help on the plugin.')
return
t = event.args[0]
logging.warn("help target is %s" % t)
cmnds.reloadcheck(bot, event, t)
where = cmnds.whereis(t)
cmndhelp = cmnds.gethelp(t)
if not cmndhelp:
ex = examples.get(t)
if ex: cmndhelp = ex.descr
if not cmndhelp: handle_helpplug(bot, event) ; return
try: ex = examples[t].example
except KeyError: ex = None
p = cmnds.perms(t)
res = "permissions: %s - location: %s - examples: %s" % (", ".join(p), where, ex)
event.reply('<br><b>%s command:</b> %s' % (event.rest, cmndhelp), res.split(" - "), dot="<br>")
cmnds.add("help", handle_help, ["OPER", "USER", "GUEST"])
examples.add("help", "show help of a command", "help rss-list")
## apro command
def handle_apro(bot, ievent):
""" arguments: <searchtxt> - apropos (search) for commands. """
try: what = ievent.args[0]
except IndexError:
ievent.missing('<what>')
return
result = cmnds.apropos(what)
result.sort()
if result:
ievent.reply("commands matching %s: " % what, result)
else: ievent.reply('no matching commands found for %s' % what)
cmnds.add('apro', handle_apro, ['USER', 'GUEST'])
examples.add('apro', 'apro <what> .. search for commands that contain <what>', 'apro com')
## whatcommands command
def handle_whatcommands(bot, ievent):
""" arguments: <permission. - show all commands with permission. """
if not ievent.rest:
ievent.missing('<perm>')
return
result = cmnds
res = []
for cmnd in result.values():
if cmnd and cmnd.perms and ievent.rest in cmnd.perms:
res.append(cmnd.cmnd)
res.sort()
if not res: ievent.reply('no commands known for permission %s' % ievent.rest)
else: ievent.reply('commands known for permission %s: ' % ievent.rest, res)
cmnds.add('whatcommands', handle_whatcommands, ['USER', 'GUEST'])
examples.add('whatcommands', 'show commands with permission <perm>', 'whatcommands USER')
## versions command
def handle_versions(bot, ievent):
""" no arguments - show versions of all loaded modules (if available). """
versions = {}
allowed = ["jsb.", ]
for mod in copy.copy(sys.modules):
for allow in allowed:
if mod.startswith(allow):
try: versions[mod] = sys.modules[mod].__version__
except AttributeError, ex: pass
try: versions['python'] = sys.version
except AttributeError, ex: pass
ievent.reply("versions ==> %s" % unicode(versions))
cmnds.add('versions', handle_versions, ['USER', 'GUEST'])
examples.add('versions', 'show versions of all loaded modules', 'versions')
## loglevel command
def handle_loglevel(bot, event):
""" arguments: <loglevel> - change loglevel of the bot. loglevel is on of debug, info, warn or error. """
if not event.rest: event.reply("loglevel is %s" % getloglevel()) ; return
from jsb.lib.config import getmainconfig
mainconfig = getmainconfig()
mainconfig.loglevel = event.rest
mainconfig.save()
#mainhandler.put(4, setloglevel, event.rest)
setloglevel(event.rest)
event.done()
cmnds.add("loglevel", handle_loglevel, "OPER")
examples.add("loglevel", "set loglevel ot on of debug, info, warning or error", "loglevel warn")
## activity command
def handle_activity(bot, ievent):
""" no arguments - show running threads. """
try: import threading
except ImportError:
ievent.reply("threading is not enabled.")
return
result = {}
todo = threadloops
for thread in threadloops:
name = "%s_%s" % (getname(type(thread)), thread.name)
try: result[name] = date.duration(thread.lastiter, plain=True)
except Exception, ex: logging.warn("%s - %s" % (name, str(ex)))
for b in getfleet().bots:
try: result[b.cfg.name] = date.duration(b.lastiter, plain=True)
except Exception, ex: logging.warn("%s - %s" % (name, str(ex)))
ievent.reply("last iterations: ", result)
cmnds.add('activity', handle_activity, ['USER', 'GUEST'])
examples.add('activity', 'show time past last thread iteration', 'activity')
## threads command
def handle_threads(bot, ievent):
""" no arguments - show running threads. """
try: import threading
except ImportError:
ievent.reply("threading is not enabled.")
return
stats = StatDict()
threadlist = threading.enumerate()
for thread in threadlist: stats.upitem(thread.getName())
result = []
for item in stats.top(): result.append("%s = %s" % (item[0], item[1]))
result.sort()
ievent.reply("threads running: ", result)
cmnds.add('threads', handle_threads, ['USER', 'GUEST'])
examples.add('threads', 'show running threads', 'threads')
## loaded command
def handle_loaded(bot, event):
""" no arguments - show plugins in cache. """
res = []
for plug in plugs.keys(): res.append(plug.split(".")[-1])
event.reply("loaded plugins (cache): ", res)
cmnds.add('loaded', handle_loaded, ['USER', 'GUEST'])
examples.add('loaded', 'show list of loaded plugins', 'loaded')
## statusline command
def handle_statusline(bot, event):
""" no arguments - show a status line. """
event.reply("<b>controlchars:</b> %s - <b>perms:</b> %s" % (event.chan.data.cc, ", ".join(event.user.data.perms)))
cmnds.add('statusline', handle_statusline, ['USER', 'GUEST'])
examples.add('statusline', 'show status line', 'statusline')
## topper command
def handle_topper(bot, event):
""" no arguments - show a 'topper' startus line. """
event.reply("<b>forwards:</b> %s - <b>watched:</b> %s - <b>feeds:</b> %s" % (", ".join(event.chan.data.forwards) or "none", ", ".join(event.chan.data.watched) or "none", ", ".join([unicode(x) for x in event.chan.data.feeds]) or "none"))
cmnds.add('topper', handle_topper, ['USER', 'GUEST'])
examples.add('topper', 'show topper line', 'topper')
## running command
def handle_running(bot, event):
""" no arguments - show running tasks. """
event.reply("<b>callbacks:</b> %s - <b>commands:</b> %s - <b>longrunning:</b> %s - <b>apirunner:</b> %s - <b>threadrunner:</b> %s" % (defaultrunner.running(), cmndrunner.running(), longrunner.running(), apirunner.running(), threadrunner.running()))
cmnds.add('running', handle_running, ['USER', 'GUEST'])
examples.add('running', "show running tasks", "running")
## descriptions command
def handle_descriptions(bot, event):
""" no arguments - show descriptions of all plugins. """
bot.plugs.loadall()
result = []
target = bot.plugs.keys()
target.sort()
for modname in target:
plug = bot.plugs.get(modname)
if plug.__doc__: txt = plug.__doc__.replace("\n", "<br>")
else: txt = "no docstring available"
result.append("* %s plugin (%s) - %s" % (modname.split(".")[-1], modname, txt))
event.reply("descriptions: <br>", result, dot="<br><br>")
cmnds.add('descriptions', handle_descriptions, ['USER', 'GUEST'])
examples.add('descriptions', "show descriptions of all plugins", "descriptions")
## stats command
def handle_stats(bot, event):
totalstats = StatDict()
counter = 0
for modname in sys.modules:
if modname.startswith("jsb"):
plugname = modname.split(".")[-1]
if event.args and plugname not in event.args: continue
try: modstats = getattr(sys.modules[modname], "stats")
except AttributeError: continue
totalstats += modstats
counter += 1
event.reply("stats results from %s modules: " % counter, totalstats)
cmnds.add("stats", handle_stats, ["OPER", "USER"])
examples.add("stats", "show stats for all JSONBOT modules/plugins or specify a module/plugin", "1) stats 2) stats rss")
| mit | 1,083,935,464,955,793,700 | 35.148073 | 252 | 0.63818 | false |
imeteora/cocos2d-x-3.x-Qt | tools/jenkins-scripts/watchdog.py | 1 | 1716 | import jenkinsapi
from jenkinsapi.jenkins import Jenkins
import sys
import time
import os
#check & kill dead buid
def build_time(_job,_threshold):
#get jenkins-job-watchdog-threshold
#Get last build running
build = _job.get_last_build()
running = build.is_running()
print 'build_job:',_job,'running:',running
if not running:
return False
#Get numerical ID of the last build.
buildnu = _job.get_last_buildnumber()
print "buildnumber:#",buildnu
#get nowtime
nowtime = time.strftime('%M',time.localtime(time.time()))
#print 'nowtime:',nowtime
#get build start time
timeb = build.get_timestamp()
#print 'buildtime:',str(timeb)[14:16]
buildtime = int(str(timeb)[14:16])
subtime = 0
if int(nowtime) >= buildtime:
subtime = int(nowtime)-buildtime
else:
subtime = 60-buildtime+int(nowtime)
if subtime > _threshold:
#print 'subtime',subtime
#kill dead buid
build.stop()
def main():
username = os.environ['JENKINS_ADMIN']
password = os.environ['JENKINS_ADMIN_PW']
J = Jenkins('http://115.28.134.83:8000',username,password)
#get all jenkins jobs
for key,job in J.iteritems():
threshold = 0
if(os.environ.has_key(key+'-threshold')):
threshold = int(os.environ[key+'-threshold'])
else:
threshold = int(os.environ['jenkins-job-watchdog-threshold'])
build_time(job,threshold)
return(0)
# -------------- main --------------
if __name__ == '__main__':
sys_ret = 0
try:
sys_ret = main()
except:
traceback.print_exc()
sys_ret = 1
finally:
sys.exit(sys_ret)
| gpl-2.0 | 4,595,123,461,525,334,500 | 26.677419 | 73 | 0.598485 | false |
gviejo/ThalamusPhysio | python/main_pop_corr_nucleus.py | 1 | 7266 |
import numpy as np
import pandas as pd
# from matplotlib.pyplot import plot,show,draw
import scipy.io
from functions import *
import _pickle as cPickle
import time
import os, sys
import ipyparallel
import neuroseries as nts
import scipy.stats
from pylab import *
from multiprocessing import Pool
data_directory = '/mnt/DataGuillaume/MergedData/'
datasets = np.loadtxt(data_directory+'datasets_ThalHpc.list', delimiter = '\n', dtype = str, comments = '#')
mappings = pd.read_hdf("/mnt/DataGuillaume/MergedData/MAPPING_NUCLEUS.h5")
nucleus = np.unique(mappings['nucleus'])
sessions = np.unique([n.split("_")[0] for n in mappings.index])
# determining number of neurons per nucleus et per sessions
count = pd.DataFrame(index=sessions, columns = nucleus,data=0)
for s in count.index:
for n in nucleus:
count.loc[s,n] = (mappings[mappings.index.str.contains(s)]['nucleus'] == n).sum()
nucleus_session = {n:count.index.values[count[n]>5] for n in nucleus}
# sys.exit()
# make directory for each nucleus
for n in nucleus:
try:
os.mkdir("/mnt/DataGuillaume/corr_pop_nucleus/"+n)
except:
pass
def compute_population_correlation(nuc, session):
start_time = time.clock()
print(session)
store = pd.HDFStore("/mnt/DataGuillaume/population_activity/"+session+".h5")
rip_pop = store['rip']
rem_pop = store['rem']
wak_pop = store['wake']
store.close()
# WHICH columns to keep
mappings = pd.read_hdf("/mnt/DataGuillaume/MergedData/MAPPING_NUCLEUS.h5")
tmp = mappings[mappings.index.str.contains(session)]['nucleus'] == nuc
neurons = tmp.index.values[np.where(tmp)[0]]
idx = np.array([int(n.split("_")[1]) for n in neurons])
rip_pop = rip_pop[idx]
rem_pop = rem_pop[idx]
wak_pop = wak_pop[idx]
###############################################################################################################
# POPULATION CORRELATION FOR EACH RIPPLES
###############################################################################################################
#matrix of distance between ripples in second
interval_mat = np.vstack(nts.TsdFrame(rip_pop).as_units('s').index.values) - nts.TsdFrame(rip_pop).as_units('s').index.values
rip_corr = np.ones(interval_mat.shape)*np.nan
# doing the upper part of the diagonal
# rip_corr = np.eye(interval_mat.shape[0])
# bad
tmp = np.zeros_like(rip_corr)
tmp[np.triu_indices(interval_mat.shape[0], 1)] += 1
tmp[np.tril_indices(interval_mat.shape[0], 300)] += 1
index = np.where(tmp == 2)
for i, j in zip(index[0], index[1]):
rip_corr[i,j] = scipy.stats.pearsonr(rip_pop.iloc[i].values, rip_pop.iloc[j].values)[0]
rip_corr[j,i] = rip_corr[i,j]
# print(rip_corr[i,j])
allrip_corr = pd.DataFrame(index = interval_mat[index], data = rip_corr[index])
rip_corr = pd.DataFrame(index = rip_pop.index.values, data = rip_corr, columns = rip_pop.index.values)
np.fill_diagonal(rip_corr.values, 1.0)
rip_corr = rip_corr.fillna(0)
###############################################################################################################
# POPULATION CORRELATION FOR EACH THETA CYCLE OF REM
###############################################################################################################
# compute all time interval for each ep of theta
interval_mat = np.vstack(nts.TsdFrame(rem_pop).as_units('s').index.values) - nts.TsdFrame(rem_pop).as_units('s').index.values
rem_corr = np.ones(interval_mat.shape)*np.nan
# index = np.where(np.logical_and(interval_mat < 3.0, interval_mat >= 0.0))
# rem_corr = np.eye(interval_mat.shape[0])
# bad
tmp = np.zeros_like(rem_corr)
tmp[np.triu_indices(interval_mat.shape[0], 1)] += 1
tmp[np.tril_indices(interval_mat.shape[0], 300)] += 1
index = np.where(tmp == 2)
for i, j in zip(index[0], index[1]):
rem_corr[i,j] = scipy.stats.pearsonr(rem_pop.iloc[i].values, rem_pop.iloc[j].values)[0]
rem_corr[j,i] = rem_corr[i,j]
allrem_corr = pd.DataFrame(index = interval_mat[index], data = rem_corr[index])
rem_corr = pd.DataFrame(index = rem_pop.index.values, data = rem_corr, columns = rem_pop.index.values)
np.fill_diagonal(rem_corr.values, 1.0)
rem_corr = rem_corr.fillna(0)
###############################################################################################################
# POPULATION CORRELATION FOR EACH THETA CYCLE OF WAKE
###############################################################################################################
# compute all time interval for each ep of theta
interval_mat = np.vstack(nts.TsdFrame(wak_pop).as_units('s').index.values) - nts.TsdFrame(wak_pop).as_units('s').index.values
wak_corr = np.ones(interval_mat.shape)*np.nan
# index = np.where(np.logical_and(interval_mat < 3.0, interval_mat >= 0.0))
# wak_corr = np.eye(interval_mat.shape[0])
# bad
tmp = np.zeros_like(wak_corr)
tmp[np.triu_indices(interval_mat.shape[0], 1)] += 1
tmp[np.tril_indices(interval_mat.shape[0], 300)] += 1
index = np.where(tmp == 2)
for i, j in zip(index[0], index[1]):
wak_corr[i,j] = scipy.stats.pearsonr(wak_pop.iloc[i].values, wak_pop.iloc[j].values)[0]
wak_corr[j,i] = wak_corr[i,j]
allwak_corr = pd.DataFrame(index = interval_mat[index], data = wak_corr[index])
wak_corr = pd.DataFrame(index = wak_pop.index.values, data = wak_corr, columns = wak_pop.index.values)
np.fill_diagonal(wak_corr.values, 1.0)
wak_corr = wak_corr.fillna(0)
###############################################################################################################
# STORING
###############################################################################################################
store = pd.HDFStore("/mnt/DataGuillaume/corr_pop_nucleus/"+nuc+"/"+session+".h5")
store.put('rip_corr', rip_corr)
store.put('allrip_corr', allrip_corr)
store.put('wak_corr', wak_corr)
store.put('allwak_corr', allwak_corr)
store.put('rem_corr', rem_corr)
store.put('allrem_corr', allrem_corr)
store.close()
print(time.clock() - start_time, "seconds")
return time.clock() - start_time
dview = Pool(8)
for n in nucleus:
print(n)
a = dview.starmap_async(compute_population_correlation, zip([n]*len(nucleus_session[n]),nucleus_session[n])).get()
# a = compute_population_correlation('AD', nucleus_session['AD'][0])
# ###############################################################################################################
# # PLOT
# ###############################################################################################################
# last = np.max([np.max(allrip_corr[:,0]),np.max(alltheta_corr[:,0])])
# bins = np.arange(0.0, last, 0.2)
# # average rip corr
# index_rip = np.digitize(allrip_corr[:,0], bins)
# mean_ripcorr = np.array([np.mean(allrip_corr[index_rip == i,1]) for i in np.unique(index_rip)[0:30]])
# # average theta corr
# index_theta = np.digitize(alltheta_corr[:,0], bins)
# mean_thetacorr = np.array([np.mean(alltheta_corr[index_theta == i,1]) for i in np.unique(index_theta)[0:30]])
# xt = list(bins[0:30][::-1]*-1.0)+list(bins[0:30])
# ytheta = list(mean_thetacorr[0:30][::-1])+list(mean_thetacorr[0:30])
# yrip = list(mean_ripcorr[0:30][::-1])+list(mean_ripcorr[0:30])
# plot(xt, ytheta, 'o-', label = 'theta')
# plot(xt, yrip, 'o-', label = 'ripple')
# legend()
# xlabel('s')
# ylabel('r')
# show()
| gpl-3.0 | 3,322,972,831,613,027,300 | 39.366667 | 126 | 0.579136 | false |
timlau/FedoraReview | src/FedoraReview/helpers_mixin.py | 1 | 5574 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# (C) 2011 - Tim Lauridsen <[email protected]>
'''
Tools for helping Fedora package reviewers
'''
import logging
import os.path
import re
import urllib
from subprocess import Popen, PIPE
import hashlib
from settings import Settings
from review_error import ReviewError
class DownloadError(ReviewError):
''' Error in urlretrieve(). '''
def __init__(self, code, url):
ReviewError.__init__(
self, "Error %s downloading %s" % (code, url))
class HelpersMixin(object):
''' Miscellaneous library support mixin class. '''
def __init__(self):
try:
self.log = Settings.get_logger()
except AttributeError:
pass
def _run_cmd(self, cmd, header='Run command'):
''' Run a command using using subprocess, return output. '''
self.log.debug(header + ': ' + cmd)
cmd = cmd.split(' ')
proc = Popen(cmd, stdout=PIPE, stderr=PIPE)
output, error = '', 'undefined'
try:
output, error = proc.communicate()
except OSError, e:
self.log.debug("OS error, stderr: " + error, exc_info=True)
self.log.error("OS error running " + ' '.join(cmd), str(e))
return output
@staticmethod
def _checksum(path):
''' get the checksum for a path using algorithm set by configuration
(default: md5)
:arg path: the path to get the the checksum for
:return: checksum
'''
ck = hashlib.new(Settings.checksum)
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(8192), ''):
ck.update(chunk)
return ck.hexdigest()
@staticmethod
def urlretrieve(url, path):
''' Similar to urllib.urlretrieve, raises DownloadError. '''
try:
# we need to timeout eventually if there are problems
import socket
socket.setdefaulttimeout(30)
istream = urllib.FancyURLopener().open(url)
if istream.getcode() and istream.getcode() != 200:
raise DownloadError(istream.getcode(), url)
with open(path, 'w') as ostream:
octets = istream.read(32767)
while octets != '':
ostream.write(octets)
octets = istream.read(32767)
except IOError as err:
raise DownloadError(str(err), url)
def _get_file(self, link, directory, logger=None):
''' Download a file in link to directory. '''
fname = link.rsplit('/', 1)[1]
path = os.path.join(directory, fname)
if os.path.exists(path) and Settings.cache:
if logger:
logger(True)
logging.debug('Using cached source: ' + fname)
return path
self.log.debug(" --> %s : %s" % (directory, link))
if logger:
logger(False)
self.urlretrieve(link, path)
return path
@staticmethod
def rpmdev_extract(archive, extract_dir):
"""
Unpack archive in extract_dir. Returns true if
from subprocess.call() returns 0
"""
cmd = 'rpmdev-extract -qC ' + extract_dir + ' ' + archive
cmd += ' &>/dev/null'
p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
log = Settings.get_logger()
log.debug("Cannot unpack " + archive)
log.debug("Status: %d, stdout: %s, stderr: %s.",
p.returncode, str(stdout), str(stderr))
return p.returncode == 0
@staticmethod
def check_rpmlint_errors(out, log):
""" Check the rpmlint output, return(ok, errmsg)
If ok, output is OK and there is 0 warnings/errors
If not ok, and errmsg!= None there is system errors,
reflected in errmsg. If not ok and msg == None parsing
is ok but there are warnings/errors"""
problems = re.compile(r'(\d+)\serrors\,\s(\d+)\swarnings')
lines = out.split('\n')[:-1]
err_lines = filter(lambda l: l.lower().find('error') != -1,
lines)
if len(err_lines) == 0:
Settings.get_logger().debug('Cannot parse rpmlint output: '
+ out)
return False, 'Cannot parse rpmlint output:'
res = problems.search(err_lines[-1])
if res and len(res.groups()) == 2:
errors, warnings = res.groups()
if errors == '0' and warnings == '0':
return True, None
else:
return False, None
else:
log.debug('Cannot parse rpmlint output: ' + out)
return False, 'Cannot parse rpmlint output:'
# vim: set expandtab ts=4 sw=4:
| gpl-2.0 | -8,841,794,272,914,089,000 | 34.503185 | 76 | 0.577861 | false |
cuauv/software | visualizer/configure.py | 1 | 1691 | #!/usr/bin/env python3
from build import ninja_common
build = ninja_common.Build("visualizer")
# Only build if all dependencies are present.
# TODO Create a better means of dependency checking.
import os
sources = ['gl_utils.cpp',
'graphics_engine.cpp',
'material.cpp',
'obj_builder.cpp',
'render_buffer.cpp',
'renderable.cpp',
'renderer.cpp',
'scene_object.cpp',
'shadow_map.cpp',
'skybox.cpp',
'stl_builder.cpp',
'stl_read.cpp']
build.build_shared(
'vis', [os.path.join('graphics_engine', source) for source in sources],
pkg_confs=['gl'], cflags=['-DGL_GLEXT_PROTOTYPES', '-Wno-misleading-indentation']
)
# We compile this separately and link at RUNTIME to avoid
# requiring OpenCV and Eigen for visualizer use.
build.build_shared('vision_link', ['vision_link.cpp'],
auv_deps=['auv-camera-message-framework', 'conf'], pkg_confs=['opencv4', 'eigen3'],
cflags=[]
)
# TODO we should not be compiling units like below.
build.build_shared('fishbowl_comm', ['fishbowl_comm.cpp', '../fishbowl/bits.cpp'],
auv_deps=['utils'])
build.build_shared('aslam_comm', ['aslam_comm.cpp'], auv_deps=['utils'])
build.build_cmd('auv-visualizer-nodisplay',
['visualizer.cpp', 'keyboard.cpp', 'point_manager.cpp',
'fishbowl_manager.cpp', 'async_manager.cpp'],
auv_deps=['shm', 'utils', 'vis',
'fishbowl_comm', 'math', 'quat', 'aslam_comm'],
pkg_confs=['gl', 'libconfig++', 'glfw3'], lflags=['-ldl'])
build.install('auv-visualizer', f='visualizer/visualizer.sh')
| bsd-3-clause | -7,491,380,834,800,671,000 | 33.510204 | 83 | 0.603193 | false |
Clarity-89/clarityv2 | src/clarityv2/portfolio/migrations/0001_initial.py | 1 | 1263 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-02-27 19:20
from __future__ import unicode_literals
import autoslug.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='name')),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique=True, verbose_name='slug')),
('image', models.ImageField(blank=True, upload_to='portfolio', verbose_name='image')),
('description', models.TextField(blank=True, verbose_name='description')),
('order', models.PositiveIntegerField(default=0)),
('published', models.BooleanField(default=True, verbose_name='published')),
],
options={
'ordering': ['order'],
'verbose_name_plural': 'portfolio entries',
'verbose_name': 'portfolio entry',
},
),
]
| mit | -5,629,408,718,103,012,000 | 36.147059 | 128 | 0.578781 | false |
cwisecarver/osf.io | addons/osfstorage/tests/test_views.py | 1 | 34687 | # encoding: utf-8
from __future__ import unicode_literals
import mock
import datetime
import pytest
from nose.tools import * # noqa
from dateutil.parser import parse as parse_datetime
from addons.osfstorage.models import OsfStorageFileNode
from framework.auth.core import Auth
from addons.osfstorage.tests.utils import (
StorageTestCase, Delta, AssertDeltas,
recursively_create_file,
)
from addons.osfstorage.tests import factories
from framework.auth import signing
from website.util import rubeus
from osf.models import Tag
from osf.models import files as models
from addons.osfstorage.apps import osf_storage_root
from addons.osfstorage import utils
from addons.base.views import make_auth
from addons.osfstorage import settings as storage_settings
from tests.factories import ProjectFactory
def create_record_with_version(path, node_settings, **kwargs):
version = factories.FileVersionFactory(**kwargs)
node_settings.get_root().append_file(path)
record.versions.append(version)
record.save()
return record
@pytest.mark.django_db
class HookTestCase(StorageTestCase):
def send_hook(self, view_name, view_kwargs, payload, method='get', **kwargs):
method = getattr(self.app, method)
return method(
self.project.api_url_for(view_name, **view_kwargs),
signing.sign_data(signing.default_signer, payload),
**kwargs
)
@pytest.mark.django_db
class TestGetMetadataHook(HookTestCase):
def test_empty(self):
res = self.send_hook(
'osfstorage_get_children',
{'fid': self.node_settings.get_root()._id},
{},
)
assert_true(isinstance(res.json, list))
assert_equal(res.json, [])
def test_file_metdata(self):
path = u'kind/of/magíc.mp3'
record = recursively_create_file(self.node_settings, path)
version = factories.FileVersionFactory()
record.versions.add(version)
record.save()
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': record.parent._id},
{},
)
assert_true(isinstance(res.json, dict))
assert_equal(res.json, record.parent.serialize(True))
def test_children_metadata(self):
path = u'kind/of/magíc.mp3'
record = recursively_create_file(self.node_settings, path)
version = factories.FileVersionFactory()
record.versions.add(version)
record.save()
res = self.send_hook(
'osfstorage_get_children',
{'fid': record.parent._id},
{},
)
assert_equal(len(res.json), 1)
res_data = res.json[0]
expected_data = record.serialize()
# Datetimes in response might not be exactly the same as in record.serialize
# because of the way Postgres serializes dates. For example,
# '2017-06-05T17:32:20.964950+00:00' will be
# serialized as '2017-06-05T17:32:20.96495+00:00' by postgres
# Therefore, we parse the dates then compare them
expected_date_modified = parse_datetime(expected_data.pop('modified'))
expected_date_created = parse_datetime(expected_data.pop('created'))
res_date_modified = parse_datetime(res_data.pop('modified'))
res_date_created = parse_datetime(res_data.pop('created'))
assert_equal(res_date_modified, expected_date_modified)
assert_equal(res_date_created, expected_date_created)
assert_equal(res_data, expected_data)
def test_osf_storage_root(self):
auth = Auth(self.project.creator)
result = osf_storage_root(self.node_settings.config, self.node_settings, auth)
node = self.project
expected = rubeus.build_addon_root(
node_settings=self.node_settings,
name='',
permissions=auth,
user=auth.user,
nodeUrl=node.url,
nodeApiUrl=node.api_url,
)
root = result[0]
assert_equal(root, expected)
def test_root_default(self):
res = self.send_hook('osfstorage_get_metadata', {}, {})
assert_equal(res.json['fullPath'], '/')
assert_equal(res.json['id'], self.node_settings.get_root()._id)
def test_metadata_not_found(self):
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': 'somebogusid'}, {},
expect_errors=True,
)
assert_equal(res.status_code, 404)
def test_metadata_not_found_lots_of_slashes(self):
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': '/not/fo/u/nd/'}, {},
expect_errors=True,
)
assert_equal(res.status_code, 404)
@pytest.mark.django_db
class TestUploadFileHook(HookTestCase):
def setUp(self):
super(TestUploadFileHook, self).setUp()
self.name = 'pízza.png'
self.record = recursively_create_file(self.node_settings, self.name)
self.auth = make_auth(self.user)
def send_upload_hook(self, parent, payload=None, **kwargs):
return self.send_hook(
'osfstorage_create_child',
{'fid': parent._id},
payload=payload or {},
method='post_json',
**kwargs
)
def make_payload(self, **kwargs):
payload = {
'user': self.user._id,
'name': self.name,
'hashes': {'base64': '=='},
'worker': {
'uname': 'testmachine'
},
'settings': {
'provider': 'filesystem',
storage_settings.WATERBUTLER_RESOURCE: 'blah',
},
'metadata': {
'size': 123,
'name': 'file',
'provider': 'filesystem',
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'
},
}
payload.update(kwargs)
return payload
def test_upload_create(self):
name = 'slightly-mad'
res = self.send_upload_hook(self.node_settings.get_root(), self.make_payload(name=name))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
record = self.node_settings.get_root().find_child_by_name(name)
version = models.FileVersion.load(res.json['version'])
assert_equal(version.size, 123)
assert_equal(version.location_hash, 'file')
assert_equal(version.location, {
'object': 'file',
'uname': 'testmachine',
'service': 'filesystem',
'provider': 'filesystem',
storage_settings.WATERBUTLER_RESOURCE: 'blah',
})
assert_equal(version.metadata, {
'size': 123,
'name': 'file',
'base64': '==',
'provider': 'filesystem',
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'
})
assert_is_not(version, None)
assert_equal([version], list(record.versions.all()))
assert_not_in(version, self.record.versions.all())
assert_equal(record.serialize(), res.json['data'])
assert_equal(res.json['data']['downloads'], self.record.get_download_count())
def test_upload_update(self):
delta = Delta(lambda: self.record.versions.count(), lambda value: value + 1)
with AssertDeltas(delta):
res = self.send_upload_hook(self.node_settings.get_root(), self.make_payload())
self.record.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
version = models.FileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_in(version, self.record.versions.all())
def test_upload_duplicate(self):
location = {
'service': 'cloud',
storage_settings.WATERBUTLER_RESOURCE: 'osf',
'object': 'file',
}
version = self.record.create_version(self.user, location)
with AssertDeltas(Delta(lambda: self.record.versions.count())):
res = self.send_upload_hook(self.node_settings.get_root(), self.make_payload())
self.record.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
version = models.FileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_in(version, self.record.versions.all())
def test_upload_create_child(self):
name = 'ლ(ಠ益ಠლ).unicode'
parent = self.node_settings.get_root().append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
assert_equal(res.json['data']['downloads'], self.record.get_download_count())
version = models.FileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_not_in(version, self.record.versions.all())
record = parent.find_child_by_name(name)
assert_in(version, record.versions.all())
assert_equals(record.name, name)
assert_equals(record.parent, parent)
def test_upload_create_child_with_same_name(self):
name = 'ლ(ಠ益ಠლ).unicode'
self.node_settings.get_root().append_file(name)
parent = self.node_settings.get_root().append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
assert_equal(res.json['data']['downloads'], self.record.get_download_count())
version = models.FileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_not_in(version, self.record.versions.all())
record = parent.find_child_by_name(name)
assert_in(version, record.versions.all())
assert_equals(record.name, name)
assert_equals(record.parent, parent)
def test_upload_fail_to_create_version_due_to_checkout(self):
user = factories.AuthUserFactory()
name = 'Gunter\'s noise.mp3'
self.node_settings.get_root().append_file(name)
root = self.node_settings.get_root()
file = root.find_child_by_name(name)
file.checkout = user
file.save()
res = self.send_upload_hook(root, self.make_payload(name=name), expect_errors=True)
assert_equal(res.status_code, 403)
def test_update_nested_child(self):
name = 'ლ(ಠ益ಠლ).unicode'
parent = self.node_settings.get_root().append_folder('cheesey')
old_node = parent.append_file(name)
res = self.send_upload_hook(parent, self.make_payload(name=name))
old_node.reload()
new_node = parent.find_child_by_name(name)
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
assert_equal(res.json['data']['downloads'], new_node.get_download_count())
assert_equal(old_node, new_node)
version = models.FileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_in(version, new_node.versions.all())
assert_in(version, new_node.versions.all())
assert_equals(new_node.name, name)
assert_equals(new_node.parent, parent)
def test_upload_weird_name(self):
name = 'another/dir/carpe.png'
parent = self.node_settings.get_root().append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name), expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(len(parent.children), 0)
def test_upload_to_file(self):
name = 'carpe.png'
parent = self.node_settings.get_root().append_file('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name), expect_errors=True)
assert_true(parent.is_file)
assert_equal(res.status_code, 400)
def test_upload_no_data(self):
res = self.send_upload_hook(self.node_settings.get_root(), expect_errors=True)
assert_equal(res.status_code, 400)
def test_archive(self):
name = 'ლ(ಠ益ಠლ).unicode'
parent = self.node_settings.get_root().append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name, hashes={'sha256': 'foo'}))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
assert_is(res.json['archive'], True)
res = self.send_hook(
'osfstorage_update_metadata',
{},
payload={'metadata': {
'vault': 'Vault 101',
'archive': '101 tluaV',
}, 'version': res.json['version']},
method='put_json',
)
res = self.send_upload_hook(parent, self.make_payload(
name=name,
hashes={'sha256': 'foo'},
metadata={
'name': 'lakdjf',
'provider': 'testing',
}))
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
assert_is(res.json['archive'], False)
# def test_upload_update_deleted(self):
# pass
@pytest.mark.django_db
class TestUpdateMetadataHook(HookTestCase):
def setUp(self):
super(TestUpdateMetadataHook, self).setUp()
self.path = 'greasy/pízza.png'
self.record = recursively_create_file(self.node_settings, self.path)
self.version = factories.FileVersionFactory()
self.record.versions = [self.version]
self.record.save()
self.payload = {
'metadata': {
'size': 123,
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT',
'md5': 'askjasdlk;jsadlkjsadf',
'sha256': 'sahduashduahdushaushda',
},
'version': self.version._id,
'size': 321, # Just to make sure the field is ignored
}
def send_metadata_hook(self, payload=None, **kwargs):
return self.send_hook(
'osfstorage_update_metadata',
{},
payload=payload or self.payload,
method='put_json',
**kwargs
)
def test_callback(self):
self.version.date_modified = None
self.version.save()
self.send_metadata_hook()
self.version.reload()
#Test fields are added
assert_equal(self.version.metadata['size'], 123)
assert_equal(self.version.metadata['md5'], 'askjasdlk;jsadlkjsadf')
assert_equal(self.version.metadata['modified'], 'Mon, 16 Feb 2015 18:45:34 GMT')
#Test attributes are populated
assert_equal(self.version.size, 123)
assert_true(isinstance(self.version.date_modified, datetime.datetime))
def test_archived(self):
self.send_metadata_hook({
'version': self.version._id,
'metadata': {
'vault': 'osf_storage_prod',
'archive': 'Some really long glacier object id here'
}
})
self.version.reload()
assert_equal(self.version.metadata['vault'], 'osf_storage_prod')
assert_equal(self.version.metadata['archive'], 'Some really long glacier object id here')
def test_archived_record_not_found(self):
res = self.send_metadata_hook(
payload={
'metadata': {'archive': 'glacier'},
'version': self.version._id[::-1],
'size': 123,
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'
},
expect_errors=True,
)
assert_equal(res.status_code, 404)
self.version.reload()
assert_not_in('archive', self.version.metadata)
@pytest.mark.django_db
class TestGetRevisions(StorageTestCase):
def setUp(self):
super(TestGetRevisions, self).setUp()
self.path = 'tie/your/mother/down.mp3'
self.record = recursively_create_file(self.node_settings, self.path)
self.record.versions = [factories.FileVersionFactory() for __ in range(15)]
self.record.save()
def get_revisions(self, fid=None, **kwargs):
return self.app.get(
self.project.api_url_for(
'osfstorage_get_revisions',
fid=fid or self.record._id,
**signing.sign_data(signing.default_signer, {})
),
auth=self.user.auth,
**kwargs
)
def test_get_revisions(self):
res = self.get_revisions()
expected = [
utils.serialize_revision(
self.project,
self.record,
version,
index=self.record.versions.count() - 1 - idx
)
for idx, version in enumerate(reversed(self.record.versions.all()))
]
assert_equal(len(res.json['revisions']), 15)
assert_equal(res.json['revisions'], [x for x in expected])
assert_equal(res.json['revisions'][0]['index'], 15)
assert_equal(res.json['revisions'][-1]['index'], 1)
def test_get_revisions_path_not_found(self):
res = self.get_revisions(fid='missing', expect_errors=True)
assert_equal(res.status_code, 404)
@pytest.mark.django_db
class TestCreateFolder(HookTestCase):
def setUp(self):
super(TestCreateFolder, self).setUp()
self.root_node = self.node_settings.get_root()
def create_folder(self, name, parent=None, **kwargs):
parent = parent or self.node_settings.get_root()
return self.send_hook(
'osfstorage_create_child',
{'fid': parent._id},
payload={
'name': name,
'user': self.user._id,
'kind': 'folder'
},
method='post_json',
**kwargs
)
def test_create_folder(self):
resp = self.create_folder('name')
self.root_node.reload()
assert_equal(resp.status_code, 201)
assert_equal(len(self.root_node.children), 1)
assert_equal(self.root_node.children[0].serialize(), resp.json['data'])
def test_no_data(self):
resp = self.send_hook(
'osfstorage_create_child',
{'fid': self.root_node._id},
payload={},
method='post_json',
expect_errors=True
)
assert_equal(resp.status_code, 400)
def test_create_with_parent(self):
resp = self.create_folder('name')
assert_equal(resp.status_code, 201)
assert_equal(self.root_node.children.count(), 1)
assert_equal(self.root_node.children.all()[0].serialize(), resp.json['data'])
resp = self.create_folder('name', parent=OsfStorageFileNode.load(resp.json['data']['id']))
assert_equal(resp.status_code, 201)
assert_equal(self.root_node.children.count(), 1)
assert_false(self.root_node.children.all()[0].is_file)
assert_equal(self.root_node.children.all()[0].children.count(), 1)
assert_false(self.root_node.children.all()[0].children.all()[0].is_file)
assert_equal(self.root_node.children.all()[0].children.all()[0].serialize(), resp.json['data'])
@pytest.mark.django_db
class TestDeleteHook(HookTestCase):
def setUp(self):
super(TestDeleteHook, self).setUp()
self.root_node = self.node_settings.get_root()
def send_hook(self, view_name, view_kwargs, payload, method='get', **kwargs):
method = getattr(self.app, method)
return method(
'{url}?payload={payload}&signature={signature}'.format(
url=self.project.api_url_for(view_name, **view_kwargs),
**signing.sign_data(signing.default_signer, payload)
),
**kwargs
)
def delete(self, file_node, **kwargs):
return self.send_hook(
'osfstorage_delete',
{'fid': file_node._id},
payload={
'user': self.user._id
},
method='delete',
**kwargs
)
def test_delete(self):
file = self.root_node.append_file('Newfile')
resp = self.delete(file)
assert_equal(resp.status_code, 200)
assert_equal(resp.json, {'status': 'success'})
fid = file._id
del file
# models.StoredFileNode._clear_object_cache()
assert_is(OsfStorageFileNode.load(fid), None)
assert_true(models.TrashedFileNode.load(fid))
def test_delete_deleted(self):
file = self.root_node.append_file('Newfile')
file.delete()
resp = self.delete(file, expect_errors=True)
assert_equal(resp.status_code, 404)
def test_cannot_delete_root(self):
resp = self.delete(self.root_node, expect_errors=True)
assert_equal(resp.status_code, 400)
def test_attempt_delete_rented_file(self):
user = factories.AuthUserFactory()
file_checked = self.root_node.append_file('Newfile')
file_checked.checkout = user
file_checked.save()
res = self.delete(file_checked, expect_errors=True)
assert_equal(res.status_code, 403)
def test_attempt_delete_while_preprint(self):
file = self.root_node.append_file('Nights')
self.node.preprint_file = file
self.node.save()
res = self.delete(file, expect_errors=True)
assert_equal(res.status_code, 403)
def test_attempt_delete_folder_with_preprint(self):
folder = self.root_node.append_folder('Fishes')
file = folder.append_file('Fish')
self.node.preprint_file = file
self.node.save()
res = self.delete(folder, expect_errors=True)
assert_equal(res.status_code, 403)
def test_delete_folder_while_preprint(self):
folder = self.root_node.append_folder('Mr. Yuck')
preprint_file = self.root_node.append_file('Thyme Out')
self.node.preprint_file = preprint_file
self.node.save()
res = self.delete(folder)
assert_equal(res.status_code, 200)
def test_delete_folder_on_preprint_with_non_preprint_file_inside(self):
folder = self.root_node.append_folder('Herbal Crooners')
file = folder.append_file('Frank Cilantro')
# project having a preprint should not block other moves
preprint_file = self.root_node.append_file('Thyme Out')
self.node.preprint_file = preprint_file
self.node.save()
res = self.delete(folder)
assert_equal(res.status_code, 200)
def test_attempt_delete_folder_with_rented_file(self):
folder = self.root_node.append_folder('Hotel Events')
user = factories.AuthUserFactory()
file_checked = folder.append_file('Checkout time')
file_checked.checkout = user
file_checked.save()
res = self.delete(folder, expect_errors=True)
assert_equal(res.status_code, 403)
def test_attempt_delete_double_nested_folder_rented_file(self):
folder = self.root_node.append_folder('One is not enough')
folder_two = folder.append_folder('Two might be doe')
user = factories.AuthUserFactory()
file_checked = folder_two.append_file('We shall see')
file_checked.checkout = user
file_checked.save()
res = self.delete(folder, expect_errors=True)
assert_equal(res.status_code, 403)
@pytest.mark.django_db
class TestMoveHook(HookTestCase):
def setUp(self):
super(TestMoveHook, self).setUp()
self.root_node = self.node_settings.get_root()
def test_move_hook(self):
file = self.root_node.append_file('Ain\'t_got_no,_I_got_life')
folder = self.root_node.append_folder('Nina Simone')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': file._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder._id,
'node': folder.node._id,
'name': folder.name,
}
},
method='post_json',)
assert_equal(res.status_code, 200)
def test_move_checkedout_file(self):
file = self.root_node.append_file('Ain\'t_got_no,_I_got_life')
file.checkout = self.user
file.save()
folder = self.root_node.append_folder('Nina Simone')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': file._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder._id,
'node': folder.node._id,
'name': folder.name,
}
},
method='post_json',
expect_errors=True,
)
assert_equal(res.status_code, 405)
def test_move_checkedout_file_in_folder(self):
folder = self.root_node.append_folder('From Here')
file = folder.append_file('No I don\'t wanna go')
file.checkout = self.user
file.save()
folder_two = self.root_node.append_folder('To There')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': folder._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder_two._id,
'node': folder_two.node._id,
'name': folder_two.name,
}
},
method='post_json',
expect_errors=True,
)
assert_equal(res.status_code, 405)
def test_move_checkedout_file_two_deep_in_folder(self):
folder = self.root_node.append_folder('From Here')
folder_nested = folder.append_folder('Inbetween')
file = folder_nested.append_file('No I don\'t wanna go')
file.checkout = self.user
file.save()
folder_two = self.root_node.append_folder('To There')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': folder._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder_two._id,
'node': folder_two.node._id,
'name': folder_two.name,
}
},
method='post_json',
expect_errors=True,
)
assert_equal(res.status_code, 405)
def test_move_preprint_file_out_of_node(self):
folder = self.root_node.append_folder('From Here')
file = folder.append_file('No I don\'t wanna go')
self.node.preprint_file = file
self.node.save()
project = ProjectFactory(creator=self.user)
project_settings = project.get_addon('osfstorage')
project_root_node = project_settings.get_root()
folder_two = project_root_node.append_folder('To There')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': folder._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder_two._id,
'node': folder_two.node._id,
'name': folder_two.name,
}
},
method='post_json',
expect_errors=True,
)
assert_equal(res.status_code, 403)
def test_move_file_out_of_node(self):
folder = self.root_node.append_folder('A long time ago')
file = folder.append_file('in a galaxy')
# project having a preprint should not block other moves
preprint_file = self.root_node.append_file('far')
self.node.preprint_file = preprint_file
self.node.save()
project = ProjectFactory(creator=self.user)
project_settings = project.get_addon('osfstorage')
project_root_node = project_settings.get_root()
folder_two = project_root_node.append_folder('far away')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': folder._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder_two._id,
'node': folder_two.node._id,
'name': folder_two.name,
}
},
method='post_json',
expect_errors=True,
)
assert_equal(res.status_code, 200)
def test_within_node_move_while_preprint(self):
file = self.root_node.append_file('Self Control')
self.node.preprint_file = file
self.node.save()
folder = self.root_node.append_folder('Frank Ocean')
res = self.send_hook(
'osfstorage_move_hook',
{'nid': self.root_node.node._id},
payload={
'source': file._id,
'node': self.root_node._id,
'user': self.user._id,
'destination': {
'parent': folder._id,
'node': folder.node._id,
'name': folder.name,
}
},
method='post_json',
expect_errors=True,
)
assert_equal(res.status_code, 200)
@pytest.mark.django_db
class TestFileTags(StorageTestCase):
def test_file_add_tag(self):
file = self.node_settings.get_root().append_file('Good Morning.mp3')
assert_not_in('Kanye_West', file.tags.values_list('name', flat=True))
url = self.project.api_url_for('osfstorage_add_tag', fid=file._id)
self.app.post_json(url, {'tag': 'Kanye_West'}, auth=self.user.auth)
file.reload()
assert_in('Kanye_West', file.tags.values_list('name', flat=True))
def test_file_add_non_ascii_tag(self):
file = self.node_settings.get_root().append_file('JapaneseCharacters.txt')
assert_not_in('コンサート', file.tags.values_list('name', flat=True))
url = self.project.api_url_for('osfstorage_add_tag', fid=file._id)
self.app.post_json(url, {'tag': 'コンサート'}, auth=self.user.auth)
file.reload()
assert_in('コンサート', file.tags.values_list('name', flat=True))
def test_file_remove_tag(self):
file = self.node_settings.get_root().append_file('Champion.mp3')
tag = Tag(name='Graduation')
tag.save()
file.tags.add(tag)
file.save()
assert_in('Graduation', file.tags.values_list('name', flat=True))
url = self.project.api_url_for('osfstorage_remove_tag', fid=file._id)
self.app.delete_json(url, {'tag': 'Graduation'}, auth=self.user.auth)
file.reload()
assert_not_in('Graduation', file.tags.values_list('name', flat=True))
def test_tag_the_same_tag(self):
file = self.node_settings.get_root().append_file('Lie,Cheat,Steal.mp3')
tag = Tag(name='Run_the_Jewels')
tag.save()
file.tags.add(tag)
file.save()
assert_in('Run_the_Jewels', file.tags.values_list('name', flat=True))
url = self.project.api_url_for('osfstorage_add_tag', fid=file._id)
res = self.app.post_json(url, {'tag': 'Run_the_Jewels'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['status'], 'failure')
def test_remove_nonexistent_tag(self):
file = self.node_settings.get_root().append_file('WonderfulEveryday.mp3')
assert_not_in('Chance', file.tags.values_list('name', flat=True))
url = self.project.api_url_for('osfstorage_remove_tag', fid=file._id)
res = self.app.delete_json(url, {'tag': 'Chance'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['status'], 'failure')
def test_file_add_tag_creates_log(self):
file = self.node_settings.get_root().append_file('Yeezy Season 3.mp4')
url = self.project.api_url_for('osfstorage_add_tag', fid=file._id)
res = self.app.post_json(url, {'tag': 'Kanye_West'}, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.node.reload()
assert_equal(self.node.logs.latest().action, 'file_tag_added')
@mock.patch('addons.osfstorage.models.OsfStorageFile.add_tag_log')
def test_file_add_tag_fail_doesnt_create_log(self, mock_log):
file = self.node_settings.get_root().append_file('UltraLightBeam.mp3')
tag = Tag(name='The Life of Pablo')
tag.save()
file.tags.add(tag)
file.save()
url = self.project.api_url_for('osfstorage_add_tag', fid=file._id)
res = self.app.post_json(url, {'tag': 'The Life of Pablo'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
mock_log.assert_not_called()
def test_file_remove_tag_creates_log(self):
file = self.node_settings.get_root().append_file('Formation.flac')
tag = Tag(name='You that when you cause all this conversation')
tag.save()
file.tags.add(tag)
file.save()
url = self.project.api_url_for('osfstorage_remove_tag', fid=file._id)
res = self.app.delete_json(url, {'tag': 'You that when you cause all this conversation'}, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.node.reload()
assert_equal(self.node.logs.latest().action, 'file_tag_removed')
@mock.patch('addons.osfstorage.models.OsfStorageFile.add_tag_log')
def test_file_remove_tag_fail_doesnt_create_log(self, mock_log):
file = self.node_settings.get_root().append_file('For-once-in-my-life.mp3')
url = self.project.api_url_for('osfstorage_remove_tag', fid=file._id)
res = self.app.delete_json(url, {'tag': 'wonder'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
mock_log.assert_not_called()
| apache-2.0 | -2,196,461,070,738,581,800 | 35.055208 | 118 | 0.581053 | false |
reedessick/pedagogy | coherentLikelihood/coherentLikelihood.py | 1 | 23070 | #!/usr/bin/python
usage = "coherentLikelihood.py [--options]"
description = "builds figures to demonstrate a heuristic burst search"
author = "Reed Essick ([email protected])"
#-------------------------------------------------
import waveforms
import numpy as np
import subprocess as sp
import matplotlib
matplotlib.use("Agg")
from matplotlib import pyplot as plt
from optparse import OptionParser
#-------------------------------------------------
parser = OptionParser(usage=usage, description=description)
parser.add_option('-v', '--verbose', default=False, action='store_true')
parser.add_option('-T', '--duration', default=10.0, type='float', help='duration of the experiment')
parser.add_option('-s', '--sampling-rate', default=1024, type='int', help='sampling rate of the experiment, should be a power of 2')
parser.add_option('-S', '--SNR', default=15.0, type='float', help='requested SNR for the injection')
parser.add_option('', '--theta', default=45, type='float', help='the polar angle for triangulation. WARNING: the plot shows "theta" but that is measured from the zenith!')
parser.add_option('', '--D-over-c', default=3, type='float', help='the triangulation baseline')
parser.add_option('-f', '--freq', default=10.0, type='float', help='central frequency of the chirpedSineGaussian')
parser.add_option('-F', '--freqDot', default=20, type='float', help='frequency derivative of the chirpedSineGaussian')
parser.add_option('-t', '--tau', default=0.25, type='float', help='time constnat of the chirpedSineGaussian')
parser.add_option('', '--frames-per-sec', default=30, type='int', help='the number of frames per second of the movie')
parser.add_option('', '--num-frames', default=200, type='int', help='the total number of frames in the movie')
parser.add_option('', '--hide-signal', default=False, action='store_true', help='do not show signal in fame*png figures')
parser.add_option('', '--hide-noisy-reconstruction', default=False, action='store_true', help='do not show the reconstructed signal which contains noise')
parser.add_option('', '--hide-noiseless-reconstruction', default=False, action='store_true', help='do not show the reconstructed signal which contains only injections')
parser.add_option('', '--tag', default='', type='string' )
parser.add_option('', '--dpi', default=200, type='int' )
parser.add_option('', '--movie-type', default=[], action='append', type='string')
parser.add_option('', '--sanity-check', default=False, action='store_true', help='stop after making sanity check plots')
opts, args = parser.parse_args()
if opts.tag:
opts.tag = "_%s"%opts.tag
N = opts.duration*opts.sampling_rate
if N%2:
raise ValueError("must have an even number of sample points! %.3f*%.3f=%.3f"%(opts.duration, opts.sampling_rate, N))
if not opts.movie_type:
opts.movie_type.append( 'mpg' )
#-------------------------------------------------
if opts.verbose:
print "generating white noise (in the freq domain)"
(freqs, wFreqDom1), (times, wTimeDom1) = waveforms.whiteNoise( opts.duration, opts.sampling_rate )
(freqs, wFreqDom2), (times, wTimeDom2) = waveforms.whiteNoise( opts.duration, opts.sampling_rate )
#-------------------------------------------------
dt = opts.D_over_c * np.cos( opts.theta*np.pi/180 )
to = opts.duration/2
if opts.verbose:
print "generating injection with to=%.3f"%(to)
hTimeDom1 = waveforms.chirpSineGaussianT( times, 1.0, opts.freq, opts.freqDot, opts.tau, to+dt/2 )
hFreqDom1 = waveforms.chirpSineGaussianF( freqs, 1.0, opts.freq, opts.freqDot, opts.tau, to+dt/2 )
hTimeDom2 = waveforms.chirpSineGaussianT( times, 1.0, opts.freq, opts.freqDot, opts.tau, to-dt/2 )
hFreqDom2 = waveforms.chirpSineGaussianF( freqs, 1.0, opts.freq, opts.freqDot, opts.tau, to-dt/2 )
#-------------------------------------------------
if opts.verbose:
print "computing optimal SNR and scaling injection"
### for white-gaussian noise with unit-variance in the frequency domain
snr = ( 2 * np.sum( hFreqDom1.real**2 + hFreqDom1.imag**2 + hFreqDom2.real**2 + hFreqDom2.imag**2 ) / opts.duration )**0.5
scaling = opts.SNR/snr
hTimeDom1 *= scaling
hFreqDom1 *= scaling
hTimeDom2 *= scaling
hFreqDom2 *= scaling
#-------------------------------------------------
if opts.verbose:
print "compute logBSN as a function of theta"
dataF1 = wFreqDom1 + hFreqDom1
dataT1 = wTimeDom1 + hTimeDom1
dataF2 = wFreqDom2 + hFreqDom2
dataT2 = wTimeDom2 + hTimeDom2
ylim = 1.1*max(np.max(np.abs(dataT2)), np.max(np.abs(dataT1)))
ylim = (-ylim, ylim)
#snr = 2 * np.sum( dataF1.real**2 + dataF1.imag**2 + dataF2.real**2 + dataF2.imag**2 ) / opts.duration + np.fft.ifft( 2 * np.fft.ifftshift( dataF1 * np.conj(dataF2) ) ).real * opts.sampling_rate ### ifft normalizes the sum by 1/n = 1/(s*T) and we want to normalize by 1/T to approximate the integral
#SNR = snr**0.5 ### this is the "coherent snr"
SNR = np.fft.ifft( 2 * np.fft.ifftshift( dataF1 * np.conj(dataF2) ) ).real * opts.sampling_rate ### ifft normalizes the sum by 1/n = 1/(s*T) and we want to normalize by 1/T to approximate the integral
#-------------------------------------------------
if opts.verbose:
print "plotting sanity check of injection and noise"
fig = plt.figure(figsize=(15,10))
### IFO1 raw data
ax = plt.subplot(2,3,1)
ax.plot( times, dataT1, 'm-', linewidth=1, alpha=0.75, label='$\mathrm{noise_1+signal_1}$' )
ax.plot( times-dt/2, dataT1, 'b-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_1+signal_1}$' )
if not opts.hide_signal:
ax.plot( times-dt/2, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_1}$' )
ax.legend(loc='best')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO1 strain data
ax = plt.subplot(2,3,2)
ax.plot( times, hTimeDom1, 'm-', linewidth=1, alpha=0.75, label='$\mathrm{signal_1}$' )
ax.plot( times-dt/2, hTimeDom1, 'b-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ signal_1}$' )
ax.set_ylim(ylim)
#ax.legend(loc='best')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
plt.annotate(s='', xy=(to+dt/2,np.min(hTimeDom1)), xytext=(to,np.min(hTimeDom1)), arrowprops=dict(arrowstyle='<-'))
#plt.annotate(s='$\\tau$', xy=(to+dt/4,np.min(hTimeDom1)*1.1), xytext=(to+dt/4,np.min(hTimeDom1)*1.1) )
ax.plot( [to]*2, ylim, 'k--', alpha=0.5, linewidth=1 )
ax.set_ylim(ylim)
### IFO2 raw data
ax = plt.subplot(2,3,4)
ax.plot( times, dataT2, 'c-', linewidth=1, alpha=0.75, label='$\mathrm{noise_2+signal_2}$' )
ax.plot( times+dt/2, dataT2, 'r-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_2+signal_2}$' )
if not opts.hide_signal:
ax.plot( times+dt/2, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_2}$' )
ax.legend(loc='best')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO2 strain data
ax = plt.subplot(2,3,5)
ax.plot( times, hTimeDom2, 'c-', linewidth=1, alpha=0.75, label='$\mathrm{signal_2}$' )
ax.plot( times+dt/2, hTimeDom2, 'r-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ signal_2}$' )
ax.set_ylim(ylim)
#ax.legend(loc='best')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
plt.annotate(s='', xy=(to-dt/2,np.max(hTimeDom2)), xytext=(to,np.max(hTimeDom2)), arrowprops=dict(arrowstyle='<-'))
#plt.annotate(s='$\\tau$', xy=(to-dt/4,np.max(hTimeDom2)*1.1), xytext=(to-dt/4,np.max(hTimeDom2)*1.1) )
ax.plot( [to]*2, ylim, 'k--', alpha=0.5, linewidth=1 )
ax.set_ylim(ylim)
### ray-plot
ax = plt.subplot(3,3,6)
truth = times<=opts.D_over_c
ax.plot( times[truth], SNR[truth], 'g-', linewidth=1, alpha=0.5, label='$\mathrm{freq-domain}$\n$\mathrm{computation}$' )
truth = times[-1]-times < opts.D_over_c
ax.plot( times[truth]-times[-1], SNR[truth], 'g-', linewidth=1, alpha=0.5, label='$\mathrm{freq-domain}$\n$\mathrm{computation}$' )
ylim_ray = ax.get_ylim()
ax.plot( [dt]*2, ylim_ray, 'k--', linewidth=1, alpha=0.5 )
ax.set_ylim(ylim_ray)
#ax.legend(loc='best')
ax.set_xlabel('$\\tau$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
#ax.set_ylabel('$\\rho(\\tau)$')
ax.set_ylabel('$\mathrm{correlated\ Energy}\sim\\rho^2$')
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax = ax.twiny()
thetas = [-90, -45, -30, -15, 0, 15, 30, 45, 90]
ax.set_xticks([opts.D_over_c*np.sin(theta*np.pi/180) for theta in thetas])
ax.set_xticklabels(["$%d^\circ$"%theta for theta in thetas])
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\\theta$')
plt.subplots_adjust(hspace=0.1, wspace=0.1)
figname = "sanityCheck%s.png"%(opts.tag)
if opts.verbose:
print " %s"%figname
fig.savefig( figname, dpi=opts.dpi )
plt.close( fig )
if opts.sanity_check:
import sys
sys.exit(0)
#-------------------------------------------------
if opts.verbose:
print "making movie frames"
shifts = np.arange(0, opts.D_over_c, 1.0/opts.sampling_rate)
N = len(shifts)
frame_step = int( 1.0*N / opts.num_frames )
frameNo = 0
### plot an openning frame
fig = plt.figure(figsize=(15,10))
### IFO1 raw data
ax = plt.subplot(2,3,1)
ax.plot( times, dataT1, 'm-', linewidth=1, alpha=0.50, label='$\mathrm{noise_1+signal_1}$' )
ax.plot( times, dataT1, 'b-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_1+signal_1}$' )
if not opts.hide_signal:
ax.plot( times, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_1}$' )
ax.legend(loc='upper left')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO1 strain data
ax = plt.subplot(2,3,2)
if not opts.hide_noisy_reconstruction:
ax.plot( times, 0.5*(dataT1 + dataT2), 'm-', linewidth=1, alpha=0.75, label='$\mathrm{reconstructed\ signal_1}$')
if not opts.hide_noiseless_reconstruction:
ax.plot( times, 0.5*(hTimeDom1 + hTimeDom2), 'b-', linewidth=1, alpha=0.90, label='$\mathrm{zero\ noise}$\n$\mathrm{reconstructed\ signal_1}$')
if not opts.hide_signal:
ax.plot( times, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{signal_1}$' )
ax.set_ylim(ylim)
ax.legend(loc='upper right')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
### IFO2 raw data
ax = plt.subplot(2,3,4)
ax.plot( times, dataT2, 'c-', linewidth=1, alpha=0.50, label='$\mathrm{noise_2+signal_2}$' )
ax.plot( times, dataT2, 'r-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_2+signal_2}$' )
if not opts.hide_signal:
ax.plot( times, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_2}$' )
ax.legend(loc='lower left')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO2 strain data
ax = plt.subplot(2,3,5)
if not opts.hide_noisy_reconstruction:
ax.plot( times, 0.5*(dataT1 + dataT2), 'c-', linewidth=1, alpha=0.75, label='$\mathrm{reconstructed\ signal_2}$')
if not opts.hide_noiseless_reconstruction:
ax.plot( times, 0.5*(hTimeDom1 + hTimeDom2), 'r-', linewidth=1, alpha=0.90, label='$\mathrm{zero\ noise}$\n$\mathrm{reconstructed\ signal_2}$')
if not opts.hide_signal:
ax.plot( times, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{signal_2}$' )
ax.set_ylim(ylim)
ax.legend(loc='lower right')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
### ray-plot
ax = plt.subplot(3,3,6)
#ax.legend(loc='best')
ax.set_xlabel('$\\tau$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
#ax.set_ylabel('$\\rho(\\tau)$')
ax.set_ylabel('$\mathrm{correlated\ Energy}\sim\\rho^2$')
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.set_ylim(ymin=1.1*np.min(SNR), ymax=1.1*np.max(SNR))
ax = ax.twiny()
thetas = [-90, -45, -30, -15, 0, 15, 30, 45, 90]
ax.set_xticks([opts.D_over_c*np.sin(theta*np.pi/180) for theta in thetas])
ax.set_xticklabels(["$%d^\circ$"%theta for theta in thetas])
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\\theta$')
plt.subplots_adjust(hspace=0.1, wspace=0.1)
figname = "frame%s-%04d.png"%(opts.tag, frameNo)
if opts.verbose:
print " %s"%figname
fig.savefig( figname, dpi=opts.dpi )
plt.close(fig)
frameNo += 1
### plot the rest of the frames
ind = 0
while ind < N:
shift = shifts[ind]
fig = plt.figure(figsize=(15,10))
### IFO1 raw data
ax = plt.subplot(2,3,1)
ax.plot( times, dataT1, 'm-', linewidth=1, alpha=0.50, label='$\mathrm{noise_1+signal_1}$' )
ax.plot( times-shift/2, dataT1, 'b-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_1+signal_1}$' )
if not opts.hide_signal:
ax.plot( times-shift/2, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_1}$' )
if shift:
plt.annotate(s='', xy=(to+dt/2,np.min(dataT1)), xytext=(to+dt/2-shift/2,np.min(dataT1)), arrowprops=dict(arrowstyle='<-'))
ax.legend(loc='upper left')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO1 strain data
ax = plt.subplot(2,3,2)
if not opts.hide_noisy_reconstruction:
ax.plot( times[ind:], 0.5*(dataT1[ind:] + dataT2[:len(hTimeDom1)-ind]), 'm-', linewidth=1, alpha=0.75, label='$\mathrm{reconstructed\ signal_1}$')
if not opts.hide_noiseless_reconstruction:
ax.plot( times[ind:], 0.5*(hTimeDom1[ind:] + hTimeDom2[:len(hTimeDom1)-ind]), 'b-', linewidth=1, alpha=0.90, label='$\mathrm{zero\ noise}$\n$\mathrm{reconstructed\ signal_1}$')
if not opts.hide_signal:
ax.plot( times, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{signal_1}$' )
ax.set_ylim(ylim)
ax.legend(loc='upper right')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
### IFO2 raw data
ax = plt.subplot(2,3,4)
ax.plot( times, dataT2, 'c-', linewidth=1, alpha=0.50, label='$\mathrm{noise_2+signal_2}$' )
ax.plot( times+shift/2, dataT2, 'r-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_2+signal_2}$' )
if not opts.hide_signal:
ax.plot( times+shift/2, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_2}$' )
if shift:
plt.annotate(s='', xy=(to-dt/2,np.max(dataT2)), xytext=(to-dt/2+shift/2,np.max(dataT2)), arrowprops=dict(arrowstyle='<-'))
ax.legend(loc='lower left')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO2 strain data
ax = plt.subplot(2,3,5)
if not opts.hide_noisy_reconstruction:
ax.plot( times[:len(hTimeDom2)-ind], 0.5*(dataT1[ind:] + dataT2[:len(hTimeDom2)-ind]), 'c-', linewidth=1, alpha=0.75, label='$\mathrm{reconstructed\ signal_2}$')
if not opts.hide_noiseless_reconstruction:
ax.plot( times[:len(hTimeDom2)-ind], 0.5*(hTimeDom1[ind:] + hTimeDom2[:len(hTimeDom2)-ind]), 'r-', linewidth=1, alpha=0.90, label='$\mathrm{zero\ noise}$\n$\mathrm{reconstructed\ signal_2}$')
if not opts.hide_signal:
ax.plot( times, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{signal_2}$' )
ax.set_ylim(ylim)
ax.legend(loc='lower right')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
### ray-plot
ax = plt.subplot(3,3,6)
truth = times <= shift
ax.plot( times[truth], SNR[truth], 'g-', linewidth=1, alpha=0.5, label='$\mathrm{freq-domain}$\n$\mathrm{computation}$' )
# truth = times[-1]-times < shift
# ax.plot( times[truth]-times[-1], SNR[truth], 'g-', linewidth=1, alpha=0.5, label='$\mathrm{freq-domain}$\n$\mathrm{computation}$' )
# ax.legend(loc='best')
ax.set_xlabel('$\\tau$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
# ax.set_ylabel('$\\rho(\\tau)$')
ax.set_ylabel('$\mathrm{correlated\ Energy}\sim\\rho^2$')
# ax.set_xlim(xmin=-opts.D_over_c, xmax=opts.D_over_c)
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.set_ylim(ymin=1.1*np.min(SNR), ymax=1.1*np.max(SNR))
ax = ax.twiny()
thetas = [-90, -45, -30, -15, 0, 15, 30, 45, 90]
ax.set_xticks([opts.D_over_c*np.sin(theta*np.pi/180) for theta in thetas])
ax.set_xticklabels(["$%d^\circ$"%theta for theta in thetas])
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\\theta$')
plt.subplots_adjust(hspace=0.1, wspace=0.1)
figname = "frame%s-%04d.png"%(opts.tag, frameNo)
if opts.verbose:
print " %s"%figname
fig.savefig( figname, dpi=opts.dpi )
plt.close(fig)
frameNo += 1
ind += frame_step
### plot the final frame
shift = opts.D_over_c
ind = N
fig = plt.figure(figsize=(15,10))
### IFO1 raw data
ax = plt.subplot(2,3,1)
ax.plot( times, dataT1, 'm-', linewidth=1, alpha=0.50, label='$\mathrm{noise_1+signal_1}$' )
ax.plot( times-shift/2, dataT1, 'b-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_1+signal_1}$' )
if not opts.hide_signal:
ax.plot( times-shift/2, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_1}$' )
plt.annotate(s='', xy=(to+dt/2,np.min(dataT1)), xytext=(to+dt/2-shift/2,np.min(dataT1)), arrowprops=dict(arrowstyle='<-'))
ax.legend(loc='upper left')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
### IFO1 strain data
ax = plt.subplot(2,3,2)
if not opts.hide_noisy_reconstruction:
ax.plot( times[ind:], 0.5*(dataT1[ind:] + dataT2[:len(hTimeDom2)-ind]), 'm-', linewidth=1, alpha=0.75, label='$\mathrm{reconstructed\ signal_1}$')
if not opts.hide_noiseless_reconstruction:
ax.plot( times[ind:], 0.5*(hTimeDom1[ind:] + hTimeDom2[:len(hTimeDom2)-ind]), 'b-', linewidth=1, alpha=0.90, label='$\mathrm{zero\ noise}$\n$\mathrm{reconstructed\ signal_1}$')
if not opts.hide_signal:
ax.plot( times, hTimeDom1, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{signal_1}$' )
ax.set_ylim(ylim)
ax.legend(loc='upper right')
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_1(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
### IFO2 raw data
ax = plt.subplot(2,3,4)
ax.plot( times, dataT2, 'c-', linewidth=1, alpha=0.50, label='$\mathrm{noise_2+signal_2}$' )
ax.plot( times+shift/2, dataT2, 'r-', linewidth=1, alpha=0.90, label='$\mathrm{shifted\ noise_2+signal_2}$' )
if not opts.hide_signal:
ax.plot( times+shift/2, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{shifted\ signal_2}$' )
plt.annotate(s='', xy=(to-dt/2,np.max(dataT2)), xytext=(to-dt/2+shift/2,np.max(dataT2)), arrowprops=dict(arrowstyle='<-'))
ax.legend(loc='lower left')
ax.set_xlabel('$\mathrm{time}$')
ax.set_ylabel('$d_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
ax.set_ylim(ylim)
## IFO2 strain data
ax = plt.subplot(2,3,5)
if not opts.hide_noisy_reconstruction:
ax.plot( times[:len(hTimeDom2)-ind], 0.5*(dataT1[ind:] + dataT2[:len(hTimeDom2)-ind]), 'c-', linewidth=1, alpha=0.75, label='$\mathrm{reconstructed\ signal_2}$')
if not opts.hide_noiseless_reconstruction:
ax.plot( times[:len(hTimeDom2)-ind], 0.5*(hTimeDom1[ind:] + hTimeDom2[:len(hTimeDom2)-ind]), 'r-', linewidth=1, alpha=0.90, label='$\mathrm{zero\ noise}$\n$\mathrm{reconstructed\ signal_2}$')
if not opts.hide_signal:
ax.plot( times, hTimeDom2, 'k-', linewidth=1, alpha=0.5, label='$\mathrm{signal_2}$' )
ax.set_ylim(ylim)
ax.legend(loc='lower right')
ax.set_xlabel('$\mathrm{time}$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_ylabel('$h_2(t)$')
ax.set_xlim(xmin=opts.D_over_c, xmax=opts.duration-opts.D_over_c)
### ray-plot
ax = plt.subplot(3,3,6)
truth = times <= shift
ax.plot( times[truth], SNR[truth], 'g-', linewidth=1, alpha=0.5, label='$\mathrm{freq-domain}$\n$\mathrm{computation}$' )
#truth = times[-1]-times < shift
#ax.plot( times[truth]-times[-1], SNR[truth], 'g-', linewidth=1, alpha=0.5, label='$\mathrm{freq-domain}$\n$\mathrm{computation}$' )
#ax.legend(loc='best')
ax.set_xlabel('$\\tau$')
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
#ax.set_ylabel('$\\rho(\\tau)$')
ax.set_ylabel('$\mathrm{correlated\ Energy}\sim\\rho^2$')
#ax.set_xlim(xmin=-opts.D_over_c, xmax=opts.D_over_c)
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.set_ylim(ymin=1.1*np.min(SNR), ymax=1.1*np.max(SNR))
ax = ax.twiny()
thetas = [-90, -45, -30, -15, 0, 15, 30, 45, 90]
ax.set_xticks([opts.D_over_c*np.sin(theta*np.pi/180) for theta in thetas])
ax.set_xticklabels(["$%d^\circ$"%theta for theta in thetas])
ax.set_xlim(xmin=0, xmax=opts.D_over_c)
ax.xaxis.tick_top()
ax.xaxis.set_label_position('top')
ax.set_xlabel('$\\theta$')
plt.subplots_adjust(hspace=0.1, wspace=0.1)
figname = "frame%s-%04d.png"%(opts.tag, frameNo)
if opts.verbose:
print " %s"%figname
fig.savefig( figname, dpi=opts.dpi )
plt.close(fig)
#-------------------------------------------------
for movie_type in opts.movie_type:
cmd = "ffmpeg -r %d -i frame%s-%s04d.png coherentLikelihood%s.%s"%(opts.frames_per_sec, opts.tag, "%", opts.tag, movie_type)
if opts.verbose:
print "wrapping into a movie:\n\t%s"%(cmd)
sp.Popen(cmd.split()).wait()
| mit | -2,642,448,682,214,172,000 | 35.103286 | 299 | 0.650975 | false |
Gorbagzog/StageIAP | HorizonPhotometricNumpy.py | 1 | 82759 | #!/usr/bin/env python3
# -*-coding:Utf-8 -*
"""H-AGN LightCone photometric Catalog.
Load catalog and make a match with the true lightcone catalog.
"""
import numpy as np
import matplotlib.pyplot as plt
import pyfits
# from scipy.spatial import cKDTree
# from timeit import default_timer as timer
import numpy.lib.recfunctions as rfn
# import matplotlib.mlab as mlab
import matplotlib as mpl
from scipy.optimize import curve_fit
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import matplotlib.ticker as ticker
from matplotlib.gridspec import GridSpec
"""Load true galdata from the H-AGN Lightcone"""
zbins_Cone = np.array([0, 1, 2, 3, 6])
numzbin = np.size(zbins_Cone)-1
galdata = []
for i in range(np.size(zbins_Cone)-1):
hdulist = pyfits.open('../Data/HorizonAGNLaigleCatalogs/Galaxies_' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'.fits')
galdata.append(hdulist[1].data)
hdulist.close()
# cols = hdulist[1].columns
# cols.info()
"""It looks like the good catalogs to use are the Haloes and not the Halos"""
halodata = []
for i in range(np.size(zbins_Cone)-1):
hdulist2 = pyfits.open('../Data/HorizonAGNLaigleCatalogs/Haloes_' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'.fits')
halodata.append(hdulist2[1].data)
hdulist2.close()
"""Load Horizon-AGN Lightcone Photometric catalog."""
col_names = ['Id', 'Ra', 'Dec', 'zphot', 'zphot_err', 'Mass', 'Mass_err', 'mag_u', 'magerr_u',
'mag_B', 'magerr_B', 'mag_V', 'magerr_V', 'mag_r', 'magerr_r', 'mag_i', 'magerr_i',
'mag_z', 'magerr_z', 'mag_Y', 'magerr_Y', 'mag_J', 'magerr_J', 'mag_H', 'magerr_H',
'mag_K', 'magerr_K', 'SFR']
galphot = np.genfromtxt(
'../Data/HorizonAGNLightconePhotometric/Salp_0.0-3.0_dust_v15c.in_Init_Small',
names=col_names)
"""Load catalog matching halos to their central galaxies"""
# Contains the IDs (starts at 1) of the central galaxy of each halo
hal_centgal = [[] for x in range(numzbin)]
for i in range(numzbin-1):
hal_centgal[i] = np.loadtxt('../Data/HorizonAGNLaigleCatalogs/Cat_' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'_Hal_CentralGal_newb.txt',
dtype='i4')
# np.loadtxt('../Data/HorizonAGNLaigleCatalogs/Cat_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'_Hal_CentralGal_new.txt',
# dtype='i4'))
# New match with Yohan algorithm to find galaxies by decreasing spheres.
"""Load halos environment.
Header is #dens dfil dnod1 dnod2.
"dens" est une estimation de la densité locale (basée sur la tesselation de delaunay)
lissée à 3Mpc, "dfiil" est la distance au filament le plus proche, "dnod1" est la distance
au noeud le plus proche, et "dnod2" la distance au noeud le plus proche en suivant le
filament. Les distances sont en Mpc.
Si tu veux pour commencer, tu pourrais separer les halos en fonction de leur distance
au filament et au noeud, e.g:
Noeuds: dnod1 < 5Mpc
Filament: dfil < 2 Mpc
Walls/voids: le reste des galaxies """
haloes_env = []
for i in range(3):
haloes_env.append(
np.loadtxt('../Data/HorizonAGNLaigleCatalogs/Haloes_' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'_env.txt'))
"""Algorithm to find nearest value using a KDTree.
We make a match between nearest galaxies in projection on the sky.
Maybe we should also take into account the third dimension, to have
a better match. But it will give more importance to the error in redshift
in the observed catalog."""
# galdata_allz = np.concatenate((galdata[0], galdata[1], galdata[2]))
# start = timer()
# kdtree = cKDTree(np.transpose([galdata_allz['Ra'], galdata_allz['Dec']]))
# obstotrue = np.apply_along_axis(kdtree.query, 0, [galphot['Ra'], galphot['Dec']])
# obstotrue[1][:] = obstotrue[1][:].astype('int')
# # add index of true gal corresponding to each observed gal
# galphot = rfn.append_fields(galphot, ['Distance', 'True_gal_idx'], obstotrue, usemask=False)
# # add index of observed gal to each true gal
# truetoobs = np.empty(galdata_allz.shape)
# truetoobs[:] = np.nan
# for idx_obs in range(len(obstotrue[0])):
# truetoobs[obstotrue[1][idx_obs].astype('int')] = idx_obs
# galdata_allz = rfn.append_fields(galdata_allz, 'Obs_gal_idx', truetoobs, usemask=False)
# end = timer()
# print('Positional match took :' + str(end - start))
"""Use the match Catalog of Clotilde"""
galdata_allz = np.concatenate((galdata[0], galdata[1], galdata[2]))
# Load the 2 columns matching catalog, first column is the ID of the galaxy in the Photo catalog,
# the second is the ID in the original catalog, concatenated in one big catalog
# Galaxies_0-1.fits, Galaxies_1-2.fits, Galaxies_2-3.fits.
obstotrue = np.loadtxt('../Data/HorizonAGNLightconePhotometric/Match.dat')
# I prefer to work with index (starts at 0) than with ID (starts at 1), and the first column is
# useless because it is just the position in the array.
# galdata_allz[obstotrue[i]] = original galaxy corresponding to galphot[i]
obstotrue = obstotrue[:, 1] - 1
# add index of observed gal to each true gal
truetoobs = np.empty(galdata_allz.shape)
truetoobs[:] = -1
for idx_obs in range(len(obstotrue)):
truetoobs[obstotrue[idx_obs].astype('int')] = idx_obs
galdata_allz = rfn.append_fields(galdata_allz, 'Obs_gal_idx', truetoobs, usemask=False)
"""Plot MsObserved vs MsTrue"""
plt.figure()
x = np.arange(5, 13)
plt.hist2d(
np.log10(galdata_allz['Mass'][galdata_allz['Obs_gal_idx'] > 0]*10**11),
galphot['Mass'][galdata_allz[galdata_allz['Obs_gal_idx'] > 0]['Obs_gal_idx'].astype('int')],
cmin=1, bins=100, range=[[9, 12], [9, 12]], norm=mpl.colors.LogNorm(), cmap='jet'
)
plt.colorbar()
# plt.plot(x, x, lab='y=x')
plt.xlabel('Original Mass', size=12)
plt.ylabel('Photometric mass', size=12)
plt.title('H-AGN, stellar photometric mass dispersion')
"""Compute median, average and percentiles for masses."""
# For true catalog
stellarmassbins = np.linspace(9, 12, num=100)
avHMperSM = np.full([numzbin, np.size(stellarmassbins)-1], np.nan)
medHMperSM = np.full([numzbin, np.size(stellarmassbins)-1], np.nan)
stdHMperSM = np.full([numzbin, np.size(stellarmassbins)-1], np.nan)
for i in range(numzbin - 1):
for j in range(np.size(stellarmassbins)-1):
m1 = stellarmassbins[j]
m2 = stellarmassbins[j+1]
# select indices of central galaxies with a mass
# between m1 and m2 :
indices = np.where(
np.logical_and(
np.logical_and(
np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) > m1,
np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) <= m2
),
hal_centgal[i] > 0
)
)
if np.size(indices) > 2:
avHMperSM[i, j] = np.average(np.log10(halodata[i]['Mass'][indices] * 10**11))
medHMperSM[i, j] = np.median(np.log10(halodata[i]['Mass'][indices] * 10**11))
stdHMperSM[i, j] = np.std(np.log10(halodata[i]['Mass'][indices] * 10**11))
# For photometric catalog
stellarmassbins = np.linspace(9, 12, num=100)
avHMperSMPhot = np.full([numzbin, np.size(stellarmassbins)-1], np.nan)
medHMperSMPhot = np.full([numzbin, np.size(stellarmassbins)-1], np.nan)
stdHMperSMPhot = np.full([numzbin, np.size(stellarmassbins)-1], np.nan)
for i in range(numzbin-1):
for j in range(np.size(stellarmassbins)-1):
m1 = stellarmassbins[j]
m2 = stellarmassbins[j+1]
# select indices of central galaxies with a mass
# between m1 and m2 :
indices = np.where(
np.logical_and(
np.logical_and(
hal_centgal[i] > 0,
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0
),
np.logical_and(
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 +
sum(len(galdata[j]) for j in range(i))
].astype('int')
] > m1,
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 +
sum(len(galdata[j]) for j in range(i))
].astype('int')
] <= m2
),
)
)
if np.size(indices) > 2:
# print(np.size(indices))
avHMperSMPhot[i, j] = np.average(np.log10(halodata[i]['Mass'][indices] * 10**11))
medHMperSMPhot[i, j] = np.median(np.log10(halodata[i]['Mass'][indices] * 10**11))
stdHMperSMPhot[i, j] = np.std(np.log10(halodata[i]['Mass'][indices] * 10**11))
# stellarmassbins = np.linspace(8.1, 12, num=100)
# first_per = np.zeros([numzbin, np.size(stellarmassbins)-1])
# last_per = np.zeros([numzbin, np.size(stellarmassbins)-1])
# for i in range(numzbin):
# for j in range(np.size(stellarmassbins)-1):
# m1 = stellarmassbins[j]
# m2 = stellarmassbins[j+1]
# # select indices of central galaxies with a mass
# # between m1 and m2 :
# indices = np.where(
# np.logical_and(
# np.logical_and(
# np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) > m1,
# np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) <= m2
# ),
# hal_centgal[i] > 0
# )
# )
# if indices[0].size : #check if the array is not empty
# first_per[i,j] = np.percentile(np.log10(
# halodata[i]['Mass'][gal_mainhaloes[i][centGalInCentHalo[i][indices]]-1]*10**11), 10)
# last_per[i,j] = np.percentile(np.log10(
# halodata[i]['Mass'][gal_mainhaloes[i][centGalInCentHalo[i][indices]]-1]*10**11), 90)
# else:
# first_per[i,j] = numpy.nan
# last_per[i,j] = numpy.nan
"""Compute average and median Ms for a given Mh"""
massbins = np.linspace(10, 15, num=100)
avSMperHM = np.zeros([numzbin, np.size(massbins)-1])
medSMperHM = np.zeros([numzbin, np.size(massbins)-1])
for i in range(numzbin-1):
for j in range(np.size(massbins)-1):
m1 = massbins[j]
m2 = massbins[j+1]
# select indices of galaxies contained in the haloes with a mass
# between m1 and m2 :
indices = np.where(np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) <= m2))[0]
# indices_cent = np.intersect1d(indices, halodata[i]['level'] == 1)
if len(indices) > 0:
avSMperHM[i, j] = np.average(
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11))
medSMperHM[i, j] = np.median(
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11))
else:
avSMperHM[i, j] = np.nan
medSMperHM[i, j] = np.nan
"""Fit the Behroozi 2010 relation on Mh(Ms)"""
def boo_MhMs(Ms, M1, Ms0, beta, delta, gamma):
"""Behroozi et al. 2010 Mh(Ms) relation
All masses are in logscale"""
return M1+beta*(Ms-Ms0)+10**(delta*(Ms-Ms0))/(1+10**(-gamma*(Ms-Ms0)))-0.5
boo_fit_true = np.empty([numzbin-1, 5])
boo_cov_true = np.empty([numzbin-1, 5, 5])
for i in range(numzbin-1):
print(i)
indices = np.where(
np.logical_and(
np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) > 9,
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1)
)
)
boo_fit_true[i], boo_cov_true[i] = curve_fit(
boo_MhMs,
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
np.log10(halodata[i]['Mass'][indices]*10**11),
bounds=[[10, 8, 0, 0, 0], [13, 11, 5, 5, 5]],
method='trf')
print(boo_fit_true)
boo_fit_phot = np.empty([numzbin-1, 5])
boo_cov_phot = np.empty([numzbin-1, 5, 5])
for i in range(numzbin-1):
print(i)
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.logical_and(
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0,
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] > 9)
)
)
boo_fit_phot[i], boo_cov_phot[i] = curve_fit(
boo_MhMs,
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
],
np.log10(halodata[i]['Mass'][indices]*10**11),
bounds=[[10, 8, 0, 0, 0], [13, 11, 5, 5, 5]],
method='trf')
print(boo_fit_phot)
"""Plot Ms(Mh) for true galaxies and level 1 halos"""
boofitsSMbins = np.linspace(9, 12, num=100)
for i in range(numzbin-1):
plt.figure()
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
# verification that all galaxies selected are central
# print(galdata[i]['level'][hal_centgal[i][indices]-1].min())
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
bins=100, cmin=1)
plt.colorbar()
# plt.scatter((massbins[:-1]+massbins[1:])/2, avSMperHM[i][:], color='red',
# label='Average SM for a given HM')
# plt.scatter((massbins[:-1]+massbins[1:])/2, medSMperHM[i][:],
# color='green', label='Median SM for a given HM')
plt.scatter(avHMperSM[i][:], (stellarmassbins[:-1]+stellarmassbins[1:])/2,
color='black', label='Average HM for a given SM')
plt.scatter(medHMperSM[i][:], (stellarmassbins[:-1]+stellarmassbins[1:])/2,
color='pink', label='Median HM for a given SM')
# Plot Behroozi fit
# plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]), boofitsSMbins,
# label=str('True Behroozi function fit'), c='r')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log($M_{*}$) [Log($M_{\odot}$)]', size=12)
plt.title('HorizonAGN, Central galz='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/TrueMass_HaloMass_Boofit' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Plot Ms(Mh) on the same figure"""
fig, ax = plt.subplots(2, 2)
# fig.suptitle('Horizon AGN CONE, WARNING !! colorbars not homogeneous')
for i in range(3):
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
ax1 = ax[i//2, i % 2]
if i == 0:
counts, xedges, yedges, im = ax1.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
bins=100, cmin=1)
else:
_, _, _, im = ax1.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
bins=(xedges, yedges), cmin=1)
# Put the colorbar inside of the axes.
axins1 = inset_axes(
ax1,
width="5%", # width = 10% of parent_bbox width
height="35%", # height : 50%
loc=8,
bbox_to_anchor=[0.5, 0, 0.5, 1],
bbox_transform=ax1.transAxes
# bbox_transform=ax1.transAxes,
# bbox_to_anchor=(1, 1)
)
cbar = fig.colorbar(im, cax=axins1)
ax1.set_xlabel('Log($M_{h}/M_{\odot}$)', size=12)
ax1.set_ylabel('Log($M_{*}/M_{\odot}$)', size=12)
cbar.ax.tick_params(labelsize=9)
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
ax1.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]), boofitsSMbins,
label=str('Behroozi function fit'), c='r')
plt.text(0.1, 0.8, str(zbins_Cone[i])+'<z<'+str(zbins_Cone[i+1]),
size=12, transform=ax1.transAxes, bbox=dict(boxstyle='round', facecolor='white'))
fig.tight_layout()
# plt.subplots_adjust(top=0.95)
plt.show()
"""Plot Ms_observed(Mh) and level 1 halos"""
boofitsSMbins = np.linspace(9, 12, num=100)
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
],
bins=100, cmin=1, range=[[10, 14], [9, 12]])
plt.colorbar()
# plt.errorbar(avHMperSMPhot[i][:], (stellarmassbins[:-1]+stellarmassbins[1:])/2,
# xerr=stdHMperSMPhot[i],
# color='red', label='Average HM for a given SM')
# plt.scatter(medHMperSMPhot[i][:], (stellarmassbins[:-1]+stellarmassbins[1:])/2,
# color='pink', label='Median HM for a given SM')
# Plot Behroozi fit
plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_phot[i]), boofitsSMbins,
label=str('Behroozi function fit'), c='r')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log($M_{*}$) Photometric [Log($M_{\odot}$)]', size=12)
plt.title('HorizonAGN photo, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/PhotoMass_HaloMass_Boofit' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Plot Ms/Mh vs Mh for true and photometric catalogs"""
plt.figure()
# cmap = ['blue', 'green', 'red']
# marker = ['v', '>', '^']
for i in range(numzbin-1):
# plt.scatter(
# medHMperSM[i],
# (stellarmassbins[:-1]+stellarmassbins[1:]) / 2 - medHMperSM[i],
# label='True catalog, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]),
# edgecolors=cmap[i], facecolors='none'
# )
plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.scatter(
# medHMperSMPhot[i],
# (stellarmassbins[:-1]+stellarmassbins[1:]) / 2 - medHMperSMPhot[i],
# label='Phot catalog, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]),
# edgecolors=cmap[i], facecolors=cmap[i]
# )
plt.legend()
plt.xlabel('Log($M_{h} / M_{\odot}$)', size=15)
plt.ylabel('Log($M_{s}/M_{h}$)', size=15)
"""Plot Ms/Mh histogram for true catalog"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
# verification that all galaxies selected are central
# print(galdata[i]['level'][hal_centgal[i][indices]-1].min())
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]/halodata[i]['Mass'][indices]),
bins=100, cmin=1)
plt.colorbar()
plt.scatter(
medHMperSM[i],
(stellarmassbins[:-1]+stellarmassbins[1:]) / 2 - medHMperSM[i],
label='Phot catalog, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]),
facecolors='none', color='red'
)
# Plot Behroozi fit
plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
label=str('Behroozi function fit'), c='r')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log($M_{*}$) [Log($M_{\odot}$)]', size=12)
plt.title('HorizonAGN, Central galz='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
"""Plot Ms/Mh for photometric catalog and with median found with Ms(Mh)"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.logical_and(
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0,
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] > 9
)
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] - np.log10(halodata[i]['Mass'][indices]*10**11),
bins=100, cmin=1, range=[[10, 14], [-2, 1]]
)
# plt.plot(
# medHMperSMPhot[i],
# (stellarmassbins[:-1]+stellarmassbins[1:]) / 2 - medHMperSMPhot[i],
# label='Phot catalog, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]),
# color='red'
# )
# plt.plot(
# boo_MhMs(boofitsSMbins, *boo_fit_phot[i]),
# boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_phot[i]),
# label=str('phot Behroozi function fit'), c='black')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=15)
plt.ylabel('Log($M_{s}/M_{h}$)', size=15)
plt.title('H-AGN, Central gal and level 1 halos')
""""With gas mass"""
# plt.hist2d(
# np.log10(halodata[i]['Mass'][indices]*10**11),
# np.log10(10**galphot['Mass'][
# galdata_allz['Obs_gal_idx'][
# hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
# ].astype('int')
# ] + gas_mass[galdata_allz['Obs_gal_idx'][
# hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
# ].astype('int')]) - np.log10(halodata[i]['Mass'][indices]*10**11),
# bins=100, cmin=1, range=[[10, 14], [-2, 1]]
# )
"""Fit the Yang relation on the M*/Mh relation"""
def mstar_over_mh_yang(x, A, m1, beta, gamma):
"""Yang et al. 2012 function, see Moster et al. 2010."""
return 2.0 * A * ((x / m1)**(-beta) + (x / m1)**gamma)**(-1)
# yang_fit_true = np.empty([numzbin, 4])
# yang_cov_true = np.empty([numzbin, 4, 4])
# for i in range(numzbin-1):
# yang_fit_true[i], yang_cov_true[i] = curve_fit(
# mstar_over_mh_yang,
# 10**medHMperSM[i][~np.isnan(medHMperSM[i])],
# 10**(((stellarmassbins[:-1]+stellarmassbins[1:]) / 2)[~np.isnan(medHMperSM[i])] -
# medHMperSM[i][~np.isnan(medHMperSM[i])]),
# sigma=stdHMperSM[i][~np.isnan(medHMperSM[i])],
# p0=[0.01, 10**12, 0.1, 0.1],
# bounds=[[0, 10**9, 0, 0], [0.5, 10**14, 5, 5]], method='trf')
# yang_fit_phot = np.empty([numzbin-1, 4])
# yang_cov_phot = np.empty([numzbin-1, 4, 4])
# for i in range(numzbin-1):
# yang_fit_phot[i], yang_cov_phot[i] = curve_fit(
# mstar_over_mh_yang,
# 10**medHMperSMPhot[i][~np.isnan(medHMperSMPhot[i])],
# 10**(((stellarmassbins[:-1]+stellarmassbins[1:]) / 2)[~np.isnan(medHMperSMPhot[i])] -
# medHMperSMPhot[i][~np.isnan(medHMperSMPhot[i])]),
# sigma=stdHMperSMPhot[i][~np.isnan(medHMperSMPhot[i])],
# p0=[0.01, 10**12, 0.5, 0.1],
# bounds=[[0, 10**10, 0, 0], [0.5, 10**13, 5, 5]], method='trf')
# print(yang_fit_phot)
yang_fit_true = np.empty([numzbin-1, 4])
yang_cov_true = np.empty([numzbin-1, 4, 4])
for i in range(numzbin-1):
print(i)
indices = np.where(
np.logical_and(
np.logical_and(
np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) > 9,
np.log10(halodata[i]['Mass']*10**11) > 10.8),
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1)
)
)
yang_fit_true[i], yang_cov_true[i] = curve_fit(
mstar_over_mh_yang,
halodata[i]['Mass'][indices]*10**11,
galdata[i]['Mass'][hal_centgal[i][indices]-1] / halodata[i]['Mass'][indices],
p0=[0.01, 10**12, 0.1, 0.1],
bounds=[[0, 10**9, 0, 0], [0.5, 10**14, 5, 5]], method='trf')
print(yang_fit_true)
yang_fit_phot = np.empty([numzbin-1, 4])
yang_cov_phot = np.empty([numzbin-1, 4, 4])
for i in range(numzbin-1):
yang_fit_phot[i], yang_cov_phot[i] = curve_fit(
mstar_over_mh_yang,
10**medHMperSMPhot[i][~np.isnan(medHMperSMPhot[i])],
10**(((stellarmassbins[:-1]+stellarmassbins[1:]) / 2)[~np.isnan(medHMperSMPhot[i])] -
medHMperSMPhot[i][~np.isnan(medHMperSMPhot[i])]),
sigma=stdHMperSMPhot[i][~np.isnan(medHMperSMPhot[i])],
p0=[0.01, 10**12, 0.5, 0.1],
bounds=[[0, 10**10, 0, 0], [0.5, 10**13, 5, 5]], method='trf')
print(yang_fit_phot)
"""Plot Yang fit"""
x = np.logspace(10, 14, num=1000)
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) > 9,
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1)
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1] / halodata[i]['Mass'][indices]),
bins=100, cmin=1, range=[[10.3, 13], [-2.5, -0.5]]
)
p = plt.plot(
np.log10(x), np.log10(mstar_over_mh_yang(x, *yang_fit_true[i])),
label=str('Moster et al. fit'), c='b')
plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
label=str('Behroozi et al. fit'), c='r')
plt.xlabel('Log($M_{h} / M_{\odot}$)', size=15)
plt.ylabel('Log($M_{s}/M_{h}$)', size=15)
plt.legend()
plt.text(0.1, 0.1, str(zbins_Cone[i])+'<z<'+str(zbins_Cone[i+1]),
size=12, transform=ax1.transAxes, bbox=dict(boxstyle='round', facecolor='white'))
plt.tight_layout()
plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/True_MsonMH_fits' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
x = np.logspace(10, 14, num=1000)
plt.figure()
for i in range(numzbin-1):
# p = plt.plot(np.log10(x), np.log10(mstar_over_mh_yang(x, *yang_fit_true[i])))
# plt.plot(np.log10(x), np.log10(mstar_over_mh_yang(x, *yang_fit_phot[i])),
# color=p[0].get_color())
# plt.scatter(
# medHMperSM[i],
# (stellarmassbins[:-1]+stellarmassbins[1:]) / 2 - medHMperSM[i],
# facecolors='none', edgecolors=p[0].get_color(),
# label='True catalog, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])
# )
# plt.scatter(
# medHMperSM[i],
# (stellarmassbins[:-1]+stellarmassbins[1:]) / 2 - medHMperSMPhot[i],
# facecolors='none', edgecolors=p[0].get_color(),
# label='Photo catalog, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])
# )
# Plot Behroozi fit
p = plt.plot(
boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_phot[i]),
boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_phot[i]),
linestyle ='--', color=p[0].get_color())
plt.legend()
plt.xlabel('Log($M_{h} / M_{\odot}$)', size=15)
plt.ylabel('Log($M_{s}/M_{h}$)', size=15)
plt.show()
"""Find MhPeak with the Yanf fit"""
MhaloPeak_true = np.zeros(numzbin-1)
for i in range(numzbin-1):
MhaloPeak_true[i] = np.log10(x[np.argmax(mstar_over_mh_yang(x, *yang_fit_true[i]))])
MhaloPeak_phot = np.zeros(numzbin-1)
for i in range(numzbin-1):
MhaloPeak_phot[i] = np.log10(x[np.argmax(mstar_over_mh_yang(x, *yang_fit_phot[i]))])
"""Find MhPeak with Behroozi fit"""
MhaloPeak_true_boo = np.zeros(numzbin-1)
for i in range(numzbin-1):
idx_max = np.argmax(boofitsSMbins - boo_MhMs(boofitsSMbins, *boo_fit_true[i]))
MhaloPeak_true_boo[i] = boo_MhMs(boofitsSMbins[idx_max], *boo_fit_true[i])
# MhaloPeak_phot_boo = np.zeros(numzbin-1)
# for i in range(numzbin-1):
# MhaloPeak_phot_boo[i] = np.log10(x[np.argmax(mstar_over_mh_yang(x, *yang_fit_phot[i]))])
"""Plot MhaloPeak versus z"""
# Lauthaud+17 use a different cosmology with H0=72
redshiftLeauthaud = np.array([(0.22 + 0.48) / 2, (0.48 + 0.74) / 2, (0.74 + 1) / 2])
MhaloPeakLeauthaud = np.log10(np.array([9.5 * 10**11, 1.45 * 10**12, 1.4 * 10**12]))
MhaloSigmaLeauthaud = np.log10(np.array(
[1.05 * 10**12, 1.55 * 10**12, 1.5 * 10**12])) - MhaloPeakLeauthaud
# Load Coupon+17 draft Peak values
# We use PeakPosMCMCMean and PeakPosMCMCstd
# Values are given in Log10(Mh*h^-1 Msun)
redshiftCoupon17 = np.array([0.34, 0.52, 0.70, 0.90, 1.17, 1.50,
1.77, 2.15, 2.75, 3.37, 3.96, 4.83])
MhaloPeakCoupon17 = np.zeros([np.size(redshiftCoupon17)])
MhaloSigmaCoupon17 = np.zeros([np.size(redshiftCoupon17)])
for i in range(len(redshiftCoupon17)):
MhaloPeakCoupon17[i], MhaloSigmaCoupon17[i] = np.loadtxt(
'../Data/Coupon17/peak/peak_{:1.2f}.ascii'.format(redshiftCoupon17[i]),
usecols=(2, 3))
plt.figure()
# plt.plot((zbins_Cone[1:-1]+zbins_Cone[:-2])/2, MhaloPeak_true, 'd',
# label='Original Catalog')
# plt.plot((zbins_Cone[1:-1]+zbins_Cone[:-2])/2, MhaloPeak_phot, 'd',
# label='Photometric Catalog')
# Coming From AM__COSMOSIari_BolshoiPlanc.py
plt.errorbar((redshifts[1:] + redshifts[:-1]) / 2, MhaloPeak + np.log10(67.74/70),
yerr=np.transpose(MhaloPeakSigma),
fmt='o', color='red', capsize=5, label='Cosmos AM')
plt.errorbar(
(zbins_Cone[1:-1]+zbins_Cone[:-2])/2, MhaloPeak_true_boo,
yerr=0.1, fmt='o', capsize=5, c='g',
label='Horizon-AGN Lightcone')
plt.errorbar(redshiftCoupon17, MhaloPeakCoupon17 - np.log10(0.7),
yerr=MhaloSigmaCoupon17, c='b',
fmt='o', capsize=5, label='Coupon et al. 2017 Draft')
plt.errorbar(redshiftLeauthaud, MhaloPeakLeauthaud + np.log10(72/70),
yerr=MhaloSigmaLeauthaud, c='black',
fmt='o', capsize=5, label='Leauthaud et al. 2011')
plt.ylabel('Log($M_{halo}^{peak}/ M_{\odot}$)', size=15)
plt.xlabel('Redshift', size=15)
plt.legend(loc=2)
# plt.title('Horizon-AGN, MhaloPeak')
plt.tight_layout()
"""Plot sSFR vs Mh for true catalogs"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
# verification that all galaxies selected are central
# print(galdata[i]['level'][hal_centgal[i][indices]-1].min())
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
# np.log10(galdata[i]['SFRCorr'][hal_centgal[i][indices]-1] /
# (galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11)),
np.log10(galdata[i]['SFRCorr'][hal_centgal[i][indices]-1]),
bins=100, cmin=1, range=[[10, 14], [-2, 2]])
plt.colorbar()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log(SFR) [Log($yr^{-1}$)]', size=12)
plt.title('HorizonAGN, Central galz='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/TrueSpecificSFR_HaloMass' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
# TODO : compute median sSFR for true and photo galaxies
"""Plot SFR vs Mh for photo catalogs"""
# TODO plot only for Ms > 10**9
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galphot['Ra'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
]),
bins=100, cmin=1)
plt.colorbar()
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log(SFR) Photometric [Log($M_{\odot}/yr$)]', size=12)
plt.title('HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/PhotoSFR_HaloMass' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""PLot sSFR vs Mh for photo cat"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.logical_and(
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0,
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')] > 9
)
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galphot['SFR'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] / 10**(galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')])
),
bins=100, cmin=1, range=[[10, 14], [-13.5, -6.5]])
plt.colorbar()
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log(sSFR) Photometric [Log($yr^{-1}$)]', size=12)
plt.title('HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/PhotoSpecificSFR_HaloMass' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Gas Met vs Mh for photo catalog"""
# Load gas mass and gas met
gas_mass, gas_met = np.loadtxt('../Data/HorizonAGNLightconePhotometric/GasInfo.dat', unpack=True)
# Add a column with gas mass and metalicity in galphot catalog
galphot = rfn.append_fields(galphot, 'Gas_mass', gas_mass, usemask=False)
galphot = rfn.append_fields(galphot, 'Gas_met', gas_met, usemask=False)
def boost(z):
"""Boost the metalicity of gas and stars because of the low resolution of H-AGN."""
return 4.08430 - 0.213574 * z - 0.111197 * z**2
# Compute boosted Metalicity for photometric catalog
gas_met_boost = np.empty(gas_met.shape)
for idx_phot in range(len(gas_met_boost)):
gas_met_boost[idx_phot] = gas_met[idx_phot] * boost(
galdata_allz['z'][obstotrue[idx_phot].astype('int')])
# Add a column on gal_phot
galphot = rfn.append_fields(galphot, 'Gas_met_boost', gas_met_boost, usemask=False)
plt.close('all')
"""Compute Median Metalicity per halo mass and 68% interval."""
massbins = np.linspace(10, 15, num=100)
medMetperHMPhot = np.zeros([numzbin, np.size(massbins)-1])
avMetperHMPhot = np.zeros([numzbin, np.size(massbins)-1])
stdMetperHMPhot = np.zeros([numzbin, np.size(massbins)-1])
# supMetperHM = np.zeros([numzbin, np.size(massbins)-1])
# infMetperHM = np.zeros([numzbin, np.size(massbins)-1])
for i in range(numzbin-1):
indices_selec = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.logical_and(
np.logical_and(
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0,
galphot['Gas_met_boost'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
]
),
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] > 9
)
)
)
gal_gasmet = galphot['Gas_met_boost'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')]
for j in range(np.size(massbins)-1):
m1 = massbins[j]
m2 = massbins[j+1]
indices = np.where(np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) <= m2))[0]
indices = np.intersect1d(indices_selec, indices)
if len(indices) > 4:
avMetperHMPhot[i, j] = np.average(gal_gasmet[indices])
medMetperHMPhot[i, j] = np.median(gal_gasmet[indices])
stdMetperHMPhot[i, j] = np.std(gal_gasmet[indices])
else:
avMetperHMPhot[i, j] = np.nan
medMetperHMPhot[i, j] = np.nan
stdMetperHMPhot[i, j] = np.nan
"""Plot Gas metalicity vs Mh for photo galaxies"""
# TODO: problem with certain galaxies having a gas metalicity of 0
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.logical_and(
np.logical_and(
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0,
galphot['Gas_met_boost'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
]
),
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] > 9
)
)
)
plt.hist2d(
#np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(haloes_env[i][indices, 0][0]),
galphot['Gas_met_boost'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
],
bins=100, cmin=1)
plt.colorbar()
# plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i],
# color='red', label='Average Metalicity for a given HM, $\pm 1\sigma$')
# plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i] + stdMetperHMPhot[i],
# color='red', linestyle='--')
# plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i] - stdMetperHMPhot[i],
# color='red', linestyle='--')
# plt.errorbar((massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i][:],
# color='red', yerr=stdMetperHMPhot[i],
# label='Average Metalicity for a given HM')
# plt.scatter((massbins[:-1]+massbins[1:])/2, medMetperHMPhot[i][:],
# color='green', label='Median Metalicity for a given HM')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Gas Metalicity', size=12)
plt.title('Photometric HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/GasMet/gasmet_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Evolution of photometric Gas metalicity with redshift"""
plt.figure()
for i in range(numzbin-1):
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i],
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.fill_between(
(massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i] + stdMetperHMPhot[i],
avMetperHMPhot[i] - stdMetperHMPhot[i], alpha=0.3,
linestyle='--')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Gas Metalicity', size=12)
plt.title('Photometric HorizonAGN Gas metalicity')
plt.tight_layout()
"""Boost stellar metalicity in True catalog"""
stellar_met_boost = np.empty(galdata_allz['met'].shape)
for idx_true in range(len(stellar_met_boost)):
stellar_met_boost[idx_true] = galdata_allz['met'][idx_true] * boost(
galdata_allz['z'][idx_true])
galdata_allz = rfn.append_fields(galdata_allz, 'Stellar_met_boost',
stellar_met_boost, usemask=False)
"""Compute average of stellar metalicity and standard deviation"""
massbins = np.linspace(10, 15, num=100)
medMetperHMtrue = np.zeros([numzbin, np.size(massbins)-1])
avMetperHMtrue = np.zeros([numzbin, np.size(massbins)-1])
stdMetperHMtrue = np.zeros([numzbin, np.size(massbins)-1])
# supMetperHM = np.zeros([numzbin, np.size(massbins)-1])
# infMetperHM = np.zeros([numzbin, np.size(massbins)-1])
for i in range(numzbin-1):
indices_selec = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
gal_stemet = galdata_allz['Stellar_met_boost'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))]
for j in range(np.size(massbins)-1):
m1 = massbins[j]
m2 = massbins[j+1]
indices = np.where(np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) <= m2))[0]
indices = np.intersect1d(indices_selec, indices)
if len(indices) > 0:
avMetperHMtrue[i, j] = np.average(gal_stemet[indices])
medMetperHMtrue[i, j] = np.median(gal_stemet[indices])
stdMetperHMtrue[i, j] = np.std(gal_stemet[indices])
else:
avMetperHMtrue[i, j] = np.nan
medMetperHMtrue[i, j] = np.nan
stdMetperHMtrue[i, j] = np.nan
"""Plot Stellar Met vs Mh for photo catalogs"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
galdata_allz['Stellar_met_boost'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))],
bins=100, cmin=1
)
plt.colorbar()
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i],
color='red', label='Average Metalicity for a given HM, $\pm 1\sigma$')
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i] + stdMetperHMtrue[i],
color='red', linestyle='--')
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i] - stdMetperHMtrue[i],
color='red', linestyle='--')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Stellar Metalicity', size=12)
plt.title('Original HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/StellarMet/stellarmet_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Evolution of stellar metalicity with redshift"""
plt.figure()
for i in range(numzbin-1):
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i],
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.fill_between(
(massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i] + stdMetperHMtrue[i],
avMetperHMtrue[i] - stdMetperHMtrue[i], alpha=0.3,
linestyle='--')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Stellar Metalicity', size=12)
plt.title('Original HorizonAGN Stellar metalicity')
plt.tight_layout()
"""Compare Photometric Gas Metalicity and Original Stellar Metalicity"""
for i in range(numzbin-1):
plt.figure()
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i],
color='green', label='Photometric Gas Metalicity $\pm 1\sigma$')
plt.fill_between(
(massbins[:-1]+massbins[1:])/2, avMetperHMPhot[i] + stdMetperHMPhot[i],
avMetperHMPhot[i] - stdMetperHMPhot[i], alpha=0.3,
color='green', linestyle='--')
plt.plot((massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i],
color='red', label='True Stellar Metalicity $\pm 1\sigma$')
plt.fill_between(
(massbins[:-1]+massbins[1:])/2, avMetperHMtrue[i] + stdMetperHMtrue[i],
avMetperHMtrue[i] - stdMetperHMtrue[i], alpha=0.3,
color='red', linestyle='--')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Metalicity', size=12)
plt.title('HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/Gas+StellarMet/gas+stellarmet_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
plt.close('all')
"""Compute average stellar met for a given halo local density"""
densbins = np.linspace(-2.5, 1, num=100)
medMetperHDtrue = np.zeros([numzbin, np.size(densbins)-1])
avMetperHDtrue = np.zeros([numzbin, np.size(densbins)-1])
stdMetperHDtrue = np.zeros([numzbin, np.size(densbins)-1])
# supMetperHM = np.zeros([numzbin, np.size(massbins)-1])
# infMetperHM = np.zeros([numzbin, np.size(massbins)-1])
for i in range(numzbin-1):
indices_selec = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
gal_stemet = galdata_allz['Stellar_met_boost'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))]
for j in range(np.size(densbins)-1):
d1 = densbins[j]
d2 = densbins[j+1]
indices = np.where(np.logical_and(
np.log10(haloes_env[i][:, 0]) > d1,
np.log10(haloes_env[i][:, 0]) <= d2))[0]
indices = np.intersect1d(indices_selec, indices)
if len(indices) > 0:
avMetperHDtrue[i, j] = np.average(gal_stemet[indices])
medMetperHDtrue[i, j] = np.median(gal_stemet[indices])
stdMetperHDtrue[i, j] = np.std(gal_stemet[indices])
else:
avMetperHDtrue[i, j] = np.nan
medMetperHDtrue[i, j] = np.nan
stdMetperHDtrue[i, j] = np.nan
"""Evolution of stellar metalicity with environment density"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
)
plt.hist2d(
np.log10(haloes_env[i][indices, 0][0]),
galdata_allz['Stellar_met_boost'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))],
bins=100, cmin=1
)
plt.colorbar()
plt.plot((densbins[:-1]+densbins[1:])/2, avMetperHDtrue[i],
color='red', label='Average Original Stellar Metalicity $\pm 1\sigma$')
plt.fill_between(
(densbins[:-1]+densbins[1:])/2, avMetperHDtrue[i] + stdMetperHDtrue[i],
avMetperHDtrue[i] - stdMetperHDtrue[i], alpha=0.3,
color='red', linestyle='--')
plt.legend()
plt.xlabel('Halo local density smoothed at 3Mpc (log)', size=12)
plt.ylabel('Stellar Metalicity', size=12)
plt.title('Original HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.tight_layout()
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/StellarMet/Stellarmet_Density_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Density of haloes versus halo mass"""
for i in range(numzbin-1):
plt.figure()
# Comment this if you want to plot all haloes and not only central haloes with central galaxies
indices = np.where(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(haloes_env[i][indices, 0][0]),
bins=100, cmin=1
)
plt.colorbar()
plt.legend()
plt.xlabel('Halo Mass', size=12)
plt.ylabel('Halo local density', size=12)
plt.title('Original HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.tight_layout()
"""Original Ms/Mh versus density"""
# compute average and std deviation
densbins = np.linspace(-2.5, 1, num=100)
medMSMHperHDtrue = np.zeros([numzbin, np.size(densbins)-1])
avMSMHperHDtrue = np.zeros([numzbin, np.size(densbins)-1])
stdMSMHperHDtrue = np.zeros([numzbin, np.size(densbins)-1])
# supMetperHM = np.zeros([numzbin, np.size(massbins)-1])
# infMetperHM = np.zeros([numzbin, np.size(massbins)-1])
for i in range(numzbin-1):
indices_selec = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
for j in range(np.size(densbins)-1):
d1 = densbins[j]
d2 = densbins[j+1]
indices = np.where(np.logical_and(
np.log10(haloes_env[i][:, 0]) > d1,
np.log10(haloes_env[i][:, 0]) <= d2))[0]
indices = np.intersect1d(indices_selec, indices)
if len(indices) > 0:
avMSMHperHDtrue[i, j] = np.average(
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1] /
halodata[i]['Mass'][indices]))
medMSMHperHDtrue[i, j] = np.median(
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1] /
halodata[i]['Mass'][indices]))
stdMSMHperHDtrue[i, j] = np.std(
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1] /
halodata[i]['Mass'][indices]))
else:
avMSMHperHDtrue[i, j] = np.nan
medMSMHperHDtrue[i, j] = np.nan
stdMSMHperHDtrue[i, j] = np.nan
"""Plot Original Ms/Mh versus density"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
# indices = np.where(hal_centgal[i] > 0)
plt.hist2d(
np.log10(haloes_env[i][indices, 0][0]),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1] /
halodata[i]['Mass'][indices]),
bins=100, cmin=1)
plt.colorbar()
plt.plot((densbins[:-1]+densbins[1:])/2, avMSMHperHDtrue[i],
color='red', label='Average $\pm 1\sigma$')
plt.fill_between(
(densbins[:-1]+densbins[1:])/2, avMSMHperHDtrue[i] + stdMSMHperHDtrue[i],
avMSMHperHDtrue[i] - stdMSMHperHDtrue[i], alpha=0.3,
color='red', linestyle='--')
plt.legend()
plt.xlabel('Log(Halo density)', size=12)
plt.ylabel('Log($M_{*}/M_{h}$)', size=12)
plt.title('Original HorizonAGN, Central gal, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/Density/dens_msmh' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Plot Hexbins of everything for original lightcone catalog"""
# Trace a line for node distance vs halo mass
# x = np.linspace(10, 14)
# y = 0.375*x - 4.75
for i in range(numzbin-1):
plt.figure()
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
# indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] > 1))
# indices = np.where(hal_centgal[i] > 0)
plt.hexbin(
# np.log10(halodata[i]['mass'][indices]*10**11),
# np.log10(galdata[i]['mass'][hal_centgal[i][indices]-1]*10**11),
np.log10(halodata[i]['Mass'][indices]*10**11),
# np.log10(halodata[i]['mvir'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
# np.log10(haloes_env[i][indices, 1][0]),
C=np.log10(galdata[i]['SFRcorr'][hal_centgal[i][indices]-1] /
(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11)),
# C=np.log10(galdata[i]['spin'][hal_centgal[i][indices]-1]),
# C=np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]/halodata[i]['Mass'][indices]),
# C=np.log10(haloes_env[i][indices, 1][0]),
# C=np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]),
gridsize=60, mincnt=50, cmap='jet', extent=[10, 14, 8, 12]
)
cb = plt.colorbar()
cb.set_label('Log(sSFR)', size=12)
plt.xlabel('Log(Halo Mass)', size=12)
plt.ylabel('Log(Stellar Mass)', size=12)
plt.title('Original HorizonAGN, Central haloes, z=' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HorizonAGN/Hexbins/NodesFilaments/HM_Fil_MsMh_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Plot Hexbins on one fig"""
boofitsSMbins = np.linspace(8, 12, num=100)
fig = plt.figure(figsize=(12, 4))
gs = GridSpec(1, 3, width_ratios=[1, 1, 1])
for i in range(3):
ax1 = plt.subplot(gs[i])
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
# indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] > 1))
# indices = np.where(hal_centgal[i] > 0)
im = ax1.hexbin(
halodata[i]['z'][indices],
np.log10(halodata[i]['mass'][indices]*10**11),
# np.log10(galdata[i]['mass'][hal_centgal[i][indices]-1]*10**11),
# np.log10(halodata[i]['Mass'][indices]*10**11),
# np.log10(halodata[i]['mvir'][indices]*10**11),
# np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
# np.log10(haloes_env[i][indices, 0][0]),
C=np.log10(galdata[i]['SFRcorr'][hal_centgal[i][indices]-1] /
(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11)),
# C=np.log10(galdata[i]['SFRcorr'][hal_centgal[i][indices]-1]),
# C=np.log10(galdata[i]['spin'][hal_centgal[i][indices]-1]),
# C=np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]/halodata[i]['Mass'][indices]),
# C=np.log10(haloes_env[i][indices, 1][0]),
# C=np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]),
# gridsize=60, mincnt=50, cmap='jet', extent=[10, 14, 8, 12]
gridsize=60, mincnt=50, cmap='jet', # vmin=-10.3, vmax=-8.7,
extent=[zbins_Cone[i], zbins_Cone[i+1], 10, 12.5]
)
# ax1.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
# boofitsSMbins,
# label=str('Behroozi function fit'), c='black')
# ax1.axvline(x=MhaloPeak_true_boo[i], linestyle='--')
axins1 = inset_axes(
ax1,
width="10%", # width = 10% of parent_bbox width
height="35%", # height : 50%
loc=8,
bbox_to_anchor=[0.5, 0, 0.5, 1],
bbox_transform=ax1.transAxes
)
cbar = fig.colorbar(im, cax=axins1)
# cbar.set_label('Log(sSFR)', size=10)
# ax1.set_xlabel('Log($M_{h}/M_{\odot}$)', size=12)
# ax1.set_ylabel('Log($M_{*}/M_{\odot}$)', size=12)
# cbar.set_label('Log(Ms)', size=10)
cbar.set_label('Log(SFR)', size=10)
# ax1.set_xlabel('Log($M_{h}/M_{\odot}$)', size=12)
ax1.set_xlabel('Redshift', size=12)
# ax1.set_ylabel('Log($d_{node}/Mpc$)', size=12)
# ax1.set_ylabel('Log($n(halo)/Mpc^{-3}$)', size=12)
ax1.set_ylabel('Log($M_{h}/M_{\odot}$)', size=12)
cbar.ax.tick_params(labelsize=9)
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
plt.text(0.7, 0.9, str(zbins_Cone[i])+'<z<'+str(zbins_Cone[i+1]),
size=12, transform=ax1.transAxes, bbox=dict(boxstyle='round', facecolor='white'))
fig.tight_layout()
"""Plot sSFR hexbins for photo catalog"""
fig = plt.figure(figsize=(12, 4))
gs = GridSpec(1, 3, width_ratios=[1, 1, 1])
for i in range(3):
ax1 = plt.subplot(gs[i])
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.logical_and(
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
] > 0,
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')] > 9
)
)
)
# indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] > 1))
# indices = np.where(hal_centgal[i] > 0)
im = ax1.hexbin(
np.log10(halodata[i]['Mass'][indices]*10**11),
galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
],
C=np.log10(galphot['SFR'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
] / 10**(galphot['Mass'][
galdata_allz['Obs_gal_idx'][
hal_centgal[i][indices] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')])
),
gridsize=60, mincnt=50, cmap='jet', extent=[10, 14, 8, 12],
)
axins1 = inset_axes(
ax1,
width="10%", # width = 10% of parent_bbox width
height="35%", # height : 50%
loc=8,
bbox_to_anchor=[0.5, 0, 0.5, 1],
bbox_transform=ax1.transAxes
)
cbar = fig.colorbar(im, cax=axins1)
# cbar.set_label('Log(sSFR)', size=10)
ax1.set_xlabel('Log($M_{h}/M_{\odot}$)', size=12)
ax1.set_ylabel('Log($M_{*}/M_{\odot}$)', size=12)
cbar.ax.tick_params(labelsize=9)
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
plt.text(0.1, 0.9, str(zbins_Cone[i])+'<z<'+str(zbins_Cone[i+1]),
size=12, transform=ax1.transAxes, bbox=dict(boxstyle='round', facecolor='white'))
fig.tight_layout()
"""Plot sSFR versus Halo mass"""
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.log10(halodata[i]['Mass']*10**11) > 0
)
)
plt.hist2d(
np.log10(galdata[i]['SFRcorr'][hal_centgal[i][indices]-1] /
(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11)),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1] / halodata[i]['Mass'][indices]),
range=[[-12, -8], [-4, 0]], bins=100, cmin=1
)
plt.xlabel('sSFR', size=20)
plt.ylabel('HaloMass', size=20)
"""Plot sSFR vs SM/HM"""
for i in range(numzbin):
plt.figure()
indices = np.where(np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1))
plt.hist2d(
np.log10(galdata[i]['SFRcorr'][hal_centgal[i][indices]-1]),
galdata[i]['Mass'][hal_centgal[i][indices]-1]/halodata[i]['Mass'][indices],
range=[[-2, 2], [-4, 0]], bins=100, cmin=20
)
plt.colorbar()
plt.xlabel('Log(SFR)', size=20)
plt.ylabel('Log(SM/HM)', size=20)
plt.title('Original HorizonAGN, Central gal, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
plt.tight_layout()
"""Select galaxies with distance to node < 10**-0.5"""
d = 10**-0.5
for i in range(numzbin-1):
plt.figure()
# plot histogram for halos with distance to node > 10**-0.5 Mpc
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
haloes_env[i][:, 2] > d
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
bins=100, cmin=1)
plt.colorbar()
# add a scatter for haloes > 10**-0.5 Mpc
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
haloes_env[i][:, 2] < d
)
)
print('N haloes close to nodes : ' + str(len(indices[0])))
plt.scatter(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
c='red', label=('Haloes with d(Node)<10**-0.5 Mpc'))
plt.legend()
plt.xlabel('Log(Halo Mass)', size=12)
plt.ylabel('Log(Stellar Mass)', size=12)
plt.title('Original HorizonAGN, Central gal, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HorizonAGN/Hexbins/NodesFilaments/Ms_Mh_distanceSeparation' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Plot Hexbins for the photometric catalog"""
# selection of relevant galaxies (central with level 1 halo and matched)
indices_allz = []
galphotselec = []
for i in range(numzbin-1):
indices_allz.append(np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))] > 0
)
))
galphotselec.append(galphot[
galdata_allz['Obs_gal_idx'][
hal_centgal[i][:] - 1 + sum(len(galdata[j]) for j in range(i))
].astype('int')
])
for i in range(numzbin-1):
plt.figure()
indices = np.intersect1d(indices_allz[i], np.where(galphotselec[i]['Mass'] > 9))
plt.hexbin(
galphotselec[i]['Mass'][indices],
# galphotselec[i]['mag_u'][indices],
galphotselec[i]['mag_J'][indices],
C=galphotselec[i]['Mass'][indices] - np.log10(halodata[i]['Mass'][indices]*10**11),
# np.log10(haloes_env[i][indices, 2][0]),
# galphotselec[i]['Mass'][indices],
# C=np.log10(galphotselec[i]['SFR'][indices]/(galphotselec[i]['Mass'][indices]*10**11)),
# C=np.log10(galphotselec[i]['SFR'][indices]),
# C=np.log10(haloes_env[i][indices, 2][0]),
# galphotselec[i]['mag_K'][indices],
# C=galphotselec[i]['mag_J'][indices]-galphotselec[i]['mag_u'][indices],
gridsize=60, mincnt=20, cmap='jet', extent=[9, 12, 20, 30]
)
cb = plt.colorbar()
cb.set_label('Log(Ms/Mh)', size=12)
plt.xlabel('Stellar mass', size=12)
plt.ylabel('Mag J', size=12)
plt.title('Photometric HorizonAGN, Central gal, z=' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/Hexbins/Colors/J_U_MsMH_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
"""Plot gas mass vs Halo mass"""
for i in range(numzbin-1):
plt.figure()
indices = indices_allz[i]
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galphotselec[i]['Gas_mass'][indices] / (halodata[i]['Mass'][indices]*10**11)),
bins=50, cmin=20, range=[[10, 12], [-1.5, -0.5]]
)
plt.xlabel('Log(Halo mass)', size=12)
plt.ylabel('Log(Gas mass)', size=12)
plt.title('Photometric HorizonAGN, Central gal, z=' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
""" Compute average gas mass per halo mass"""
def averageperHM(data, data_name, indices_selec, numzbin, massbins):
"""Retun average, median and standard eviation of the data per halo mass.
Routine to compute useful info on the data.
Warning : it is full of particular cases, as for instance for gas mass I take only
positiove masses, and I compute them on a logscale.
"""
medperHM = np.zeros([numzbin, np.size(massbins)-1])
avperHM = np.zeros([numzbin, np.size(massbins)-1])
stdperHM = np.zeros([numzbin, np.size(massbins)-1])
for i in range(numzbin):
for j in range(np.size(massbins)-1):
m1 = massbins[j]
m2 = massbins[j+1]
indices = np.where(np.logical_and(
np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) <= m2),
data[i][data_name] > 0
))[0]
indices = np.intersect1d(indices_selec[i], indices)
if len(indices) > 0:
avperHM[i, j] = np.average(np.log10(data[i][data_name][indices]))
medperHM[i, j] = np.median(np.log10(data[i][data_name][indices]))
stdperHM[i, j] = np.std(np.log10(data[i][data_name][indices]))
else:
avperHM[i, j] = np.nan
medperHM[i, j] = np.nan
stdperHM[i, j] = np.nan
return avperHM, medperHM, stdperHM
massbins = np.linspace(10, 13, num=20)
avGMperHM, medGMperHM, stdGMperHM = averageperHM(galphotselec, 'Gas_mass',
indices_allz, 3, massbins)
"""Plot Gas_mass versus Halo_mass"""
for i in range(numzbin-1):
plt.figure()
indices = indices_allz[i]
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galphotselec[i]['Gas_mass'][indices]) / np.log10(
halodata[i]['Mass'][indices]*10**11),
bins=100, cmin=1, range=[[10, 13], [0.6, 1.1]]
)
plt.colorbar()
# plt.errorbar(
# (massbins[:-1]+massbins[1:])/2, avGMperHM[i],
# yerr=stdGMperHM[i], color='red'
# )
plt.xlabel('Log(Halo virial mass)', size=12)
plt.ylabel('Log(Gas virial mass)/Log(Halo Mass)', size=12)
plt.title('Photometric HorizonAGN, Central gal, z=' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/Hexbins/GasMass/logGMonlogHVM_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
for i in range(numzbin-1):
plt.figure()
indices = np.intersect1d(indices_allz[i], np.where(galphotselec[i]['Mass'] > 0))
plt.hexbin(
np.log10(halodata[i]['mvir'][indices]*10**11),
np.log10(galphotselec[i]['Gas_mass'][indices]) / np.log10(
halodata[i]['mvir'][indices]*10**11),
# C=galphotselec[i]['Mass'][indices] - np.log10(
# halodata[i]['Mass'][indices]*10**11) ,
gridsize=60, mincnt=10, cmap='jet', extent=[10, 13, 0.6, 1.1]
)
cb = plt.colorbar()
cb.set_label('Log(Ms/Mh)', size=12)
plt.xlabel('Log(Halo mass)', size=12)
plt.ylabel('Log(Gas mass)/Log(Halo Mass)', size=12)
plt.title('Photometric HorizonAGN, Central gal, z=' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
# plt.savefig('../Plots/HAGN_Matching/ClotMatch/Hexbins/GasMass/logGMonlogHM_' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
cut = 0.85
for i in range(numzbin-1):
plt.figure()
# plot histogram for halos with distance to node > 10**-0.5 Mpc
indices = indices_allz[i]
indices = np.intersect1d(indices, np.where(galphotselec[i]['Mass'] > 9))
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
galphotselec[i]['Mass'][indices],
bins=100, cmin=1)
plt.colorbar()
# add a scatter for haloes > 10**-0.5 Mpc
indices = np.intersect1d(indices,
np.where(np.log10(galphotselec[i]['Gas_mass'][:]) / np.log10(
halodata[i]['mvir'][:]*10**11) < cut)
)
print('N haloes inferior at cut : ' + str(len(indices)))
plt.scatter(
np.log10(halodata[i]['Mass'][indices]*10**11),
galphotselec[i]['Mass'][indices],
c='red', label=('Haloes with d(Node)<10**-0.5 Mpc'))
plt.legend()
plt.xlabel('Log(Halo Mass)', size=12)
plt.ylabel('Log(Stellar Mass)', size=12)
plt.title('Original HorizonAGN, Central gal, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HorizonAGN/Hexbins/NodesFilaments/Ms_Mh_distanceSeparation' +
"""Plot colors"""
for i in range(numzbin-1):
plt.figure()
indices = np.intersect1d(indices_allz[i], np.where(galphotselec[i]['Mass'] > 9))
plt.hist2d(
# galphotselec[i]['Mass'][indices],
np.log10(halodata[i]['Mass'][indices]*10**11),
galphotselec[i]['mag_u'][indices],
cmin=1, bins=50
)
"""Test de faire des corner plot"""
from getdist import plots, MCSamples
i = 0
indices = indices_allz[i]
indices = np.intersect1d(indices_allz[i], np.where(galphotselec[i]['Mass'] > 9))
indices = np.intersect1d(indices, np.where(galphotselec[i]['Gas_mass'] > 0) )
# names = ['Ms', 'Mh', 'Ms/Mh', 'J-U', 'U-R']
# data = [
# galphotselec[i]['Mass'][indices],
# np.log10(halodata[i]['Mass'][indices]*10**11),
# galphotselec[i]['Mass'][indices] - np.log10(halodata[i]['Mass'][indices]*10**11),
# galphotselec[i]['mag_J'][indices] - galphotselec[i]['mag_u'][indices],
# galphotselec[i]['mag_u'][indices] - galphotselec[i]['mag_r'][indices],
# ]
names = ['Ms', 'Mh', 'Mg', 'log(Mg)/log(Mh)']
data = [
galphotselec[i]['Mass'][indices],
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galphotselec[i]['Gas_mass'][indices]),
np.log10(galphotselec[i]['Gas_mass'][indices])/np.log10(halodata[i]['Mass'][indices]*10**11),
]
samples = MCSamples(samples=data, names=names)
# Si l'on souhaite changer les zones de confiance des graphs,
# par défaut ce sont les zones de confiance à 65% et 95%
samples.contours = np.array([0.68, 0.95, 0.99])
samples.updateBaseStatistics()
g = plots.getSubplotPlotter()
g.settings.num_plot_contours = 3
g.triangle_plot(samples, filled=True, contours=0.2)
#g.export('statistiques')
#plt.close('all')
"""Try to do Principal component analysis on the data"""
i=2
indices = np.intersect1d(indices_allz[i], np.where(galphotselec[i]['Mass'] > 9))
indices = np.intersect1d(indices, np.where(galphotselec[i]['Gas_mass'] > 0) )
data = np.transpose(np.array([
galphotselec[i]['Mass'][indices],
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galphotselec[i]['Gas_mass'][indices]),
]))
# result = mlab.PCA(data)
# from mpl_toolkits.mplot3d import Axes3D
# x = []
# y = []
# z = []
# for item in result.Y:
# x.append(item[0])
# y.append(item[1])
# z.append(item[2])
# plt.close('all') # close all latent plotting windows
# fig1 = plt.figure() # Make a plotting figure
# ax = Axes3D(fig1) # use the plotting figure to create a Axis3D object.
# pltData = [x,y,z]
# ax.scatter(pltData[0], pltData[1], pltData[2], 'bo') # make a scatter plot of blue dots from the
# data
# # make simple, bare axis lines through space:
# xAxisLine = ((min(pltData[0]), max(pltData[0])), (0, 0), (0,0)) # 2 points make the x-axis line
# at the data extrema along x-axis
# ax.plot(xAxisLine[0], xAxisLine[1], xAxisLine[2], 'r') # make a red line for the x-axis.
# yAxisLine = ((0, 0), (min(pltData[1]), max(pltData[1])), (0,0)) # 2 points make the y-axis line
# at the data extrema along y-axis
# ax.plot(yAxisLine[0], yAxisLine[1], yAxisLine[2], 'r') # make a red line for the y-axis.
# zAxisLine = ((0, 0), (0,0), (min(pltData[2]), max(pltData[2]))) # 2 points make the z-axis line
# at the data extrema along z-axis
# ax.plot(zAxisLine[0], zAxisLine[1], zAxisLine[2], 'r') # make a red line for the z-axis.
# # label the axes
# ax.set_xlabel("x-axis label")
# ax.set_ylabel("y-axis label")
# ax.set_zlabel("y-axis label")
# ax.set_title("The title of the plot")
# plt.show() # show the plot
from sklearn.decomposition import PCA
sk_pca = PCA(n_components=2)
sklearn_result = sk_pca.fit_transform(data)
plt.plot(sklearn_result[:, 0], sklearn_result[:, 1], '.')
""""PLot Ms/Mh separating halos with environment"""
# Fit Boo on it :
boo_fit_hd = np.empty([numzbin-1, 5])
boo_cov_hd = np.empty([numzbin-1, 5, 5])
for i in range(numzbin-1):
print(i)
indices = np.where(
np.logical_and(
np.logical_and(
np.log10(galdata[i]['Mass'][hal_centgal[i]-1]*10**11) > 9,
np.log10(haloes_env[i][:, 0]) > -0.5
),
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1)
)
)
boo_fit_hd[i], boo_cov_hd[i] = curve_fit(
boo_MhMs,
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
np.log10(halodata[i]['Mass'][indices]*10**11),
bounds=[[10, 8, 0, 0, 0], [13, 11, 5, 5, 5]],
method='trf')
print(boo_fit_hd)
boofitsSMbins = np.linspace(9, 12, num=100)
for i in range(numzbin-1):
plt.figure()
indices = np.where(
np.logical_and(
np.logical_and(hal_centgal[i] > 0, halodata[i]['level'] == 1),
np.log10(haloes_env[i][:, 0]) > 0
)
)
plt.hist2d(
np.log10(halodata[i]['Mass'][indices]*10**11),
np.log10(galdata[i]['Mass'][hal_centgal[i][indices]-1]*10**11),
bins=100, cmin=1)
plt.colorbar()
# Plot Behroozi fit
# plt.plot(boo_MhMs(boofitsSMbins, *boo_fit_true[i]), boofitsSMbins,
# label=str('True Behroozi function fit'), c='r')
plt.legend()
plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=12)
plt.ylabel('Log($M_{*}$) [Log($M_{\odot}$)]', size=12)
plt.title('HorizonAGN, Central galz='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.savefig('../Plots/HAGN_Matching/ClotMatchBis/TrueMass_HaloMass_Boofit' +
# str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) + '.pdf')
plt.figure()
for i in range(3):
p = plt.plot(
boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
boofitsSMbins-boo_MhMs(boofitsSMbins, *boo_fit_true[i]),
label=str('All '+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])))
plt.plot(
boo_MhMs(boofitsSMbins, *boo_fit_hd[i]),
boofitsSMbins-boo_MhMs(boofitsSMbins, *boo_fit_hd[i]),
label=str('HD '+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])),
color=p[0].get_color(), linestyle='--')
plt.plot(
boo_MhMs(boofitsSMbins, *boo_fit_ld[i]),
boofitsSMbins-boo_MhMs(boofitsSMbins, *boo_fit_ld[i]),
label=str('LD '+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])),
color=p[0].get_color(), linestyle=':')
plt.legend()
plt.xlabel('Log($M_{h}/M_{\odot}$)', size=12)
plt.ylabel('Log($M_{*}/M_{h}$)', size=12)
"""Load data to compute number of galaxies per halo"""
# Main halos
gal_mainhaloes = []
mainHaloMass = []
for i in range(np.size(zbins_Cone)-2):
gal_mainhaloes.append(
np.loadtxt('../Data/HorizonAGNLaigleCatalogs/Cat_' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'_Gal_MainHaloes_newb.txt',
dtype='i4'))
# Sub halos
gal_subhaloes = []
subHaloMass = []
for i in range(np.size(zbins_Cone)-2):
gal_subhaloes.append(
np.loadtxt('../Data/HorizonAGNLaigleCatalogs/Cat_' +
str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+'_Gal_SubHaloes_newb.txt',
dtype='i4'))
"""Number of galaxies per halo"""
# Number of halo with minimal mass
# minimum = min(10**11*halodata['Mass'])
# indices = [i for i, v in enumerate(10**11*halodata['Mass']) if v == minimum]
# np.size(indices)
# gal_subhaloes give the ondex of the closest halo -> not relevant
# nbgalaxiesperhalos = []
# for i in range(numzbin-1):
# # index j of nbgalaxiesperhalos gives the number of galaxies in the halo of
# # ID = j+1
# nbgalaxiesperhalos.append(np.zeros(np.size(halodata[i]['Mass'])))
# for j in gal_subhaloes[i].astype(int):
# nbgalaxiesperhalos[i][j-1] += 1
nbgalaxiesperhalos_main = []
for i in range(numzbin-1):
# index j of nbgalaxiesperhalos gives the number of galaxies in the halo of
# ID = j+1
nbgalaxiesperhalos_main.append(np.zeros(np.size(halodata[i]['Mass'])))
indices = np.where(gal_mainhaloes[i] > 0)
for j in gal_mainhaloes[i][indices].astype(int):
nbgalaxiesperhalos_main[i][j-1] += 1
# nb_centralgalaxies_per_mainhalo = []
# for i in range(numzbin-1):
# print(i)
# nb_centralgalaxies_per_mainhalo.append(np.zeros(np.size(halodata[i]['Mass'])))
# indices = np.where(np.logical_and(
# galdata[i]['level'] == 1,
# gal_mainhaloes[i] > 0))
# for j in gal_mainhaloes[i][indices]:
# nb_centralgalaxies_per_mainhalo[i][j-1] += 1
# WARNING : central gal are to be asssociated using hal_centgal, where only one gal is associated
# to each halo
nb_centralgalaxies_per_mainhalo = []
for i in range(numzbin-1):
nb_centralgalaxies_per_mainhalo.append(np.zeros(np.size(halodata[i]['Mass'])))
nb_centralgalaxies_per_mainhalo[i][(hal_centgal[i] > 0) & (halodata[i]['level'] == 1)] = 1
nb_levelMore1_galaxies_per_mainhalo = []
for i in range(numzbin-1):
print(i)
nb_levelMore1_galaxies_per_mainhalo.append(np.zeros(np.size(halodata[i]['Mass'])))
indices = np.where(np.logical_and(
galdata[i]['level'] >= 1,
gal_mainhaloes[i] > 0))
for j in gal_mainhaloes[i][indices]:
nb_levelMore1_galaxies_per_mainhalo[i][j-1] += 1
nb_level1galaxies_per_mainhalo = []
for i in range(numzbin-1):
print(i)
nb_level1galaxies_per_mainhalo.append(np.zeros(np.size(halodata[i]['Mass'])))
indices = np.where(np.logical_and(
galdata[i]['level'] == 1,
gal_mainhaloes[i] > 0))
for j in gal_mainhaloes[i][indices]:
nb_level1galaxies_per_mainhalo[i][j-1] += 1
nb_level1galaxies_per_mainhalo = []
for i in range(numzbin-1):
print(i)
nb_level1galaxies_per_mainhalo.append(np.zeros(np.size(halodata[i]['Mass'])))
indices = set(np.where(gal_mainhaloes[i] > 0)).difference(set(hal_centgal[i]-1))
for j in gal_mainhaloes[i][indices]:
nb_level1galaxies_per_mainhalo[i][j-1] += 1
"""Plot"""
# for i in range(4):
# plt.hist(nbgalaxiesperhalos[i], bins=range(nbgalaxiesperhalos[i].max().astype(int)))
# plt.yscale('log')
# plt.show()
"""Number galaxy per halo versus Halo Mass"""
# Compute Average mass of halos for a given number of galaxies in the halo
# averageHaloMassPerNgal = []
# for i in range(numzbin-1):
# averageHaloMassPerNgal.append(np.empty(nbgalaxiesperhalos_main[i].astype(int).max()+1))
# for j in range(nbgalaxiesperhalos_main[i].astype(int).max()+1):
# averageHaloMassPerNgal[i][j] = np.mean(
# halodata[i]['Mass'][nbgalaxiesperhalos_main[i] == j])
# Compute average number of galaxies in halos given a halo mass interval
massbins = np.linspace(10, 15, num=100)
averageNgalperHaloMass = np.zeros([numzbin-1, np.size(massbins)-1])
av_centralgalaxies_per_mainhalo = np.zeros([numzbin-1, np.size(massbins)-1])
av_levelMore1_galaxies_per_mainhalo = np.zeros([numzbin-1, np.size(massbins)-1])
av_level1galaxies_per_mainhalo = np.zeros([numzbin-1, np.size(massbins)-1])
for i in range(numzbin-1):
for j in range(np.size(massbins)-1):
m1 = massbins[j]
m2 = massbins[j+1]
averageNgalperHaloMass[i][j] = np.average(
nbgalaxiesperhalos_main[i][
np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) < m2)
])
av_centralgalaxies_per_mainhalo[i][j] = np.average(
nb_centralgalaxies_per_mainhalo[i][
np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) < m2)
])
av_levelMore1_galaxies_per_mainhalo[i][j] = np.average(
nb_levelMore1_galaxies_per_mainhalo[i][
np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) < m2)
])
av_level1galaxies_per_mainhalo[i][j] = np.average(
nb_level1galaxies_per_mainhalo[i][
np.logical_and(
np.log10(halodata[i]['Mass']*10**11) > m1,
np.log10(halodata[i]['Mass']*10**11) < m2)
])
# massbins = np.linspace(10, 15, num=100)
# averageNgalperSubHaloMass = np.zeros([numzbin, np.size(massbins)-1])
# for i in range(numzbin-1):
# for j in range(np.size(massbins)-1):
# m1 = massbins[j]
# m2 = massbins[j+1]
# averageNgalperSubHaloMass[i][j] = np.average(
# nbgalaxiesperhalos[i][
# np.logical_and(
# np.log10(halodata[i]['Mass']*10**11) > m1,
# np.log10(halodata[i]['Mass']*10**11) < m2)
# ])
"""Plot"""
# plt.hist2d(np.log10(halodata[0]['Mass'][nbgalaxiesperhalos_main[0]>0]*10**11),
# nbgalaxiesperhalos_main[0][nbgalaxiesperhalos_main[0]>0], bins=100, cmin=1)
# for i in range(4):
# fig = plt.figure()
# plt.yscale('log')
# plt.scatter(np.log10(halodata[i]['Mass'][nbgalaxiesperhalos_main[i]>0]*10**11),
# nbgalaxiesperhalos_main[i][nbgalaxiesperhalos_main[i]>0],
# marker='.')
# # plt.scatter(np.log10(averageHaloMassPerNgal[i][1:]*10**11),
# # np.arange(1, nbgalaxiesperhalos_main[i].astype(int).max()+1), label='Average Mass')
# plt.title('HorizonAGN, z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]) +
# ', match gal-Mainhalo')
# plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]')
# plt.ylabel('Number of galaxies in the halo')
# plt.legend()
# plt.show()
# for i in range(numzbin-1):
# plt.scatter(
# (massbins[:-1]+massbins[1:])/2, av_levelMore1_galaxies_per_mainhalo[i][:],
# label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1]))
# plt.yscale('log')
# plt.ylabel('Average number of galaxies per halo', size=15)
# plt.xlabel('Log($M_{h}$) [Log($M_{\odot}$)]', size=15)
fig = plt.figure(figsize=(12, 4))
gs = GridSpec(1, 3, width_ratios=[1, 1, 1])
for i in range(numzbin-1):
ax1 = plt.subplot(gs[i])
ax1.scatter((massbins[:-1]+massbins[1:])/2, averageNgalperHaloMass[i][:],
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+', all', marker='d')
ax1.scatter((massbins[:-1]+massbins[1:])/2, av_centralgalaxies_per_mainhalo[i][:],
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+', central', marker='.')
ax1.scatter((massbins[:-1]+massbins[1:])/2, av_levelMore1_galaxies_per_mainhalo[i][:],
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+', level>1', marker='+')
ax1.scatter((massbins[:-1]+massbins[1:])/2, av_level1galaxies_per_mainhalo[i][:],
label='z='+str(zbins_Cone[i])+'-'+str(zbins_Cone[i+1])+', level=1', marker='*')
ax1.set_yscale('log')
ax1.legend()
ax1.set_ylabel('Average number of galaxies per halo')
ax1.set_xlabel('Log($M_{h}/M_{\odot}$)')
plt.tight_layout()
| gpl-3.0 | -2,726,468,782,197,244,400 | 39.112942 | 99 | 0.574976 | false |
gdietz/OpenMEE | imputation/ui_mice_parameters_page.py | 1 | 5170 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mice_parameters_page.ui'
#
# Created: Fri Mar 7 09:30:08 2014
# by: PyQt4 UI code generator 4.10.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_WizardPage(object):
def setupUi(self, WizardPage):
WizardPage.setObjectName(_fromUtf8("WizardPage"))
WizardPage.resize(391, 288)
self.verticalLayout = QtGui.QVBoxLayout(WizardPage)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label = QtGui.QLabel(WizardPage)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout.addWidget(self.label)
self.m_spinBox = QtGui.QSpinBox(WizardPage)
self.m_spinBox.setMinimum(1)
self.m_spinBox.setMaximum(20)
self.m_spinBox.setProperty("value", 5)
self.m_spinBox.setObjectName(_fromUtf8("m_spinBox"))
self.horizontalLayout.addWidget(self.m_spinBox)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_5 = QtGui.QLabel(WizardPage)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.horizontalLayout_2.addWidget(self.label_5)
self.maxit_spinBox = QtGui.QSpinBox(WizardPage)
self.maxit_spinBox.setMinimum(1)
self.maxit_spinBox.setMaximum(20)
self.maxit_spinBox.setProperty("value", 5)
self.maxit_spinBox.setObjectName(_fromUtf8("maxit_spinBox"))
self.horizontalLayout_2.addWidget(self.maxit_spinBox)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.defaultMethod_groupBox = QtGui.QGroupBox(WizardPage)
self.defaultMethod_groupBox.setObjectName(_fromUtf8("defaultMethod_groupBox"))
self.gridLayout = QtGui.QGridLayout(self.defaultMethod_groupBox)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label_2 = QtGui.QLabel(self.defaultMethod_groupBox)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 0, 0, 1, 1)
self.numeric_comboBox = QtGui.QComboBox(self.defaultMethod_groupBox)
self.numeric_comboBox.setObjectName(_fromUtf8("numeric_comboBox"))
self.gridLayout.addWidget(self.numeric_comboBox, 0, 1, 1, 1)
self.label_3 = QtGui.QLabel(self.defaultMethod_groupBox)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 1, 0, 1, 1)
self.factor_2_levels_comboBox = QtGui.QComboBox(self.defaultMethod_groupBox)
self.factor_2_levels_comboBox.setObjectName(_fromUtf8("factor_2_levels_comboBox"))
self.gridLayout.addWidget(self.factor_2_levels_comboBox, 1, 1, 1, 1)
self.label_4 = QtGui.QLabel(self.defaultMethod_groupBox)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1)
self.factor_gt_2_levels_comboBox = QtGui.QComboBox(self.defaultMethod_groupBox)
self.factor_gt_2_levels_comboBox.setObjectName(_fromUtf8("factor_gt_2_levels_comboBox"))
self.gridLayout.addWidget(self.factor_gt_2_levels_comboBox, 2, 1, 1, 1)
self.verticalLayout.addWidget(self.defaultMethod_groupBox)
spacerItem2 = QtGui.QSpacerItem(20, 50, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem2)
self.retranslateUi(WizardPage)
QtCore.QMetaObject.connectSlotsByName(WizardPage)
def retranslateUi(self, WizardPage):
WizardPage.setWindowTitle(_translate("WizardPage", "WizardPage", None))
self.label.setText(_translate("WizardPage", "# of multiple imputations:", None))
self.label_5.setText(_translate("WizardPage", "# of iterations:", None))
self.defaultMethod_groupBox.setTitle(_translate("WizardPage", "Imputation Methods", None))
self.label_2.setText(_translate("WizardPage", "numeric covariates:", None))
self.label_3.setText(_translate("WizardPage", "categorical with 2 levels:", None))
self.label_4.setText(_translate("WizardPage", "categorical with \n"
"more than 2 levels:", None))
| gpl-3.0 | 7,559,098,870,301,518,000 | 51.755102 | 103 | 0.706576 | false |
RenolY2/battalion-tools | bw_archive/bw_archive_base.py | 1 | 3311 | import io
import struct
from array import array
from rxet.helper import read_uint32
class BWResource(object):
def __init__(self, name, size, memview):
self.name = name
self._size = size
self._data = memview
self._fileobj = io.BytesIO(self._data)
@property
def fileobj(self):
return self._fileobj
# File object and data object should be kept up to date together when
# one of them is changed.
@fileobj.setter
def fileobj(self, fobj):
self._fileobj.close()
self._fileobj = fobj
self._data = fobj.getbuffer()
@property
def data(self):
return self._data
@data.setter
def data(self, data):
self._fileobj.close()
self._data = data
self._fileobj = io.BytesIO(self._data)
def pack(self):
#data = self.fileobj.read()
data = self._data#self.fileobj.getbuffer()
#print(self.name, len(data))
return self.name, len(data), data
# Interpret a data entry as a section. If cls is given, an instance of that will be returned.
# When using cls, offset is unused.
def as_section(self, offset=0, cls=None):
if cls is None:
return BWSection(self.name, self._size, self._data, section_offset=offset)
else:
return cls(self.name, self._size, self._data)
class BWSection(BWResource):
def __init__(self, name, size, memview, section_offset=0):
super().__init__(name, size, memview)
self.entries = []
self._header = self._data[0:section_offset]
self._fileobj.seek(section_offset)
while self._fileobj.tell() < self._size:
name, size, entry_memview = read_section(self._fileobj, memview)
res_obj = BWResource(name, size, entry_memview)
self.entries.append(res_obj)
def pack(self):
packed = io.BytesIO()
packed.write(self._header)
section_size = len(self._header)
for entry in self.entries:
name, size, data = entry.pack()
packed.write(name)
assert size == len(data)
packed.write(struct.pack("I", size))
packed.write(data)
# 4 bytes for the ID, 4 bytes for the length, and the rest is from the data
section_size += 4 + 4 + len(data)
packed_data = packed.getvalue()
packed.close()
return self.name, section_size, packed_data
def as_section(self, offset=0):
return self
class BWArchiveBase(BWSection):
# f should be a file open in binary mode
def __init__(self, f):
# We read the content of the file into memory and put it in a bytearray,
# which is necessary so the content can be modified.
file_content = bytearray(f.read())
#file_content = array("B", f.read())
super().__init__(name=None, size=len(file_content), memview=file_content)
def write(self, f):
unused, size, data = self.pack()
f.write(data)
def read_section(f, memview):
name = f.read(4)
size = read_uint32(f)
offset = f.tell()
data = memoryview(memview[offset:(offset+size)])#f.read(data_len)
f.seek(size, io.SEEK_CUR)
#print(len(memview), len(f.getbuffer()))
return name, size, data
| mit | 4,123,272,925,999,109,000 | 26.363636 | 97 | 0.595892 | false |
Subsets and Splits