ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a50e1c0db449ef5cd915cfceefa91e122dbc4cf | import pytest
from app import database, main, models
from passlib.hash import bcrypt
from starlette.testclient import TestClient
@pytest.fixture(scope="session")
def db():
yield database.SessionLocal()
@pytest.fixture(scope="module")
def client():
with TestClient(main.app) as test_client:
yield test_client
@pytest.fixture(scope="session", autouse=True)
def create_test_data(db):
test_user = models.User(username="test", password=bcrypt.hash("test"))
db.add(test_user)
db.commit()
yield
db.query(models.User).delete()
db.commit()
db.close()
|
py | 1a50e2d95a1e5df27bc1f8c0b4fd2c5b713f50c2 | import os
import requests # pip install requests
# Please NOTE: In this sample we're assuming Cloud Api Server is hosted at "https://localhost".
# If it's not then please replace this with with your hosting url.
# The authentication key (API Key).
# Get your own by registering at https://app.pdf.co/documentation/api
API_KEY = "**************************************"
# Base URL for PDF.co Web API requests
BASE_URL = "https://localhost"
def main(args = None):
fillPDFForm()
def fillPDFForm():
"""Fillable PDF form using PDF.co Web API"""
# Prepare requests params as JSON
# See documentation: https://apidocs.pdf.co
payload = "{\n \"async\": false,\n \"encrypt\": true,\n \"name\": \"newDocument\",\n \"url\": \"https://bytescout-com.s3-us-west-2.amazonaws.com/files/demo-files/cloud-api/pdf-edit/sample.pdf\",\n \"annotations\":[ \n {\n \"text\":\"sample prefilled text\",\n \"x\": 10,\n \"y\": 30,\n \"size\": 12,\n \"pages\": \"0-\",\n \"type\": \"TextField\",\n \"id\": \"textfield1\"\n },\n {\n \"x\": 100,\n \"y\": 150,\n \"size\": 12,\n \"pages\": \"0-\",\n \"type\": \"Checkbox\",\n \"id\": \"checkbox2\"\n },\n {\n \"x\": 100,\n \"y\": 170,\n \"size\": 12,\n \"pages\": \"0-\",\n \"link\": \"https://bytescout-com.s3-us-west-2.amazonaws.com/files/demo-files/cloud-api/pdf-edit/logo.png\",\n \"type\": \"CheckboxChecked\",\n \"id\":\"checkbox3\"\n } \n \n ],\n \n \"images\": [\n {\n \"url\": \"bytescout-com.s3-us-west-2.amazonaws.com/files/demo-files/cloud-api/pdf-edit/logo.png\",\n \"x\": 200,\n \"y\": 250,\n \"pages\": \"0\",\n \"link\": \"www.pdf.co\"\n }\n \n ]\n}"
# Prepare URL for 'Fillable PDF' API request
url = "{}/pdf/edit/add".format(BASE_URL)
# Execute request and get response as JSON
response = requests.post(url, data=payload, headers={"x-api-key": API_KEY, 'Content-Type': 'application/json'})
if (response.status_code == 200):
json = response.json()
if json["error"] == False:
# Get URL of result file
resultFileUrl = json["url"]
# Download result file
r = requests.get(resultFileUrl, stream=True)
if (r.status_code == 200):
with open(destinationFile, 'wb') as file:
for chunk in r:
file.write(chunk)
print(f"Result file saved as \"{destinationFile}\" file.")
else:
print(f"Request error: {response.status_code} {response.reason}")
else:
# Show service reported error
print(json["message"])
else:
print(f"Request error: {response.status_code} {response.reason}")
if __name__ == '__main__':
main() |
py | 1a50e3c3cc4bcf3c3054145df39d73731bb5b675 | #!/bin/python
# -*- coding: UTF-8 -*-
# ******************************************************
# DESC :
# AUTHOR : Alex Stocks
# VERSION : 1.0
# LICENCE : LGPL V3
# EMAIL : [email protected]
# MOD : 2016-04-11 20:15
# FILE : template.py
# ******************************************************
import os, sys
master_run_script_fmt="""#!/usr/bin/env bash
# ******************************************************
# DESC : redis-cluster:redis-master devops script
# AUTHOR : Alex Stocks
# VERSION : 1.0
# LICENCE : LGPL V3
# EMAIL : [email protected]
# MOD : 2016-03-30 15:29
# FILE : meta-master-load.sh
# ******************************************************
# meta-master-script-name {start|stop} meta-master-ip meta-master-port
# user-param: {start|stop} meta-master-ip meta-master-port
sh bin/master-run.sh $1 %s $2 $3"""
slave_run_script_fmt="""#!/usr/bin/env bash
# ******************************************************
# DESC : redis-cluster:redis-slave devops script
# AUTHOR : Alex Stocks
# VERSION : 1.0
# LICENCE : LGPL V3
# EMAIL : [email protected]
# MOD : 2016-03-30 15:29
# FILE : slave-load.sh
# ******************************************************
# slave-script-name {start|stop} index slave-ip slave-port master-ip master-port
# user-param: {start|stop} slave-ip slave-port master-ip master-port
sh bin/slave-run.sh $1 %s $2 $3 $4 $5"""
meta_master_run_script_fmt="""#!/usr/bin/env bash
# ******************************************************
# DESC : redis-cluster:meta-master devops script
# AUTHOR : Alex Stocks
# VERSION : 1.0
# LICENCE : LGPL V3
# EMAIL : [email protected]
# MOD : 2016-03-30 15:29
# FILE : meta-master-load.sh
# ******************************************************
# meta-master-script-name {start|stop} meta-master-ip meta-master-port
# user-param: {start|stop} meta-master-ip meta-master-port
sh bin/meta-master-run.sh $1 $2 $3"""
meta_slave_run_script_fmt="""#!/usr/bin/env bash
# ******************************************************
# DESC : redis-cluster:meta-slave devops script
# AUTHOR : Alex Stocks
# VERSION : 1.0
# LICENCE : LGPL V3
# EMAIL : [email protected]
# MOD : 2016-03-30 15:29
# FILE : meta-slave-load.sh
# ******************************************************
# meta-slave-script-name {start|stop} meta-slave-ip meta-slave-port meta-master-ip meta-master-port
# user-param: {start|stop} meta-slave-ip meta-slave-port meta-master-ip meta-master-port
sh bin/meta-slave-run.sh $1 $2 $3 $4 $5"""
sentinel_run_script_fmt="""#!/usr/bin/env bash
# ******************************************************
# DESC : redis-cluster:redis-sentinel devops script
# AUTHOR : Alex Stocks
# VERSION : 1.0
# LICENCE : LGPL V3
# EMAIL : [email protected]
# MOD : 2016-03-31 11:38
# FILE : sentinel-load.sh
# ******************************************************
# sentinel-script-name {start|stop} index sentinel-ip sentinel-port notify-script instance-set
# user-param: {start|stop} sentinel-ip sentinel-port notify-script instance-script
sh bin/sentinel-run.sh $1 %s $2 $3 $4 $5"""
"""print help"""
def printHelp():
""" print help prompt
"""
print 'usage:'
print ' example: ./template.py {master|slave} index memory-size'
print ' example: ./template.py {meta-master|meta-slave} memory-size'
print ' example: ./template.py sentinel index'
def saveFile(filename, contents):
fh = open(filename, 'w+')
fh.write(contents)
fh.close()
def genMaster(index, memory_size):
content = (master_run_script_fmt % (index))
saveFile('master-load.sh', content)
dir = 'master%s' % index
cmd = ("mkdir -p %s && cd %s && mv ../master-load.sh ./ && "
"mkdir -p bin && cp ../template/redis-server ./bin/redis-server%s && "
"cp ../template/master-run.sh ./bin/ && "
"sed -i \"s/maxmemory=1G/maxmemory=%s/g\" bin/master-run.sh &&"
"mkdir -p conf && cp ../template/redis.conf.template conf/"
% (dir, dir, index, memory_size))
# print cmd
os.system(cmd)
def genSlave(index, memory_size):
content = (slave_run_script_fmt % (index))
saveFile('slave-load.sh', content)
dir = 'slave%s' % index
cmd = ("mkdir -p %s && cd %s && mv ../slave-load.sh ./ && "
"mkdir -p bin && cp ../template/redis-server ./bin/redis-slave%s && "
"cp ../template/slave-run.sh ./bin/ && "
"sed -i \"s/maxmemory=1G/maxmemory=%s/g\" bin/slave-run.sh &&"
"mkdir -p conf && cp ../template/redis.conf.template conf/"
% (dir, dir, index, memory_size))
# print cmd
os.system(cmd)
def genMetaMaster(memory_size):
content = (meta_master_run_script_fmt)
saveFile('meta-master-load.sh', content)
dir = 'meta_master'
cmd = ("mkdir -p %s && cd %s && mv ../meta-master-load.sh ./ && "
"mkdir -p bin && cp ../template/redis-server ./bin/meta-redis-server && "
"cp ../template/meta-master-run.sh ./bin/ && "
"sed -i \"s/maxmemory=1G/maxmemory=%s/g\" bin/meta-master-run.sh &&"
"mkdir -p conf && cp ../template/redis.conf.template conf/"
% (dir, dir, memory_size))
# print cmd
os.system(cmd)
def genMetaSlave(memory_size):
content = (meta_slave_run_script_fmt)
saveFile('meta-slave-load.sh', content)
dir = 'meta_slave'
cmd = ("mkdir -p %s && cd %s && mv ../meta-slave-load.sh ./ && "
"mkdir -p bin && cp ../template/redis-server ./bin/meta-redis-slave && "
"cp ../template/meta-slave-run.sh ./bin/ && "
"sed -i \"s/maxmemory=1G/maxmemory=%s/g\" bin/meta-slave-run.sh &&"
"mkdir -p conf && cp ../template/redis.conf.template conf/"
% (dir, dir, memory_size))
# print cmd
os.system(cmd)
def genSentinel(index):
content = (sentinel_run_script_fmt % (index))
saveFile('sentinel-load.sh', content)
dir = 'sentinel%s' % index
cmd = ("mkdir -p %s && cd %s && mv ../sentinel-load.sh ./ && "
"mkdir -p bin && cp ../template/redis-server ./bin/redis-sentinel%s && "
"cp ../template/sentinel-run.sh ./bin/ && "
"mkdir -p conf && cp ../template/sentinel.conf.template conf/"
% (dir, dir, index))
# print cmd
os.system(cmd)
if __name__ == '__main__':
if len(sys.argv) < 3:
printHelp()
sys.exit(1)
role = sys.argv[1]
index = sys.argv[2]
if role == 'master':
memory_size = sys.argv[3]
genMaster(index, memory_size)
elif role == 'slave':
memory_size = sys.argv[3]
genSlave(index, memory_size)
elif role == 'meta-master':
memory_size = sys.argv[2]
genMetaMaster(memory_size)
elif role == 'meta-slave':
memory_size = sys.argv[2]
genMetaSlave(memory_size)
elif role == 'sentinel':
genSentinel(index)
else:
printHelp()
sys.exit(1)
|
py | 1a50e3caf8ea5c3e820ca0f2c7d36b8ff11a7f87 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import os
from oslo_concurrency import lockutils
from oslo_log import log as logging
import yaml
from neutron.tests.tempest.common import cred_provider
from neutron.tests.tempest import config
from neutron.tests.tempest import exceptions
CONF = config.CONF
LOG = logging.getLogger(__name__)
def read_accounts_yaml(path):
yaml_file = open(path, 'r')
accounts = yaml.load(yaml_file)
return accounts
class Accounts(cred_provider.CredentialProvider):
def __init__(self, name):
super(Accounts, self).__init__(name)
self.name = name
if os.path.isfile(CONF.auth.test_accounts_file):
accounts = read_accounts_yaml(CONF.auth.test_accounts_file)
self.use_default_creds = False
else:
accounts = {}
self.use_default_creds = True
self.hash_dict = self.get_hash_dict(accounts)
# FIXME(dhellmann): The configuration option is not part of
# the API of the library, because if we change the option name
# or group it will break this use. Tempest needs to set this
# value somewhere that it owns, and then use
# lockutils.set_defaults() to tell oslo.concurrency what value
# to use.
self.accounts_dir = os.path.join(CONF.oslo_concurrency.lock_path,
'test_accounts')
self.isolated_creds = {}
@classmethod
def _append_role(cls, role, account_hash, hash_dict):
if role in hash_dict['roles']:
hash_dict['roles'][role].append(account_hash)
else:
hash_dict['roles'][role] = [account_hash]
return hash_dict
@classmethod
def get_hash_dict(cls, accounts):
hash_dict = {'roles': {}, 'creds': {}}
# Loop over the accounts read from the yaml file
for account in accounts:
roles = []
types = []
if 'roles' in account:
roles = account.pop('roles')
if 'types' in account:
types = account.pop('types')
temp_hash = hashlib.md5()
temp_hash.update(str(account))
temp_hash_key = temp_hash.hexdigest()
hash_dict['creds'][temp_hash_key] = account
for role in roles:
hash_dict = cls._append_role(role, temp_hash_key,
hash_dict)
# If types are set for the account append the matching role
# subdict with the hash
for type in types:
if type == 'admin':
hash_dict = cls._append_role(CONF.identity.admin_role,
temp_hash_key, hash_dict)
elif type == 'operator':
hash_dict = cls._append_role(
CONF.object_storage.operator_role, temp_hash_key,
hash_dict)
elif type == 'reseller_admin':
hash_dict = cls._append_role(
CONF.object_storage.reseller_admin_role,
temp_hash_key,
hash_dict)
return hash_dict
def is_multi_user(self):
# Default credentials is not a valid option with locking Account
if self.use_default_creds:
raise exceptions.InvalidConfiguration(
"Account file %s doesn't exist" % CONF.auth.test_accounts_file)
else:
return len(self.hash_dict['creds']) > 1
def is_multi_tenant(self):
return self.is_multi_user()
def _create_hash_file(self, hash_string):
path = os.path.join(os.path.join(self.accounts_dir, hash_string))
if not os.path.isfile(path):
with open(path, 'w') as fd:
fd.write(self.name)
return True
return False
@lockutils.synchronized('test_accounts_io', external=True)
def _get_free_hash(self, hashes):
# Cast as a list because in some edge cases a set will be passed in
hashes = list(hashes)
if not os.path.isdir(self.accounts_dir):
os.mkdir(self.accounts_dir)
# Create File from first hash (since none are in use)
self._create_hash_file(hashes[0])
return hashes[0]
names = []
for _hash in hashes:
res = self._create_hash_file(_hash)
if res:
return _hash
else:
path = os.path.join(os.path.join(self.accounts_dir,
_hash))
with open(path, 'r') as fd:
names.append(fd.read())
msg = ('Insufficient number of users provided. %s have allocated all '
'the credentials for this allocation request' % ','.join(names))
raise exceptions.InvalidConfiguration(msg)
def _get_match_hash_list(self, roles=None):
hashes = []
if roles:
# Loop over all the creds for each role in the subdict and generate
# a list of cred lists for each role
for role in roles:
temp_hashes = self.hash_dict['roles'].get(role, None)
if not temp_hashes:
raise exceptions.InvalidConfiguration(
"No credentials with role: %s specified in the "
"accounts ""file" % role)
hashes.append(temp_hashes)
# Take the list of lists and do a boolean and between each list to
# find the creds which fall under all the specified roles
temp_list = set(hashes[0])
for hash_list in hashes[1:]:
temp_list = temp_list & set(hash_list)
hashes = temp_list
else:
hashes = self.hash_dict['creds'].keys()
# NOTE(mtreinish): admin is a special case because of the increased
# privlege set which could potentially cause issues on tests where that
# is not expected. So unless the admin role isn't specified do not
# allocate admin.
admin_hashes = self.hash_dict['roles'].get(CONF.identity.admin_role,
None)
if ((not roles or CONF.identity.admin_role not in roles) and
admin_hashes):
useable_hashes = [x for x in hashes if x not in admin_hashes]
else:
useable_hashes = hashes
return useable_hashes
def _get_creds(self, roles=None):
if self.use_default_creds:
raise exceptions.InvalidConfiguration(
"Account file %s doesn't exist" % CONF.auth.test_accounts_file)
useable_hashes = self._get_match_hash_list(roles)
free_hash = self._get_free_hash(useable_hashes)
return self.hash_dict['creds'][free_hash]
@lockutils.synchronized('test_accounts_io', external=True)
def remove_hash(self, hash_string):
hash_path = os.path.join(self.accounts_dir, hash_string)
if not os.path.isfile(hash_path):
LOG.warning('Expected an account lock file %s to remove, but '
'one did not exist' % hash_path)
else:
os.remove(hash_path)
if not os.listdir(self.accounts_dir):
os.rmdir(self.accounts_dir)
def get_hash(self, creds):
for _hash in self.hash_dict['creds']:
# Comparing on the attributes that are expected in the YAML
if all([getattr(creds, k) == self.hash_dict['creds'][_hash][k] for
k in creds.get_init_attributes()]):
return _hash
raise AttributeError('Invalid credentials %s' % creds)
def remove_credentials(self, creds):
_hash = self.get_hash(creds)
self.remove_hash(_hash)
def get_primary_creds(self):
if self.isolated_creds.get('primary'):
return self.isolated_creds.get('primary')
creds = self._get_creds()
primary_credential = cred_provider.get_credentials(**creds)
self.isolated_creds['primary'] = primary_credential
return primary_credential
def get_alt_creds(self):
if self.isolated_creds.get('alt'):
return self.isolated_creds.get('alt')
creds = self._get_creds()
alt_credential = cred_provider.get_credentials(**creds)
self.isolated_creds['alt'] = alt_credential
return alt_credential
def get_creds_by_roles(self, roles, force_new=False):
roles = list(set(roles))
exist_creds = self.isolated_creds.get(str(roles), None)
# The force kwarg is used to allocate an additional set of creds with
# the same role list. The index used for the previously allocation
# in the isolated_creds dict will be moved.
if exist_creds and not force_new:
return exist_creds
elif exist_creds and force_new:
new_index = str(roles) + '-' + str(len(self.isolated_creds))
self.isolated_creds[new_index] = exist_creds
creds = self._get_creds(roles=roles)
role_credential = cred_provider.get_credentials(**creds)
self.isolated_creds[str(roles)] = role_credential
return role_credential
def clear_isolated_creds(self):
for creds in self.isolated_creds.values():
self.remove_credentials(creds)
def get_admin_creds(self):
return self.get_creds_by_roles([CONF.identity.admin_role])
def is_role_available(self, role):
if self.use_default_creds:
return False
else:
if self.hash_dict['roles'].get(role):
return True
return False
def admin_available(self):
return self.is_role_available(CONF.identity.admin_role)
class NotLockingAccounts(Accounts):
"""Credentials provider which always returns the first and second
configured accounts as primary and alt users.
This credential provider can be used in case of serial test execution
to preserve the current behaviour of the serial tempest run.
"""
def _unique_creds(self, cred_arg=None):
"""Verify that the configured credentials are valid and distinct """
if self.use_default_creds:
try:
user = self.get_primary_creds()
alt_user = self.get_alt_creds()
return getattr(user, cred_arg) != getattr(alt_user, cred_arg)
except exceptions.InvalidCredentials as ic:
msg = "At least one of the configured credentials is " \
"not valid: %s" % ic
raise exceptions.InvalidConfiguration(msg)
else:
# TODO(andreaf) Add a uniqueness check here
return len(self.hash_dict['creds']) > 1
def is_multi_user(self):
return self._unique_creds('username')
def is_multi_tenant(self):
return self._unique_creds('tenant_id')
def get_creds(self, id, roles=None):
try:
hashes = self._get_match_hash_list(roles)
# No need to sort the dict as within the same python process
# the HASH seed won't change, so subsequent calls to keys()
# will return the same result
_hash = hashes[id]
except IndexError:
msg = 'Insufficient number of users provided'
raise exceptions.InvalidConfiguration(msg)
return self.hash_dict['creds'][_hash]
def get_primary_creds(self):
if self.isolated_creds.get('primary'):
return self.isolated_creds.get('primary')
if not self.use_default_creds:
creds = self.get_creds(0)
primary_credential = cred_provider.get_credentials(**creds)
else:
primary_credential = cred_provider.get_configured_credentials(
'user')
self.isolated_creds['primary'] = primary_credential
return primary_credential
def get_alt_creds(self):
if self.isolated_creds.get('alt'):
return self.isolated_creds.get('alt')
if not self.use_default_creds:
creds = self.get_creds(1)
alt_credential = cred_provider.get_credentials(**creds)
else:
alt_credential = cred_provider.get_configured_credentials(
'alt_user')
self.isolated_creds['alt'] = alt_credential
return alt_credential
def clear_isolated_creds(self):
self.isolated_creds = {}
def get_admin_creds(self):
if not self.use_default_creds:
return self.get_creds_by_roles([CONF.identity.admin_role])
else:
creds = cred_provider.get_configured_credentials(
"identity_admin", fill_in=False)
self.isolated_creds['admin'] = creds
return creds
def get_creds_by_roles(self, roles, force_new=False):
roles = list(set(roles))
exist_creds = self.isolated_creds.get(str(roles), None)
index = 0
if exist_creds and not force_new:
return exist_creds
elif exist_creds and force_new:
new_index = str(roles) + '-' + str(len(self.isolated_creds))
self.isolated_creds[new_index] = exist_creds
# Figure out how many existing creds for this roles set are present
# use this as the index the returning hash list to ensure separate
# creds are returned with force_new being True
for creds_names in self.isolated_creds:
if str(roles) in creds_names:
index = index + 1
if not self.use_default_creds:
creds = self.get_creds(index, roles=roles)
role_credential = cred_provider.get_credentials(**creds)
self.isolated_creds[str(roles)] = role_credential
else:
msg = "Default credentials can not be used with specifying "\
"credentials by roles"
raise exceptions.InvalidConfiguration(msg)
return role_credential
|
py | 1a50e6f75fd2680fe96b728d4d9b7204e1957649 | from .models import Agent
from model_bakery.recipe import Recipe, seq
from model_bakery import baker
from itertools import cycle
from django.utils import timezone as djangotime
agent = Recipe(
Agent,
client="Default",
site="Default",
hostname=seq("TestHostname"),
monitoring_type=cycle(["workstation", "server"]),
)
server_agent = agent.extend(
monitoring_type="server",
)
workstation_agent = agent.extend(
monitoring_type="workstation",
)
online_agent = agent.extend(last_seen=djangotime.now())
overdue_agent = agent.extend(
last_seen=djangotime.now() - djangotime.timedelta(minutes=6)
) |
py | 1a50e829204d6b9f44fbdc1454253505f206d8ac | import csv
from typing import Optional
from daos.csv.csv_dao_utils import DATA_DIR
from daos.protocols.player_dao import PlayerDao
from models.player import Player
PLAYERS_CSV = DATA_DIR + 'players.csv'
class PlayerCsvDao(PlayerDao):
def find_player_by_id(self, player_id: int) -> Optional[Player]:
with open(PLAYERS_CSV) as f:
reader = csv.reader(f)
for row in reader:
player = Player(int(row[0]), row[1])
if player.player_id == player_id:
return player
else:
return None
|
py | 1a50e86fd22a19fe4c1f27a174a37d05c06ccebc |
import shutil,os
def compare(x, y):
stat_x = os.stat(newPath + "/" + x)
stat_y = os.stat(newPath + "/" + y)
if stat_x.st_mtime < stat_y.st_mtime:
return -1
elif stat_x.st_ctime > stat_y.st_ctime:
return 1
else:
return 0
path = 'e:\\tee\\'
path = 'E:\\Tee\\Top\\'
i = 1
j = 1
for DirIndex in range(1,26):
newPath = path+str(DirIndex)+'\\';
items = os.listdir(newPath)
items.sort(compare)
for file in items:
if os.path.isfile(os.path.join(newPath,file))==True:
new_name=file.replace(file,"1_%04d_T%d_1.png"%(j,i))
os.rename(os.path.join(newPath,file),os.path.join(newPath,new_name))
# shutil.copy(os.path.join(newPath,file),os.path.join(newPath,new_name))
i += 1
if i%4 == 0:
j+=1
i=1
|
py | 1a50e8948573d3b84b478de13f69021fd3bad280 | from .login import *
__all__ = ("LoginStartPacket",)
|
py | 1a50e93d2d86ee7be7c6a3e2edd050e4f5040162 | """Grafico con los valores obtenidos en la implementacion serial en CPU"""
import matplotlib.pyplot as plt
import numpy as np
import csv
path = "Data/"
if __name__ == "__main__":
size = []
time = []
with open(path + 'serial_CPU.csv', mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
size += [ int(row['width']) * int(row['height']) ]
tmp_time = float(row['time']) * 0.001
time += [tmp_time / float(row['iter']) ]
print(size[-1], time[-1])
n_size = np.array(size)
n_time = np.array(time)
print(n_size)
print(n_time)
n_eval = n_size / n_time / 1000000
print(n_eval)
fig, ax = plt.subplots(figsize=(10,7))
ax.set_xscale('log')
ax.plot(n_size, n_eval, 'r-o')
ax.set(xlabel='Tamaño del mundo [Células]', ylabel='Células evaluadas por segundo [Millones]',
title='Células evaluadas por segundo para distintos tamaños de mundo\ncon implementación secuencial en CPU ')
ax.grid()
fig.savefig("images/serial_CPU.png")
plt.show() |
py | 1a50eb881d2578b80e8cec49b27e7eeac7fdc232 | """
Functions to write atomic coordinates in commmon chemical formats.
"""
import os
def write_pdb(file_name, atoms, coordinates, header='mol'):
""" Write given atomic coordinates to file in pdb format """
with open(file_name, 'w') as pdb_file:
pdb_file.write('HEADER ' + header + '\n')
format = 'HETATM%5d%3s MOL 1 %8.3f%8.3f%8.3f 1.00 0.00 %2s\n'
for atom_index, (atom_name, atom_coor) in enumerate(zip(atoms, coordinates), start=1):
x, y, z = atom_coor
pdb_file.write(format % (atom_index, atom_name, x, y, z, atom_name.rjust(2)))
pdb_file.write('END\n')
def write_xyz(file_name, atoms, coordinates, header='mol'):
""" Write given atomic coordinates to file in xyz format """
with open(file_name, 'w') as xyz_file:
xyz_file.write(str(len(coordinates)) + '\n')
xyz_file.write(header + '\n')
format = '%s %.4f %.4f %.4f\n'
for atom, coor in zip(atoms, coordinates):
xyz_file.write(format % (atom, coor[0], coor[1], coor[2]))
def write_cif(file_name, atoms, coordinates, header='mol', cell=[1, 1, 1, 90, 90, 90]):
""" Write given atomic coordinates to file in cif format """
with open(file_name, 'w') as cif_file:
cif_file.write('data_%s\n' % header)
cif_file.write('_cell_length_a %7.4f\n' % cell[0])
cif_file.write('_cell_length_b %7.4f\n' % cell[1])
cif_file.write('_cell_length_c %7.4f\n' % cell[2])
cif_file.write('_cell_angle_alpha %7.4f\n' % cell[3])
cif_file.write('_cell_angle_beta %7.4f\n' % cell[4])
cif_file.write('_cell_angle_gamma %7.4f\n' % cell[5])
cif_file.write('loop_\n')
cif_file.write('_atom_site_label\n')
cif_file.write('_atom_site_type_symbol\n')
cif_file.write('_atom_site_fract_x\n')
cif_file.write('_atom_site_fract_y\n')
cif_file.write('_atom_site_fract_z\n')
cif_format = '%s%-4i %2s %7.4f %7.4f %7.4f\n'
for i, (atom, coor) in enumerate(zip(atoms, coordinates)):
cif_file.write(cif_format % (atom, i, atom, coor[0], coor[1], coor[2]))
|
py | 1a50eb8cd36fe18ec33cc9c79c9b6456b947fe77 | from django.urls import path
from .views import EmailView
urlpatterns = [path("<uuid:pk>", EmailView.as_view(), name="email")]
app_name = "enhanced_emails"
|
py | 1a50ebaaa9fb6287e7999663ce1e04517ade32d4 | from SPARQLWrapper import SPARQLWrapper
import modules.misc
import logging
class Build:
def __init__(self, filter_set_edges=[], filter_set_vertices=[]):
self.name = "Build class"
self.filter_set_edges = filter_set_edges
self.filter_set_vertices = filter_set_vertices
def fetch_node_id(self, page):
output = page.replace("http://dbpedia.org/resource/Category:", "")
output = page.replace("http://dbpedia.org/resource/", "")
return output
def filter_query_pred_gen(self):
filter_query_pred = ""
for i in range(len(self.filter_set_edges)):
if len(self.filter_set_edges) == 0:
break
elif len(self.filter_set_edges) == 1:
string = "FILTER(regex(?pred£, ££))"
filter_query_pred = string.replace("££", str(self.filter_set_edges[0]))
elif i == 0:
filter_query_pred = "FILTER("
string = "regex(?pred£, ££)"
filter_query_pred = filter_query_pred + string.replace("££", str(self.filter_set_edges[i]))
elif i < len(self.filter_set_edges) - 1:
string = "||regex(?pred£, ££)"
filter_query_pred = filter_query_pred + string.replace("££", str(self.filter_set_edges[i]))
elif i == len(self.filter_set_edges) - 1:
string = "||regex(?pred£, ££))"
filter_query_pred = filter_query_pred + string.replace("££", str(self.filter_set_edges[i]))
return filter_query_pred
def filter_query_pred_inv_gen(self):
filter_query_pred_inv = ""
for i in range(len(self.filter_set_edges)):
if len(self.filter_set_edges) == 0:
break
elif len(self.filter_set_edges) == 1:
string = "FILTER(regex(?pred_inv£, ££))"
filter_query_pred_inv = string.replace("££", str(self.filter_set_edges[0]))
elif i == 0:
filter_query_pred_inv = "FILTER("
string = "regex(?pred_inv£, ££)"
filter_query_pred_inv = filter_query_pred_inv + string.replace("££", str(self.filter_set_edges[i]))
elif i < len(self.filter_set_edges) - 1:
string = "||regex(?pred_inv£, ££)"
filter_query_pred_inv = filter_query_pred_inv + string.replace("££", str(self.filter_set_edges[i]))
elif i == len(self.filter_set_edges) - 1:
string = "||regex(?pred_inv£, ££))"
filter_query_pred_inv = filter_query_pred_inv + string.replace("££", str(self.filter_set_edges[i]))
return filter_query_pred_inv
def filter_query_vertex_gen(self):
filter_query_vertex = ""
for i in range(len(self.filter_set_vertices)):
if len(self.filter_set_vertices) == 0:
break
elif len(self.filter_set_vertices) == 1:
string = "FILTER(regex(?n£, ££))"
filter_query_vertex = string.replace("££", str(self.filter_set_vertices[0]))
elif i == 0:
filter_query_vertex = "FILTER("
string = "regex(?n£, ££)"
filter_query_vertex = filter_query_vertex + string.replace("££", str(self.filter_set_vertices[i]))
elif i < len(self.filter_set_vertices) - 1:
string = "||regex(?n£, ££)"
filter_query_vertex = filter_query_vertex + string.replace("££", str(self.filter_set_vertices[i]))
elif i == len(self.filter_set_vertices) - 1:
string = "||regex(?n£, ££))"
filter_query_vertex = filter_query_vertex + string.replace("££", str(self.filter_set_vertices[i]))
return filter_query_vertex
def cypher_url_gen(self, sparql_query):
wrapper = SPARQLWrapper("http://dbpedia.org/sparql")
wrapper.setQuery(sparql_query)
wrapper.setReturnFormat("csv")
query_result = wrapper.query()
url = query_result.geturl()
return url
def run(self, depth):
sparql_query = self.sparql_query_gen(depth)
url = self.cypher_url_gen(sparql_query)
cypher_query = self.cypher_query_gen(depth, url)
modules.misc.commit_cypher_query(cypher_query)
cypher_query_combine_nodes = """
MATCH (n1),(n2)
WHERE n1.iri = n2.iri and id(n1) < id(n2)
CALL apoc.refactor.mergeNodes([n1, n2]) YIELD node
RETURN n1, n2
"""
modules.misc.commit_cypher_query(cypher_query_combine_nodes)
cypher_query_combine_edges = """
MATCH (n1)-[r]->(n2), (n1)-[s]->(n2)
WHERE r.iri = s.iri and id(r) < id(s)
DELETE s
"""
modules.misc.commit_cypher_query(cypher_query_combine_edges)
class Pairwise(Build):
def __init__(self, start_page, end_page, filter_set_edges=[], filter_set_vertices=[]):
self.name = "Pairwise build between " + start_page + " and " + end_page
self.start_page = start_page
self.end_page = end_page
self.filter_set_edges = filter_set_edges
self.filter_set_vertices = filter_set_vertices
def sparql_query_gen(self, depth):
query_part1 = "\nSELECT "
for i in range(depth - 1):
string = "?pred£ ?pred_inv£ ?n£ "
query_part1 = query_part1 + string.replace("£", str(i + 1))
final_string = "?pred£ ?pred_inv£\n"
query_part1 = query_part1 + final_string.replace("£", str(depth))
filter_query_pred = self.filter_query_pred_gen()
filter_query_pred_inv = self.filter_query_pred_inv_gen()
filter_query_vertex = self.filter_query_vertex_gen()
filter_query_vertex_mid = filter_query_vertex + filter_query_vertex.replace("£", "££")
filter_query_vertex_mid = filter_query_vertex_mid.replace(")FILTER(", ")&&(").replace("FILTER(", "FILTER((") + ")"
query_part2_open = """
WHERE {
"""
query_part2_a = """
{ {
<""" + self.start_page + """> ?pred1 ?n1
} UNION {
?n1 ?pred_inv1 <""" + self.start_page + """>
} } .
"""
query_part2_b = """
{ {
""" + filter_query_pred.replace("£", "1") + """
<""" + self.start_page + """> ?pred1 ?n1
} UNION {
""" + filter_query_pred_inv.replace("£", "1") + """
?n1 ?pred_inv1 <""" + self.start_page + """>
} } .
"""
query_part2_c = """
{ {
""" + filter_query_vertex.replace("£", "1") + """
<""" + self.start_page + """> ?pred1 ?n1
} UNION {
""" + filter_query_vertex.replace("£", "1") + """
?n1 ?pred_inv1 <""" + self.start_page + """>
} } .
"""
query_part2_d = """
{ {
""" + filter_query_pred.replace("£", "1") + filter_query_vertex.replace("£", "1") + """
<""" + self.start_page + """> ?pred1 ?n1
} UNION {
""" + filter_query_pred_inv.replace("£", "1") + filter_query_vertex.replace("£", "1") + """
?n1 ?pred_inv1 <""" + self.start_page + """>
} } .
"""
for i in range(depth - 2):
block_a = """
{ {
?n£ ?pred££ ?n££
} UNION {
?n££ ?pred_inv££ ?n£
} } .
"""
block_b = """
{ {
""" + filter_query_pred.replace("£", str(i + 2)) + """
?n£ ?pred££ ?n££
} UNION {
""" + filter_query_pred_inv.replace("£", str(i + 2)) + """
?n££ ?pred_inv££ ?n£
} } .
"""
block_c = """
{ {
""" + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} UNION {
""" + filter_query_vertex_mid + """
?n££ ?pred_inv££ ?n£
} } .
"""
block_d = """
{ {
""" + filter_query_pred.replace("£", str(i + 2)) + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} UNION {
""" + filter_query_pred_inv.replace("£", str(i + 2)) + filter_query_vertex_mid + """
?n££ ?pred_inv££ ?n£
} } .
"""
query_part2_a = query_part2_a + block_a.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_b = query_part2_b + block_b.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_c = query_part2_c + block_c.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_d = query_part2_d + block_d.replace("££", str(i + 2)).replace("£", str(i + 1))
final_block_a = """
{ {
""" + filter_query_pred.replace("£", str(depth)) + """
?n£ ?pred££ <""" + self.end_page + """>
} UNION {
""" + filter_query_pred_inv.replace("£", str(depth)) + """
<""" + self.end_page + """> ?pred_inv££ ?n£
} } .
"""
final_block_b = """
{ {
""" + filter_query_pred.replace("£", str(depth)) + """
?n£ ?pred££ <""" + self.end_page + """>
} UNION {
""" + filter_query_pred_inv.replace("£", str(depth)) + """
<""" + self.end_page + """> ?pred_inv££ ?n£
} } .
"""
final_block_c = """
{ {
""" + filter_query_vertex + """
?n£ ?pred££ <""" + self.end_page + """>
} UNION {
""" + filter_query_vertex + """
<""" + self.end_page + """> ?pred_inv££ ?n£
} } .
"""
final_block_d = """
{ {
""" + filter_query_pred.replace("£", str(depth)) + filter_query_vertex + """
?n£ ?pred££ <""" + self.end_page + """>
} UNION {
""" + filter_query_pred_inv.replace("£", str(depth)) + filter_query_vertex + """
<""" + self.end_page + """> ?pred_inv££ ?n£
} } .
"""
query_part2_a = query_part2_a + final_block_a.replace("££", str(depth)).replace("£", str(depth - 1))
query_part2_b = query_part2_b + final_block_b.replace("££", str(depth)).replace("£", str(depth - 1))
query_part2_c = query_part2_c + final_block_c.replace("££", str(depth)).replace("£", str(depth - 1))
query_part2_d = query_part2_d + final_block_d.replace("££", str(depth)).replace("£", str(depth - 1))
query_part2_close = """
}
"""
if len(self.filter_set_edges) == 0 and len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_a + query_part2_close
elif len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_b + query_part2_close
elif len(self.filter_set_edges) == 0:
query_part2 = query_part2_open + query_part2_c + query_part2_close
elif len(self.filter_set_edges) != 0 and len(self.filter_set_vertices) != 0:
query_part2 = query_part2_open + query_part2_d + query_part2_close
query = query_part1 + query_part2
logging.info(query)
return query
def cypher_query_gen(self, depth, url):
query_part1 = "WITH \"" + url + "\" AS url\n\nLOAD CSV WITH HEADERS FROM url AS row\n\n"
query_part2 = "MERGE (n0:depth_0 {iri: \"" + self.start_page + "\"})\n"
for i in range(depth - 1):
string = "MERGE (n£:depth_£ {iri: row.n£})\n"
query_part2 = query_part2 + string.replace("£", str(i + 1))
final_string = "MERGE (n£:depth_0 {iri: \"" + self.end_page + "\"})\n"
query_part2 = query_part2 + final_string.replace("£", str(depth))
query_part3 = ""
for i in range(depth):
block = """
FOREACH (x IN CASE WHEN row.pred££ IS NULL THEN [] ELSE [1] END | MERGE (n£)-[p:pred {iri: row.pred££}]->(n££))
FOREACH (x IN CASE WHEN row.pred_inv££ IS NULL THEN [] ELSE [1] END | MERGE (n£)<-[p:pred {iri: row.pred_inv££}]-(n££))
"""
query_part3 = query_part3 + block.replace("££", str(i + 1)).replace("£", str(i))
query = query_part1 + query_part2 + query_part3
logging.info(query)
return query
class Parent(Build):
def __init__(self, page, filter_set_edges=[], filter_set_vertices=[]):
self.name = "Parent build on " + page
self.page = page
self.filter_set_edges = filter_set_edges
self.filter_set_vertices = filter_set_vertices
def sparql_query_gen(self, depth):
query_part1 = "\nSELECT "
for i in range(depth):
string = "?pred£ ?n£ "
query_part1 = query_part1 + string.replace("£", str(i + 1))
filter_query_pred = self.filter_query_pred_gen()
filter_query_vertex = self.filter_query_vertex_gen()
filter_query_vertex_mid = filter_query_vertex + filter_query_vertex.replace("£", "££")
filter_query_vertex_mid = filter_query_vertex_mid.replace(")FILTER(", ")&&(").replace("FILTER(", "FILTER((") + ")"
query_part2_open = """
WHERE {
"""
query_part2_a = """
{
<""" + self.page + """> ?pred1 ?n1
} .
"""
query_part2_b = """
{
""" + filter_query_pred.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} .
"""
query_part2_c = """
{
""" + filter_query_vertex.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} .
"""
query_part2_d = """
{
""" + filter_query_pred.replace("£", "1") + filter_query_vertex.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} .
"""
for i in range(depth - 1):
block_a = """
{
?n£ ?pred££ ?n££
} .
"""
block_b = """
{
""" + filter_query_pred.replace("£", "££") + """
?n£ ?pred££ ?n££
} .
"""
block_c = """
{
""" + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} .
"""
block_d = """
{
""" + filter_query_pred.replace("£", "££") + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} .
"""
query_part2_a = query_part2_a + block_a.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_b = query_part2_b + block_b.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_c = query_part2_c + block_c.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_d = query_part2_d + block_d.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_close = """
}
"""
if len(self.filter_set_edges) == 0 and len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_a + query_part2_close
elif len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_b + query_part2_close
elif len(self.filter_set_edges) == 0:
query_part2 = query_part2_open + query_part2_c + query_part2_close
elif len(self.filter_set_edges) != 0 and len(self.filter_set_vertices) != 0:
query_part2 = query_part2_open + query_part2_d + query_part2_close
query = query_part1 + query_part2
logging.info(query)
return query
def cypher_query_gen(self, depth, url):
query_part1 = "WITH \"" + url + "\" AS url\n\nLOAD CSV WITH HEADERS FROM url AS row\n\n"
node_id = self.fetch_node_id(self.page)
query_part2 = "MERGE (n0:depth_0:" + node_id + " {iri: \"" + self.page + "\"})\n"
for i in range(depth):
string = "MERGE (n£:depth_£:" + node_id + " {iri: row.n£})\n"
query_part2 = query_part2 + string.replace("£", str(i + 1))
query_part3 = ""
for i in range(depth):
block = """
FOREACH (x IN CASE WHEN row.pred££ IS NULL THEN [] ELSE [1] END | MERGE (n£)-[p:pred {iri: row.pred££}]->(n££))
"""
query_part3 = query_part3 + block.replace("££", str(i + 1)).replace("£", str(i))
query = query_part1 + query_part2 + query_part3
logging.info(query)
return query
class FiniteParent(Build):
def __init__(self, page, filter_set_edges=[], filter_set_vertices=[]):
self.name = "FiniteParent build on " + page
self.page = page
self.filter_set_edges = filter_set_edges
self.filter_set_vertices = filter_set_vertices
def sparql_query_gen(self, depth):
query_part1 = "\nSELECT "
for i in range(depth):
string = "?pred£ ?n£ "
query_part1 = query_part1 + string.replace("£", str(i + 1))
filter_query_pred = self.filter_query_pred_gen()
filter_query_vertex = self.filter_query_vertex_gen()
filter_query_vertex_mid = filter_query_vertex + filter_query_vertex.replace("£", "££")
filter_query_vertex_mid = filter_query_vertex_mid.replace(")FILTER(", ")&&(").replace("FILTER(", "FILTER((") + ")"
query_part2_open = """
WHERE {
"""
query_part2_a = """
{
<""" + self.page + """> ?pred1 ?n1
} .
"""
query_part2_b = """
{
""" + filter_query_pred.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} .
"""
query_part2_c = """
{
""" + filter_query_vertex.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} .
"""
query_part2_d = """
{
""" + filter_query_pred.replace("£", "1") + filter_query_vertex.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} .
"""
temp_a = ""
temp_b = ""
temp_c = ""
temp_d = ""
final_a = ""
final_b = ""
final_c = ""
final_d = ""
for i in range(depth - 1):
block_a = """
{
?n£ ?pred££ ?n££
} ."""
block_b = """
{
""" + filter_query_pred.replace("£", "££") + """
?n£ ?pred££ ?n££
} ."""
block_c = """
{
""" + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} ."""
block_d = """
{
""" + filter_query_pred.replace("£", "££") + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} ."""
temp_a = temp_a + block_a.replace("££", str(i + 2)).replace("£", str(i + 1))
temp_b = temp_b + block_b.replace("££", str(i + 2)).replace("£", str(i + 1))
temp_c = temp_c + block_c.replace("££", str(i + 2)).replace("£", str(i + 1))
temp_d = temp_d + block_d.replace("££", str(i + 2)).replace("£", str(i + 1))
final_a = final_a + """
OPTIONAL {""" + temp_a + """
} .
"""
final_b = final_b + """
OPTIONAL {""" + temp_b + """
} .
"""
final_c = final_c + """
OPTIONAL {""" + temp_c + """
} .
"""
final_d = final_d + """
OPTIONAL {""" + temp_d + """
} .
"""
query_part2_close = """
}
"""
if len(self.filter_set_edges) == 0 and len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_a + final_a + query_part2_close
elif len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_b + final_b + query_part2_close
elif len(self.filter_set_edges) == 0:
query_part2 = query_part2_open + query_part2_c + final_c + query_part2_close
elif len(self.filter_set_edges) != 0 and len(self.filter_set_vertices) != 0:
query_part2 = query_part2_open + query_part2_d + final_d + query_part2_close
query = query_part1 + query_part2
logging.info(query)
return query
def cypher_query_gen(self, depth, url):
query_part1 = "WITH \"" + url + "\" AS url\n\nLOAD CSV WITH HEADERS FROM url AS row\n\n"
node_id = self.fetch_node_id(self.page)
query_part2 = """
FOREACH (x IN CASE WHEN row.pred££ IS NULL THEN [] ELSE [1] END | MERGE (n0:depth_0:""" + node_id + """ {iri: \"""" + self.page + """\"}) MERGE (n££:depth_££:""" + node_id + """ {iri: row.n££}) MERGE (n£)-[p:pred {iri: row.pred££}]->(n££))
"""
query_part2 = query_part2.replace("££", str(0 + 1)).replace("£", str(0))
for i in range(depth - 1):
block = """
FOREACH (x IN CASE WHEN row.pred££ IS NULL THEN [] ELSE [1] END | MERGE (n£:depth_£:""" + node_id + """ {iri: row.n£}) MERGE (n££:depth_££:""" + node_id + """ {iri: row.n££}) MERGE (n£)-[p:pred {iri: row.pred££}]->(n££))
"""
query_part2 = query_part2 + block.replace("££", str(i + 2)).replace("£", str(i + 1))
query = query_part1 + query_part2
logging.info(query)
return query
class Populate(Build):
def __init__(self, page, filter_set_edges=[], filter_set_vertices=[]):
self.name = "Populate build on " + page
self.page = page
self.filter_set_edges = filter_set_edges
self.filter_set_vertices = filter_set_vertices
def sparql_query_gen(self, depth):
query_part1 = "\nSELECT "
for i in range(depth):
string = "?pred£ ?pred_inv£ ?n£ "
query_part1 = query_part1 + string.replace("£", str(i + 1))
filter_query_pred = self.filter_query_pred_gen()
filter_query_pred_inv = self.filter_query_pred_inv_gen()
filter_query_vertex = self.filter_query_vertex_gen()
filter_query_vertex_mid = filter_query_vertex + filter_query_vertex.replace("£", "££")
filter_query_vertex_mid = filter_query_vertex_mid.replace(")FILTER(", ")&&(").replace("FILTER(", "FILTER((") + ")"
query_part2_open = """
WHERE {
"""
query_part2_a = """
{ {
<""" + self.page + """> ?pred1 ?n1
} UNION {
?n1 ?pred_inv1 <""" + self.page + """>
} } .
"""
query_part2_b = """
{ {
""" + filter_query_pred.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} UNION {
""" + filter_query_pred_inv.replace("£", "1") + """
?n1 ?pred_inv1 <""" + self.page + """>
} } .
"""
query_part2_c = """
{ {
""" + filter_query_vertex.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} UNION {
""" + filter_query_vertex.replace("£", "1") + """
?n1 ?pred_inv1 <""" + self.page + """>
} } .
"""
query_part2_d = """
{ {
""" + filter_query_pred.replace("£", "1") + filter_query_vertex.replace("£", "1") + """
<""" + self.page + """> ?pred1 ?n1
} UNION {
""" + filter_query_pred_inv.replace("£", "1") + filter_query_vertex.replace("£", "1") + """
?n1 ?pred_inv1 <""" + self.page + """>
} } .
"""
for i in range(depth - 1):
block_a = """
{ {
?n£ ?pred££ ?n££
} UNION {
?n££ ?pred_inv££ ?n£
} } .
"""
block_b = """
{ {
""" + filter_query_pred.replace("£", str(i + 2)) + """
?n£ ?pred££ ?n££
} UNION {
""" + filter_query_pred_inv.replace("£", str(i + 2)) + """
?n££ ?pred_inv££ ?n£
} } .
"""
block_c = """
{ {
""" + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} UNION {
""" + filter_query_vertex_mid + """
?n££ ?pred_inv££ ?n£
} } .
"""
block_d = """
{ {
""" + filter_query_pred.replace("£", str(i + 2)) + filter_query_vertex_mid + """
?n£ ?pred££ ?n££
} UNION {
""" + filter_query_pred_inv.replace("£", str(i + 2)) + filter_query_vertex_mid + """
?n££ ?pred_inv££ ?n£
} } .
"""
query_part2_a = query_part2_a + block_a.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_b = query_part2_b + block_b.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_c = query_part2_c + block_c.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_d = query_part2_d + block_d.replace("££", str(i + 2)).replace("£", str(i + 1))
query_part2_close = """
}
"""
if len(self.filter_set_edges) == 0 and len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_a + query_part2_close
elif len(self.filter_set_vertices) == 0:
query_part2 = query_part2_open + query_part2_b + query_part2_close
elif len(self.filter_set_edges) == 0:
query_part2 = query_part2_open + query_part2_c + query_part2_close
elif len(self.filter_set_edges) != 0 and len(self.filter_set_vertices) != 0:
query_part2 = query_part2_open + query_part2_d + query_part2_close
query = query_part1 + query_part2
logging.info(query)
return query
def cypher_query_gen(self, depth, url):
query_part1 = "WITH \"" + url + "\" AS url\n\nLOAD CSV WITH HEADERS FROM url AS row\n\n"
node_id = self.fetch_node_id(self.page)
query_part2 = "MERGE (n0:depth_0:" + node_id + " {iri: \"" + self.page + "\"})\n"
for i in range(depth):
string = "MERGE (n£:depth_£:" + node_id + " {iri: row.n£})\n"
query_part2 = query_part2 + string.replace("£", str(i + 1))
query_part3 = ""
for i in range(depth):
block = """
FOREACH (x IN CASE WHEN row.pred££ IS NULL THEN [] ELSE [1] END | MERGE (n£)-[p:pred {iri: row.pred££}]->(n££))
FOREACH (x IN CASE WHEN row.pred_inv££ IS NULL THEN [] ELSE [1] END | MERGE (n£)<-[p:pred {iri: row.pred_inv££}]-(n££))
"""
query_part3 = query_part3 + block.replace("££", str(i + 1)).replace("£", str(i))
query = query_part1 + query_part2 + query_part3
logging.info(query)
return query
class Clean:
def __init__(self):
self.name = "Clean class"
class Leaf(Clean):
def __init__(self):
self.name = "Leaf clean"
def run(self, depth):
cypher_query = """
MATCH (x)
WITH x, size((x)--()) as degree
WHERE degree = 1
DETACH DELETE (x)
"""
cypher_query_set = []
for i in range(depth):
cypher_query_set.append(cypher_query)
modules.misc.commit_cypher_query_set(cypher_query_set)
class DisjointParent(Clean):
def __init__(self):
self.name = "DisjointParent clean"
def get_root_labels(self):
cypher_query = """
MATCH (x:depth_0)
RETURN DISTINCT labels(x)
"""
output = modules.misc.commit_cypher_query_numpy(cypher_query).tolist()
self.root_labels = []
for i in output:
i[0].remove("depth_0")
self.root_labels.append(i[0][0])
logging.info(self.root_labels)
def combinations(self, root_labels):
root_label_combinations = []
for i in root_labels:
for j in root_labels:
if i < j:
root_label_combinations.append([i, j])
self.root_label_combinations = root_label_combinations
def run(self, depth):
self.get_root_labels()
self.combinations(self.root_labels)
cypher_query_1_set = []
cypher_query_1a = """
MATCH (x:depth_0)
MATCH (y:root_1:root_2)
SET x.keep = 1, y.keep = 1
"""
cypher_query_1_set.append(cypher_query_1a)
match_a = "MATCH (x:depth_0)-->(n1)-->"
match_b = "(y:root_1:root_2)"
pattern_statement = ""
set_statement = "SET n1.keep = 1"
if depth >= 2:
cypher_query_1b = match_a + match_b + "\n" + set_statement + "\n"
cypher_query_1_set.append(cypher_query_1b)
for i in range(depth - 2):
pattern_statement = ""
match_a = match_a + "(n&)-->".replace("&", str(i + 2))
pattern_statement = pattern_statement + match_a + match_b
set_statement = set_statement + ", n" + str(i + 2) + ".keep = 1"
cypher_query_1c = pattern_statement + "\n" + set_statement
cypher_query_1_set.append(cypher_query_1c)
cypher_query_set = []
for i in self.root_label_combinations:
for j in cypher_query_1_set:
x = j.replace("root_1", i[0]).replace("root_2", i[1])
cypher_query_set.append(x)
cypher_query_2 = """
MATCH (x)
WHERE x.keep IS NULL
DETACH DELETE x
"""
cypher_query_3 = """
MATCH (x)
SET x.keep = NULL
"""
cypher_query_set = cypher_query_set + [cypher_query_2, cypher_query_3]
logging.info(cypher_query_set)
modules.misc.commit_cypher_query_set(cypher_query_set)
|
py | 1a50ec1d23adb725395a458acbfc7222f38e9bf5 | #!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiple RPC users."""
from test_framework.test_framework import AgenorCoinTestFramework
from test_framework.util import str_to_b64str, assert_equal
import os
import http.client
import urllib.parse
class HTTPBasicsTest (AgenorCoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def setup_chain(self):
super().setup_chain()
#Append rpcauth to agenor.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
rpcuser = "rpcuser=rpcuser�"
rpcpassword = "rpcpassword=rpcpassword�"
with open(os.path.join(self.options.tmpdir+"/node0", "agenor.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
with open(os.path.join(self.options.tmpdir+"/node1", "agenor.conf"), 'a', encoding='utf8') as f:
f.write(rpcuser+"\n")
f.write(rpcpassword+"\n")
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
###############################################################
# Check correctness of the rpcuser/rpcpassword config options #
###############################################################
url = urllib.parse.urlparse(self.nodes[1].url)
# rpcuser and rpcpassword authpair
rpcuserauthpair = "rpcuser�:rpcpassword�"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong login name with rpcuser's password
rpcuserauthpair = "rpcuserwrong:rpcpassword"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Wrong password for rpcuser
rpcuserauthpair = "rpcuser:rpcpasswordwrong"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
|
py | 1a50ec8695466d31d8509336e8ca43bf007f6274 | import random
import discord
import json
import requests
import io
from random import randint
from discord.ext import commands
from utils import lists, http, default, eapi, sfapi
processapi = eapi.processapi
processshowapi = eapi.processshowapi
search = sfapi.search
class ResultNotFound(Exception):
"""Used if ResultNotFound is triggered by e* API."""
pass
class InvalidHTTPResponse(Exception):
"""Used if non-200 HTTP Response got from server."""
pass
class Fun:
def __init__(self, bot):
self.bot = bot
self.config = default.get("config.json")
@commands.command(aliases=['8ball'])
async def eightball(self, ctx, *, question: commands.clean_content):
""" Consult 8ball to receive an answer """
answer = random.choice(lists.ballresponse)
await ctx.send(f"🎱 **Question:** {question}\n**Answer:** {answer}")
@staticmethod
async def randomimageapi(ctx, url, endpoint):
try:
r = await http.get(url, res_method="json", no_cache=True)
except json.JSONDecodeError:
return await ctx.send("Couldn't find anything from the API")
embed = discord.Embed(colour=249742)
embed.set_image(url=r[endpoint])
await ctx.send(embed=embed)
@staticmethod
async def textapi(ctx, url, endpoint):
try:
r = await http.get(url, res_method="json", no_cache=True)
except json.JSONDecodeError:
return await ctx.send("Couldn't find anything from the API")
await ctx.send(f"{r[endpoint]}")
@staticmethod
async def factapi(ctx, url, endpoint):
try:
r = await http.get(url, res_method="json", no_cache=True)
except json.JSONDecodeError:
return await ctx.send("Couldn't find anything from the API")
await ctx.send(f'**Did you know?** 🤔\n\n{r[endpoint]}')
@staticmethod
async def asciitext(ctx, url):
try:
with requests.get(url) as f:
html = f.text
await ctx.send(f"```\n{html}\n```")
except InvalidHTTPResponse as e:
print(e)
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def cat(self, ctx):
""" Posts a random cat """
await self.randomimageapi(ctx, 'https://nekos.life/api/v2/img/meow', 'url')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def dog(self, ctx):
""" Posts a random dog """ # https://dog.ceo/api/breeds/image/random Fetch!
await self.randomimageapi(ctx, 'https://random.dog/woof.json', 'url')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def doggo(self, ctx):
""" Posts a random dog """
await self.randomimageapi(ctx, 'https://dog.ceo/api/breeds/image/random', 'message')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def neko(self, ctx):
""" Posts a random neko """
await self.randomimageapi(ctx, 'https://nekos.life/api/v2/img/neko', 'url')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def duck(self, ctx):
""" Posts a random duck """
await self.randomimageapi(ctx, 'https://random-d.uk/api/v1/random', 'url')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def fox(self, ctx):
""" Posts a random fox girl """
await self.randomimageapi(ctx, 'https://nekos.life/api/v2/img/fox_girl', 'url')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def rabbit(self, ctx):
""" Posts a random rabbit """
await self.randomimageapi(ctx, f'https://api.chewey-bot.ga/rabbit?auth={self.config.cheweyauth}', 'data')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def snek(self, ctx):
""" Does a heckin snek image """
await self.randomimageapi(ctx, f'https://api.chewey-bot.ga/snake?auth={self.config.cheweyauth}', 'data')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def otter(self, ctx):
""" Posts a random otter """
await self.randomimageapi(ctx, f'https://api.chewey-bot.ga/otter?auth={self.config.cheweyauth}', 'data')
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def birb(self, ctx):
""" Posts a random birb """
await self.randomimageapi(ctx, f'https://api.chewey-bot.ga/birb?auth={self.config.cheweyauth}', 'data')
@commands.command(aliases=['flip', 'coin'])
async def coinflip(self, ctx):
""" Coinflip! """
coinsides = ['Heads', 'Tails']
await ctx.send(f"**{ctx.author.name}** flipped a coin and got **{random.choice(coinsides)}**!")
@commands.command()
async def reverse(self, ctx, *, text: str):
""" !poow ,ffuts esreveR
Everything you type after reverse will of course, be reversed
"""
t_rev = text[::-1].replace("@", "@\u200B").replace("&", "&\u200B")
await ctx.send(f"🔁 {t_rev}")
@commands.command()
async def rate(self, ctx, *, thing: commands.clean_content):
""" Rates what you desire """
numbers = random.randint(0, 100)
decimals = random.randint(0, 9)
if numbers == 100:
decimals = 0
await ctx.send(f"I'd rate {thing} a **{numbers}.{decimals} / 100**")
@commands.command(aliases=['howhot', 'hot'])
async def hotcalc(self, ctx, user: discord.Member = None):
""" Returns a random percent for how hot is a discord user """
if user is None:
user = ctx.author
random.seed(user.id)
r = random.randint(1, 100)
hot = r / 1.17
emoji = "💔"
if hot > 25:
emoji = "❤"
if hot > 50:
emoji = "💖"
if hot > 75:
emoji = "💞"
await ctx.send(f"**{user.name}** is **{hot:.2f}%** hot {emoji}")
@commands.command()
async def e926(self, ctx, *args):
"""Searches e926 with given queries.
Arguments:
`*args` : list
The quer(y/ies)"""
msgtoedit = await ctx.send("Searching...")
args = ' '.join(args)
args = str(args)
netloc = "e926"
print("------")
print("Got command with args: " + args)
if "order:score_asc" in args:
await ctx.send("I'm not going to fall into that one, silly~")
return
if "score:" in args:
apilink = 'https://e926.net/post/index.json?tags=' + args + '&limit=320'
else:
apilink = 'https://e926.net/post/index.json?tags=' + args + ' score:>25&limit=320'
try:
await eapi.processapi(apilink)
except ResultNotFound:
await ctx.send("Result not found!")
return
except InvalidHTTPResponse:
await ctx.send("We're getting invalid response from the API, please try again later!")
return
msgtoedit = await ctx.channel.get_message(msgtoedit.id)
msgtosend = "Post link: `https://""" + netloc + """.net/post/show/""" + eapi.processapi.imgid + """/`\r\nArtist: `""" + eapi.processapi.imgartist + """`\r\nSource: `""" + eapi.processapi.imgsource + """`\r\nRating: """ + eapi.processapi.imgrating + """\r\nTags: `""" + eapi.processapi.imgtags + """` ...and more\r\nImage link: """ + eapi.processapi.file_link
await msgtoedit.edit(content=msgtosend)
@commands.command()
async def yell(self, ctx, *, text: str):
""" AAAAAAAAA!
Everything you type after yell will of course, be yelled
"""
t_upper = text.upper().replace("@", "@\u200B").replace("&", "&\u200B")
await ctx.send(f"⬆️ {t_upper}")
@commands.command()
async def whisper(self, ctx, *, text: str):
""" Shh
Be quiet..
"""
t_lower = text.lower().replace("@", "@\u200B").replace("&", "&\u200B")
await ctx.send(f"⬇️ {t_lower}")
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def headpat(self, ctx):
"""Posts a random headpat from headp.at"""
def url_to_bytes(url):
data = requests.get(url)
content = io.BytesIO(data.content)
filename = url.rsplit("/", 1)[-1]
return {"content": content, "filename": filename}
pats = requests.get("http://headp.at/js/pats.json").json()
pat = random.choice(pats)
file = url_to_bytes("http://headp.at/pats/{}".format(pat))
await ctx.send(file=discord.File(file["content"], file["filename"]))
@commands.command()
async def hug(self, ctx, user: discord.Member = None):
""" Hug a user! """
if user is None:
user = ctx.author
await ctx.send(f"💖 | **{ctx.author.name}** hugs **{user.name}**")
@commands.command()
async def cookie(self, ctx, user: discord.Member = None):
""" Hug a user! """
if user is None:
user = ctx.author
await ctx.send(f"🍪 | **{ctx.author.name}** gives **{user.name}** a cookie!")
@commands.command()
async def stab(self, ctx, user: discord.Member = None):
""" Ssstab a perssson! """
if user is None:
user = ctx.author
await ctx.send(f"🔪 | **{ctx.author.name}** stabbed **{user.name}** in the hand (How rude)!")
@commands.command()
async def pat(self, ctx, user: discord.Member = None):
""" Headpats for all! """
if user is None:
user = ctx.author
await ctx.send(f"<a:patkyutie:444890889513598986> | **{ctx.author.name}** pats **{user.name}** on the head!")
@commands.command()
async def nom(self, ctx, user: discord.Member = None):
""" Nom a user! """
if user is None:
user = ctx.author
await ctx.send(f"<a:WanTriggered:437201280918618112> | **{ctx.author.name}** nommed **{user.name}**'s arm!")
@commands.command()
@commands.cooldown(rate=1, per=5.0, type=commands.BucketType.user)
async def fact(self, ctx):
""" sends a random fact """
await self.factapi(ctx, 'https://nekos.life/api/v2/fact', 'fact')
@commands.command()
async def bamboozle(self, ctx):
""" You just got bamboozled! """
await ctx.send(f"**{ctx.author.name}** just got heckin' bamboozled!")
@commands.command(hidden=True)
async def highcontrastphotooffruitfloatingthreateninglyinthedark(self, ctx):
""" .. """
await ctx.send("https://i.imgur.com/gtm1VKQ.jpg")
@commands.command(hidden=True)
async def lighttheme(self, ctx):
""" E """
await ctx.send("Ew https://i.imgur.com/fbIE97N.png")
@commands.command()
@commands.guild_only()
async def ship(self, ctx, user: discord.User, *, user2: discord.User=None):
"""Checks the shiprate for 2 users"""
author = ctx.message.author
if not user2:
user2 = author
if not user:
await ctx.send("can't ship nothing y'know..")
elif user.id == user2.id:
await ctx.send("i-i can't ship the same person..")
elif user.id == author.id and user2.id == author.id:
await ctx.send(f"wow, you're in love with yourself, huh {ctx.author.name}?")
elif user == self.bot.user and user2 == author or user2 == self.bot.user and user == author:
blushes = ["m-me..? 0////0", "m-me..? >////<"]
return await ctx.send(random.choice(blushes))
else:
n = randint(1, 100)
if n == 100:
bar = "██████████"
heart = '💞'
elif n >= 90:
bar = "█████████."
heart = '💕'
elif n >= 80:
bar = "████████.."
heart = '😍'
elif n >= 70:
bar = "███████..."
heart = '💗'
elif n >= 60:
bar = "██████...."
heart = '❤'
elif n >= 50:
bar = '█████.....'
heart = '❤'
elif n >= 40:
bar = "████......"
heart = '💔'
elif n >= 30:
bar = "███......."
heart = '💔'
elif n >= 20:
bar = "██........"
heart = '💔'
elif n >= 10:
bar = "█........."
heart = '💔'
elif n < 10:
bar = ".........."
heart = '🖤'
else:
bar = ".........."
heart = '🖤'
name1 = user.name.replace(" ", "")
name1 = name1[:int(len(name1) / 2):]
name2 = user2.name.replace(" ", "")
name2 = name2[int(len(name2) / 2)::]
ship = discord.Embed(description=f"**{n}%** **`{bar}`** {heart}", color=ctx.me.colour)
ship.title = f"{user.name} x {user2.name}"
ship.set_footer(text=f"Shipname: {str(name1 + name2).lower()}")
await ctx.send(embed=ship)
@commands.command(aliases=['👏'])
@commands.guild_only()
async def emojify(self, ctx, emote, *, text_to_clap: str):
""" 👏bottom👏text👏 """
clapped_text = text_to_clap.replace("@everyone", f"{emote}everyone").replace("@here", f"{emote}here").replace(" ", f"{emote}")
clapped_text = f"{emote}{clapped_text}{emote}"
await ctx.send(clapped_text)
@commands.command()
async def owo(self, ctx):
"""Sends a random owo face"""
owo = random.choice(lists.owos)
await ctx.send(f"{owo} whats this~?")
@commands.command()
async def choose(self, ctx, *args):
"""Choose one of a lot arguments (Split with |) """
args = ' '.join(args)
args = str(args)
choices = args.split('|')
if len(choices) < 2:
await ctx.send("You need to send at least 2 argument!")
return
await ctx.send(random.choice(choices))
@commands.command()
async def jpeg(self, ctx, urltojpeg: str):
""" Does what it says on the can """
if "http" not in urltojpeg:
return ctx.send("Include a url you donk!")
await self.randomimageapi(ctx, f'https://nekobot.xyz/api/imagegen?type=jpeg&url={urltojpeg}', 'message')
@commands.command()
async def deepfry(self, ctx, urltojpeg: str):
""" Deepfries an image """
if "http" not in urltojpeg:
return ctx.send("Include a url you donk!")
await self.randomimageapi(ctx, f'https://nekobot.xyz/api/imagegen?type=deepfry&image={urltojpeg}', 'message')
@commands.command()
async def clyde(self, ctx, clydetext: str):
""" Makes Clyde say something """
if clydetext is None:
return ctx.send("Include some text you donk!")
await self.randomimageapi(ctx, f'https://nekobot.xyz/api/imagegen?type=clyde&text={clydetext}', 'message')
@commands.command()
async def magik(self, ctx, intensity: str, imgtomagik: str):
""" why don'T WE JUST RELAX AND TURn on THe rADIO? wOuLd You LIKE AM OR FM """
if imgtomagik is None:
return ctx.send("Include some text you donk!")
if intensity not in ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']:
return ctx.send("Include an intensity to magik (1-10)")
await self.randomimageapi(ctx, f'https://nekobot.xyz/api/imagegen?type=magik&image={imgtomagik}&intensity={intensity}', 'message')
@commands.command(aliases=['ascii'])
async def asciify(self, ctx, *, text: str):
""" Test """
texttoascii = text.replace(" ", "%20")
await self.asciitext(ctx, f"http://artii.herokuapp.com/make?text={texttoascii}")
def setup(bot):
bot.add_cog(Fun(bot))
|
py | 1a50ec9959a204638b1e44dbd1f2c6e16301c982 | import os
import json
import pandas
from flask import Flask, jsonify, redirect, render_template, request
from google.cloud import secretmanager
from alpha_vantage.timeseries import TimeSeries
app = Flask(__name__)
PROJECT_ID = os.environ.get("PROJECTID")
secrets = secretmanager.SecretManagerServiceClient()
ALPHA_VANTAGE_KEY = secrets.access_secret_version(request={"name": "projects/"+PROJECT_ID+"/secrets/alpha-vantage-key/versions/1"}).payload.data.decode("utf-8")
ts = TimeSeries(key=ALPHA_VANTAGE_KEY)
@app.route("/")
def hello():
return "Hello World!!!"
@app.route('/api/v1/symbol', methods=['POST'])
def get_time_series():
if request.method == 'POST':
symbol = request.args['symbol']
data, metadata = ts.get_intraday(
symbol, interval='15min', outputsize="25")
return jsonify(data=data)
if __name__ == "__main__":
app.debug=True
app.run() |
py | 1a50ec9af6243c7df0906dcbdba305436a17aeca | # Copyright 2020 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import torch
import torch.distributed as dist
from monai.data import (
CacheDataset,
DataLoader,
load_decathlon_datalist,
partition_dataset,
)
from monai.engines import SupervisedEvaluator, SupervisedTrainer
from monai.handlers import (
CheckpointSaver,
LrScheduleHandler,
MeanDice,
StatsHandler,
TensorBoardStatsHandler,
ValidationHandler,
)
from monai.inferers import SimpleInferer, SlidingWindowInferer
from monai.losses import DiceLoss
from monai.networks.layers import Norm
from monai.networks.nets import UNet
from monai.transforms import (
Activationsd,
AsDiscreted,
Compose,
CropForegroundd,
EnsureChannelFirstd,
LoadImaged,
Orientationd,
RandCropByPosNegLabeld,
ScaleIntensityRanged,
Spacingd,
ToTensord,
)
from torch.nn.parallel import DistributedDataParallel
from monai.handlers import from_engine
class TrainConfiger:
"""
This class is used to config the necessary components of train and evaluate engines
for MONAI trainer.
Please check the implementation of `SupervisedEvaluator` and `SupervisedTrainer`
from `monai.engines` and determine which components can be used.
Args:
config_root: root folder path of config files.
wf_config_file_name: json file name of the workflow config file.
"""
def __init__(
self,
config_root: str,
wf_config_file_name: str,
local_rank: int = 0,
):
with open(os.path.join(config_root, wf_config_file_name)) as file:
wf_config = json.load(file)
self.wf_config = wf_config
"""
config Args:
max_epochs: the total epoch number for trainer to run.
learning_rate: the learning rate for optimizer.
data_list_base_dir: the directory containing the data list json file.
data_list_json_file: the data list json file.
val_interval: the interval (number of epochs) to do validation.
ckpt_dir: the directory to save the checkpoint.
amp: whether to enable auto-mixed-precision training.
use_gpu: whether to use GPU in training.
multi_gpu: whether to use multiple GPUs for distributed training.
"""
self.max_epochs = wf_config["max_epochs"]
self.learning_rate = wf_config["learning_rate"]
self.data_list_base_dir = wf_config["data_list_base_dir"]
self.data_list_json_file = wf_config["data_list_json_file"]
self.val_interval = wf_config["val_interval"]
self.ckpt_dir = wf_config["ckpt_dir"]
self.amp = wf_config["amp"]
self.use_gpu = wf_config["use_gpu"]
self.multi_gpu = wf_config["multi_gpu"]
self.local_rank = local_rank
def set_device(self):
if self.multi_gpu:
# initialize distributed training
dist.init_process_group(backend="nccl", init_method="env://")
device = torch.device(f"cuda:{self.local_rank}")
torch.cuda.set_device(device)
else:
device = torch.device("cuda" if self.use_gpu else "cpu")
self.device = device
def configure(self):
self.set_device()
network = UNet(
dimensions=3,
in_channels=1,
out_channels=2,
channels=(16, 32, 64, 128, 256),
strides=(2, 2, 2, 2),
num_res_units=2,
norm=Norm.BATCH,
).to(self.device)
if self.multi_gpu:
network = DistributedDataParallel(
module=network,
device_ids=[self.device],
find_unused_parameters=False,
)
train_transforms = Compose(
[
LoadImaged(keys=("image", "label")),
EnsureChannelFirstd(keys=("image", "label")),
Spacingd(
keys=["image", "label"],
pixdim=(1.5, 1.5, 2.0),
mode=("bilinear", "nearest"),
),
Orientationd(keys=["image", "label"], axcodes="RAS"),
ScaleIntensityRanged(
keys="image",
a_min=-57,
a_max=164,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=("image", "label"), source_key="image"),
RandCropByPosNegLabeld(
keys=("image", "label"),
label_key="label",
spatial_size=(64, 64, 64),
pos=1,
neg=1,
num_samples=4,
image_key="image",
image_threshold=0,
),
ToTensord(keys=("image", "label")),
]
)
# set datalist
train_datalist = load_decathlon_datalist(
os.path.join(self.data_list_base_dir, self.data_list_json_file),
is_segmentation=True,
data_list_key="training",
base_dir=self.data_list_base_dir,
)
val_datalist = load_decathlon_datalist(
os.path.join(self.data_list_base_dir, self.data_list_json_file),
is_segmentation=True,
data_list_key="validation",
base_dir=self.data_list_base_dir,
)
if self.multi_gpu:
train_datalist = partition_dataset(
data=train_datalist,
shuffle=True,
num_partitions=dist.get_world_size(),
even_divisible=True,
)[dist.get_rank()]
train_ds = CacheDataset(
data=train_datalist,
transform=train_transforms,
cache_rate=1.0,
num_workers=4,
)
train_data_loader = DataLoader(
train_ds,
batch_size=2,
shuffle=True,
num_workers=4,
)
val_transforms = Compose(
[
LoadImaged(keys=("image", "label")),
EnsureChannelFirstd(keys=("image", "label")),
Spacingd(
keys=["image", "label"],
pixdim=(1.5, 1.5, 2.0),
mode=("bilinear", "nearest"),
),
Orientationd(keys=["image", "label"], axcodes="RAS"),
ScaleIntensityRanged(
keys="image",
a_min=-57,
a_max=164,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=("image", "label"), source_key="image"),
ToTensord(keys=("image", "label")),
]
)
val_ds = CacheDataset(
data=val_datalist, transform=val_transforms, cache_rate=0.0, num_workers=4
)
val_data_loader = DataLoader(
val_ds,
batch_size=1,
shuffle=False,
num_workers=4,
)
post_transform = Compose(
[
Activationsd(keys="pred", softmax=True),
AsDiscreted(
keys=["pred", "label"],
argmax=[True, False],
to_onehot=True,
num_classes=2,
),
]
)
# metric
key_val_metric = {
"val_mean_dice": MeanDice(
include_background=False,
output_transform=from_engine(["pred", "label"]),
#device=self.device,
)
}
val_handlers = [
StatsHandler(output_transform=lambda x: None),
CheckpointSaver(
save_dir=self.ckpt_dir,
save_dict={"model": network},
save_key_metric=True,
),
TensorBoardStatsHandler(
log_dir=self.ckpt_dir, output_transform=lambda x: None
),
]
self.eval_engine = SupervisedEvaluator(
device=self.device,
val_data_loader=val_data_loader,
network=network,
inferer=SlidingWindowInferer(
roi_size=[160, 160, 160],
sw_batch_size=4,
overlap=0.5,
),
postprocessing=post_transform,
key_val_metric=key_val_metric,
val_handlers=val_handlers,
amp=self.amp,
)
optimizer = torch.optim.Adam(network.parameters(), self.learning_rate)
loss_function = DiceLoss(to_onehot_y=True, softmax=True)
lr_scheduler = torch.optim.lr_scheduler.StepLR(
optimizer, step_size=5000, gamma=0.1
)
train_handlers = [
LrScheduleHandler(lr_scheduler=lr_scheduler, print_lr=True),
ValidationHandler(
validator=self.eval_engine, interval=self.val_interval, epoch_level=True
),
StatsHandler(tag_name="train_loss", output_transform=from_engine("loss", first=True)),
TensorBoardStatsHandler(
log_dir=self.ckpt_dir,
tag_name="train_loss",
output_transform=from_engine("loss", first=True),
),
]
self.train_engine = SupervisedTrainer(
device=self.device,
max_epochs=self.max_epochs,
train_data_loader=train_data_loader,
network=network,
optimizer=optimizer,
loss_function=loss_function,
inferer=SimpleInferer(),
postprocessing=post_transform,
key_train_metric=None,
train_handlers=train_handlers,
amp=self.amp,
)
if self.local_rank > 0:
self.train_engine.logger.setLevel(logging.WARNING)
self.eval_engine.logger.setLevel(logging.WARNING)
|
py | 1a50ecbb6b418a94ce32afde29696d109941f724 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#-----------------------------------------------------------------------
# Author: delimitry
#-----------------------------------------------------------------------
import os
import time
import math
import datetime
from asciicanvas import AsciiCanvas
x_scale_ratio = 1.75
def draw_second_hand(ascii_canvas, seconds, length, fill_char):
"""
Draw second hand
"""
x0 = int(math.ceil(ascii_canvas.cols / 2.0))
y0 = int(math.ceil(ascii_canvas.lines / 2.0))
x1 = x0 + int(math.cos((seconds + 45) * 6 * math.pi / 180) * length * x_scale_ratio)
y1 = y0 + int(math.sin((seconds + 45) * 6 * math.pi / 180) * length)
ascii_canvas.add_line(int(x0), int(y0), int(x1), int(y1), fill_char=fill_char)
def draw_minute_hand(ascii_canvas, minutes, length, fill_char):
"""
Draw minute hand
"""
x0 = int(math.ceil(ascii_canvas.cols / 2.0))
y0 = int(math.ceil(ascii_canvas.lines / 2.0))
x1 = x0 + int(math.cos((minutes + 45) * 6 * math.pi / 180) * length * x_scale_ratio)
y1 = y0 + int(math.sin((minutes + 45) * 6 * math.pi / 180) * length)
ascii_canvas.add_line(int(x0), int(y0), int(x1), int(y1), fill_char=fill_char)
def draw_hour_hand(ascii_canvas, hours, minutes, length, fill_char):
"""
Draw hour hand
"""
x0 = int(math.ceil(ascii_canvas.cols / 2.0))
y0 = int(math.ceil(ascii_canvas.lines / 2.0))
total_hours = hours + minutes / 60.0
x1 = x0 + int(math.cos((total_hours + 45) * 30 * math.pi / 180) * length * x_scale_ratio)
y1 = y0 + int(math.sin((total_hours + 45) * 30 * math.pi / 180) * length)
ascii_canvas.add_line(int(x0), int(y0), int(x1), int(y1), fill_char=fill_char)
def draw_clock_face(ascii_canvas, radius, mark_char):
"""
Draw clock face with hour and minute marks
"""
x0 = ascii_canvas.cols // 2
y0 = ascii_canvas.lines // 2
# draw marks first
for mark in range(1, 12 * 5 + 1):
x1 = x0 + int(math.cos((mark + 45) * 6 * math.pi / 180) * radius * x_scale_ratio)
y1 = y0 + int(math.sin((mark + 45) * 6 * math.pi / 180) * radius)
if mark % 5 != 0:
ascii_canvas.add_text(x1, y1, mark_char)
# start from 1 because at 0 index - 12 hour
for mark in range(1, 12 + 1):
x1 = x0 + int(math.cos((mark + 45) * 30 * math.pi / 180) * radius * x_scale_ratio)
y1 = y0 + int(math.sin((mark + 45) * 30 * math.pi / 180) * radius)
ascii_canvas.add_text(x1, y1, '%s' % mark)
def draw_clock(cols, lines):
"""
Draw clock
"""
if cols < 25 or lines < 25:
print('Too little columns/lines for print out the clock!')
exit()
# prepare chars
single_line_border_chars = ('.', '-', '.', '|', ' ', '|', '`', '-', "'")
second_hand_char = '.'
minute_hand_char = 'o'
hour_hand_char = 'O'
mark_char = '`'
if os.name == 'nt':
single_line_border_chars = ('.', '-', '.', '|', ' ', '|', '`', '-', "'") # ('\xDA', '\xC4', '\xBF', '\xB3', '\x20', '\xB3', '\xC0', '\xC4', '\xD9')
second_hand_char = '.' # '\xFA'
minute_hand_char = 'o' # '\xF9'
hour_hand_char = 'O' # 'o'
mark_char = '`' # '\xF9'
# create ascii canvas for clock and eval vars
ascii_canvas = AsciiCanvas(cols, lines)
center_x = int(math.ceil(cols / 2.0))
center_y = int(math.ceil(lines / 2.0))
radius = center_y - 5
second_hand_length = int(radius / 1.17)
minute_hand_length = int(radius / 1.25)
hour_hand_length = int(radius / 1.95)
# add clock region and clock face
ascii_canvas.add_rect(5, 3, int(math.floor(cols / 2.0)) * 2 - 9, int(math.floor(lines / 2.0)) * 2 - 5)
draw_clock_face(ascii_canvas, radius, mark_char)
now = datetime.datetime.now()
# add regions with weekday and day if possible
if center_x > 25:
left_pos = int(radius * x_scale_ratio) / 2 - 4
ascii_canvas.add_nine_patch_rect(int(center_x + left_pos), int(center_y - 1), 5, 3, single_line_border_chars)
ascii_canvas.add_text(int(center_x + left_pos + 1), int(center_y), now.strftime('%a'))
ascii_canvas.add_nine_patch_rect(int(center_x + left_pos + 5), int(center_y - 1), 4, 3, single_line_border_chars)
ascii_canvas.add_text(int(center_x + left_pos + 1 + 5), int(center_y), now.strftime('%d'))
# add clock hands
draw_second_hand(ascii_canvas, now.second, second_hand_length, fill_char=second_hand_char)
draw_minute_hand(ascii_canvas, now.minute, minute_hand_length, fill_char=minute_hand_char)
draw_hour_hand(ascii_canvas, now.hour, now.minute, hour_hand_length, fill_char=hour_hand_char)
# print out canvas
ascii_canvas.print_out()
def main():
lines = 40
cols = int(lines * x_scale_ratio)
# set console window size and screen buffer size
if os.name == 'nt':
os.system('mode con: cols=%s lines=%s' % (cols + 1, lines + 1))
while True:
os.system('cls' if os.name == 'nt' else 'clear')
draw_clock(cols, lines)
time.sleep(0.2)
if __name__ == '__main__':
main()
|
py | 1a50ed0ae34fb9b33a8224b77a652a4d1b36741b | # coding: utf-8
import socketserver
import os
# Copyright 2013 Abram Hindle, Eddie Antonio Santos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Furthermore it is derived from the Python documentation examples thus
# some of the code is Copyright © 2001-2013 Python Software
# Foundation; All Rights Reserved
#
# http://docs.python.org/2/library/socketserver.html
#
# run: python freetests.py
# try: curl -v -X GET http://127.0.0.1:8080/
def getFileContents(path):
fileText = ""
with open(path, "r") as fin:
fileText = fin.read()
return fileText
class MyWebServer(socketserver.BaseRequestHandler):
def handle(self):
self.data = self.request.recv(1024).strip()
requestParams = self.data.decode().split(' ')
requestedFile = requestParams[1]
print("Got a request of: %s\n" % self.data)
if requestParams[0] == "GET":
if "../" not in requestedFile and os.path.exists("./www"+requestedFile):
self.index("./www"+requestedFile)
else:
self.pageNotFound()
else:
self.methodNotAllowed()
def pageNotFound(self):
self.request.sendall(bytearray("HTTP/1.1 404 Not Found\r\n", "utf-8"))
def methodNotAllowed(self):
self.request.sendall(
bytearray("HTTP/1.1 405 Method Not Allowed\r\n", "utf-8"))
def movedPermantently(self, location):
host = self.server.server_address[0]
port = self.server.server_address[1]
baseUrl = "http://%s:%s" % (host, port)
self.request.sendall(
bytearray("HTTP/1.1 301 Moved Permanently\n", "utf-8"))
self.request.sendall(
bytearray("Location:" + baseUrl + location, "utf-8"))
def serveFile(self, fileText, fileType, httpHeader):
self.request.sendall(bytearray(httpHeader, "utf-8"))
self.request.sendall(
bytearray("Content-Type:" + fileType + "\n\n", "utf-8"))
self.request.sendall(bytearray(fileText, "utf-8"))
def index(self, path):
httpHeader = "HTTP/1.1 200 OK\n"
if os.path.isdir(path):
if path[-1] != "/":
path += "/"
location = path[5:]
self.movedPermantently(location)
return
# httpHeader = "HTTP/1.1 302 Found\n"
path += "index.html"
fileText = getFileContents(path)
fileType = "text/html"
if path[-3:] == "css":
fileType = "text/css"
self.serveFile(fileText, fileType, httpHeader)
if __name__ == "__main__":
HOST, PORT = "localhost", 8080
socketserver.TCPServer.allow_reuse_address = True
# Create the server, binding to localhost on port 8080
server = socketserver.TCPServer((HOST, PORT), MyWebServer)
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
|
py | 1a50ed206dc3e23411c1a8005678847423da07fc | #!/usr/bin/python
import sys
import socket
import argparse
parser = argparse.ArgumentParser(
prog='find_my_name',
prefix_chars='-'
)
parser.add_argument('-I','--ipaddress', type=str, help="The IP Address to reverse resolve")
parser.add_argument('-v','--verbose', action="store_true", help="Print additional output")
args = parser.parse_args()
# ReverseLookup
reverse_lookup = socket.gethostbyaddr(args.ipaddress)
fqdn_only = socket.getfqdn(args.ipaddress)
fqdn,aliases,net_ip = socket.gethostbyaddr(args.ipaddress)
#if args.verbose:
if args.ipaddress:
print "IPADDRESS Entered: ", args.ipaddress
print "REVERSE Lookup Results"
print "FQDN: ", fqdn
print "ALIASES: ", aliases
print "Network IP: ", net_ip
print "FQDN_ONLY: ", fqdn_only
|
py | 1a50ed6d37734913dde15573ce55a0c53fdd5abd | # Copyright 2018 Xu Chen All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
import numpy as np
import os
from input_data.cifar10 import load_cifar10_data
def _dream_cropping(image, label, specs, cropped_size):
if cropped_size < specs['image_size']:
image = tf.image.resize_image_with_crop_or_pad(
image, cropped_size, cropped_size)
# convert from 0 ~ 255 to 0. ~ 1.
image = tf.cast(image, tf.float32) * (1. / 255.)
# transpose image into (CHW)
image = tf.transpose(image, [2, 0, 1]) # (CHW)
feature = {
'image': image,
'label': tf.one_hot(label, 10)
}
return feature
def _dream_process(feature):
batched_features = {
'images': feature['image'],
'labels': feature['label']
}
return batched_features
def _dream_sample_pairs(split, data_dir, max_epochs, n_repeats,
total_batch_size=1):
"""
We do the following steps to produce the dataset:
1. sample one (image, label) pair in one class;
2. repeat pair in 1. {n_repeats} times;
3. go back to do 1. unless we finish one iteration
(after a {num_classes} time loop). And we consider
this as one epoch.
4. go back to do 1. again to finish {max_epochs} loop.
So there will be {max_epochs} number of unique pairs selected for
each class.
Args:
split: 'train' or 'test', which split of dataset to read from;
data_dir: path to the mnist data directory;
max_epochs: maximum epochs to go through the model;
n_repeats: number of computed gradients;
batch_size: total number of images per batch.
Returns:
processed images, labels and specs
"""
"""Dataset specs"""
specs = {
'split': split,
'max_epochs': max_epochs,
'steps_per_epoch': n_repeats,
'batch_size': total_batch_size,
'image_size': 32,
'depth': 3,
'num_classes': 10
}
"""Load data from mat files"""
images, labels = load_cifar10_data.load_cifar10(data_dir, split)
assert images.shape[0] == labels.shape[0]
specs['total_size'] = int(images.shape[0])
"""Process np array"""
# sort by labels to get the index permutations
# classes: 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
indices = [specs['total_size'] // specs['num_classes'] * i
for i in range(specs['num_classes'])]
indices.append(specs['total_size'])
perm = labels.argsort()
images = images[perm]
labels = labels[perm]
sampled_idc_lists = []
for start in indices[:-1]:
sampled_idc_lists.append(
np.arange(start, start + max_epochs).tolist())
sampled_idc_mat = np.array(sampled_idc_lists)
sampled_idc_mat = np.transpose(sampled_idc_mat, [1, 0])
sampled_idc_lists = sampled_idc_mat.flatten().tolist()
assert len(sampled_idc_lists) == max_epochs * specs['num_classes']
# we let n_repeats = steps_per_epoch = number of computed gradients
list_of_images = []
list_of_labels = []
for idx in sampled_idc_lists:
for _ in range(n_repeats):
list_of_images.append(images[idx])
list_of_labels.append(labels[idx])
res_images = np.stack(list_of_images, axis=0)
res_labels = np.array(list_of_labels)
assert res_images.shape == (max_epochs*specs['num_classes']*n_repeats, specs['image_size'], specs['image_size'], specs['depth'])
assert res_labels.shape == (max_epochs*specs['num_classes']*n_repeats,)
specs['total_size'] = res_labels.shape[0]
return (res_images, res_labels), specs
def inputs(split, data_dir, max_epochs, n_repeats, cropped_size,
total_batch_size=1):
"""Construct fashion mnist inputs for dream experiment.
Args:
split: 'train' or 'test' split to read from dataset;
data_dir: path to mnist data directory;
max_epochs: maximum epochs to go through the model;
n_repeats: number of computed gradients / number of the same input to repeat;
cropped_size: image size after cropping;
total_batch_size: total number of images per batch.
Returns:
batched_features: a dictionary of the input data features.
"""
assert split == 'train' or split == 'test'
"""Load sampled images and labels"""
(images, labels), specs = _dream_sample_pairs(
split, data_dir, max_epochs, n_repeats, total_batch_size)
if cropped_size == None:
cropped_size = specs['image_size']
assert cropped_size <= specs['image_size']
"""Process dataset object"""
dataset = tf.data.Dataset.from_tensor_slices((images, labels))
dataset = dataset.prefetch(1)
dataset = dataset.map(
lambda image, label: _dream_cropping(image, label, specs, cropped_size),
num_parallel_calls=3)
specs['image_size'] = cropped_size
batched_dataset = dataset.batch(specs['batch_size'])
batched_dataset = batched_dataset.map(_dream_process, num_parallel_calls=3)
batched_dataset = batched_dataset.prefetch(1)
return batched_dataset, specs |
py | 1a50ee32482a1a800885a478cbc06b8cd56a4df4 | import hashlib
from MyLibs import configure, db
def check(session):
if 'logged_in' in session:
username, passwordH = session['logged_in'].split("=")
login = db.check(username, passwordH)
if login == "Wrong":
return(False)
else:
return(True)
def login(username, password):
login = db.check(username, hashlib.sha256(bytes(password, "utf8")).hexdigest())
return(login)
def create(session, username, password):
session['logged_in'] = username + "=" + hashlib.sha256(bytes(password, "utf8")).hexdigest()
return(session)
|
py | 1a50ef494460817d588cbf8fb38f5f04d46b8ab5 | # Copyright 2020 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The IBMQ device class for PennyLane-Orquestra.
"""
import os
import warnings
from pennylane_orquestra.orquestra_device import OrquestraDevice
class QeIBMQDevice(OrquestraDevice):
"""The Orquestra IBMQ device.
Args:
wires (int, Iterable[Number, str]]): Number of subsystems represented
by the device, or iterable that contains unique labels for the
subsystems as numbers (i.e., ``[-1, 0, 2]``) or strings (``['ancilla',
'q1', 'q2']``). Default 1 if not specified.
shots (int or list[int]): Number of circuit evaluations/random samples used to estimate
expectation values of observables. If ``None``, the device calculates
probability, expectation values, and variances analytically. If an integer,
it specifies the number of samples to estimate these quantities.
If a list of integers is passed, the circuit evaluations are batched over the list of shots.
backend (str): the name of the Qiskit backend to use supported by
Orquestra, e.g., ``"ibmq_qasm_simulator"`` or the name of real hardware
devices
Keyword Args:
ibmqx_token=None (str): the authentication token needed to run a job on
IBMQ
"""
short_name = "orquestra.ibmq"
qe_component = "qe-qiskit"
qe_module_name = "qeqiskit.backend"
qe_function_name = "QiskitBackend"
def __init__(self, wires, shots=8192, backend="ibmq_qasm_simulator", **kwargs):
self._token = kwargs.get("ibmqx_token", None) or os.getenv("IBMQX_TOKEN")
if self._token is None:
raise ValueError(
"Please pass a valid IBMQX token to the device using the "
"'ibmqx_token' argument or by specifying the IBMQX_TOKEN "
"environment variable."
)
if shots is None:
# Raise a warning if the analytic attribute was set to True
warnings.warn(
f"The {self.short_name} device cannot be used in analytic "
"mode. Setting shots to 8192. Results are based on "
"sampling."
)
shots = 8192
super().__init__(wires, backend=backend, shots=shots, **kwargs)
def create_backend_specs(self):
backend_dict = super().create_backend_specs()
# Plug in the IBMQ token
backend_dict["api_token"] = self._token
return backend_dict
|
py | 1a50ef7af62a6707a6396d42ff539b9639d51f92 | # -*- coding: utf-8 -*-
from __future__ import print_function
# tag::mcts_go_cnn_preprocessing[]
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
np.random.seed(123)
X = np.load('../generated_games/features-200.npy')
Y = np.load('../generated_games/labels-200.npy')
samples = X.shape[0]
size = 9
input_shape = (size, size, 1)
X = X.reshape(samples, size, size, 1)
train_samples = 10000
X_train, X_test = X[:train_samples], X[train_samples:]
Y_train, Y_test = Y[:train_samples], Y[train_samples:]
# end::mcts_go_cnn_preprocessing[]
# tag::mcts_go_cnn_model[]
model = Sequential()
"""
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
"""
# filter = 48
# 3×3の畳み込みカーネルを選択する
# 通常、畳み込みの出力は入力よりも小さくなる。
# padding = 'same'を追加することで、Kerasに行列をエッジの周りに0で埋めるようにできるため、出力は入力と同じ次元を持つようになる。
model.add(Conv2D(filters=48, # <1>
kernel_size=(3, 3), # <2>
activation='sigmoid',
padding='same',
input_shape=input_shape))
model.add(Dropout(rate=0.6))
model.add(Conv2D(64, (3, 3), activation='relu'))
# 最大プーリング
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(rate=0.6))
# 平坦化
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(rate=0.6))
# ソフトマックス
model.add(Dense(size * size, activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# end::mcts_go_cnn_model[]
# tag::mcts_go_cnn_eval[]
model.fit(X_train, Y_train,
batch_size=64,
epochs=5,
verbose=1,
validation_data=(X_test, Y_test))
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# end::mcts_go_cnn_eval[]
|
py | 1a50f0315f2564ed94cba82519ce8d067068b118 | def is_triangle(a, b, c):
if a+b>c and a+c>b and b+c>a:
return True;
return False;
|
py | 1a50f124df3ce87659e88872c9bdea2f2fad9fb8 | # # ===============================================================================
# # Copyright 2014 Jake Ross
# #
# # Licensed under the Apache License, Version 2.0 (the "License");
# # you may not use this file except in compliance with the License.
# # You may obtain a copy of the License at
# #
# # http://www.apache.org/licenses/LICENSE-2.0
# #
# # Unless required by applicable law or agreed to in writing, software
# # distributed under the License is distributed on an "AS IS" BASIS,
# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# # See the License for the specific language governing permissions and
# # limitations under the License.
# # ===============================================================================
#
# # ============= enthought library imports =======================
# from __future__ import absolute_import
# from pyface.file_dialog import FileDialog
# # ============= standard library imports ========================
# import os
# # ============= local library imports ==========================
# from pychron.database.core.defaults import load_irradiation_map
# from pychron.loggable import Loggable
# from pychron.paths import paths
#
#
# class IrradiationHolderLoader(Loggable):
# def do_import(self, db):
# path = self._get_holder_path()
# if path:
# self.debug('doing irradiation holder import')
# overwrite_geometry = True
# name = os.path.splitext(os.path.basename(path))[0]
# msg='Added'
# dbh = db.get_irradiation_holder(name)
# if dbh:
# msg = 'Updated'
# load_irradiation_map(db, path, name, overwrite_geometry)
#
# self.information_dialog('{} Irradiation Holder "{}"'.format(msg, name))
#
# def _get_holder_path(self):
# dlg=FileDialog(default_directory=paths.irradiation_tray_maps_dir,
# wildcard='*.txt')
# if dlg.open():
# if dlg.path:
# return dlg.path
# # ============= EOF =============================================
#
#
#
|
py | 1a50f168d79e913ab3d369c67a7a08493ca3ed5b | from pyunity import Behaviour, SceneManager, GameObject, Vector3, MeshRenderer, Mesh, Material, RGB, ShowInInspector
class Rotator(Behaviour):
def Update(self, dt):
self.transform.eulerAngles += Vector3(0, 90, 135) * dt
def main():
scene = SceneManager.AddScene("Scene")
scene.mainCamera.transform.localPosition = Vector3(0, 0, -10)
cube = GameObject("Cube")
renderer = cube.AddComponent(MeshRenderer)
renderer.mesh = Mesh.cube(2)
renderer.mat = Material(RGB(255, 0, 0))
cube.AddComponent(Rotator)
scene.Add(cube)
scene.List()
SceneManager.LoadScene(scene)
if __name__ == "__main__":
main()
|
py | 1a50f182480424bddece85845a2071389528a44b | # Generated by Django 3.0.5 on 2020-04-27 00:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tutor', '0026_auto_20200426_1442'),
]
operations = [
migrations.RenameField(
model_name='profile',
old_name='acceptedjobs',
new_name='jobinteractions',
),
migrations.RemoveField(
model_name='profile',
name='requestedjobs',
),
]
|
py | 1a50f245a57a7dc8f245cc754d41302762328746 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Johnny Chan
# https://github.com/jchanvfx/NodeGraphQt
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name Johnny Chan nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
**NodeGraphQt** is a node graph framework that can be implemented and re purposed
into applications that supports **PySide2**.
project: https://github.com/jchanvfx/NodeGraphQt
example code:
.. code-block:: python
:linenos:
import sys
from NodeGraphQt import QtWidgets, NodeGraph, BaseNode
class MyNode(BaseNode):
__identifier__ = 'com.chantasticvfx'
NODE_NAME = 'My Node'
def __init__(self):
super(MyNode, self).__init__()
self.add_input('foo', color=(180, 80, 0))
self.add_output('bar')
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
graph = NodeGraph()
graph.register_node(BaseNode)
graph.register_node(BackdropNode)
backdrop = graph.create_node('nodeGraphQt.nodes.Backdrop', name='Backdrop')
node_a = graph.create_node('com.chantasticvfx.MyNode', name='Node A')
node_b = graph.create_node('com.chantasticvfx.MyNode', name='Node B', color='#5b162f')
node_a.set_input(0, node_b.output(0))
viewer = graph.viewer()
viewer.show()
app.exec_()
"""
try:
from Qt import QtWidgets, QtGui, QtCore, QtCompat, QtOpenGL
except ImportError as ie:
from .vendor.Qt import __version__ as qtpy_ver
from .vendor.Qt import QtWidgets, QtGui, QtCore, QtCompat ,QtOpenGL
print('Cannot import "Qt.py" module falling back on '
'"NodeGraphQt.vendor.Qt ({})"'.format(qtpy_ver))
from .base.graph import NodeGraph
from .base.menu import NodesMenu, NodeGraphMenu, NodeGraphCommand
from .base.node import NodeObject, BaseNode, BackdropNode, SubGraph
from .base.port import Port
from .pkg_info import __version__ as VERSION
from .pkg_info import __license__ as LICENSE
# functions
from .base.utils import setup_context_menu, \
topological_sort_by_up, topological_sort_by_down, \
update_node_down_stream, update_node_up_stream,\
update_nodes_by_up, update_nodes_by_down
# widgets
from .widgets.node_tree import NodeTreeWidget
from .widgets.properties_bin import PropertiesBinWidget
from .widgets.node_publish_widget import NodePublishWidget
__version__ = VERSION
__all__ = [
'BackdropNode',
'BaseNode',
'LICENSE',
'NodeGraph',
'NodeGraphCommand',
'NodeGraphMenu',
'NodeObject',
'NodeTreeWidget',
'NodesMenu',
'Port',
'PropertiesBinWidget',
'VERSION',
'constants',
'setup_context_menu',
'NodePublishWidget',
'SubGraph',
'topological_sort_by_up',
'topological_sort_by_down',
'update_node_up_stream',
'update_node_down_stream',
'update_nodes_by_up',
'update_nodes_by_down',
]
|
py | 1a50f26619aeb2f861a596dcdcbbae5677e27fcd | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import logging
import emission.core.wrapper.wrapperbase as ecwb
import emission.core.wrapper.motionactivity as ecwm
import emission.core.wrapper.modeprediction as ecwmp
# Used for various metrics such as count, distance, mean speed calorie consumption,
# median speed calorie consumption
# Should come later: carbon footprint
# Optimal doesn't look like it fits this, because it is not per mode
class ModeStatTimeSummary(ecwb.WrapperBase):
# We will end up with props like
# {
# MotionTypes.IN_VEHICLE: ecwb.WrapperBase.Access.WORM
# }
# Each distance will have
#
#
# Make this only predicted mode, or remove completely depending on what we
# do for mode stuff
props = dict([(t.name, ecwb.WrapperBase.Access.WORM) for t in ecwm.MotionTypes])
props.update(dict([(t.name, ecwb.WrapperBase.Access.WORM) for t in ecwmp.PredictedModeTypes]))
props.update(
{'ts': ecwb.WrapperBase.Access.WORM, # YYYY-MM-DD
'local_dt': ecwb.WrapperBase.Access.WORM,
'fmt_time': ecwb.WrapperBase.Access.WORM,
'nUsers': ecwb.WrapperBase.Access.WORM} # Relevant in the
# aggregate case, when we want to see how many users we have
# aggregated data from so that we can compute avg, etc
)
enums = {}
geojson = []
nullable = []
local_dates = ['local_dt']
def _populateDependencies(self):
pass
|
py | 1a50f27b375d9b335ad143292efedc05e1093a1d | import os, sys
# pylint: disable-msg=F0401
from setuptools import setup, find_packages
here = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.normpath(os.path.join(here,
'openmdao',
'examples',
'metamodel_tutorial')))
import releaseinfo
version = releaseinfo.__version__
setup(name='openmdao.examples.metamodel_tutorial',
version=version,
description="OpenMDAO examples - Metamodel Tutorial",
long_description="""\
""",
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
],
keywords='optimization multidisciplinary multi-disciplinary analysis',
author='',
author_email='',
url='http://openmdao.org',
license='Apache License, Version 2.0',
namespace_packages=["openmdao", "openmdao.examples"],
packages=find_packages(), #['openmdao','openmdao.examples'],
include_package_data=True,
test_suite='nose.collector',
zip_safe=False,
install_requires=[
'setuptools',
'openmdao.lib',
],
entry_points="""
# -*- Entry points: -*-
"""
)
|
py | 1a50f28d013f8cf1f8254ad89d83282b236abd40 | # -*- coding: utf-8 -*-
"""
GoPro Encoding
==============
Defines the *GoPro* *Protune* encoding:
- :func:`colour.models.log_encoding_Protune`
- :func:`colour.models.log_decoding_Protune`
See Also
--------
`RGB Colourspaces Jupyter Notebook
<http://nbviewer.jupyter.org/github/colour-science/colour-notebooks/\
blob/master/notebooks/models/rgb.ipynb>`_
References
----------
- :cite:`GoPro2016a` : GoPro, Duiker, H.-P., & Mansencal, T. (2016).
gopro.py. Retrieved April 12, 2017, from
https://github.com/hpd/OpenColorIO-Configs/blob/master/aces_1.0.3/python/\
aces_ocio/colorspaces/gopro.py
"""
from __future__ import division, unicode_literals
import numpy as np
from colour.utilities import from_range_1, to_domain_1
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2019 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = '[email protected]'
__status__ = 'Production'
__all__ = ['log_encoding_Protune', 'log_decoding_Protune']
def log_encoding_Protune(x):
"""
Defines the *Protune* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x : numeric or array_like
Linear data :math:`x`.
Returns
-------
numeric or ndarray
Non-linear data :math:`y`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``y`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`GoPro2016a`
Examples
--------
>>> log_encoding_Protune(0.18) # doctest: +ELLIPSIS
0.6456234...
"""
x = to_domain_1(x)
y = np.log(x * 112 + 1) / np.log(113)
return from_range_1(y)
def log_decoding_Protune(y):
"""
Defines the *Protune* log decoding curve / electro-optical transfer
function.
Parameters
----------
y : numeric or array_like
Non-linear data :math:`y`.
Returns
-------
numeric or ndarray
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``y`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`GoPro2016a`
Examples
--------
>>> log_decoding_Protune(0.645623486803636) # doctest: +ELLIPSIS
0.1...
"""
y = to_domain_1(y)
x = (113 ** y - 1) / 112
return from_range_1(x)
|
py | 1a50f2d50576c1655104881991824db93285e515 | import datetime
import re
import smtplib
import time
import urllib
from typing import Any, List, Optional, Sequence
from unittest.mock import MagicMock, patch
from urllib.parse import urlencode
import orjson
from django.conf import settings
from django.contrib.auth.views import PasswordResetConfirmView
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.test import override_settings
from django.urls import reverse
from django.utils.timezone import now as timezone_now
from confirmation import settings as confirmation_settings
from confirmation.models import (
Confirmation,
ConfirmationKeyException,
MultiuseInvite,
confirmation_url,
create_confirmation_link,
generate_key,
get_object_from_key,
one_click_unsubscribe_link,
)
from zerver.context_processors import common_context
from zerver.decorator import do_two_factor_login
from zerver.forms import HomepageForm, check_subdomain_available
from zerver.lib.actions import (
add_new_user_history,
change_user_is_active,
do_add_default_stream,
do_change_full_name,
do_change_realm_subdomain,
do_change_user_role,
do_create_default_stream_group,
do_create_realm,
do_create_user,
do_deactivate_realm,
do_deactivate_user,
do_get_user_invites,
do_invite_users,
do_set_realm_property,
get_default_streams_for_realm,
get_stream,
)
from zerver.lib.email_notifications import enqueue_welcome_emails, followup_day2_email_delay
from zerver.lib.initial_password import initial_password
from zerver.lib.mobile_auth_otp import (
ascii_to_hex,
hex_to_ascii,
is_valid_otp,
otp_decrypt_api_key,
otp_encrypt_api_key,
xor_hex_strings,
)
from zerver.lib.name_restrictions import is_disposable_domain
from zerver.lib.rate_limiter import add_ratelimit_rule, remove_ratelimit_rule
from zerver.lib.send_email import FromAddress, deliver_email, send_future_email
from zerver.lib.stream_subscription import get_stream_subscriptions_for_user
from zerver.lib.streams import create_stream_if_needed
from zerver.lib.subdomains import is_root_domain_available
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import (
avatar_disk_path,
cache_tries_captured,
find_key_by_email,
get_test_image_file,
load_subdomain_token,
message_stream_count,
most_recent_message,
most_recent_usermessage,
queries_captured,
reset_emails_in_zulip_realm,
)
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
DefaultStream,
Message,
PreregistrationUser,
Realm,
Recipient,
ScheduledEmail,
Stream,
Subscription,
UserMessage,
UserProfile,
flush_per_request_caches,
get_realm,
get_system_bot,
get_user,
get_user_by_delivery_email,
)
from zerver.views.auth import redirect_and_log_into_subdomain, start_two_factor_auth
from zerver.views.development.registration import confirmation_key
from zerver.views.invite import get_invitee_emails_set
from zproject.backends import ExternalAuthDataDict, ExternalAuthResult
class RedirectAndLogIntoSubdomainTestCase(ZulipTestCase):
def test_data(self) -> None:
realm = get_realm("zulip")
user_profile = self.example_user("hamlet")
name = user_profile.full_name
email = user_profile.delivery_email
response = redirect_and_log_into_subdomain(ExternalAuthResult(user_profile=user_profile))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{"full_name": name, "email": email, "subdomain": realm.subdomain, "is_signup": False},
)
data_dict = ExternalAuthDataDict(is_signup=True, multiuse_object_key="key")
response = redirect_and_log_into_subdomain(
ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
)
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": name,
"email": email,
"subdomain": realm.subdomain,
# the email has an account at the subdomain,
# so is_signup get overridden to False:
"is_signup": False,
"multiuse_object_key": "key",
},
)
data_dict = ExternalAuthDataDict(
email=self.nonreg_email("alice"),
full_name="Alice",
subdomain=realm.subdomain,
is_signup=True,
full_name_validated=True,
multiuse_object_key="key",
)
response = redirect_and_log_into_subdomain(ExternalAuthResult(data_dict=data_dict))
data = load_subdomain_token(response)
self.assertDictEqual(
data,
{
"full_name": "Alice",
"email": self.nonreg_email("alice"),
"full_name_validated": True,
"subdomain": realm.subdomain,
"is_signup": True,
"multiuse_object_key": "key",
},
)
class DeactivationNoticeTestCase(ZulipTestCase):
def test_redirection_for_deactivated_realm(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 302)
self.assertIn("deactivated", result.url)
def test_redirection_for_active_realm(self) -> None:
for url in ("/register/", "/login/"):
result = self.client_get(url)
self.assertEqual(result.status_code, 200)
def test_deactivation_notice_when_realm_is_active(self) -> None:
result = self.client_get("/accounts/deactivated/")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result.url)
def test_deactivation_notice_when_deactivated(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_get("/accounts/deactivated/")
self.assertIn("Zulip Dev, has been deactivated.", result.content.decode())
self.assertNotIn("It has moved to", result.content.decode())
def test_deactivation_notice_when_deactivated_and_deactivated_redirect_is_set(self) -> None:
realm = get_realm("zulip")
realm.deactivated = True
realm.deactivated_redirect = "http://example.zulipchat.com"
realm.save(update_fields=["deactivated", "deactivated_redirect"])
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://example.zulipchat.com">http://example.zulipchat.com</a>.',
result.content.decode(),
)
def test_deactivation_notice_when_realm_subdomain_is_changed(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-subdomain-name", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-subdomain-name.testserver">http://new-subdomain-name.testserver</a>.',
result.content.decode(),
)
def test_deactivated_redirect_field_of_placeholder_realms_are_modified_on_changing_subdomain_multiple_times(
self,
) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name-1", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-1.testserver">http://new-name-1.testserver</a>.',
result.content.decode(),
)
realm = get_realm("new-name-1")
do_change_realm_subdomain(realm, "new-name-2", acting_user=None)
result = self.client_get("/accounts/deactivated/")
self.assertIn(
'It has moved to <a href="http://new-name-2.testserver">http://new-name-2.testserver</a>.',
result.content.decode(),
)
class AddNewUserHistoryTest(ZulipTestCase):
def test_add_new_user_history_race(self) -> None:
"""Sends a message during user creation"""
# Create a user who hasn't had historical messages added
realm = get_realm("zulip")
stream = Stream.objects.get(realm=realm, name="Denmark")
DefaultStream.objects.create(stream=stream, realm=realm)
# Make sure at least 3 messages are sent to Denmark and it's a default stream.
message_id = self.send_stream_message(self.example_user("hamlet"), stream.name, "test 1")
self.send_stream_message(self.example_user("hamlet"), stream.name, "test 2")
self.send_stream_message(self.example_user("hamlet"), stream.name, "test 3")
with patch("zerver.lib.actions.add_new_user_history"):
self.register(self.nonreg_email("test"), "test")
user_profile = self.nonreg_user("test")
subs = Subscription.objects.select_related("recipient").filter(
user_profile=user_profile, recipient__type=Recipient.STREAM
)
streams = Stream.objects.filter(id__in=[sub.recipient.type_id for sub in subs])
# Sent a message afterwards to trigger a race between message
# sending and `add_new_user_history`.
race_message_id = self.send_stream_message(
self.example_user("hamlet"), streams[0].name, "test"
)
# Overwrite ONBOARDING_UNREAD_MESSAGES to 2
ONBOARDING_UNREAD_MESSAGES = 2
with patch("zerver.lib.actions.ONBOARDING_UNREAD_MESSAGES", ONBOARDING_UNREAD_MESSAGES):
add_new_user_history(user_profile, streams)
# Our first message is in the user's history
self.assertTrue(
UserMessage.objects.filter(user_profile=user_profile, message_id=message_id).exists()
)
# The race message is in the user's history and marked unread.
self.assertTrue(
UserMessage.objects.filter(
user_profile=user_profile, message_id=race_message_id
).exists()
)
self.assertFalse(
UserMessage.objects.get(
user_profile=user_profile, message_id=race_message_id
).flags.read.is_set
)
# Verify that the ONBOARDING_UNREAD_MESSAGES latest messages
# that weren't the race message are marked as unread.
latest_messages = (
UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
)
.exclude(message_id=race_message_id)
.order_by("-message_id")[0:ONBOARDING_UNREAD_MESSAGES]
)
self.assertEqual(len(latest_messages), 2)
for msg in latest_messages:
self.assertFalse(msg.flags.read.is_set)
# Verify that older messages are correctly marked as read.
older_messages = (
UserMessage.objects.filter(
user_profile=user_profile,
message__recipient__type=Recipient.STREAM,
)
.exclude(message_id=race_message_id)
.order_by("-message_id")[ONBOARDING_UNREAD_MESSAGES : ONBOARDING_UNREAD_MESSAGES + 1]
)
self.assertTrue(len(older_messages) > 0)
for msg in older_messages:
self.assertTrue(msg.flags.read.is_set)
def test_auto_subbed_to_personals(self) -> None:
"""
Newly created users are auto-subbed to the ability to receive
personals.
"""
test_email = self.nonreg_email("test")
self.register(test_email, "test")
user_profile = self.nonreg_user("test")
old_messages_count = message_stream_count(user_profile)
self.send_personal_message(user_profile, user_profile)
new_messages_count = message_stream_count(user_profile)
self.assertEqual(new_messages_count, old_messages_count + 1)
recipient = Recipient.objects.get(type_id=user_profile.id, type=Recipient.PERSONAL)
message = most_recent_message(user_profile)
self.assertEqual(message.recipient, recipient)
with patch("zerver.models.get_display_recipient", return_value="recip"):
self.assertEqual(
str(message),
"<Message: recip / / "
"<UserProfile: {} {}>>".format(user_profile.email, user_profile.realm),
)
user_message = most_recent_usermessage(user_profile)
self.assertEqual(
str(user_message),
f"<UserMessage: recip / {user_profile.email} ([])>",
)
class InitialPasswordTest(ZulipTestCase):
def test_none_initial_password_salt(self) -> None:
with self.settings(INITIAL_PASSWORD_SALT=None):
self.assertIsNone(initial_password("[email protected]"))
class PasswordResetTest(ZulipTestCase):
"""
Log in, reset password, log out, log in with new password.
"""
def get_reset_mail_body(self, subdomain: str = "zulip") -> str:
from django.core.mail import outbox
[message] = outbox
self.assertEqual(self.email_envelope_from(message), settings.NOREPLY_EMAIL_ADDRESS)
self.assertRegex(
self.email_display_from(message),
fr"^Zulip Account Security <{self.TOKENIZED_NOREPLY_REGEX}>\Z",
)
self.assertIn(f"{subdomain}.testserver", message.extra_headers["List-Id"])
return message.body
def test_password_reset(self) -> None:
user = self.example_user("hamlet")
email = user.delivery_email
old_password = initial_password(email)
assert old_password is not None
self.login_user(user)
# test password reset template
result = self.client_get("/accounts/password/reset/")
self.assert_in_response("Reset your password", result)
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("reset your password", body)
# Visit the password reset link.
password_reset_url = self.get_confirmation_url_from_outbox(
email, url_pattern=settings.EXTERNAL_HOST + r"(\S\S+)"
)
result = self.client_get(password_reset_url)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.endswith(f"/{PasswordResetConfirmView.reset_url_token}/"))
final_reset_url = result.url
result = self.client_get(final_reset_url)
self.assertEqual(result.status_code, 200)
# Reset your password
with self.settings(PASSWORD_MIN_LENGTH=3, PASSWORD_MIN_GUESSES=1000):
# Verify weak passwords don't work.
result = self.client_post(
final_reset_url, {"new_password1": "easy", "new_password2": "easy"}
)
self.assert_in_response("The password is too weak.", result)
result = self.client_post(
final_reset_url, {"new_password1": "f657gdGGk9", "new_password2": "f657gdGGk9"}
)
# password reset succeeded
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/password/done/"))
# log back in with new password
self.login_by_email(email, password="f657gdGGk9")
user_profile = self.example_user("hamlet")
self.assert_logged_in_user_id(user_profile.id)
# make sure old password no longer works
self.assert_login_failure(email, password=old_password)
def test_password_reset_for_non_existent_user(self) -> None:
email = "[email protected]"
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("You do not have an account", body)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("deactivated", body)
def test_password_reset_for_deactivated_user(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
do_deactivate_user(user_profile, acting_user=None)
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email})
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
body = self.get_reset_mail_body()
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("has been deactivated", body)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("not have an account", body)
def test_password_reset_with_deactivated_realm(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
do_deactivate_realm(user_profile.realm, acting_user=None)
# start the password reset process by supplying an email address
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(m.output, ["INFO:root:Realm is deactivated"])
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
# Check that the password reset email is from a noreply address.
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
@override_settings(RATE_LIMITING=True)
def test_rate_limiting(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
from django.core.mail import outbox
add_ratelimit_rule(10, 2, domain="password_reset_form_by_email")
start_time = time.time()
with patch("time.time", return_value=start_time):
self.client_post("/accounts/password/reset/", {"email": email})
self.client_post("/accounts/password/reset/", {"email": email})
self.assert_length(outbox, 2)
# Too many password reset emails sent to the address, we won't send more.
with self.assertLogs(level="INFO") as info_logs:
self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
info_logs.output,
["INFO:root:Too many password reset attempts for email [email protected]"],
)
self.assert_length(outbox, 2)
# Resetting for a different address works though.
self.client_post("/accounts/password/reset/", {"email": self.example_email("othello")})
self.assert_length(outbox, 3)
self.client_post("/accounts/password/reset/", {"email": self.example_email("othello")})
self.assert_length(outbox, 4)
# After time, password reset emails can be sent again.
with patch("time.time", return_value=start_time + 11):
self.client_post("/accounts/password/reset/", {"email": email})
self.client_post("/accounts/password/reset/", {"email": email})
self.assert_length(outbox, 6)
remove_ratelimit_rule(10, 2, domain="password_reset_form_by_email")
def test_wrong_subdomain(self) -> None:
email = self.example_email("hamlet")
# start the password reset process by supplying an email address
result = self.client_post("/accounts/password/reset/", {"email": email}, subdomain="zephyr")
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
body = self.get_reset_mail_body("zephyr")
self.assertIn("Somebody (possibly you) requested a new password", body)
self.assertIn("You do not have an account", body)
self.assertIn(
"active accounts in the following organization(s).\nhttp://zulip.testserver", body
)
self.assertIn("safely ignore", body)
self.assertNotIn("reset your password", body)
self.assertNotIn("deactivated", body)
def test_invalid_subdomain(self) -> None:
email = self.example_email("hamlet")
# start the password reset process by supplying an email address
result = self.client_post(
"/accounts/password/reset/", {"email": email}, subdomain="invalid"
)
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 404)
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_auth_only(self) -> None:
"""If the email auth backend is not enabled, password reset should do nothing"""
email = self.example_email("hamlet")
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
m.output,
[
"INFO:root:Password reset attempted for [email protected] even though password auth is disabled."
],
)
# check the redirect link telling you to check mail for password reset link
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email in a few minutes to finish the process.", result)
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_and_email_auth(self) -> None:
"""If both email and LDAP auth backends are enabled, limit password
reset to users outside the LDAP domain"""
# If the domain matches, we don't generate an email
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
email = self.example_email("hamlet")
with self.assertLogs(level="INFO") as m:
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(
m.output, ["INFO:root:Password reset not allowed for user in LDAP domain"]
)
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
# If the domain doesn't match, we do generate an email
with self.settings(LDAP_APPEND_DOMAIN="example.com"):
email = self.example_email("hamlet")
result = self.client_post("/accounts/password/reset/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
body = self.get_reset_mail_body()
self.assertIn("reset your password", body)
def test_redirect_endpoints(self) -> None:
"""
These tests are mostly designed to give us 100% URL coverage
in our URL coverage reports. Our mechanism for finding URL
coverage doesn't handle redirects, so we just have a few quick
tests here.
"""
result = self.client_get("/accounts/password/reset/done/")
self.assert_in_success_response(["Check your email"], result)
result = self.client_get("/accounts/password/done/")
self.assert_in_success_response(["We've reset your password!"], result)
result = self.client_get("/accounts/send_confirm/[email protected]")
self.assert_in_success_response(["/accounts/home/"], result)
result = self.client_get("/accounts/new/send_confirm/[email protected]")
self.assert_in_success_response(["/new/"], result)
class LoginTest(ZulipTestCase):
"""
Logging in, registration, and logging out.
"""
def test_login(self) -> None:
self.login("hamlet")
user_profile = self.example_user("hamlet")
self.assert_logged_in_user_id(user_profile.id)
def test_login_deactivated_user(self) -> None:
user_profile = self.example_user("hamlet")
do_deactivate_user(user_profile, acting_user=None)
result = self.login_with_return(self.example_email("hamlet"), "xxx")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Your account is no longer active.", result)
self.assert_logged_in_user_id(None)
def test_login_bad_password(self) -> None:
user = self.example_user("hamlet")
password: Optional[str] = "wrongpassword"
result = self.login_with_return(user.delivery_email, password=password)
self.assert_in_success_response([user.delivery_email], result)
self.assert_logged_in_user_id(None)
# Parallel test to confirm that the right password works using the
# same login code, which verifies our failing test isn't broken
# for some other reason.
password = initial_password(user.delivery_email)
result = self.login_with_return(user.delivery_email, password=password)
self.assertEqual(result.status_code, 302)
self.assert_logged_in_user_id(user.id)
@override_settings(RATE_LIMITING_AUTHENTICATE=True)
def test_login_bad_password_rate_limiter(self) -> None:
user_profile = self.example_user("hamlet")
email = user_profile.delivery_email
add_ratelimit_rule(10, 2, domain="authenticate_by_username")
start_time = time.time()
with patch("time.time", return_value=start_time):
self.login_with_return(email, password="wrongpassword")
self.assert_logged_in_user_id(None)
self.login_with_return(email, password="wrongpassword")
self.assert_logged_in_user_id(None)
# We're over the allowed limit, so the next attempt, even with the correct
# password, will get blocked.
result = self.login_with_return(email)
self.assert_in_success_response(["Try again in 10 seconds"], result)
# After time passes, we should be able to log in.
with patch("time.time", return_value=start_time + 11):
self.login_with_return(email)
self.assert_logged_in_user_id(user_profile.id)
remove_ratelimit_rule(10, 2, domain="authenticate_by_username")
def test_login_nonexist_user(self) -> None:
result = self.login_with_return("[email protected]", "xxx")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Please enter a correct email and password", result)
self.assert_logged_in_user_id(None)
def test_login_wrong_subdomain(self) -> None:
with self.assertLogs(level="WARNING") as m:
result = self.login_with_return(self.mit_email("sipbtest"), "xxx")
self.assertEqual(
m.output,
[
"WARNING:root:User [email protected] attempted password login to wrong subdomain zulip"
],
)
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Your Zulip account is not a member of the "
"organization associated with this subdomain.",
result,
)
self.assert_logged_in_user_id(None)
def test_login_invalid_subdomain(self) -> None:
result = self.login_with_return(self.example_email("hamlet"), "xxx", subdomain="invalid")
self.assertEqual(result.status_code, 404)
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
self.assert_logged_in_user_id(None)
def test_register(self) -> None:
reset_emails_in_zulip_realm()
realm = get_realm("zulip")
stream_names = [f"stream_{i}" for i in range(40)]
for stream_name in stream_names:
stream = self.make_stream(stream_name, realm=realm)
DefaultStream.objects.create(stream=stream, realm=realm)
# Clear all the caches.
flush_per_request_caches()
ContentType.objects.clear_cache()
with queries_captured() as queries, cache_tries_captured() as cache_tries:
self.register(self.nonreg_email("test"), "test")
# Ensure the number of queries we make is not O(streams)
self.assertEqual(len(queries), 70)
# We can probably avoid a couple cache hits here, but there doesn't
# seem to be any O(N) behavior. Some of the cache hits are related
# to sending messages, such as getting the welcome bot, looking up
# the alert words for a realm, etc.
self.assertEqual(len(cache_tries), 15)
user_profile = self.nonreg_user("test")
self.assert_logged_in_user_id(user_profile.id)
self.assertFalse(user_profile.enable_stream_desktop_notifications)
def test_register_deactivated(self) -> None:
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.client_post(
"/accounts/home/", {"email": self.nonreg_email("test")}, subdomain="zulip"
)
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
with self.assertRaises(UserProfile.DoesNotExist):
self.nonreg_user("test")
def test_register_with_invalid_email(self) -> None:
"""
If you try to register with invalid email, you get an invalid email
page
"""
invalid_email = "foo\x00bar"
result = self.client_post("/accounts/home/", {"email": invalid_email}, subdomain="zulip")
self.assertEqual(result.status_code, 200)
self.assertContains(result, "Enter a valid email address")
def test_register_deactivated_partway_through(self) -> None:
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
email = self.nonreg_email("test")
result = self.client_post("/accounts/home/", {"email": email}, subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertNotIn("deactivated", result.url)
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.submit_reg_form_for_user(email, "abcd1234", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
with self.assertRaises(UserProfile.DoesNotExist):
self.nonreg_user("test")
def test_login_deactivated_realm(self) -> None:
"""
If you try to log in to a deactivated realm, you get a clear error page.
"""
realm = get_realm("zulip")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.login_with_return(self.example_email("hamlet"), subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual("/accounts/deactivated/", result.url)
def test_logout(self) -> None:
self.login("hamlet")
# We use the logout API, not self.logout, to make sure we test
# the actual logout code path.
self.client_post("/accounts/logout/")
self.assert_logged_in_user_id(None)
def test_non_ascii_login(self) -> None:
"""
You can log in even if your password contain non-ASCII characters.
"""
email = self.nonreg_email("test")
password = "hümbüǵ"
# Registering succeeds.
self.register(email, password)
user_profile = self.nonreg_user("test")
self.assert_logged_in_user_id(user_profile.id)
self.logout()
self.assert_logged_in_user_id(None)
# Logging in succeeds.
self.logout()
self.login_by_email(email, password)
self.assert_logged_in_user_id(user_profile.id)
@override_settings(TWO_FACTOR_AUTHENTICATION_ENABLED=False)
def test_login_page_redirects_logged_in_user(self) -> None:
"""You will be redirected to the app's main page if you land on the
login page when already logged in.
"""
self.login("cordelia")
response = self.client_get("/login/")
self.assertEqual(response["Location"], "http://zulip.testserver")
def test_options_request_to_login_page(self) -> None:
response = self.client_options("/login/")
self.assertEqual(response.status_code, 200)
@override_settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True)
def test_login_page_redirects_logged_in_user_under_2fa(self) -> None:
"""You will be redirected to the app's main page if you land on the
login page when already logged in.
"""
user_profile = self.example_user("cordelia")
self.create_default_device(user_profile)
self.login("cordelia")
self.login_2fa(user_profile)
response = self.client_get("/login/")
self.assertEqual(response["Location"], "http://zulip.testserver")
def test_start_two_factor_auth(self) -> None:
request = MagicMock(POST={})
with patch("zerver.views.auth.TwoFactorLoginView") as mock_view:
mock_view.as_view.return_value = lambda *a, **k: HttpResponse()
response = start_two_factor_auth(request)
self.assertTrue(isinstance(response, HttpResponse))
def test_do_two_factor_login(self) -> None:
user_profile = self.example_user("hamlet")
self.create_default_device(user_profile)
request = MagicMock()
with patch("zerver.decorator.django_otp.login") as mock_login:
do_two_factor_login(request, user_profile)
mock_login.assert_called_once()
def test_zulip_default_context_does_not_load_inline_previews(self) -> None:
realm = get_realm("zulip")
description = "https://www.google.com/images/srpr/logo4w.png"
realm.description = description
realm.save(update_fields=["description"])
response = self.client_get("/login/")
expected_response = """<p><a href="https://www.google.com/images/srpr/logo4w.png">\
https://www.google.com/images/srpr/logo4w.png</a></p>"""
self.assertEqual(response.context_data["realm_description"], expected_response)
self.assertEqual(response.status_code, 200)
class InviteUserBase(ZulipTestCase):
def check_sent_emails(
self, correct_recipients: List[str], custom_from_name: Optional[str] = None
) -> None:
from django.core.mail import outbox
self.assertEqual(len(outbox), len(correct_recipients))
email_recipients = [email.recipients()[0] for email in outbox]
self.assertEqual(sorted(email_recipients), sorted(correct_recipients))
if len(outbox) == 0:
return
if custom_from_name is not None:
self.assertIn(custom_from_name, self.email_display_from(outbox[0]))
self.assertEqual(self.email_envelope_from(outbox[0]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertRegex(
self.email_display_from(outbox[0]), fr" <{self.TOKENIZED_NOREPLY_REGEX}>\Z"
)
self.assertEqual(outbox[0].extra_headers["List-Id"], "Zulip Dev <zulip.testserver>")
def invite(
self,
invitee_emails: str,
stream_names: Sequence[str],
body: str = "",
invite_as: int = PreregistrationUser.INVITE_AS["MEMBER"],
) -> HttpResponse:
"""
Invites the specified users to Zulip with the specified streams.
users should be a string containing the users to invite, comma or
newline separated.
streams should be a list of strings.
"""
stream_ids = []
for stream_name in stream_names:
stream_ids.append(self.get_stream_id(stream_name))
return self.client_post(
"/json/invites",
{
"invitee_emails": invitee_emails,
"stream_ids": orjson.dumps(stream_ids).decode(),
"invite_as": invite_as,
},
)
class InviteUserTest(InviteUserBase):
def test_successful_invite_user(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
invitee = "[email protected]"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee], custom_from_name="Hamlet")
def test_newbie_restrictions(self) -> None:
user_profile = self.example_user("hamlet")
invitee = "[email protected]"
stream_name = "Denmark"
self.login_user(user_profile)
result = self.invite(invitee, [stream_name])
self.assert_json_success(result)
user_profile.date_joined = timezone_now() - datetime.timedelta(days=10)
user_profile.save()
with self.settings(INVITES_MIN_USER_AGE_DAYS=5):
result = self.invite(invitee, [stream_name])
self.assert_json_success(result)
with self.settings(INVITES_MIN_USER_AGE_DAYS=15):
result = self.invite(invitee, [stream_name])
self.assert_json_error_contains(result, "Your account is too new")
def test_invite_limits(self) -> None:
user_profile = self.example_user("hamlet")
realm = user_profile.realm
stream_name = "Denmark"
# These constants only need to be in descending order
# for this test to trigger an InvitationError based
# on max daily counts.
site_max = 50
realm_max = 40
num_invitees = 30
max_daily_count = 20
daily_counts = [(1, max_daily_count)]
invite_emails = [f"foo-{i:02}@zulip.com" for i in range(num_invitees)]
invitees = ",".join(invite_emails)
self.login_user(user_profile)
realm.max_invites = realm_max
realm.date_created = timezone_now()
realm.save()
def try_invite() -> HttpResponse:
with self.settings(
OPEN_REALM_CREATION=True,
INVITES_DEFAULT_REALM_DAILY_MAX=site_max,
INVITES_NEW_REALM_LIMIT_DAYS=daily_counts,
):
result = self.invite(invitees, [stream_name])
return result
result = try_invite()
self.assert_json_error_contains(result, "enough remaining invites")
# Next show that aggregate limits expire once the realm is old
# enough.
realm.date_created = timezone_now() - datetime.timedelta(days=8)
realm.save()
with queries_captured() as queries:
with cache_tries_captured() as cache_tries:
result = try_invite()
self.assert_json_success(result)
# TODO: Fix large query count here.
#
# TODO: There is some test OTHER than this one
# that is leaking some kind of state change
# that throws off the query count here. It
# is hard to investigate currently (due to
# the large number of queries), so I just
# use an approximate equality check.
actual_count = len(queries)
expected_count = 251
if abs(actual_count - expected_count) > 1:
raise AssertionError(
f"""
Unexpected number of queries:
expected query count: {expected_count}
actual: {actual_count}
"""
)
# Almost all of these cache hits are to re-fetch each one of the
# invitees. These happen inside our queue processor for sending
# confirmation emails, so they are somewhat difficult to avoid.
#
# TODO: Mock the call to queue_json_publish, so we can measure the
# queue impact separately from the user-perceived impact.
self.assert_length(cache_tries, 32)
# Next get line coverage on bumping a realm's max_invites.
realm.date_created = timezone_now()
realm.max_invites = site_max + 10
realm.save()
result = try_invite()
self.assert_json_success(result)
# Finally get coverage on the case that OPEN_REALM_CREATION is False.
with self.settings(OPEN_REALM_CREATION=False):
result = self.invite(invitees, [stream_name])
self.assert_json_success(result)
def test_cross_realm_bot(self) -> None:
inviter = self.example_user("hamlet")
self.login_user(inviter)
cross_realm_bot_email = "[email protected]"
legit_new_email = "[email protected]"
invitee_emails = ",".join([cross_realm_bot_email, legit_new_email])
result = self.invite(invitee_emails, ["Denmark"])
self.assert_json_error(
result,
"Some of those addresses are already using Zulip,"
+ " so we didn't send them an invitation."
+ " We did send invitations to everyone else!",
)
def test_invite_mirror_dummy_user(self) -> None:
"""
A mirror dummy account is a temporary account
that we keep in our system if we are mirroring
data from something like Zephyr or IRC.
We want users to eventually just sign up or
register for Zulip, in which case we will just
fully "activate" the account.
Here we test that you can invite a person who
has a mirror dummy account.
"""
inviter = self.example_user("hamlet")
self.login_user(inviter)
mirror_user = self.example_user("cordelia")
mirror_user.is_mirror_dummy = True
mirror_user.save()
change_user_is_active(mirror_user, False)
self.assertEqual(
PreregistrationUser.objects.filter(email=mirror_user.email).count(),
0,
)
result = self.invite(mirror_user.email, ["Denmark"])
self.assert_json_success(result)
prereg_user = PreregistrationUser.objects.get(email=mirror_user.email)
self.assertEqual(
prereg_user.referred_by.email,
inviter.email,
)
def test_successful_invite_user_as_owner_from_owner_account(self) -> None:
self.login("desdemona")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertTrue(invitee_profile.is_realm_owner)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_owner_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
self.assert_json_error(response, "Must be an organization owner")
def test_successful_invite_user_as_admin_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
result = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"]
)
self.assert_json_success(result)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertTrue(invitee_profile.is_realm_admin)
self.assertFalse(invitee_profile.is_realm_owner)
self.assertFalse(invitee_profile.is_guest)
def test_invite_user_as_admin_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
response = self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"]
)
self.assert_json_error(response, "Must be an organization administrator")
def test_invite_user_as_invalid_type(self) -> None:
"""
Test inviting a user as invalid type of user i.e. type of invite_as
is not in PreregistrationUser.INVITE_AS
"""
self.login("iago")
invitee = self.nonreg_email("alice")
response = self.invite(invitee, ["Denmark"], invite_as=10)
self.assert_json_error(response, "Must be invited as an valid type of user")
def test_successful_invite_user_as_guest_from_normal_account(self) -> None:
self.login("hamlet")
invitee = self.nonreg_email("alice")
self.assert_json_success(
self.invite(invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["GUEST_USER"])
)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_guest)
def test_successful_invite_user_as_guest_from_admin_account(self) -> None:
self.login("iago")
invitee = self.nonreg_email("alice")
self.assert_json_success(
self.invite(invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["GUEST_USER"])
)
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
self.assertFalse(invitee_profile.is_realm_admin)
self.assertTrue(invitee_profile.is_guest)
def test_successful_invite_user_with_name(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
email = "[email protected]"
invitee = f"Alice Test <{email}>"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.check_sent_emails([email], custom_from_name="Hamlet")
def test_successful_invite_user_with_name_and_normal_one(self) -> None:
"""
A call to /json/invites with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet")
email = "[email protected]"
email2 = "[email protected]"
invitee = f"Alice Test <{email}>, {email2}"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2], custom_from_name="Hamlet")
def test_require_realm_admin(self) -> None:
"""
The invite_by_admins_only realm setting works properly.
"""
realm = get_realm("zulip")
realm.invite_by_admins_only = True
realm.save()
self.login("hamlet")
email = "[email protected]"
email2 = "[email protected]"
invitee = f"Alice Test <{email}>, {email2}"
self.assert_json_error(
self.invite(invitee, ["Denmark"]), "Must be an organization administrator"
)
# Now verify an administrator can do it
self.login("iago")
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_invite_user_signup_initial_history(self) -> None:
"""
Test that a new user invited to a stream receives some initial
history but only from public streams.
"""
self.login("hamlet")
user_profile = self.example_user("hamlet")
private_stream_name = "Secret"
self.make_stream(private_stream_name, invite_only=True)
self.subscribe(user_profile, private_stream_name)
public_msg_id = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="Public topic",
content="Public message",
)
secret_msg_id = self.send_stream_message(
self.example_user("hamlet"),
private_stream_name,
topic_name="Secret topic",
content="Secret message",
)
invitee = self.nonreg_email("alice")
self.assert_json_success(self.invite(invitee, [private_stream_name, "Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user(invitee, "password")
invitee_profile = self.nonreg_user("alice")
invitee_msg_ids = [
um.message_id for um in UserMessage.objects.filter(user_profile=invitee_profile)
]
self.assertTrue(public_msg_id in invitee_msg_ids)
self.assertFalse(secret_msg_id in invitee_msg_ids)
self.assertFalse(invitee_profile.is_realm_admin)
# Test that exactly 2 new Zulip messages were sent, both notifications.
last_3_messages = list(reversed(list(Message.objects.all().order_by("-id")[0:3])))
first_msg = last_3_messages[0]
self.assertEqual(first_msg.id, secret_msg_id)
# The first, from notification-bot to the user who invited the new user.
second_msg = last_3_messages[1]
self.assertEqual(second_msg.sender.email, "[email protected]")
self.assertTrue(
second_msg.content.startswith(
f"alice_zulip.com <`{invitee_profile.email}`> accepted your",
)
)
# The second, from welcome-bot to the user who was invited.
third_msg = last_3_messages[2]
self.assertEqual(third_msg.sender.email, "[email protected]")
self.assertTrue(third_msg.content.startswith("Hello, and welcome to Zulip!"))
def test_multi_user_invite(self) -> None:
"""
Invites multiple users with a variety of delimiters.
"""
self.login("hamlet")
# Intentionally use a weird string.
self.assert_json_success(
self.invite(
"""[email protected], [email protected],
[email protected]
[email protected]""",
["Denmark"],
)
)
for user in ("bob", "carol", "dave", "earl"):
self.assertTrue(find_key_by_email(f"{user}[email protected]"))
self.check_sent_emails(
[
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
]
)
def test_max_invites_model(self) -> None:
realm = get_realm("zulip")
self.assertEqual(realm.max_invites, settings.INVITES_DEFAULT_REALM_DAILY_MAX)
realm.max_invites = 3
realm.save()
self.assertEqual(get_realm("zulip").max_invites, 3)
realm.max_invites = settings.INVITES_DEFAULT_REALM_DAILY_MAX
realm.save()
def test_invite_too_many_users(self) -> None:
# Only a light test of this pathway; e.g. doesn't test that
# the limit gets reset after 24 hours
self.login("iago")
invitee_emails = "[email protected], [email protected]"
self.invite(invitee_emails, ["Denmark"])
invitee_emails = ", ".join(str(i) for i in range(get_realm("zulip").max_invites - 1))
self.assert_json_error(
self.invite(invitee_emails, ["Denmark"]),
"You do not have enough remaining invites for today. "
"Please contact [email protected] to have your limit raised. "
"No invitations were sent.",
)
def test_missing_or_invalid_params(self) -> None:
"""
Tests inviting with various missing or invalid parameters.
"""
realm = get_realm("zulip")
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
self.login("hamlet")
invitee_emails = "[email protected]"
self.assert_json_error(
self.invite(invitee_emails, []),
"You must specify at least one stream for invitees to join.",
)
for address in ("noatsign.com", "[email protected]"):
self.assert_json_error(
self.invite(address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
self.check_sent_emails([])
self.assert_json_error(
self.invite("", ["Denmark"]), "You must specify at least one email address."
)
self.check_sent_emails([])
def test_guest_user_invitation(self) -> None:
"""
Guest user can't invite new users
"""
self.login("polonius")
invitee = "[email protected]"
self.assert_json_error(self.invite(invitee, ["Denmark"]), "Not allowed for guest users")
self.assertEqual(find_key_by_email(invitee), None)
self.check_sent_emails([])
def test_invalid_stream(self) -> None:
"""
Tests inviting to a non-existent stream.
"""
self.login("hamlet")
self.assert_json_error(
self.invite("[email protected]", ["NotARealStream"]),
f"Stream does not exist with id: {self.INVALID_STREAM_ID}. No invites were sent.",
)
self.check_sent_emails([])
def test_invite_existing_user(self) -> None:
"""
If you invite an address already using Zulip, no invitation is sent.
"""
self.login("hamlet")
hamlet_email = "[email protected]"
result = self.invite(hamlet_email, ["Denmark"])
self.assert_json_error(result, "We weren't able to invite anyone.")
self.assertFalse(
PreregistrationUser.objects.filter(email__iexact=hamlet_email).exists(),
)
self.check_sent_emails([])
def normalize_string(self, s: str) -> str:
s = s.strip()
return re.sub(r"\s+", " ", s)
def test_invite_links_in_name(self) -> None:
"""
If you invite an address already using Zulip, no invitation is sent.
"""
hamlet = self.example_user("hamlet")
self.login_user(hamlet)
# Test we properly handle links in user full names
do_change_full_name(hamlet, "</a> https://www.google.com", hamlet)
result = self.invite("[email protected]", ["Denmark"])
self.assert_json_success(result)
self.check_sent_emails(["[email protected]"])
from django.core.mail import outbox
body = self.normalize_string(outbox[0].alternatives[0][0])
# Verify that one can't get Zulip to send invitation emails
# that third-party products will linkify using the full_name
# field, because we've included that field inside the mailto:
# link for the sender.
self.assertIn(
'<a href="mailto:[email protected]" style="color:#46aa8f; text-decoration:underline"></a> https://www.google.com ([email protected])</a> wants',
body,
)
# TODO: Ideally, this test would also test the Invitation
# Reminder email generated, but the test setup for that is
# annoying.
def test_invite_some_existing_some_new(self) -> None:
"""
If you invite a mix of already existing and new users, invitations are
only sent to the new users.
"""
self.login("hamlet")
existing = [self.example_email("hamlet"), "[email protected]"]
new = ["[email protected]", "[email protected]"]
invitee_emails = "\n".join(existing + new)
self.assert_json_error(
self.invite(invitee_emails, ["Denmark"]),
"Some of those addresses are already using Zulip, \
so we didn't send them an invitation. We did send invitations to everyone else!",
)
# We only created accounts for the new users.
for email in existing:
self.assertRaises(
PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(email=email),
)
for email in new:
self.assertTrue(PreregistrationUser.objects.get(email=email))
# We only sent emails to the new users.
self.check_sent_emails(new)
prereg_user = PreregistrationUser.objects.get(email="[email protected]")
self.assertEqual(prereg_user.email, "[email protected]")
def test_invite_outside_domain_in_closed_realm(self) -> None:
"""
In a realm with `emails_restricted_to_domains = True`, you can't invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
self.login("hamlet")
external_address = "[email protected]"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
def test_invite_using_disposable_email(self) -> None:
"""
In a realm with `disallow_disposable_email_addresses = True`, you can't invite
people with a disposable domain.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.disallow_disposable_email_addresses = True
zulip_realm.save()
self.login("hamlet")
external_address = "[email protected]"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.",
)
def test_invite_outside_domain_in_open_realm(self) -> None:
"""
In a realm with `emails_restricted_to_domains = False`, you can invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "[email protected]"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
def test_invite_outside_domain_before_closing(self) -> None:
"""
If you invite someone with a different domain from that of the realm
when `emails_restricted_to_domains = False`, but `emails_restricted_to_domains` later
changes to true, the invitation should succeed but the invitee's signup
attempt should fail.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "[email protected]"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
result = self.submit_reg_form_for_user("[email protected]", "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response("only allows users with email addresses", result)
def test_disposable_emails_before_closing(self) -> None:
"""
If you invite someone with a disposable email when
`disallow_disposable_email_addresses = False`, but
later changes to true, the invitation should succeed
but the invitee's signup attempt should fail.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.disallow_disposable_email_addresses = False
zulip_realm.save()
self.login("hamlet")
external_address = "[email protected]"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.disallow_disposable_email_addresses = True
zulip_realm.save()
result = self.submit_reg_form_for_user("[email protected]", "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Please sign up using a real email address.", result)
def test_invite_with_email_containing_plus_before_closing(self) -> None:
"""
If you invite someone with an email containing plus when
`emails_restricted_to_domains = False`, but later change
`emails_restricted_to_domains = True`, the invitation should
succeed but the invitee's signup attempt should fail as
users are not allowed to signup using email containing +
when the realm is restricted to domain.
"""
zulip_realm = get_realm("zulip")
zulip_realm.emails_restricted_to_domains = False
zulip_realm.save()
self.login("hamlet")
external_address = "[email protected]"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
zulip_realm.emails_restricted_to_domains = True
zulip_realm.save()
result = self.submit_reg_form_for_user(external_address, "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"Zulip Dev, does not allow signups using emails\n that contains +", result
)
def test_invalid_email_check_after_confirming_email(self) -> None:
self.login("hamlet")
email = "[email protected]"
self.assert_json_success(self.invite(email, ["Denmark"]))
obj = Confirmation.objects.get(confirmation_key=find_key_by_email(email))
prereg_user = obj.content_object
prereg_user.email = "invalid.email"
prereg_user.save()
result = self.submit_reg_form_for_user(email, "password")
self.assertEqual(result.status_code, 200)
self.assert_in_response(
"The email address you are trying to sign up with is not valid", result
)
def test_invite_with_non_ascii_streams(self) -> None:
"""
Inviting someone to streams with non-ASCII characters succeeds.
"""
self.login("hamlet")
invitee = "[email protected]"
stream_name = "hümbüǵ"
# Make sure we're subscribed before inviting someone.
self.subscribe(self.example_user("hamlet"), stream_name)
self.assert_json_success(self.invite(invitee, [stream_name]))
def test_invitation_reminder_email(self) -> None:
from django.core.mail import outbox
# All users belong to zulip realm
referrer_name = "hamlet"
current_user = self.example_user(referrer_name)
self.login_user(current_user)
invitee_email = self.nonreg_email("alice")
self.assert_json_success(self.invite(invitee_email, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee_email))
self.check_sent_emails([invitee_email])
data = {"email": invitee_email, "referrer_email": current_user.email}
invitee = PreregistrationUser.objects.get(email=data["email"])
referrer = self.example_user(referrer_name)
link = create_confirmation_link(invitee, Confirmation.INVITATION)
context = common_context(referrer)
context.update(
activate_url=link,
referrer_name=referrer.full_name,
referrer_email=referrer.email,
referrer_realm_name=referrer.realm.name,
)
with self.settings(EMAIL_BACKEND="django.core.mail.backends.console.EmailBackend"):
email = data["email"]
send_future_email(
"zerver/emails/invitation_reminder",
referrer.realm,
to_emails=[email],
from_address=FromAddress.no_reply_placeholder,
context=context,
)
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now()
)
self.assertEqual(len(email_jobs_to_deliver), 1)
email_count = len(outbox)
for job in email_jobs_to_deliver:
deliver_email(job)
self.assertEqual(len(outbox), email_count + 1)
self.assertEqual(self.email_envelope_from(outbox[-1]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertIn(FromAddress.NOREPLY, self.email_display_from(outbox[-1]))
# Now verify that signing up clears invite_reminder emails
with self.settings(EMAIL_BACKEND="django.core.mail.backends.console.EmailBackend"):
email = data["email"]
send_future_email(
"zerver/emails/invitation_reminder",
referrer.realm,
to_emails=[email],
from_address=FromAddress.no_reply_placeholder,
context=context,
)
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now(), type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(len(email_jobs_to_deliver), 1)
self.register(invitee_email, "test")
email_jobs_to_deliver = ScheduledEmail.objects.filter(
scheduled_timestamp__lte=timezone_now(), type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(len(email_jobs_to_deliver), 0)
def test_no_invitation_reminder_when_link_expires_quickly(self) -> None:
self.login("hamlet")
# Check invitation reminder email is scheduled with 4 day link expiry
with self.settings(INVITATION_LINK_VALIDITY_DAYS=4):
self.invite("[email protected]", ["Denmark"])
self.assertEqual(
ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1
)
# Check invitation reminder email is not scheduled with 3 day link expiry
with self.settings(INVITATION_LINK_VALIDITY_DAYS=3):
self.invite("[email protected]", ["Denmark"])
self.assertEqual(
ScheduledEmail.objects.filter(type=ScheduledEmail.INVITATION_REMINDER).count(), 1
)
# make sure users can't take a valid confirmation key from another
# pathway and use it with the invitation URL route
def test_confirmation_key_of_wrong_type(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
url = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = url.split("/")[-1]
# Mainly a test of get_object_from_key, rather than of the invitation pathway
with self.assertRaises(ConfirmationKeyException) as cm:
get_object_from_key(registration_key, Confirmation.INVITATION)
self.assertEqual(cm.exception.error_type, ConfirmationKeyException.DOES_NOT_EXIST)
# Verify that using the wrong type doesn't work in the main confirm code path
email_change_url = create_confirmation_link(prereg_user, Confirmation.EMAIL_CHANGE)
email_change_key = email_change_url.split("/")[-1]
url = "/accounts/do_confirm/" + email_change_key
result = self.client_get(url)
self.assert_in_success_response(
["Whoops. We couldn't find your confirmation link in the system."], result
)
def test_confirmation_expired(self) -> None:
email = self.nonreg_email("alice")
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
url = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = url.split("/")[-1]
conf = Confirmation.objects.filter(confirmation_key=registration_key).first()
conf.date_sent -= datetime.timedelta(weeks=3)
conf.save()
target_url = "/" + url.split("/", 3)[3]
result = self.client_get(target_url)
self.assert_in_success_response(
["Whoops. The confirmation link has expired or been deactivated."], result
)
def test_send_more_than_one_invite_to_same_user(self) -> None:
self.user_profile = self.example_user("iago")
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["[email protected]"], streams, False)
prereg_user = PreregistrationUser.objects.get(email="[email protected]")
do_invite_users(self.user_profile, ["[email protected]"], streams, False)
do_invite_users(self.user_profile, ["[email protected]"], streams, False)
# Also send an invite from a different realm.
lear = get_realm("lear")
lear_user = self.lear_user("cordelia")
do_invite_users(lear_user, ["[email protected]"], [], False)
invites = PreregistrationUser.objects.filter(email__iexact="[email protected]")
self.assertEqual(len(invites), 4)
do_create_user(
"[email protected]",
"password",
self.user_profile.realm,
"full name",
prereg_user=prereg_user,
acting_user=None,
)
accepted_invite = PreregistrationUser.objects.filter(
email__iexact="[email protected]", status=confirmation_settings.STATUS_ACTIVE
)
revoked_invites = PreregistrationUser.objects.filter(
email__iexact="[email protected]", status=confirmation_settings.STATUS_REVOKED
)
# If a user was invited more than once, when it accepts one invite and register
# the others must be canceled.
self.assertEqual(len(accepted_invite), 1)
self.assertEqual(accepted_invite[0].id, prereg_user.id)
expected_revoked_invites = set(invites.exclude(id=prereg_user.id).exclude(realm=lear))
self.assertEqual(set(revoked_invites), expected_revoked_invites)
self.assertEqual(
PreregistrationUser.objects.get(email__iexact="[email protected]", realm=lear).status, 0
)
def test_confirmation_obj_not_exist_error(self) -> None:
"""Since the key is a param input by the user to the registration endpoint,
if it inserts an invalid value, the confirmation object won't be found. This
tests if, in that scenario, we handle the exception by redirecting the user to
the confirmation_link_expired_error page.
"""
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = "invalid_confirmation_key"
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assertEqual(response.status_code, 200)
self.assert_in_success_response(
["The registration link has expired or is not valid."], response
)
registration_key = confirmation_link.split("/")[-1]
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assert_in_success_response(["We just need you to do one last thing."], response)
response = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(response.status_code, 302)
def test_validate_email_not_already_in_realm(self) -> None:
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
url = "/accounts/register/"
self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
self.submit_reg_form_for_user(email, password, key=registration_key)
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_name": "alice"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.url,
reverse("login") + "?" + urlencode({"email": email, "already_registered": 1}),
)
class InvitationsTestCase(InviteUserBase):
def test_do_get_user_invites(self) -> None:
self.login("iago")
user_profile = self.example_user("iago")
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
prereg_user_one = PreregistrationUser(email="[email protected]", referred_by=user_profile)
prereg_user_one.save()
prereg_user_two = PreregistrationUser(email="[email protected]", referred_by=user_profile)
prereg_user_two.save()
prereg_user_three = PreregistrationUser(email="[email protected]", referred_by=hamlet)
prereg_user_three.save()
prereg_user_four = PreregistrationUser(email="[email protected]", referred_by=othello)
prereg_user_four.save()
prereg_user_other_realm = PreregistrationUser(
email="[email protected]", referred_by=self.mit_user("sipbtest")
)
prereg_user_other_realm.save()
multiuse_invite = MultiuseInvite.objects.create(
referred_by=user_profile, realm=user_profile.realm
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
self.assertEqual(len(do_get_user_invites(user_profile)), 5)
self.assertEqual(len(do_get_user_invites(hamlet)), 1)
self.assertEqual(len(do_get_user_invites(othello)), 1)
def test_successful_get_open_invitations(self) -> None:
"""
A GET call to /json/invites returns all unexpired invitations.
"""
realm = get_realm("zulip")
days_to_activate = getattr(settings, "INVITATION_LINK_VALIDITY_DAYS", "Wrong")
active_value = getattr(confirmation_settings, "STATUS_ACTIVE", "Wrong")
self.assertNotEqual(days_to_activate, "Wrong")
self.assertNotEqual(active_value, "Wrong")
self.login("iago")
user_profile = self.example_user("iago")
prereg_user_one = PreregistrationUser(email="[email protected]", referred_by=user_profile)
prereg_user_one.save()
expired_datetime = timezone_now() - datetime.timedelta(days=(days_to_activate + 1))
prereg_user_two = PreregistrationUser(email="[email protected]", referred_by=user_profile)
prereg_user_two.save()
PreregistrationUser.objects.filter(id=prereg_user_two.id).update(
invited_at=expired_datetime
)
prereg_user_three = PreregistrationUser(
email="[email protected]", referred_by=user_profile, status=active_value
)
prereg_user_three.save()
hamlet = self.example_user("hamlet")
othello = self.example_user("othello")
multiuse_invite_one = MultiuseInvite.objects.create(referred_by=hamlet, realm=realm)
create_confirmation_link(multiuse_invite_one, Confirmation.MULTIUSE_INVITE)
multiuse_invite_two = MultiuseInvite.objects.create(referred_by=othello, realm=realm)
create_confirmation_link(multiuse_invite_two, Confirmation.MULTIUSE_INVITE)
confirmation = Confirmation.objects.last()
confirmation.date_sent = expired_datetime
confirmation.save()
result = self.client_get("/json/invites")
self.assertEqual(result.status_code, 200)
invites = orjson.loads(result.content)["invites"]
self.assertEqual(len(invites), 2)
self.assertFalse(invites[0]["is_multiuse"])
self.assertEqual(invites[0]["email"], "[email protected]")
self.assertTrue(invites[1]["is_multiuse"])
self.assertEqual(invites[1]["invited_by_user_id"], hamlet.id)
def test_successful_delete_invitation(self) -> None:
"""
A DELETE call to /json/invites/<ID> should delete the invite and
any scheduled invitation reminder emails.
"""
self.login("iago")
invitee = "[email protected]"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify that the scheduled email exists.
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assertEqual(result.status_code, 200)
error_result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assert_json_error(error_result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_successful_member_delete_invitation(self) -> None:
"""
A DELETE call from member account to /json/invites/<ID> should delete the invite and
any scheduled invitation reminder emails.
"""
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
invitee = "[email protected]"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
# Verify that the scheduled email exists.
prereg_user = PreregistrationUser.objects.get(email=invitee, referred_by=user_profile)
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
# Verify another non-admin can't delete
result = self.api_delete(
self.example_user("othello"), "/api/v1/invites/" + str(prereg_user.id)
)
self.assert_json_error(result, "Must be an organization administrator")
# Verify that the scheduled email still exists.
prereg_user = PreregistrationUser.objects.get(email=invitee, referred_by=user_profile)
ScheduledEmail.objects.get(address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER)
# Verify deletion works.
result = self.api_delete(user_profile, "/api/v1/invites/" + str(prereg_user.id))
self.assertEqual(result.status_code, 200)
result = self.api_delete(user_profile, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_error(result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_delete_owner_invitation(self) -> None:
self.login("desdemona")
owner = self.example_user("desdemona")
invitee = "[email protected]"
self.assert_json_success(
self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
)
prereg_user = PreregistrationUser.objects.get(email=invitee)
result = self.api_delete(
self.example_user("iago"), "/api/v1/invites/" + str(prereg_user.id)
)
self.assert_json_error(result, "Must be an organization owner")
result = self.api_delete(owner, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_success(result)
result = self.api_delete(owner, "/api/v1/invites/" + str(prereg_user.id))
self.assert_json_error(result, "No such invitation")
self.assertRaises(
ScheduledEmail.DoesNotExist,
lambda: ScheduledEmail.objects.get(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
),
)
def test_delete_multiuse_invite(self) -> None:
"""
A DELETE call to /json/invites/multiuse<ID> should delete the
multiuse_invite.
"""
self.login("iago")
zulip_realm = get_realm("zulip")
multiuse_invite = MultiuseInvite.objects.create(
referred_by=self.example_user("hamlet"), realm=zulip_realm
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assertEqual(result.status_code, 200)
self.assertIsNone(MultiuseInvite.objects.filter(id=multiuse_invite.id).first())
# Test that trying to double-delete fails
error_result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_error(error_result, "No such invitation")
# Test deleting owner mutiuse_invite.
multiuse_invite = MultiuseInvite.objects.create(
referred_by=self.example_user("desdemona"),
realm=zulip_realm,
invited_as=PreregistrationUser.INVITE_AS["REALM_OWNER"],
)
create_confirmation_link(multiuse_invite, Confirmation.MULTIUSE_INVITE)
error_result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_error(error_result, "Must be an organization owner")
self.login("desdemona")
result = self.client_delete("/json/invites/multiuse/" + str(multiuse_invite.id))
self.assert_json_success(result)
self.assertIsNone(MultiuseInvite.objects.filter(id=multiuse_invite.id).first())
# Test deleting multiuse invite from another realm
mit_realm = get_realm("zephyr")
multiuse_invite_in_mit = MultiuseInvite.objects.create(
referred_by=self.mit_user("sipbtest"), realm=mit_realm
)
create_confirmation_link(multiuse_invite_in_mit, Confirmation.MULTIUSE_INVITE)
error_result = self.client_delete(
"/json/invites/multiuse/" + str(multiuse_invite_in_mit.id)
)
self.assert_json_error(error_result, "No such invitation")
def test_successful_resend_invitation(self) -> None:
"""
A POST call to /json/invites/<ID>/resend should send an invitation reminder email
and delete any scheduled invitation reminder email.
"""
self.login("iago")
invitee = "[email protected]"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify and then clear from the outbox the original invite email
self.check_sent_emails([invitee], custom_from_name="Zulip")
from django.core.mail import outbox
outbox.pop()
# Verify that the scheduled email exists.
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Resend invite
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
self.assertEqual(result.status_code, 200)
error_result = self.client_post("/json/invites/" + str(9999) + "/resend")
self.assert_json_error(error_result, "No such invitation")
self.check_sent_emails([invitee], custom_from_name="Zulip")
def test_successful_member_resend_invitation(self) -> None:
"""A POST call from member a account to /json/invites/<ID>/resend
should send an invitation reminder email and delete any
scheduled invitation reminder email if they send the invite.
"""
self.login("hamlet")
user_profile = self.example_user("hamlet")
invitee = "[email protected]"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
# Verify hamlet has only one invitation (Member can resend invitations only sent by him).
invitation = PreregistrationUser.objects.filter(referred_by=user_profile)
self.assertEqual(len(invitation), 1)
prereg_user = PreregistrationUser.objects.get(email=invitee)
# Verify and then clear from the outbox the original invite email
self.check_sent_emails([invitee], custom_from_name="Zulip")
from django.core.mail import outbox
outbox.pop()
# Verify that the scheduled email exists.
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Resend invite
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
self.assertEqual(result.status_code, 200)
error_result = self.client_post("/json/invites/" + str(9999) + "/resend")
self.assert_json_error(error_result, "No such invitation")
self.check_sent_emails([invitee], custom_from_name="Zulip")
self.logout()
self.login("othello")
invitee = "[email protected]"
prereg_user_one = PreregistrationUser(email=invitee, referred_by=user_profile)
prereg_user_one.save()
prereg_user = PreregistrationUser.objects.get(email=invitee)
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "Must be an organization administrator")
def test_resend_owner_invitation(self) -> None:
self.login("desdemona")
invitee = "[email protected]"
self.assert_json_success(
self.invite(
invitee, ["Denmark"], invite_as=PreregistrationUser.INVITE_AS["REALM_OWNER"]
)
)
self.check_sent_emails([invitee], custom_from_name="Zulip")
scheduledemail_filter = ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
)
self.assertEqual(scheduledemail_filter.count(), 1)
original_timestamp = scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
# Test only organization owners can resend owner invitation.
self.login("iago")
prereg_user = PreregistrationUser.objects.get(email=invitee)
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "Must be an organization owner")
self.login("desdemona")
result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_success(result)
self.assertEqual(
ScheduledEmail.objects.filter(
address__iexact=invitee, type=ScheduledEmail.INVITATION_REMINDER
).count(),
1,
)
# Check that we have exactly one scheduled email, and that it is different
self.assertEqual(scheduledemail_filter.count(), 1)
self.assertNotEqual(
original_timestamp, scheduledemail_filter.values_list("scheduled_timestamp", flat=True)
)
def test_accessing_invites_in_another_realm(self) -> None:
inviter = UserProfile.objects.exclude(realm=get_realm("zulip")).first()
prereg_user = PreregistrationUser.objects.create(
email="email", referred_by=inviter, realm=inviter.realm
)
self.login("iago")
error_result = self.client_post("/json/invites/" + str(prereg_user.id) + "/resend")
self.assert_json_error(error_result, "No such invitation")
error_result = self.client_delete("/json/invites/" + str(prereg_user.id))
self.assert_json_error(error_result, "No such invitation")
def test_prereg_user_status(self) -> None:
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = UserProfile.objects.filter(realm=realm).first()
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = confirmation_link.split("/")[-1]
result = self.client_post(
"/accounts/register/",
{"key": registration_key, "from_confirmation": "1", "full_name": "alice"},
)
self.assertEqual(result.status_code, 200)
confirmation = Confirmation.objects.get(confirmation_key=registration_key)
prereg_user = confirmation.content_object
self.assertEqual(prereg_user.status, 0)
result = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(result.status_code, 302)
prereg_user = PreregistrationUser.objects.get(email=email, referred_by=inviter, realm=realm)
self.assertEqual(prereg_user.status, confirmation_settings.STATUS_ACTIVE)
user = get_user_by_delivery_email(email, realm)
self.assertIsNotNone(user)
self.assertEqual(user.delivery_email, email)
class InviteeEmailsParserTests(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.email1 = "[email protected]"
self.email2 = "[email protected]"
self.email3 = "[email protected]"
def test_if_emails_separated_by_commas_are_parsed_and_striped_correctly(self) -> None:
emails_raw = f"{self.email1} ,{self.email2}, {self.email3}"
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_separated_by_newlines_are_parsed_and_striped_correctly(self) -> None:
emails_raw = f"{self.email1}\n {self.email2}\n {self.email3} "
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_from_email_client_separated_by_newlines_are_parsed_correctly(self) -> None:
emails_raw = (
f"Email One <{self.email1}>\nEmailTwo<{self.email2}>\nEmail Three<{self.email3}>"
)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_in_mixed_style_are_parsed_correctly(self) -> None:
emails_raw = f"Email One <{self.email1}>,EmailTwo<{self.email2}>\n{self.email3}"
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
class MultiuseInviteTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.realm = get_realm("zulip")
self.realm.invite_required = True
self.realm.save()
def generate_multiuse_invite_link(
self, streams: Optional[List[Stream]] = None, date_sent: Optional[datetime.datetime] = None
) -> str:
invite = MultiuseInvite(realm=self.realm, referred_by=self.example_user("iago"))
invite.save()
if streams is not None:
invite.streams.set(streams)
if date_sent is None:
date_sent = timezone_now()
key = generate_key()
Confirmation.objects.create(
content_object=invite,
date_sent=date_sent,
confirmation_key=key,
type=Confirmation.MULTIUSE_INVITE,
)
return confirmation_url(key, self.realm, Confirmation.MULTIUSE_INVITE)
def check_user_able_to_register(self, email: str, invite_link: str) -> None:
password = "password"
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password)
self.assertEqual(result.status_code, 302)
from django.core.mail import outbox
outbox.pop()
def test_valid_multiuse_link(self) -> None:
email1 = self.nonreg_email("test")
email2 = self.nonreg_email("test1")
email3 = self.nonreg_email("alice")
date_sent = timezone_now() - datetime.timedelta(
days=settings.INVITATION_LINK_VALIDITY_DAYS - 1
)
invite_link = self.generate_multiuse_invite_link(date_sent=date_sent)
self.check_user_able_to_register(email1, invite_link)
self.check_user_able_to_register(email2, invite_link)
self.check_user_able_to_register(email3, invite_link)
def test_expired_multiuse_link(self) -> None:
email = self.nonreg_email("newuser")
date_sent = timezone_now() - datetime.timedelta(days=settings.INVITATION_LINK_VALIDITY_DAYS)
invite_link = self.generate_multiuse_invite_link(date_sent=date_sent)
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("The confirmation link has expired or been deactivated.", result)
def test_invalid_multiuse_link(self) -> None:
email = self.nonreg_email("newuser")
invite_link = "/join/invalid_key/"
result = self.client_post(invite_link, {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("Whoops. The confirmation link is malformed.", result)
def test_invalid_multiuse_link_in_open_realm(self) -> None:
self.realm.invite_required = False
self.realm.save()
email = self.nonreg_email("newuser")
invite_link = "/join/invalid_key/"
with patch("zerver.views.registration.get_realm_from_request", return_value=self.realm):
with patch("zerver.views.registration.get_realm", return_value=self.realm):
self.check_user_able_to_register(email, invite_link)
def test_multiuse_link_with_specified_streams(self) -> None:
name1 = "newuser"
name2 = "bob"
email1 = self.nonreg_email(name1)
email2 = self.nonreg_email(name2)
stream_names = ["Rome", "Scotland", "Venice"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
invite_link = self.generate_multiuse_invite_link(streams=streams)
self.check_user_able_to_register(email1, invite_link)
self.check_user_subscribed_only_to_streams(name1, streams)
stream_names = ["Rome", "Verona"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
invite_link = self.generate_multiuse_invite_link(streams=streams)
self.check_user_able_to_register(email2, invite_link)
self.check_user_subscribed_only_to_streams(name2, streams)
def test_create_multiuse_link_api_call(self) -> None:
self.login("iago")
result = self.client_post("/json/invites/multiuse")
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
def test_create_multiuse_link_with_specified_streams_api_call(self) -> None:
self.login("iago")
stream_names = ["Rome", "Scotland", "Venice"]
streams = [get_stream(stream_name, self.realm) for stream_name in stream_names]
stream_ids = [stream.id for stream in streams]
result = self.client_post(
"/json/invites/multiuse", {"stream_ids": orjson.dumps(stream_ids).decode()}
)
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
self.check_user_subscribed_only_to_streams("test", streams)
def test_only_admin_can_create_multiuse_link_api_call(self) -> None:
self.login("iago")
# Only admins should be able to create multiuse invites even if
# invite_by_admins_only is set to False.
self.realm.invite_by_admins_only = False
self.realm.save()
result = self.client_post("/json/invites/multiuse")
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
self.login("hamlet")
result = self.client_post("/json/invites/multiuse")
self.assert_json_error(result, "Must be an organization administrator")
def test_multiuse_link_for_inviting_as_owner(self) -> None:
self.login("iago")
result = self.client_post(
"/json/invites/multiuse",
{"invite_as": orjson.dumps(PreregistrationUser.INVITE_AS["REALM_OWNER"]).decode()},
)
self.assert_json_error(result, "Must be an organization owner")
self.login("desdemona")
result = self.client_post(
"/json/invites/multiuse",
{"invite_as": orjson.dumps(PreregistrationUser.INVITE_AS["REALM_OWNER"]).decode()},
)
self.assert_json_success(result)
invite_link = result.json()["invite_link"]
self.check_user_able_to_register(self.nonreg_email("test"), invite_link)
def test_create_multiuse_link_invalid_stream_api_call(self) -> None:
self.login("iago")
result = self.client_post(
"/json/invites/multiuse", {"stream_ids": orjson.dumps([54321]).decode()}
)
self.assert_json_error(result, "Invalid stream id 54321. No invites were sent.")
class EmailUnsubscribeTests(ZulipTestCase):
def test_error_unsubscribe(self) -> None:
# An invalid unsubscribe token "test123" produces an error.
result = self.client_get("/accounts/unsubscribe/missed_messages/test123")
self.assert_in_response("Unknown email unsubscribe request", result)
# An unknown message type "fake" produces an error.
user_profile = self.example_user("hamlet")
unsubscribe_link = one_click_unsubscribe_link(user_profile, "fake")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assert_in_response("Unknown email unsubscribe request", result)
def test_missedmessage_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in missed message
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = self.example_user("hamlet")
user_profile.enable_offline_email_notifications = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_offline_email_notifications)
def test_welcome_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in welcome e-mails that you can
click even when logged out to stop receiving them.
"""
user_profile = self.example_user("hamlet")
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
enqueue_welcome_emails(user_profile)
self.assertEqual(2, ScheduledEmail.objects.filter(users=user_profile).count())
# Simulate unsubscribing from the welcome e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The welcome email jobs are no longer scheduled.
self.assertEqual(result.status_code, 200)
self.assertEqual(0, ScheduledEmail.objects.filter(users=user_profile).count())
def test_digest_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in digest e-mails that you can
click even when logged out to stop receiving them.
Unsubscribing from these emails also dequeues any digest email jobs that
have been queued.
"""
user_profile = self.example_user("hamlet")
self.assertTrue(user_profile.enable_digest_emails)
# Enqueue a fake digest email.
context = {
"name": "",
"realm_uri": "",
"unread_pms": [],
"hot_conversations": [],
"new_users": [],
"new_streams": {"plain": []},
"unsubscribe_link": "",
}
send_future_email(
"zerver/emails/digest",
user_profile.realm,
to_user_ids=[user_profile.id],
context=context,
)
self.assertEqual(1, ScheduledEmail.objects.filter(users=user_profile).count())
# Simulate unsubscribing from digest e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The setting is toggled off, and scheduled jobs have been removed.
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_digest_emails)
self.assertEqual(0, ScheduledEmail.objects.filter(users=user_profile).count())
def test_login_unsubscribe(self) -> None:
"""
We provide one-click unsubscribe links in login
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = self.example_user("hamlet")
user_profile.enable_login_emails = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile, "login")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
user_profile.refresh_from_db()
self.assertFalse(user_profile.enable_login_emails)
class RealmCreationTest(ZulipTestCase):
@override_settings(OPEN_REALM_CREATION=True)
def check_able_to_create_realm(self, email: str, password: str = "test") -> None:
notification_bot = get_system_bot(settings.NOTIFICATION_BOT)
signups_stream, _ = create_stream_if_needed(notification_bot.realm, "signups")
string_id = "zuliptest"
# Make sure the realm does not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
# Create new realm with the email
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password, realm_subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertTrue(
result["Location"].startswith("http://zuliptest.testserver/accounts/login/subdomain/")
)
# Make sure the realm is created
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
user = get_user(email, realm)
self.assertEqual(user.realm, realm)
# Check that user is the owner.
self.assertEqual(user.role, UserProfile.ROLE_REALM_OWNER)
# Check defaults
self.assertEqual(realm.org_type, Realm.CORPORATE)
self.assertEqual(realm.emails_restricted_to_domains, False)
self.assertEqual(realm.invite_required, True)
# Check welcome messages
for stream_name, text, message_count in [
(Realm.DEFAULT_NOTIFICATION_STREAM_NAME, "with the topic", 3),
(Realm.INITIAL_PRIVATE_STREAM_NAME, "private stream", 1),
]:
stream = get_stream(stream_name, realm)
recipient = stream.recipient
messages = Message.objects.filter(recipient=recipient).order_by("date_sent")
self.assertEqual(len(messages), message_count)
self.assertIn(text, messages[0].content)
# Check signup messages
recipient = signups_stream.recipient
messages = Message.objects.filter(recipient=recipient).order_by("id")
self.assertEqual(len(messages), 2)
self.assertIn("Signups enabled", messages[0].content)
self.assertIn("signed up", messages[1].content)
self.assertEqual("zuliptest", messages[1].topic_name())
# Piggyback a little check for how we handle
# empty string_ids.
realm.string_id = ""
self.assertEqual(realm.display_subdomain, ".")
def test_create_realm_non_existing_email(self) -> None:
self.check_able_to_create_realm("[email protected]")
def test_create_realm_existing_email(self) -> None:
self.check_able_to_create_realm("[email protected]")
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_create_realm_ldap_email(self) -> None:
self.init_default_ldap_database()
with self.settings(LDAP_EMAIL_ATTR="mail"):
self.check_able_to_create_realm(
"[email protected]", self.ldap_password("newuser_with_email")
)
def test_create_realm_as_system_bot(self) -> None:
result = self.client_post("/new/", {"email": "[email protected]"})
self.assertEqual(result.status_code, 200)
self.assert_in_response("[email protected] is reserved for system bots", result)
def test_create_realm_no_creation_key(self) -> None:
"""
Trying to create a realm without a creation_key should fail when
OPEN_REALM_CREATION is false.
"""
email = "[email protected]"
with self.settings(OPEN_REALM_CREATION=False):
# Create new realm with the email, but no creation key.
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 200)
self.assert_in_response("New organization creation disabled", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_create_realm_with_subdomain(self) -> None:
password = "test"
string_id = "zuliptest"
email = "[email protected]"
realm_name = "Test"
# Make sure the realm does not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
# Create new realm with the email
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
result = self.client_get(result.url, subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zuliptest.testserver")
# Make sure the realm is created
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user(email, realm).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
@override_settings(OPEN_REALM_CREATION=True, FREE_TRIAL_DAYS=30)
def test_create_realm_during_free_trial(self) -> None:
password = "test"
string_id = "zuliptest"
email = "[email protected]"
realm_name = "Test"
with self.assertRaises(Realm.DoesNotExist):
get_realm(string_id)
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
result = self.client_get(result.url, subdomain=string_id)
self.assertEqual(result.url, "http://zuliptest.testserver/upgrade/?onboarding=true")
result = self.client_get(result.url, subdomain=string_id)
self.assert_in_success_response(["Not ready to start your trial?"], result)
realm = get_realm(string_id)
self.assertEqual(realm.string_id, string_id)
self.assertEqual(get_user(email, realm).realm, realm)
self.assertEqual(realm.name, realm_name)
self.assertEqual(realm.subdomain, string_id)
@override_settings(OPEN_REALM_CREATION=True)
def test_create_two_realms(self) -> None:
"""
Verify correct behavior and PreregistrationUser handling when using
two pre-generated realm creation links to create two different realms.
"""
password = "test"
first_string_id = "zuliptest"
second_string_id = "zuliptest2"
email = "[email protected]"
first_realm_name = "Test"
second_realm_name = "Test"
# Make sure the realms do not exist
with self.assertRaises(Realm.DoesNotExist):
get_realm(first_string_id)
with self.assertRaises(Realm.DoesNotExist):
get_realm(second_string_id)
# Now we pre-generate two realm creation links
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
first_confirmation_url = self.get_confirmation_url_from_outbox(email)
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 1)
# Get a second realm creation link.
result = self.client_post("/new/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/new/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
second_confirmation_url = self.get_confirmation_url_from_outbox(email)
self.assertNotEqual(first_confirmation_url, second_confirmation_url)
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 2)
# Create and verify the first realm
result = self.client_get(first_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain=first_string_id,
realm_name=first_realm_name,
key=first_confirmation_url.split("/")[-1],
)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(first_string_id)
self.assertEqual(realm.string_id, first_string_id)
self.assertEqual(realm.name, first_realm_name)
# One of the PreregistrationUsers should have been used up:
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 1)
# Create and verify the second realm
result = self.client_get(second_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain=second_string_id,
realm_name=second_realm_name,
key=second_confirmation_url.split("/")[-1],
)
self.assertEqual(result.status_code, 302)
# Make sure the realm is created
realm = get_realm(second_string_id)
self.assertEqual(realm.string_id, second_string_id)
self.assertEqual(realm.name, second_realm_name)
# The remaining PreregistrationUser should have been used up:
self.assertEqual(PreregistrationUser.objects.filter(email=email, status=0).count(), 0)
@override_settings(OPEN_REALM_CREATION=True)
def test_mailinator_signup(self) -> None:
result = self.client_post("/new/", {"email": "[email protected]"})
self.assert_in_response("Please use your real email address.", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions(self) -> None:
password = "test"
email = "[email protected]"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
errors = {
"id": "length 3 or greater",
"-id": "cannot start or end with a",
"string-ID": "lowercase letters",
"string_id": "lowercase letters",
"stream": "unavailable",
"streams": "unavailable",
"about": "unavailable",
"abouts": "unavailable",
"zephyr": "unavailable",
}
for string_id, error_msg in errors.items():
result = self.submit_reg_form_for_user(
email, password, realm_subdomain=string_id, realm_name=realm_name
)
self.assert_in_response(error_msg, result)
# test valid subdomain
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="a-0", realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://a-0.testserver/accounts/login/subdomain/"))
@override_settings(OPEN_REALM_CREATION=True)
def test_create_realm_using_old_subdomain_of_a_realm(self) -> None:
realm = get_realm("zulip")
do_change_realm_subdomain(realm, "new-name", acting_user=None)
password = "test"
email = "[email protected]"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="zulip", realm_name=realm_name
)
self.assert_in_response("Subdomain unavailable. Please choose a different one.", result)
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions_root_domain(self) -> None:
password = "test"
email = "[email protected]"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
# test root domain will fail with ROOT_DOMAIN_LANDING_PAGE
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="", realm_name=realm_name
)
self.assert_in_response("unavailable", result)
# test valid use of root domain
result = self.submit_reg_form_for_user(
email, password, realm_subdomain="", realm_name=realm_name
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://testserver/accounts/login/subdomain/"))
@override_settings(OPEN_REALM_CREATION=True)
def test_subdomain_restrictions_root_domain_option(self) -> None:
password = "test"
email = "[email protected]"
realm_name = "Test"
result = self.client_post("/new/", {"email": email})
self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
self.client_get(confirmation_url)
# test root domain will fail with ROOT_DOMAIN_LANDING_PAGE
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain="abcdef",
realm_in_root_domain="true",
realm_name=realm_name,
)
self.assert_in_response("unavailable", result)
# test valid use of root domain
result = self.submit_reg_form_for_user(
email,
password,
realm_subdomain="abcdef",
realm_in_root_domain="true",
realm_name=realm_name,
)
self.assertEqual(result.status_code, 302)
self.assertTrue(result.url.startswith("http://testserver/accounts/login/subdomain/"))
def test_is_root_domain_available(self) -> None:
self.assertTrue(is_root_domain_available())
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
self.assertFalse(is_root_domain_available())
realm = get_realm("zulip")
realm.string_id = Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
realm.save()
self.assertFalse(is_root_domain_available())
def test_subdomain_check_api(self) -> None:
result = self.client_get("/json/realm/subdomain/zulip")
self.assert_in_success_response(
["Subdomain unavailable. Please choose a different one."], result
)
result = self.client_get("/json/realm/subdomain/zu_lip")
self.assert_in_success_response(
["Subdomain can only have lowercase letters, numbers, and '-'s."], result
)
result = self.client_get("/json/realm/subdomain/hufflepuff")
self.assert_in_success_response(["available"], result)
self.assert_not_in_success_response(["unavailable"], result)
def test_subdomain_check_management_command(self) -> None:
# Short names should not work, even with the flag
with self.assertRaises(ValidationError):
check_subdomain_available("aa")
with self.assertRaises(ValidationError):
check_subdomain_available("aa", allow_reserved_subdomain=True)
# Malformed names should never work
with self.assertRaises(ValidationError):
check_subdomain_available("-ba_d-")
with self.assertRaises(ValidationError):
check_subdomain_available("-ba_d-", allow_reserved_subdomain=True)
with patch("zerver.lib.name_restrictions.is_reserved_subdomain", return_value=False):
# Existing realms should never work even if they are not reserved keywords
with self.assertRaises(ValidationError):
check_subdomain_available("zulip")
with self.assertRaises(ValidationError):
check_subdomain_available("zulip", allow_reserved_subdomain=True)
# Reserved ones should only work with the flag
with self.assertRaises(ValidationError):
check_subdomain_available("stream")
check_subdomain_available("stream", allow_reserved_subdomain=True)
class UserSignUpTest(InviteUserBase):
def _assert_redirected_to(self, result: HttpResponse, url: str) -> None:
self.assertEqual(result.status_code, 302)
self.assertEqual(result["LOCATION"], url)
def test_bad_email_configuration_for_accounts_home(self) -> None:
"""
Make sure we redirect for SMTP errors.
"""
email = self.nonreg_email("newguy")
smtp_mock = patch(
"zerver.views.registration.send_confirm_registration_email",
side_effect=smtplib.SMTPException("uh oh"),
)
with smtp_mock, self.assertLogs(level="ERROR") as m:
result = self.client_post("/accounts/home/", {"email": email})
self._assert_redirected_to(result, "/config-error/smtp")
self.assertEqual(m.output, ["ERROR:root:Error in accounts_home: uh oh"])
def test_bad_email_configuration_for_create_realm(self) -> None:
"""
Make sure we redirect for SMTP errors.
"""
email = self.nonreg_email("newguy")
smtp_mock = patch(
"zerver.views.registration.send_confirm_registration_email",
side_effect=smtplib.SMTPException("uh oh"),
)
with smtp_mock, self.assertLogs(level="ERROR") as m:
result = self.client_post("/new/", {"email": email})
self._assert_redirected_to(result, "/config-error/smtp")
self.assertEqual(m.output, ["ERROR:root:Error in create_realm: uh oh"])
def test_user_default_language_and_timezone(self) -> None:
"""
Check if the default language of new user is the default language
of the realm.
"""
email = self.nonreg_email("newguy")
password = "newpassword"
timezone = "US/Mountain"
realm = get_realm("zulip")
do_set_realm_property(realm, "default_language", "de", acting_user=None)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(email, password, timezone=timezone)
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newguy")
self.assertEqual(user_profile.default_language, realm.default_language)
self.assertEqual(user_profile.timezone, timezone)
from django.core.mail import outbox
outbox.pop()
def test_default_twenty_four_hour_time(self) -> None:
"""
Check if the default twenty_four_hour_time setting of new user
is the default twenty_four_hour_time of the realm.
"""
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
do_set_realm_property(realm, "default_twenty_four_hour_time", True, acting_user=None)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password)
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newguy")
self.assertEqual(user_profile.twenty_four_hour_time, realm.default_twenty_four_hour_time)
def test_signup_already_active(self) -> None:
"""
Check if signing up with an active email redirects to a login page.
"""
email = self.example_email("hamlet")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertIn("login", result["Location"])
result = self.client_get(result.url)
self.assert_in_response("You've already registered", result)
def test_signup_system_bot(self) -> None:
email = "[email protected]"
result = self.client_post("/accounts/home/", {"email": email}, subdomain="lear")
self.assertEqual(result.status_code, 302)
self.assertIn("login", result["Location"])
result = self.client_get(result.url)
# This is not really the right error message, but at least it's an error.
self.assert_in_response("You've already registered", result)
def test_signup_existing_email(self) -> None:
"""
Check if signing up with an email used in another realm succeeds.
"""
email = self.example_email("hamlet")
password = "newpassword"
realm = get_realm("lear")
result = self.client_post("/accounts/home/", {"email": email}, subdomain="lear")
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain="lear")
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain="lear")
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(email, password, subdomain="lear")
self.assertEqual(result.status_code, 302)
get_user(email, realm)
self.assertEqual(UserProfile.objects.filter(delivery_email=email).count(), 2)
def test_signup_invalid_name(self) -> None:
"""
Check if an invalid name during signup is handled properly.
"""
email = "[email protected]"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(email, password, full_name="<invalid>")
self.assert_in_success_response(["Invalid characters in name!"], result)
# Verify that the user is asked for name and password
self.assert_in_success_response(["id_password", "id_full_name"], result)
def test_signup_without_password(self) -> None:
"""
Check if signing up without a password works properly when
password_auth_enabled is False.
"""
email = self.nonreg_email("newuser")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
with patch("zerver.views.registration.password_auth_enabled", return_value=False):
result = self.client_post(
"/accounts/register/",
{"full_name": "New User", "key": find_key_by_email(email), "terms": True},
)
# User should now be logged in.
self.assertEqual(result.status_code, 302)
user_profile = self.nonreg_user("newuser")
self.assert_logged_in_user_id(user_profile.id)
def test_signup_without_full_name(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "[email protected]"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": find_key_by_email(email),
"terms": True,
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
# Verify that the user is asked for name and password
self.assert_in_success_response(["id_password", "id_full_name"], result)
def test_signup_email_message_contains_org_header(self) -> None:
email = "[email protected]"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
from django.core.mail import outbox
self.assertEqual(outbox[0].extra_headers["List-Id"], "Zulip Dev <zulip.testserver>")
def test_signup_with_full_name(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "[email protected]"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": find_key_by_email(email),
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
def test_signup_with_weak_password(self) -> None:
"""
Check if signing up without a full name redirects to a registration
form.
"""
email = "[email protected]"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
with self.settings(PASSWORD_MIN_LENGTH=6, PASSWORD_MIN_GUESSES=1000):
result = self.client_post(
"/accounts/register/",
{
"password": "easy",
"key": find_key_by_email(email),
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
self.assert_in_success_response(["We just need you to do one last thing."], result)
result = self.submit_reg_form_for_user(email, "easy", full_name="New Guy")
self.assert_in_success_response(["The password is too weak."], result)
with self.assertRaises(UserProfile.DoesNotExist):
# Account wasn't created.
get_user(email, get_realm("zulip"))
def test_signup_with_default_stream_group(self) -> None:
# Check if user is subscribed to the streams of default
# stream group as well as default streams.
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
default_streams = []
for stream_name in ["venice", "verona"]:
stream = get_stream(stream_name, realm)
do_add_default_stream(stream)
default_streams.append(stream)
group1_streams = []
for stream_name in ["scotland", "denmark"]:
stream = get_stream(stream_name, realm)
group1_streams.append(stream)
do_create_default_stream_group(realm, "group 1", "group 1 description", group1_streams)
result = self.submit_reg_form_for_user(email, password, default_stream_groups=["group 1"])
self.check_user_subscribed_only_to_streams("newguy", default_streams + group1_streams)
def test_signup_two_confirmation_links(self) -> None:
email = self.nonreg_email("newguy")
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
first_confirmation_url = self.get_confirmation_url_from_outbox(email)
first_confirmation_key = find_key_by_email(email)
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
second_confirmation_url = self.get_confirmation_url_from_outbox(email)
# Sanity check:
self.assertNotEqual(first_confirmation_url, second_confirmation_url)
# Register the account (this will use the second confirmation url):
result = self.submit_reg_form_for_user(
email, password, full_name="New Guy", from_confirmation="1"
)
self.assert_in_success_response(
["We just need you to do one last thing.", "New Guy", email], result
)
result = self.submit_reg_form_for_user(email, password, full_name="New Guy")
user_profile = UserProfile.objects.get(delivery_email=email)
self.assertEqual(user_profile.delivery_email, email)
# Now try to to register using the first confirmation url:
result = self.client_get(first_confirmation_url)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{
"password": password,
"key": first_confirmation_key,
"terms": True,
"full_name": "New Guy",
"from_confirmation": "1",
},
)
# Error page should be displayed
self.assert_in_success_response(
["The registration link has expired or is not valid."], result
)
self.assertEqual(result.status_code, 200)
def test_signup_with_multiple_default_stream_groups(self) -> None:
# Check if user is subscribed to the streams of default
# stream groups as well as default streams.
email = self.nonreg_email("newguy")
password = "newpassword"
realm = get_realm("zulip")
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"])
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
default_streams = []
for stream_name in ["venice", "verona"]:
stream = get_stream(stream_name, realm)
do_add_default_stream(stream)
default_streams.append(stream)
group1_streams = []
for stream_name in ["scotland", "denmark"]:
stream = get_stream(stream_name, realm)
group1_streams.append(stream)
do_create_default_stream_group(realm, "group 1", "group 1 description", group1_streams)
group2_streams = []
for stream_name in ["scotland", "rome"]:
stream = get_stream(stream_name, realm)
group2_streams.append(stream)
do_create_default_stream_group(realm, "group 2", "group 2 description", group2_streams)
result = self.submit_reg_form_for_user(
email, password, default_stream_groups=["group 1", "group 2"]
)
self.check_user_subscribed_only_to_streams(
"newguy", list(set(default_streams + group1_streams + group2_streams))
)
def test_signup_without_user_settings_from_another_realm(self) -> None:
hamlet_in_zulip = self.example_user("hamlet")
email = hamlet_in_zulip.delivery_email
password = "newpassword"
subdomain = "lear"
realm = get_realm("lear")
# Make an account in the Zulip realm, but we're not copying from there.
hamlet_in_zulip.left_side_userlist = True
hamlet_in_zulip.default_language = "de"
hamlet_in_zulip.emojiset = "twitter"
hamlet_in_zulip.high_contrast_mode = True
hamlet_in_zulip.enter_sends = True
hamlet_in_zulip.tutorial_status = UserProfile.TUTORIAL_FINISHED
hamlet_in_zulip.save()
result = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain=subdomain)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain=subdomain)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email, password, source_realm="on", HTTP_HOST=subdomain + ".testserver"
)
hamlet = get_user(self.example_email("hamlet"), realm)
self.assertEqual(hamlet.left_side_userlist, False)
self.assertEqual(hamlet.default_language, "en")
self.assertEqual(hamlet.emojiset, "google-blob")
self.assertEqual(hamlet.high_contrast_mode, False)
self.assertEqual(hamlet.enable_stream_audible_notifications, False)
self.assertEqual(hamlet.enter_sends, False)
self.assertEqual(hamlet.tutorial_status, UserProfile.TUTORIAL_WAITING)
def test_signup_with_user_settings_from_another_realm(self) -> None:
hamlet_in_zulip = self.example_user("hamlet")
email = hamlet_in_zulip.delivery_email
password = "newpassword"
subdomain = "lear"
lear_realm = get_realm("lear")
self.login("hamlet")
with get_test_image_file("img.png") as image_file:
self.client_post("/json/users/me/avatar", {"file": image_file})
hamlet_in_zulip.refresh_from_db()
hamlet_in_zulip.left_side_userlist = True
hamlet_in_zulip.default_language = "de"
hamlet_in_zulip.emojiset = "twitter"
hamlet_in_zulip.high_contrast_mode = True
hamlet_in_zulip.enter_sends = True
hamlet_in_zulip.tutorial_status = UserProfile.TUTORIAL_FINISHED
hamlet_in_zulip.save()
result = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
self.assertEqual(result.status_code, 302)
result = self.client_get(result["Location"], subdomain=subdomain)
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url, subdomain=subdomain)
self.assertEqual(result.status_code, 200)
result = self.client_post(
"/accounts/register/",
{"password": password, "key": find_key_by_email(email), "from_confirmation": "1"},
subdomain=subdomain,
)
self.assert_in_success_response(
[
"Import settings from existing Zulip account",
"selected >\n Zulip Dev",
"We just need you to do one last thing.",
],
result,
)
result = self.submit_reg_form_for_user(
email, password, source_realm="zulip", HTTP_HOST=subdomain + ".testserver"
)
hamlet_in_lear = get_user(email, lear_realm)
self.assertEqual(hamlet_in_lear.left_side_userlist, True)
self.assertEqual(hamlet_in_lear.default_language, "de")
self.assertEqual(hamlet_in_lear.emojiset, "twitter")
self.assertEqual(hamlet_in_lear.high_contrast_mode, True)
self.assertEqual(hamlet_in_lear.enter_sends, True)
self.assertEqual(hamlet_in_lear.enable_stream_audible_notifications, False)
self.assertEqual(hamlet_in_lear.tutorial_status, UserProfile.TUTORIAL_FINISHED)
zulip_path_id = avatar_disk_path(hamlet_in_zulip)
lear_path_id = avatar_disk_path(hamlet_in_lear)
with open(zulip_path_id, "rb") as f:
zulip_avatar_bits = f.read()
with open(lear_path_id, "rb") as f:
lear_avatar_bits = f.read()
self.assertTrue(len(zulip_avatar_bits) > 500)
self.assertEqual(zulip_avatar_bits, lear_avatar_bits)
def test_signup_invalid_subdomain(self) -> None:
"""
Check if attempting to authenticate to the wrong subdomain logs an
error and redirects.
"""
email = "[email protected]"
password = "newpassword"
result = self.client_post("/accounts/home/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
confirmation_url = self.get_confirmation_url_from_outbox(email)
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
def invalid_subdomain(**kwargs: Any) -> Any:
return_data = kwargs.get("return_data", {})
return_data["invalid_subdomain"] = True
with patch("zerver.views.registration.authenticate", side_effect=invalid_subdomain):
with self.assertLogs(level="ERROR") as m:
result = self.client_post(
"/accounts/register/",
{
"password": password,
"full_name": "New User",
"key": find_key_by_email(email),
"terms": True,
},
)
self.assertEqual(
m.output,
["ERROR:root:Subdomain mismatch in registration zulip: [email protected]"],
)
self.assertEqual(result.status_code, 302)
def test_replace_subdomain_in_confirmation_link(self) -> None:
"""
Check that manually changing the subdomain in a registration
confirmation link doesn't allow you to register to a different realm.
"""
email = "[email protected]"
self.client_post("/accounts/home/", {"email": email})
result = self.client_post(
"/accounts/register/",
{
"password": "password",
"key": find_key_by_email(email),
"terms": True,
"full_name": "New User",
"from_confirmation": "1",
},
subdomain="zephyr",
)
self.assert_in_success_response(["We couldn't find your confirmation link"], result)
def test_failed_signup_due_to_restricted_domain(self) -> None:
realm = get_realm("zulip")
do_set_realm_property(realm, "invite_required", False, acting_user=None)
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
email = "[email protected]"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(
f"Your email address, {email}, is not in one of the domains", form.errors["email"][0]
)
def test_failed_signup_due_to_disposable_email(self) -> None:
realm = get_realm("zulip")
realm.emails_restricted_to_domains = False
realm.disallow_disposable_email_addresses = True
realm.save()
email = "[email protected]"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn("Please use your real email address", form.errors["email"][0])
def test_failed_signup_due_to_email_containing_plus(self) -> None:
realm = get_realm("zulip")
realm.emails_restricted_to_domains = True
realm.save()
email = "[email protected]"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(
"Email addresses containing + are not allowed in this organization.",
form.errors["email"][0],
)
def test_failed_signup_due_to_invite_required(self) -> None:
realm = get_realm("zulip")
realm.invite_required = True
realm.save()
email = "[email protected]"
form = HomepageForm({"email": email}, realm=realm)
self.assertIn(f"Please request an invite for {email} from", form.errors["email"][0])
def test_failed_signup_due_to_nonexistent_realm(self) -> None:
email = "[email protected]"
form = HomepageForm({"email": email}, realm=None)
self.assertIn(
f"organization you are trying to join using {email} does not exist",
form.errors["email"][0],
)
def test_access_signup_page_in_root_domain_without_realm(self) -> None:
result = self.client_get("/register", subdomain="", follow=True)
self.assert_in_success_response(["Find your Zulip accounts"], result)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_from_confirmation(self) -> None:
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Full name should be set from LDAP
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
[
"We just need you to do one last thing.",
"New LDAP fullname",
"[email protected]",
],
result,
)
# Verify that the user is asked for name
self.assert_in_success_response(["id_full_name"], result)
# Verify that user is asked for its LDAP/Active Directory password.
self.assert_in_success_response(
["Enter your LDAP/Active Directory password.", "ldap-password"], result
)
self.assert_not_in_success_response(["id_password"], result)
# Test the TypeError exception handler
with patch(
"zproject.backends.ZulipLDAPAuthBackendBase.get_mapped_name", side_effect=TypeError
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
["We just need you to do one last thing.", "[email protected]"], result
)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipLDAPUserPopulator",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_populate_only_registration_from_confirmation(self) -> None:
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_BIND_PASSWORD="",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTH_LDAP_USER_DN_TEMPLATE="uid=%(user)s,ou=users,dc=zulip,dc=com",
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
# Full name should be set from LDAP
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
[
"We just need you to do one last thing.",
"New LDAP fullname",
"[email protected]",
],
result,
)
# Verify that the user is asked for name
self.assert_in_success_response(["id_full_name"], result)
# Verify that user is NOT asked for its LDAP/Active Directory password.
# LDAP is not configured for authentication in this test.
self.assert_not_in_success_response(
["Enter your LDAP/Active Directory password.", "ldap-password"], result
)
# If we were using e.g. the SAML auth backend, there
# shouldn't be a password prompt, but since it uses the
# EmailAuthBackend, there should be password field here.
self.assert_in_success_response(["id_password"], result)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_end_to_end(self) -> None:
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
full_name = "New LDAP fullname"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Full name should be set from LDAP
self.assert_in_success_response(
["We just need you to do one last thing.", full_name, "[email protected]"], result
)
# Submit the final form with the wrong password.
result = self.submit_reg_form_for_user(
email,
"wrongpassword",
full_name=full_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Didn't create an account
with self.assertRaises(UserProfile.DoesNotExist):
user_profile = UserProfile.objects.get(delivery_email=email)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
# Submit the final form with the correct password.
result = self.submit_reg_form_for_user(
email,
password,
full_name=full_name,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, full_name)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_split_full_name_mapping(self) -> None:
self.init_default_ldap_database()
ldap_user_attr_map = {"first_name": "sn", "last_name": "cn"}
subdomain = "zulip"
email = "[email protected]"
password = self.ldap_password("newuser_splitname")
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
# Test split name mapping.
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, "First Last")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_auto_registration_on_login(self) -> None:
"""The most common way for LDAP authentication to be used is with a
server that doesn't have a terms-of-service required, in which
case we offer a complete single-sign-on experience (where the
user just enters their LDAP username and password, and their
account is created if it doesn't already exist).
This test verifies that flow.
"""
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {
"full_name": "cn",
"custom_profile_field__phone_number": "homePhone",
}
full_name = "New LDAP fullname"
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from form which was set by LDAP.
self.assertEqual(user_profile.full_name, full_name)
# Test custom profile fields are properly synced.
phone_number_field = CustomProfileField.objects.get(
realm=user_profile.realm, name="Phone number"
)
phone_number_field_value = CustomProfileFieldValue.objects.get(
user_profile=user_profile, field=phone_number_field
)
self.assertEqual(phone_number_field_value.value, "a-new-number")
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
def test_ldap_registration_multiple_realms(self) -> None:
password = self.ldap_password("newuser")
email = "[email protected]"
self.init_default_ldap_database()
ldap_user_attr_map = {
"full_name": "cn",
}
do_create_realm("test", "test", emails_restricted_to_domains=False)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
subdomain = "zulip"
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email, realm=get_realm("zulip"))
self.logout()
# Test registration in another realm works.
subdomain = "test"
self.login_with_return(email, password, HTTP_HOST=subdomain + ".testserver")
user_profile = UserProfile.objects.get(delivery_email=email, realm=get_realm("test"))
self.assertEqual(user_profile.delivery_email, email)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_ldap_registration_when_names_changes_are_disabled(self) -> None:
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
# Click confirmation link. This will 'authenticated_full_name'
# session variable which will be used to set the fullname of
# the user.
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
with patch("zerver.views.registration.name_changes_disabled", return_value=True):
result = self.submit_reg_form_for_user(
email,
password,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from LDAP session.
self.assertEqual(user_profile.full_name, "New LDAP fullname")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_signup_with_ldap_and_email_enabled_using_email_with_ldap_append_domain(self) -> None:
password = "nonldappassword"
email = "[email protected]"
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# If the user's email is inside the LDAP directory and we just
# have a wrong password, then we refuse to create an account
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because password was wrong
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
# For the rest of the test we delete the user from ldap.
del self.mock_ldap.directory["uid=newuser,ou=users,dc=zulip,dc=com"]
# If the user's email is not in the LDAP directory, but fits LDAP_APPEND_DOMAIN,
# we refuse to create the account.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
), self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because emails matching LDAP_APPEND_DOMAIN,
# aren't allowed to create non-LDAP accounts.
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: newuser. Input username: [email protected]"
],
)
# If the email is outside of LDAP_APPEND_DOMAIN, we successfully create a non-LDAP account,
# with the password managed in the Zulip database.
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="example.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs(level="WARNING") as m:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
self.assertEqual(
m.output,
["WARNING:root:New account email [email protected] could not be found in LDAP"],
)
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: Email [email protected] does not match LDAP domain example.com."
],
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from the POST request, not LDAP
self.assertEqual(user_profile.full_name, "Non-LDAP Full Name")
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
"zproject.backends.ZulipDummyBackend",
)
)
def test_signup_with_ldap_and_email_enabled_using_email_with_ldap_email_search(self) -> None:
# If the user's email is inside the LDAP directory and we just
# have a wrong password, then we refuse to create an account
password = "nonldappassword"
email = "[email protected]" # belongs to user uid=newuser_with_email in the test directory
subdomain = "zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_EMAIL_ATTR="mail",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
# We get redirected back to the login page because password was wrong
self.assertEqual(result.url, "/accounts/login/?email=newuser_email%40zulip.com")
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
# If the user's email is not in the LDAP directory , though, we
# successfully create an account with a password in the Zulip
# database.
password = "nonldappassword"
email = "[email protected]"
subdomain = "zulip"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_EMAIL_ATTR="mail",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs(level="WARNING") as m:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
self.assertEqual(
m.output,
[
"WARNING:root:New account email [email protected] could not be found in LDAP"
],
)
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
result = self.submit_reg_form_for_user(
email,
password,
full_name="Non-LDAP Full Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: [email protected]. Input username: [email protected]"
],
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "http://zulip.testserver/")
user_profile = UserProfile.objects.get(delivery_email=email)
# Name comes from the POST request, not LDAP
self.assertEqual(user_profile.full_name, "Non-LDAP Full Name")
def ldap_invite_and_signup_as(
self, invite_as: int, streams: Sequence[str] = ["Denmark"]
) -> None:
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
subdomain = "zulip"
email = "[email protected]"
password = self.ldap_password("newuser")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
):
with self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
# Invite user.
self.login("iago")
self.assertEqual(
debug_log.output,
[
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: iago. Input username: [email protected]"
],
)
response = self.invite(
invitee_emails="[email protected]", stream_names=streams, invite_as=invite_as
)
self.assert_json_success(response)
self.logout()
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
full_name="Ignore",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_user_as_admin(self) -> None:
self.ldap_invite_and_signup_as(PreregistrationUser.INVITE_AS["REALM_ADMIN"])
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_realm_admin)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_user_as_guest(self) -> None:
self.ldap_invite_and_signup_as(PreregistrationUser.INVITE_AS["GUEST_USER"])
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_guest)
@override_settings(
AUTHENTICATION_BACKENDS=(
"zproject.backends.ZulipLDAPAuthBackend",
"zproject.backends.EmailAuthBackend",
)
)
def test_ldap_invite_streams(self) -> None:
stream_name = "Rome"
realm = get_realm("zulip")
stream = get_stream(stream_name, realm)
default_streams = get_default_streams_for_realm(realm)
default_streams_name = [stream.name for stream in default_streams]
self.assertNotIn(stream_name, default_streams_name)
# Invite user.
self.ldap_invite_and_signup_as(
PreregistrationUser.INVITE_AS["REALM_ADMIN"], streams=[stream_name]
)
user_profile = UserProfile.objects.get(delivery_email=self.nonreg_email("newuser"))
self.assertTrue(user_profile.is_realm_admin)
sub = get_stream_subscriptions_for_user(user_profile).filter(recipient__type_id=stream.id)
self.assertEqual(len(sub), 1)
def test_registration_when_name_changes_are_disabled(self) -> None:
"""
Test `name_changes_disabled` when we are not running under LDAP.
"""
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
with patch("zerver.views.registration.name_changes_disabled", return_value=True):
result = self.submit_reg_form_for_user(
email,
password,
full_name="New Name",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
user_profile = UserProfile.objects.get(delivery_email=email)
# 'New Name' comes from POST data; not from LDAP session.
self.assertEqual(user_profile.full_name, "New Name")
def test_realm_creation_through_ldap(self) -> None:
password = self.ldap_password("newuser")
email = "[email protected]"
subdomain = "zulip"
realm_name = "Zulip"
self.init_default_ldap_database()
ldap_user_attr_map = {"full_name": "cn"}
with patch("zerver.views.registration.get_subdomain", return_value=subdomain):
result = self.client_post("/register/", {"email": email})
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
with self.settings(
POPULATE_PROFILE_VIA_LDAP=True,
LDAP_APPEND_DOMAIN="zulip.com",
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
TERMS_OF_SERVICE=False,
):
result = self.client_get(confirmation_url)
self.assertEqual(result.status_code, 200)
key = find_key_by_email(email)
confirmation = Confirmation.objects.get(confirmation_key=key)
prereg_user = confirmation.content_object
prereg_user.realm_creation = True
prereg_user.save()
result = self.submit_reg_form_for_user(
email,
password,
realm_name=realm_name,
realm_subdomain=subdomain,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assert_in_success_response(
["We just need you to do one last thing.", "[email protected]"], result
)
@patch(
"DNS.dnslookup",
return_value=[["sipbtest:*:20922:101:Fred Sipb,,,:/mit/sipbtest:/bin/athena/tcsh"]],
)
def test_registration_of_mirror_dummy_user(self, ignored: Any) -> None:
password = "test"
subdomain = "zephyr"
user_profile = self.mit_user("sipbtest")
email = user_profile.delivery_email
user_profile.is_mirror_dummy = True
user_profile.save()
change_user_is_active(user_profile, False)
result = self.client_post("/register/", {"email": email}, subdomain="zephyr")
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith(f"/accounts/send_confirm/{email}"))
result = self.client_get(result["Location"], subdomain="zephyr")
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
match = re.search(settings.EXTERNAL_HOST + r"(\S+)>", message.body)
assert match is not None
[confirmation_url] = match.groups()
break
else:
raise AssertionError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url, subdomain="zephyr")
self.assertEqual(result.status_code, 200)
# If the mirror dummy user is already active, attempting to
# submit the registration form should raise an AssertionError
# (this is an invalid state, so it's a bug we got here):
change_user_is_active(user_profile, True)
with self.assertRaisesRegex(
AssertionError, "Mirror dummy user is already active!"
), self.assertLogs("django.request", "ERROR") as error_log:
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertTrue(
"ERROR:django.request:Internal Server Error: /accounts/register/" in error_log.output[0]
)
self.assertTrue(
'raise AssertionError("Mirror dummy user is already active!' in error_log.output[0]
)
self.assertTrue(
"AssertionError: Mirror dummy user is already active!" in error_log.output[0]
)
change_user_is_active(user_profile, False)
result = self.submit_reg_form_for_user(
email,
password,
from_confirmation="1",
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 200)
result = self.submit_reg_form_for_user(
email,
password,
# Pass HTTP_HOST for the target subdomain
HTTP_HOST=subdomain + ".testserver",
)
self.assertEqual(result.status_code, 302)
self.assert_logged_in_user_id(user_profile.id)
def test_registration_of_active_mirror_dummy_user(self) -> None:
"""
Trying to activate an already-active mirror dummy user should
raise an AssertionError.
"""
user_profile = self.mit_user("sipbtest")
email = user_profile.delivery_email
user_profile.is_mirror_dummy = True
user_profile.save()
change_user_is_active(user_profile, True)
with self.assertRaisesRegex(
AssertionError, "Mirror dummy user is already active!"
), self.assertLogs("django.request", "ERROR") as error_log:
self.client_post("/register/", {"email": email}, subdomain="zephyr")
self.assertTrue(
"ERROR:django.request:Internal Server Error: /register/" in error_log.output[0]
)
self.assertTrue(
'raise AssertionError("Mirror dummy user is already active!' in error_log.output[0]
)
self.assertTrue(
"AssertionError: Mirror dummy user is already active!" in error_log.output[0]
)
@override_settings(TERMS_OF_SERVICE=False)
def test_dev_user_registration(self) -> None:
"""Verify that /devtools/register_user creates a new user, logs them
in, and redirects to the logged-in app."""
count = UserProfile.objects.count()
email = f"user-{count}@zulip.com"
result = self.client_post("/devtools/register_user/")
user_profile = UserProfile.objects.all().order_by("id").last()
self.assertEqual(result.status_code, 302)
self.assertEqual(user_profile.delivery_email, email)
self.assertEqual(result["Location"], "http://zulip.testserver/")
self.assert_logged_in_user_id(user_profile.id)
@override_settings(TERMS_OF_SERVICE=False)
def test_dev_user_registration_create_realm(self) -> None:
count = UserProfile.objects.count()
string_id = f"realm-{count}"
result = self.client_post("/devtools/register_realm/")
self.assertEqual(result.status_code, 302)
self.assertTrue(
result["Location"].startswith(f"http://{string_id}.testserver/accounts/login/subdomain")
)
result = self.client_get(result["Location"], subdomain=string_id)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], f"http://{string_id}.testserver")
user_profile = UserProfile.objects.all().order_by("id").last()
self.assert_logged_in_user_id(user_profile.id)
class DeactivateUserTest(ZulipTestCase):
def test_deactivate_user(self) -> None:
user = self.example_user("hamlet")
email = user.email
self.login_user(user)
self.assertTrue(user.is_active)
result = self.client_delete("/json/users/me")
self.assert_json_success(result)
user = self.example_user("hamlet")
self.assertFalse(user.is_active)
password = initial_password(email)
assert password is not None
self.assert_login_failure(email, password=password)
def test_do_not_deactivate_final_owner(self) -> None:
user = self.example_user("desdemona")
user_2 = self.example_user("iago")
self.login_user(user)
self.assertTrue(user.is_active)
result = self.client_delete("/json/users/me")
self.assert_json_error(result, "Cannot deactivate the only organization owner.")
user = self.example_user("desdemona")
self.assertTrue(user.is_active)
self.assertTrue(user.is_realm_owner)
do_change_user_role(user_2, UserProfile.ROLE_REALM_OWNER, acting_user=None)
self.assertTrue(user_2.is_realm_owner)
result = self.client_delete("/json/users/me")
self.assert_json_success(result)
do_change_user_role(user, UserProfile.ROLE_REALM_OWNER, acting_user=None)
def test_do_not_deactivate_final_user(self) -> None:
realm = get_realm("zulip")
for user_profile in UserProfile.objects.filter(realm=realm).exclude(
role=UserProfile.ROLE_REALM_OWNER
):
do_deactivate_user(user_profile, acting_user=None)
user = self.example_user("desdemona")
self.login_user(user)
result = self.client_delete("/json/users/me")
self.assert_json_error(result, "Cannot deactivate the only user.")
class TestLoginPage(ZulipTestCase):
@patch("django.http.HttpRequest.get_host")
def test_login_page_redirects_for_root_alias(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "www.testserver"
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
@patch("django.http.HttpRequest.get_host")
def test_login_page_redirects_for_root_domain(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "testserver"
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
mock_get_host.return_value = "www.testserver.com"
with self.settings(
ROOT_DOMAIN_LANDING_PAGE=True,
EXTERNAL_HOST="www.testserver.com",
ROOT_SUBDOMAIN_ALIASES=["test"],
):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/")
result = self.client_get("/en/login/", {"next": "/upgrade/"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/go/?next=%2Fupgrade%2F")
@patch("django.http.HttpRequest.get_host")
def test_login_page_works_without_subdomains(self, mock_get_host: MagicMock) -> None:
mock_get_host.return_value = "www.testserver"
with self.settings(ROOT_SUBDOMAIN_ALIASES=["www"]):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 200)
mock_get_host.return_value = "testserver"
with self.settings(ROOT_SUBDOMAIN_ALIASES=["www"]):
result = self.client_get("/en/login/")
self.assertEqual(result.status_code, 200)
def test_login_page_registration_hint(self) -> None:
response = self.client_get("/login/")
self.assert_not_in_success_response(
["Don't have an account yet? You need to be invited to join this organization."],
response,
)
realm = get_realm("zulip")
realm.invite_required = True
realm.save(update_fields=["invite_required"])
response = self.client_get("/login/")
self.assert_in_success_response(
["Don't have an account yet? You need to be invited to join this organization."],
response,
)
class TestFindMyTeam(ZulipTestCase):
def test_template(self) -> None:
result = self.client_get("/accounts/find/")
self.assertIn("Find your Zulip accounts", result.content.decode("utf8"))
def test_result(self) -> None:
# We capitalize a letter in cordelia's email to test that the search is case-insensitive.
result = self.client_post(
"/accounts/find/", dict(emails="[email protected],[email protected]")
)
self.assertEqual(result.status_code, 302)
self.assertEqual(
result.url, "/accounts/find/?emails=iago%40zulip.com%2CcordeliA%40zulip.com"
)
result = self.client_get(result.url)
content = result.content.decode("utf8")
self.assertIn("Emails sent! You will only receive emails", content)
self.assertIn("[email protected]", content)
self.assertIn("[email protected]", content)
from django.core.mail import outbox
# 3 = 1 + 2 -- Cordelia gets an email each for the "zulip" and "lear" realms.
self.assertEqual(len(outbox), 3)
def test_find_team_ignore_invalid_email(self) -> None:
result = self.client_post(
"/accounts/find/", dict(emails="[email protected],[email protected]")
)
self.assertEqual(result.status_code, 302)
self.assertEqual(
result.url, "/accounts/find/?emails=iago%40zulip.com%2Cinvalid_email%40zulip.com"
)
result = self.client_get(result.url)
content = result.content.decode("utf8")
self.assertIn("Emails sent! You will only receive emails", content)
self.assertIn(self.example_email("iago"), content)
self.assertIn("invalid_email@", content)
from django.core.mail import outbox
self.assertEqual(len(outbox), 1)
def test_find_team_reject_invalid_email(self) -> None:
result = self.client_post("/accounts/find/", dict(emails="invalid_string"))
self.assertEqual(result.status_code, 200)
self.assertIn(b"Enter a valid email", result.content)
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
# Just for coverage on perhaps-unnecessary validation code.
result = self.client_get("/accounts/find/", {"emails": "invalid"})
self.assertEqual(result.status_code, 200)
def test_find_team_zero_emails(self) -> None:
data = {"emails": ""}
result = self.client_post("/accounts/find/", data)
self.assertIn("This field is required", result.content.decode("utf8"))
self.assertEqual(result.status_code, 200)
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
def test_find_team_one_email(self) -> None:
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assertEqual(len(outbox), 1)
def test_find_team_deactivated_user(self) -> None:
do_deactivate_user(self.example_user("hamlet"), acting_user=None)
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
def test_find_team_deactivated_realm(self) -> None:
do_deactivate_realm(get_realm("zulip"), acting_user=None)
data = {"emails": self.example_email("hamlet")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=hamlet%40zulip.com")
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
def test_find_team_bot_email(self) -> None:
data = {"emails": self.example_email("webhook_bot")}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result.url, "/accounts/find/?emails=webhook-bot%40zulip.com")
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
def test_find_team_more_than_ten_emails(self) -> None:
data = {"emails": ",".join(f"hamlet-{i}@zulip.com" for i in range(11))}
result = self.client_post("/accounts/find/", data)
self.assertEqual(result.status_code, 200)
self.assertIn("Please enter at most 10", result.content.decode("utf8"))
from django.core.mail import outbox
self.assertEqual(len(outbox), 0)
class ConfirmationKeyTest(ZulipTestCase):
def test_confirmation_key(self) -> None:
request = MagicMock()
request.session = {
"confirmation_key": {"confirmation_key": "xyzzy"},
}
result = confirmation_key(request)
self.assert_json_success(result)
self.assert_in_response("xyzzy", result)
class MobileAuthOTPTest(ZulipTestCase):
def test_xor_hex_strings(self) -> None:
self.assertEqual(xor_hex_strings("1237c81ab", "18989fd12"), "0aaf57cb9")
with self.assertRaises(AssertionError):
xor_hex_strings("1", "31")
def test_is_valid_otp(self) -> None:
self.assertEqual(is_valid_otp("1234"), False)
self.assertEqual(is_valid_otp("1234abcd" * 8), True)
self.assertEqual(is_valid_otp("1234abcZ" * 8), False)
def test_ascii_to_hex(self) -> None:
self.assertEqual(ascii_to_hex("ZcdR1234"), "5a63645231323334")
self.assertEqual(hex_to_ascii("5a63645231323334"), "ZcdR1234")
def test_otp_encrypt_api_key(self) -> None:
api_key = "12ac" * 8
otp = "7be38894" * 8
result = otp_encrypt_api_key(api_key, otp)
self.assertEqual(result, "4ad1e9f7" * 8)
decryped = otp_decrypt_api_key(result, otp)
self.assertEqual(decryped, api_key)
class FollowupEmailTest(ZulipTestCase):
def test_followup_day2_email(self) -> None:
user_profile = self.example_user("hamlet")
# Test date_joined == Sunday
user_profile.date_joined = datetime.datetime(
2018, 1, 7, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=2, hours=-1)
)
# Test date_joined == Tuesday
user_profile.date_joined = datetime.datetime(
2018, 1, 2, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=2, hours=-1)
)
# Test date_joined == Thursday
user_profile.date_joined = datetime.datetime(
2018, 1, 4, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=1, hours=-1)
)
# Test date_joined == Friday
user_profile.date_joined = datetime.datetime(
2018, 1, 5, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=3, hours=-1)
)
# Time offset of America/Phoenix is -07:00
user_profile.timezone = "America/Phoenix"
# Test date_joined == Friday in UTC, but Thursday in the user's timezone
user_profile.date_joined = datetime.datetime(
2018, 1, 5, 1, 0, 0, 0, tzinfo=datetime.timezone.utc
)
self.assertEqual(
followup_day2_email_delay(user_profile), datetime.timedelta(days=1, hours=-1)
)
class NoReplyEmailTest(ZulipTestCase):
def test_noreply_email_address(self) -> None:
self.assertTrue(
re.search(self.TOKENIZED_NOREPLY_REGEX, FromAddress.tokenized_no_reply_address())
)
with self.settings(ADD_TOKENS_TO_NOREPLY_ADDRESS=False):
self.assertEqual(FromAddress.tokenized_no_reply_address(), "noreply@testserver")
class TwoFactorAuthTest(ZulipTestCase):
@patch("two_factor.models.totp")
def test_two_factor_login(self, mock_totp: MagicMock) -> None:
token = 123456
email = self.example_email("hamlet")
password = self.ldap_password("hamlet")
user_profile = self.example_user("hamlet")
user_profile.set_password(password)
user_profile.save()
self.create_default_device(user_profile)
def totp(*args: Any, **kwargs: Any) -> int:
return token
mock_totp.side_effect = totp
with self.settings(
AUTHENTICATION_BACKENDS=("zproject.backends.EmailAuthBackend",),
TWO_FACTOR_CALL_GATEWAY="two_factor.gateways.fake.Fake",
TWO_FACTOR_SMS_GATEWAY="two_factor.gateways.fake.Fake",
TWO_FACTOR_AUTHENTICATION_ENABLED=True,
):
first_step_data = {
"username": email,
"password": password,
"two_factor_login_view-current_step": "auth",
}
with self.assertLogs("two_factor.gateways.fake", "INFO") as info_logs:
result = self.client_post("/accounts/login/", first_step_data)
self.assertEqual(
info_logs.output,
['INFO:two_factor.gateways.fake:Fake SMS to +12125550100: "Your token is: 123456"'],
)
self.assertEqual(result.status_code, 200)
second_step_data = {
"token-otp_token": str(token),
"two_factor_login_view-current_step": "token",
}
result = self.client_post("/accounts/login/", second_step_data)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://zulip.testserver")
# Going to login page should redirect to '/' if user is already
# logged in.
result = self.client_get("/accounts/login/")
self.assertEqual(result["Location"], "http://zulip.testserver")
class NameRestrictionsTest(ZulipTestCase):
def test_whitelisted_disposable_domains(self) -> None:
self.assertFalse(is_disposable_domain("OPayQ.com"))
class RealmRedirectTest(ZulipTestCase):
def test_realm_redirect_without_next_param(self) -> None:
result = self.client_get("/accounts/go/")
self.assert_in_success_response(["Enter your organization's Zulip URL"], result)
result = self.client_post("/accounts/go/", {"subdomain": "zephyr"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://zephyr.testserver")
result = self.client_post("/accounts/go/", {"subdomain": "invalid"})
self.assert_in_success_response(["We couldn't find that Zulip organization."], result)
def test_realm_redirect_with_next_param(self) -> None:
result = self.client_get("/accounts/go/", {"next": "billing"})
self.assert_in_success_response(
["Enter your organization's Zulip URL", 'action="/accounts/go/?next=billing"'], result
)
result = self.client_post("/accounts/go/?next=billing", {"subdomain": "lear"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "http://lear.testserver/billing")
|
py | 1a50f2e95575fd8e7733e885d558f37ed7316963 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from ._configuration import ServiceFabricManagementClientConfiguration
from .operations import ClustersOperations
from .operations import ClusterVersionsOperations
from .operations import Operations
from .operations import ApplicationTypesOperations
from .operations import ApplicationTypeVersionsOperations
from .operations import ApplicationsOperations
from .operations import ServicesOperations
from . import models
class ServiceFabricManagementClient(object):
"""Service Fabric Management Client.
:ivar clusters: ClustersOperations operations
:vartype clusters: azure.mgmt.servicefabric.operations.ClustersOperations
:ivar cluster_versions: ClusterVersionsOperations operations
:vartype cluster_versions: azure.mgmt.servicefabric.operations.ClusterVersionsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.servicefabric.operations.Operations
:ivar application_types: ApplicationTypesOperations operations
:vartype application_types: azure.mgmt.servicefabric.operations.ApplicationTypesOperations
:ivar application_type_versions: ApplicationTypeVersionsOperations operations
:vartype application_type_versions: azure.mgmt.servicefabric.operations.ApplicationTypeVersionsOperations
:ivar applications: ApplicationsOperations operations
:vartype applications: azure.mgmt.servicefabric.operations.ApplicationsOperations
:ivar services: ServicesOperations operations
:vartype services: azure.mgmt.servicefabric.operations.ServicesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The customer subscription identifier.
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
base_url=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://management.azure.com'
self._config = ServiceFabricManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.clusters = ClustersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.cluster_versions = ClusterVersionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.application_types = ApplicationTypesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.application_type_versions = ApplicationTypeVersionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.applications = ApplicationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.services = ServicesOperations(
self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, http_request, **kwargs):
# type: (HttpRequest, Any) -> HttpResponse
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.HttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ServiceFabricManagementClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
|
py | 1a50f32845b77d163ab3fa327a9582ad17a28acd | import pandas as pd
from jnius import autoclass
from awesome_data import DataSet
from marspy.convert.molecule import *
class Archive:
def __init__(self, filepath):
self.filepath = filepath
self.name = self.filepath.split('/')[-1]
self.File = autoclass('java.io.File')
self.yamaFile = self.File(self.filepath)
def get_molecule_by_uid(self, uid):
raise NotImplementedError
def get_molecules_by_tags(self, tags):
raise NotImplementedError
def validate_params(self):
pass
class SingleMoleculeArchive(Archive):
instances = []
def __init__(self, filepath, accept_tag, label=dict()):
Archive.__init__(self, filepath)
self.instances.append(self)
self.Archive = autoclass('de.mpg.biochem.mars.molecule.SingleMoleculeArchive')
self.archive_link = self.Archive(self.yamaFile)
self.metadata_uids = list(self.archive_link.getMetadataUIDs())
self.label = label
# nucleotide
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getStringParameter('nucleotide')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if StringParameter is not set, getStringParameter returns empty string ''
if len(self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('nucleotide')) == 0:
# default n/a
self.nucleotide = 'n/a'
print(f'nucleotide not found. Setting default to {self.nucleotide}')
# parameter properly set
else:
self.nucleotide = self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('nucleotide')
# highsalt_wash
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getParameter('highsalt_wash')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if Parameter is not set, getParameter returns np.nan
if np.isnan(self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('highsalt_wash')):
# default False
self.highsalt_wash = False
print(f'highsalt_wash not found. Setting default to {self.highsalt_wash}')
else:
self.highsalt_wash = \
self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('highsalt_wash') == 1
# cdc6
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getStringParameter('cdc6')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if StringParameter is not set, getStringParameter returns empty string ''
if len(self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('cdc6')) == 0:
# default n/a
self.cdc6 = 'n/a'
print(f'cdc6 not found. Setting default to {self.cdc6}')
# parameter properly set
else:
self.cdc6 = self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('cdc6')
self.protein = list(self.label.keys())[0]
# instantiate a new SingleMolecule for each uid and store instances as list
self.molecules = [SingleMolecule(uid, self.protein, archive=self.archive_link) for uid in
self.archive_link.getMoleculeUIDs() if self.archive_link.get(uid).hasTag(accept_tag)]
self.tags = set()
for molecule in self.molecules:
self.tags.update(molecule.tags)
def get_molecule_by_uid(self, uid):
"""
Returns molecule object with provided UID.
"""
return list(filter(lambda molecule: molecule.uid == uid, self.molecules))[0]
def get_molecules_by_tags(self, tags):
"""
Provide tags as list.
Returns filter of all molecules which have all the specified tags
"""
return filter(lambda molecule: set(tags).issubset(set(molecule.tags)), self.molecules)
def __len__(self):
return len(self.molecules)
class DnaMoleculeArchive(Archive):
instances = []
def __init__(self, filepath, accept_tag, labels=dict()):
Archive.__init__(self, filepath)
self.instances.append(self)
self.Archive = autoclass('de.mpg.biochem.mars.molecule.DnaMoleculeArchive')
self.archive_link = self.Archive(self.yamaFile)
self.metadata_uids = list(self.archive_link.getMetadataUIDs())
self.dna_molecule_count = 0
for metadata in self.metadata_uids:
self.dna_molecule_count += dict(sc.to_python(self.archive_link.getMetadata(metadata).getParameters()))[
'DnaMoleculeCount']
# subtract # of reject_dna tags
self.dna_molecule_count -= len(list(filter(lambda uid:
self.archive_link.get(uid).hasTag('reject_dna'),
self.archive_link.moleculeUIDs)))
self.labels = labels
# nucleotide
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getStringParameter('nucleotide')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if StringParameter is not set, getStringParameter returns empty string ''
if len(self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('nucleotide')) == 0:
# default n/a
self.nucleotide = 'n/a'
print(f'nucleotide not found. Setting default to {self.nucleotide}')
# parameter properly set
else:
self.nucleotide = self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('nucleotide')
# highsalt_wash
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getParameter('highsalt_wash')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if Parameter is not set, getParameter returns np.nan
if np.isnan(self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('highsalt_wash')):
# default False
self.highsalt_wash = False
print(f'highsalt_wash not found. Setting default to {self.highsalt_wash}')
else:
self.highsalt_wash = \
self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('highsalt_wash') == 1
# dna_count_valid: data was fully analyzed - ALL DNA molecules fitted
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getParameter('dna_count_valid')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if Parameter is not set, getParameter returns np.nan
if np.isnan(self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('dna_count_valid')):
# default True
self.dna_count_valid = True
print(f'dna_count_valid not found. Setting default to {self.dna_count_valid}')
else:
self.dna_count_valid = \
self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('dna_count_valid') == 1
# t7_terminator
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getParameter('t7_terminator')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if Parameter is not set, getParameter returns np.nan
if np.isnan(self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('t7_terminator')):
# default False
self.t7_terminator = False
print(f't7_terminator not found. Setting default to {self.t7_terminator}')
else:
self.t7_terminator = \
self.archive_link.getMetadata(self.metadata_uids[0]).getParameter('t7_terminator') == 1
# chromatin
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getStringParameter('chromatin')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if StringParameter is not set, getStringParameter returns empty string ''
if len(self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('chromatin')) == 0:
# default n/a
self.chromatin = 'n/a'
print(f'chromatin not found. Setting default to {self.chromatin}')
# parameter properly set
else:
self.chromatin = self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('chromatin')
# cdc6
# check if all metadata parameters match & raise warning if conditions to in one archive are not identical
if len({self.archive_link.getMetadata(metadata_uid).getStringParameter('cdc6')
for metadata_uid in self.metadata_uids}) > 1:
raise MarsPyWarning()
# if StringParameter is not set, getStringParameter returns empty string ''
if len(self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('cdc6')) == 0:
# default n/a
self.cdc6 = 'n/a'
print(f'cdc6 not found. Setting default to {self.cdc6}')
# parameter properly set
else:
self.cdc6 = self.archive_link.getMetadata(self.metadata_uids[0]).getStringParameter('cdc6')
self.proteins = set()
# will get all columns in DataTable with 'Protein_n_Position_on_Dna'
for match in re.findall('\w+_Position_on_DNA', '$'.join(set(sc.to_python(
self.archive_link.properties().getColumnSet())))):
self.proteins.add(match.split('_')[0])
# instantiate a new DnaMolecule for each uid and store instances as list
self.molecules = [DnaMolecule(uid, self.proteins, archive=self.archive_link) for uid in
self.archive_link.getMoleculeUIDs()
if self.archive_link.get(uid).hasTag(accept_tag)]
# define archive tags union of all molecule tags
# define archive prefixes as union of all molecule prefixes (will be used for top level columns in big df later)
self.tags = set()
self.prefixes = set()
for molecule in self.molecules:
self.tags.update(molecule.tags)
self.prefixes.update(molecule.prefixes)
def validate_params(self):
"""
Integrity check of passed Archive.
"""
# compare number protein in params vs actual one (retrieved from metadata)
for molecule in self.molecules:
# take global protein to confirm dict was pasted correctly
for protein in self.proteins:
if not (molecule.proteins[protein] == molecule.params['Number_' + protein]):
err_message = f"Conflict in molecule {molecule.uid}!\n\
Number of {protein} retrieved from metadata: {molecule.proteins[protein]}\n\
Number of {protein} based on Parameter: {molecule.params['Number_' + protein]}"
raise MarsPyException(err_message)
return 'passed'
def add_segments_tables(self):
"""
Attach all segment tables to molecule records (stored as dict)
"""
# Do we have collisions in the archive?
coll_exp = False
for tag in self.tags:
if re.match('coll', tag):
coll_exp = True
for molecule in self.molecules:
molecule.seg_dfs = list()
# all segmentTableNames
for x, y, region in (sc.to_python(self.archive_link.get(molecule.uid).getSegmentsTableNames())):
# internal control that all seg_dfs are valid
_assigned = False
# all proteins on molecule
for prefix in molecule.prefixes:
if re.match(prefix, x) and re.match(prefix, y):
molecule.seg_dfs.append(SegmentsTable(molecule=molecule, prefix=prefix,
col_x=x, col_y=y, region=region, coll_exp=coll_exp))
_assigned = True
break
if not _assigned:
err_message = f"Conflict in molecule {molecule.uid}!\nSegmentTable {x} {y} {region} not assigned!"
raise MarsPyException(err_message)
def filter_segments(self, b_min=0, sigma_b_max=0):
"""
Filter all segments for all molecules in archive based on SegmentsTable type.
Also see filter_segments() in SegmentsTable object:
Mode 1: SegmentTable type: 'bleaching' -
Reject all steps with increase in fluorescence intensity (initial quenching).
If increase in fluorescence is detected, remove segments with same intensity (double counts).
Mode 2: SegmentsTable type: 'rate' -
Reject all segments with B value (velocity) < b_min and sigma_B > sigma_b_max (poor fits)
"""
for molecule in self.molecules:
for prefix in molecule.prefixes:
# check if one protein molecule has only one seg_df with type 'bleaching'
if len(list(filter(lambda df:
prefix == df.prefix and df.type == 'bleaching', molecule.seg_dfs))) > 1:
err_message = f"Conflict in molecule {molecule.uid}!\nMore than one SegmentTable for {prefix}."
raise MarsPyException(err_message)
# apply filter to all seg_dfs
for seg_df in molecule.seg_dfs:
seg_df.filter_segments(b_min=b_min, sigma_b_max=sigma_b_max)
# in case seg_df is empty after filtering, delete object
remove_seg_dfs = set()
for seg_df in molecule.seg_dfs:
if len(seg_df.df) == 0:
remove_seg_dfs.add(seg_df)
for seg_df in remove_seg_dfs:
molecule.seg_dfs.remove(seg_df)
def calc_bleaching_steps(self):
"""
Calculate bleaching steps for all proteins of all molecules in archive.
Also see calc_bleaching_steps() in SegmentsTable object:
Calculate number of bleaching steps based off of segment table rows.
Returns: number of bleaching steps (integer value)
No return value here; bleaching steps are stored as attribute dict with prefixes as key.
"""
for molecule in self.molecules:
molecule.bleaching_steps = dict()
for prefix in molecule.prefixes:
# check if one protein molecule has only one seg_df with type 'bleaching'
if len(list(filter(lambda seg_df:
prefix == seg_df.prefix and seg_df.type == 'bleaching', molecule.seg_dfs))) > 1:
err_message = f"Conflict in molecule {molecule.uid}!\nMore than one SegmentTable for {prefix}."
raise MarsPyException(err_message)
# only molecules with proper bleaching (not rejected)
if 'reject_bleach_' + prefix in molecule.tags:
continue
molecule.bleaching_steps[prefix] = \
list(filter(lambda seg_df: prefix == seg_df.prefix and seg_df.type == 'bleaching',
molecule.seg_dfs))[0].calc_bleaching_steps()
def detect_pauses(self, thresh=3, global_thresh=False, col='B'):
"""
Detect pauses in translocation for all SegmentTables of all molecules in archive.
Also see detect_pauses() in SegmentsTable object:
Detection pauses in SegmentTable (only for type = 'rate', others are skipped)
global_thresh: Set to True if a fixed threshold for all molecules should be used
thresh: threshold to detect pauses.
If global_thresh is False, a molecule-specific threshold is calculated with thresh^-1 * np.mean(col)
col: column evaluated for pauses
"""
for molecule in self.molecules:
for seg_df in molecule.seg_dfs:
seg_df.detect_pauses(thresh=thresh, global_thresh=global_thresh, col=col)
def get_molecule_by_uid(self, uid):
"""
Returns molecule object with provided UID.
"""
return list(filter(lambda molecule: molecule.uid == uid, self.molecules))[0]
def get_molecules_by_tags(self, tags):
"""
Provide tags as list.
Returns filter of all molecules which have all the specified tags
"""
return filter(lambda molecule: set(tags).issubset(set(molecule.tags)), self.molecules)
def __len__(self):
return len(self.molecules)
def instantiate_archive(name, datasets):
"""
Instantiates passed archive from underlying dataset
"""
# check if we have the right data type
for data in datasets:
if not isinstance(data, DataSet):
raise MarsPyException('Dataset contains non-compatible data type.')
data = list(filter(lambda dataset: dataset.name == name, datasets))[0]
if data.archive_type == 'DnaMoleculeArchive':
DnaMoleculeArchive(filepath=data.filepath + data.name, accept_tag=data.accept_tag, labels=data.labels)
elif data.archive_type == 'SingleMoleculeArchive':
SingleMoleculeArchive(filepath=data.filepath + data.name, accept_tag=data.accept_tag, label=data.labels)
else:
raise MarsPyException(f'Failed to instantiate Archive {data.name}.')
def describe_archives(archives):
"""
Describes passed archives by returning a pandsa DataFrame. Pass archives as iterable object
"""
df = pd.DataFrame(columns=['# of datasets', '# of molecules', 'labeled proteins', 'nucleotide', 'HS challenge?',
'chromatin', 'terminator?', 'archive validation'])
for archive in archives:
_temp_df = pd.DataFrame(index=[archive.name.split('.')[0]],
data=[[len(archive.metadata_uids), len(archive),
'; '.join([label + '-' + protein for protein, label in archive.labels.items()]),
archive.nucleotide, archive.highsalt_wash, archive.chromatin,
archive.t7_terminator, archive.validate_params()]],
columns=['# of datasets', '# of molecules', 'labeled proteins', 'nucleotide',
'HS challenge?', 'chromatin', 'terminator?', 'archive validation'])
df = pd.concat([df, _temp_df])
df = df.infer_objects()
return df
|
py | 1a50f448d1b9c837907c54c39681c4dd5ab93716 | from setuptools import setup
setup(name='gatzzt',
version='0.1',
description='tools for data science',
url='http://github.com/GatzZ/gatzzt',
author='GatzZ',
author_email='[email protected]',
license='MIT',
packages=['gatzzt'],
zip_safe=False) |
py | 1a50f5d7bb0c459a3f48ed14e7103ccf29b1e4a3 | class Solution:
def beautifulArray(self, N: int) -> List[int]:
|
py | 1a50f621c98f04071597edc3b22ad8bd83de788a | import numpy as np
from gridlod import util
def build_randomcheckerboard(Nepsilon, NFine, alpha, beta, p):
# builds a random checkerboard coefficient with spectral bounds alpha and beta,
# piece-wise constant on mesh with Nepsilon blocks
# returns a fine coefficient on mesh with NFine blocks
Ntepsilon = np.prod(Nepsilon)
c = np.random.binomial(1,p,Ntepsilon)
values = alpha + (beta-alpha) * c
def randomcheckerboard(x):
index = (x*Nepsilon).astype(int)
d = np.shape(index)[1]
if d == 1:
flatindex = index[:]
if d == 2:
flatindex = index[:,1]*Nepsilon[0]+index[:,0]
if d == 3:
flatindex = index[:,2]*(Nepsilon[0]*Nepsilon[1]) + index[:,1]*Nepsilon[0] + index[:,0]
else:
NotImplementedError('other dimensions not available')
return values[flatindex]
xFine = util.tCoordinates(NFine)
return randomcheckerboard(xFine).flatten()
def build_checkerboardbasis(NPatch, NepsilonElement, NFineElement, alpha, beta):
# builds a list of coeeficients to combine any checkerboard coefficient
# input: NPatch is number of coarse elements, NepsilonElement and NFineElement the number of cells (per dimension)
# per coarse element for the epsilon and the fine mesh, respectively; alpha and beta are the spectral bounds of the coefficient
Nepsilon = NPatch * NepsilonElement
Ntepsilon = np.prod(Nepsilon)
NFine = NPatch*NFineElement
NtFine = np.prod(NFine)
def checkerboardI(ii):
coeff = alpha * np.ones(NtFine)
#find out which indices on fine grid correspond to element ii on epsilon grid
elementIndex = util.convertpLinearIndexToCoordIndex(Nepsilon-1, ii)[:]
indices = util.extractElementFine(Nepsilon, NFineElement//NepsilonElement, elementIndex)
coeff[indices] = beta
return coeff
checkerboardbasis = list(map(checkerboardI, range(Ntepsilon)))
checkerboardbasis.append(alpha*np.ones(NtFine))
return checkerboardbasis
def build_inclusions_defect_2d(NFine, Nepsilon, bg, val, incl_bl, incl_tr, p_defect, def_val=None):
# builds a fine coefficient which is periodic with periodicity length 1/epsilon.
# On the unit cell, the coefficient takes the value val inside a rectangle described by incl_bl (bottom left) and
# incl_tr (top right), otherwise the value is bg
# with a probability of p_defect the inclusion 'vanishes', i.e. the value is set to def_val (default: bg)
assert(np.all(incl_bl) >= 0.)
assert(np.all(incl_tr) <= 1.)
assert(p_defect < 1.)
if def_val is None:
def_val = bg
#probability of defect is p_defect
c = np.random.binomial(1, p_defect, np.prod(Nepsilon))
aBaseSquare = bg*np.ones(NFine)
flatidx = 0
for ii in range(Nepsilon[0]):
for jj in range(Nepsilon[1]):
startindexcols = int((ii + incl_bl[0]) * (NFine/Nepsilon)[0])
stopindexcols = int((ii + incl_tr[0]) * (NFine/Nepsilon)[0])
startindexrows = int((jj + incl_bl[1]) * (NFine/Nepsilon)[1])
stopindexrows = int((jj + incl_tr[1]) * (NFine/Nepsilon)[1])
if c[flatidx] == 0:
aBaseSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = val
else:
aBaseSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = def_val
flatidx += 1
return aBaseSquare.flatten()
def build_inclusions_change_2d(NFine, Nepsilon, bg, val, incl_bl, incl_tr, p_defect, model):
# builds a fine coefficient which is periodic with periodicity length 1/epsilon.
# On the unit cell, the coefficient takes the value val inside a rectangle described by incl_bl (bottom left) and
# incl_tr (top right), otherwise the value is bg
# with a probability of p_defect the inclusion 'changes', where three models are implemented:
# -filling the whole scaled unit cell (fill)
# -shifting the inclusion to def_bl, def_br
# - L-shape, i.e. erasing only the area def_bl to def_br
assert(np.all(incl_bl) >= 0.)
assert(np.all(incl_tr) <= 1.)
assert(p_defect < 1.)
assert(model['name'] in ['inclfill', 'inclshift', 'inclLshape'])
#probability of defect is p_defect
c = np.random.binomial(1, p_defect, np.prod(Nepsilon))
aBaseSquare = bg*np.ones(NFine)
flatidx = 0
for ii in range(Nepsilon[0]):
for jj in range(Nepsilon[1]):
startindexcols = int((ii + incl_bl[0]) * (NFine/Nepsilon)[0])
stopindexcols = int((ii + incl_tr[0]) * (NFine/Nepsilon)[0])
startindexrows = int((jj + incl_bl[1]) * (NFine/Nepsilon)[1])
stopindexrows = int((jj + incl_tr[1]) * (NFine/Nepsilon)[1])
if c[flatidx] == 0: # no defect
aBaseSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = val
else:
if model['name'] == 'inclfill':
startdefindexcols = int((ii) * (NFine / Nepsilon)[0])
stopdefindexcols = int((ii + 1) * (NFine / Nepsilon)[0])
startdefindexrows = int((jj) * (NFine / Nepsilon)[1])
stopdefindexrows = int((jj + 1) * (NFine / Nepsilon)[1])
aBaseSquare[startdefindexrows:stopdefindexrows, startdefindexcols:stopdefindexcols] = val
if model['name'] == 'inclshift':
def_bl = model['def_bl']
def_tr = model['def_tr']
startdefindexcols = int((ii + def_bl[0]) * (NFine / Nepsilon)[0])
stopdefindexcols = int((ii + def_tr[0]) * (NFine / Nepsilon)[0])
startdefindexrows = int((jj + def_bl[1]) * (NFine / Nepsilon)[1])
stopdefindexrows = int((jj + def_tr[1]) * (NFine / Nepsilon)[1])
aBaseSquare[startdefindexrows:stopdefindexrows, startdefindexcols:stopdefindexcols] = val
if model['name'] == 'inclLshape':
#first, put a normal inclusion
aBaseSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = val
# erase now the complement of the Lshape in the inclusion
def_bl = model['def_bl']
def_tr = model['def_tr']
startdefindexcols = int((ii + def_bl[0]) * (NFine / Nepsilon)[0])
stopdefindexcols = int((ii + def_tr[0]) * (NFine / Nepsilon)[0])
startdefindexrows = int((jj + def_bl[1]) * (NFine / Nepsilon)[1])
stopdefindexrows = int((jj + def_tr[1]) * (NFine / Nepsilon)[1])
aBaseSquare[startdefindexrows:stopdefindexrows, startdefindexcols:stopdefindexcols] = bg
flatidx += 1
return aBaseSquare.flatten()
def build_inclusionbasis_2d(NPatch, NEpsilonElement, NFineElement, bg, val, incl_bl, incl_tr, defval=None):
Nepsilon = NPatch * NEpsilonElement
NFine = NPatch * NFineElement
if defval is None:
defval = bg
assert (np.all(incl_bl) >= 0.)
assert (np.all(incl_tr) <= 1.)
aBaseSquare = bg * np.ones(NFine)
for ii in range(Nepsilon[0]):
for jj in range(Nepsilon[1]):
startindexcols = int((ii + incl_bl[0]) * (NFine / Nepsilon)[0])
stopindexcols = int((ii + incl_tr[0]) * (NFine / Nepsilon)[0])
startindexrows = int((jj + incl_bl[1]) * (NFine / Nepsilon)[1])
stopindexrows = int((jj + incl_tr[1]) * (NFine / Nepsilon)[1])
aBaseSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = val
#aBase = aBaseSquare.flatten()
def inclusion_defectI(ii):
aSquare = np.copy(aBaseSquare)
tmp_indx = np.array([ii % Nepsilon[1], ii // Nepsilon[1]])
startindexcols = int((tmp_indx[0] + incl_bl[0]) * (NFine / Nepsilon)[0])
stopindexcols = int((tmp_indx[0] + incl_tr[0]) * (NFine / Nepsilon)[0])
startindexrows = int((tmp_indx[1] + incl_bl[1]) * (NFine / Nepsilon)[1])
stopindexrows = int((tmp_indx[1] + incl_tr[1]) * (NFine / Nepsilon)[1])
aSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = defval
return aSquare.flatten()
coeffList = list(map(inclusion_defectI, range(np.prod(Nepsilon))))
coeffList.append(aBaseSquare.flatten())
return coeffList
def build_inclusionbasis_change_2d(NPatch, NEpsilonElement, NFineElement, bg, val, incl_bl, incl_tr, model):
Nepsilon = NPatch * NEpsilonElement
NFine = NPatch * NFineElement
assert (np.all(incl_bl) >= 0.)
assert (np.all(incl_tr) <= 1.)
assert(model['name'] in ['inclfill', 'inclshift', 'inclLshape'])
aBaseSquare = bg * np.ones(NFine)
for ii in range(Nepsilon[0]):
for jj in range(Nepsilon[1]):
startindexcols = int((ii + incl_bl[0]) * (NFine / Nepsilon)[0])
stopindexcols = int((ii + incl_tr[0]) * (NFine / Nepsilon)[0])
startindexrows = int((jj + incl_bl[1]) * (NFine / Nepsilon)[1])
stopindexrows = int((jj + incl_tr[1]) * (NFine / Nepsilon)[1])
aBaseSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = val
#aBase = aBaseSquare.flatten()
def inclusion_defectI(ii):
aSquare = np.copy(aBaseSquare)
tmp_indx = np.array([ii % Nepsilon[1], ii // Nepsilon[1]])
if model['name'] == 'inclfill':
startdefindexcols = int((tmp_indx[0]) * (NFine / Nepsilon)[0])
stopdefindexcols = int((tmp_indx[0] + 1) * (NFine / Nepsilon)[0])
startdefindexrows = int((tmp_indx[1]) * (NFine / Nepsilon)[1])
stopdefindexrows = int((tmp_indx[1] + 1) * (NFine / Nepsilon)[1])
aSquare[startdefindexrows:stopdefindexrows, startdefindexcols:stopdefindexcols] = val
if model['name'] == 'inclshift':
def_bl = model['def_bl']
def_tr = model['def_tr']
#first erase the inclusion
startindexcols = int((tmp_indx[0] + incl_bl[0]) * (NFine / Nepsilon)[0])
stopindexcols = int((tmp_indx[0] + incl_tr[0]) * (NFine / Nepsilon)[0])
startindexrows = int((tmp_indx[1] + incl_bl[1]) * (NFine / Nepsilon)[1])
stopindexrows = int((tmp_indx[1] + incl_tr[1]) * (NFine / Nepsilon)[1])
aSquare[startindexrows:stopindexrows, startindexcols:stopindexcols] = bg
#now put the inclusion at the new place
startdefindexcols = int((tmp_indx[0] + def_bl[0]) * (NFine / Nepsilon)[0])
stopdefindexcols = int((tmp_indx[0] + def_tr[0]) * (NFine / Nepsilon)[0])
startdefindexrows = int((tmp_indx[1] + def_bl[1]) * (NFine / Nepsilon)[1])
stopdefindexrows = int((tmp_indx[1] + def_tr[1]) * (NFine / Nepsilon)[1])
aSquare[startdefindexrows:stopdefindexrows, startdefindexcols:stopdefindexcols] = val
if model['name'] == 'inclLshape': # erase the complement of the Lshape in the inclusion
def_bl = model['def_bl']
def_tr = model['def_tr']
startdefindexcols = int((tmp_indx[0] + def_bl[0]) * (NFine / Nepsilon)[0])
stopdefindexcols = int((tmp_indx[0] + def_tr[0]) * (NFine / Nepsilon)[0])
startdefindexrows = int((tmp_indx[1] + def_bl[1]) * (NFine / Nepsilon)[1])
stopdefindexrows = int((tmp_indx[1] + def_tr[1]) * (NFine / Nepsilon)[1])
aSquare[startdefindexrows:stopdefindexrows, startdefindexcols:stopdefindexcols] = bg
return aSquare.flatten()
coeffList = list(map(inclusion_defectI, range(np.prod(Nepsilon))))
coeffList.append(aBaseSquare.flatten())
return coeffList |
py | 1a50f646360cdada15647a3a4b1ceef3fa063dbb | import numpy as np
from sys import argv
import sys
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
import matplotlib.ticker as ticker
from collections import Counter
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from EpiModel import *
import cv19
#-------------------------------------------------------------
# Esta es una implementacion del modelo SEIR de desarrollo,
# para que luego reemplace a las funciones del modulo cv19
#
# en la version original:
# no hay retardo, el contagio y los sintomas son instantaneos
# no es estocástico
# toda la poblacion reacciona igual al virus
# es cerrado: S+I+R=N
#
# en esta version:
# hay retardo estadistico
# no es estocástico
# es cerrado: S+I+R=N
#
# Mejoras a realizar:
# - incorporar casos importados: simplemente sumar una Poisson a I
# - incorporar la naturaleza estocástica: simplemente sortear una
# VA en lugar de hacer el calculo determinista
# - incorporar cuarentenas: hacer que beta sea una función del tiempo
#
# Con eso el modelo tendría todo
#
#--------------------------------------------------------------
def random_gen(A, n=1):
from random import random
res = []
rep = [0]*len(A)
for _ in range(n):
u = random()
#msg = 'testing if %f is in interval (%f %f) / index: %i'
y_old = 0.
for i, y_new in enumerate(A):
if u > y_old and u < y_new:
j = i
#print(msg % (u, y_old, y_new, i))
break
else:
y_old = y_new
if j<0 or j>= len(A):
print(j, len(A))
res.append(int(j))
rep[j] = rep[j] + 1
return(res, rep)
#___________________________________
# Load settings
conf = cv19.parser()
conf.check_file(argv)
conf.read_config_file()
conf.load_filenames()
conf.load_parameters()
#___________________________________
# parameters
R_0 = 2.2
beta = 0.7
sigma = 0.05
gamma = beta / R_0
population = 100000
N_init = 10
t_max = 200
#----------------------------------------------- SIMULATED MODEL
#{{{
c = cv19.InfectionCurve()
p = conf.p
g = cv19.Graph_nd()
# al ppo. todos en S con la distrib de la poblacion:
# 1. inventar una PDF cualquiera para la distrib de poblacion:
# en este ejemplo hay Nages=3 rangos: joven, adulto, mayor
pdf = np.array([5, 3, 2])
Nages = len(pdf)
pdf = pdf / float(pdf.sum())
r, rep = random_gen(pdf.cumsum(), population)
rep = np.array([.8,.2,.1])*population
pop_by_age = np.c_[rep]
# Population has a given age distribution
#---------------------------------------------------
S_init_by_age = np.c_[[[3],[20],[1]]]
# Initialize graph:
#---------------------------------------------------
I0 = S_init_by_age
S0 = pop_by_age - I0
E0 = np.zeros([Nages,1])
R0 = np.zeros([Nages,1])
S, E, I, R = S0, E0, I0, R0
zs = np.zeros([Nages,1])
pops = np.c_[[[population],[population],[population]]]
# transition probabilities may depend on the age:
#----------------------------------------------------
R_0 = 2.2
beta = 0.7
betas = np.c_[[[beta],[beta],[beta]]]
sigma = 0.05
sigmas = np.c_[[[sigma],[sigma],[sigma]]]
gamma = beta/R_0
gammas = np.c_[[[gamma],[gamma],[gamma]]]
#----------------------------------------------------
ts = [0.] # time series
nms = ['prob','lag']
p_dt = 1.
t = 0.
time_steps = 0
t_max = 140
while t < t_max:
time_steps = time_steps + 1
t_prev = t
t = t + p.dt
ts.append(t)
# (( S )) al actualzar S usar el I por edades y el S total.
Sf = S[:,-1].reshape(3,1) # distrib. el dia anterior
St = np.c_[([S[:,-1].sum()]*3)] # total S el dia anterior
If = I[:,-1].reshape(3,1)
dS = - St * If / population * betas
#dS = - Sf * If / pop_by_age * betas
n_S = np.maximum(Sf + dS, zs)
# (( E ))
It = np.c_[([I[:,-1].sum()]*3)]
Ef = E[:,-1].reshape(3,1)
dE = St * It / population * betas - sigmas * Ef
dE = Sf * It / pop_by_age * betas - sigmas * Ef
n_E = np.minimum(Ef + dE, pop_by_age)
# (( I ))
dI = sigmas*Ef - gammas * If
n_I = np.minimum(If + dI, pops)
# (( R ))
Rf = R[:,-1].reshape(3,1)
dR = gammas * If
n_R = np.minimum(Rf + dR, pop_by_age)
S = np.insert(S, [time_steps], n_S, axis=1)
E = np.insert(E, [time_steps], n_E, axis=1)
I = np.insert(I, [time_steps], n_I, axis=1)
R = np.insert(R, [time_steps], n_R, axis=1)
##}}}
#
# para el lag:
# reemplazar I[:,-1] por I[:,-l:] y pesar por la distribución
# de tiempos de retardo.
##------------------------------------------------------- PLOT
##{{{
#
ics = [S[0], S[1], S[2], E[0], E[1], E[2], I[0], I[1], I[2], R[0], R[1], R[2]]
labels = ['S', 'E', 'I', 'R']
labels = ['S[0]', 'S[1]', 'S[2]', 'E[0]', 'E[1]', 'E[2]', 'I[0]',
'I[1]', 'I[2]', 'R[0]', 'R[1]', 'R[2]']
clrs = ['red']*3 + ['blue']*3 + ['green']*3 + ['orange']*3
t = ts
plt.rcParams['savefig.facecolor'] = "0.8"
fig, ax = plt.subplots(1, 3, figsize=(20, 10))
#--- SIMU linear
for i, ic in enumerate(ics):
if i%3!=0: continue
sns.lineplot(x=t, y=ic, sort=False, linewidth=1, ax=ax[0],
label=labels[i], color=clrs[i])
#sns.scatterplot(t, ic, ax=ax[0])
ax[0].set_xlabel('Time [days]', fontsize=22)
ax[0].set_ylabel('Number infected', fontsize=22)
ax[0].legend()
ax[0].grid()
ax[0].set_title('Simulation')
#---
for i, ic in enumerate(ics):
if i%3!=1: continue
sns.lineplot(x=t, y=ic, sort=False, linewidth=1, ax=ax[1],
label=labels[i], color=clrs[i])
#sns.scatterplot(t, ic, ax=ax[0])
ax[1].set_xlabel('Time [days]', fontsize=22)
ax[1].set_ylabel('Number infected', fontsize=22)
ax[1].legend()
ax[1].grid()
ax[1].set_title('Simulation')
#---
for i, ic in enumerate(ics):
if i%3!=2: continue
sns.lineplot(x=t, y=ic, sort=False, linewidth=1, ax=ax[2],
label=labels[i], color=clrs[i])
#sns.scatterplot(t, ic, ax=ax[0])
ax[2].set_xlabel('Time [days]', fontsize=22)
ax[2].set_ylabel('Number infected', fontsize=22)
ax[2].legend()
ax[2].grid()
ax[2].set_title('Simulation')
#--- plt
plt.xticks(rotation=0, fontsize=22)
plt.yticks(rotation=90, fontsize=22)
plt.tight_layout()
fig.savefig('../plt/plot_sim_dists.png')
plt.close()
#}}}
|
py | 1a50f81d932843336b3e69b5e740b3960191e4df | import pandas as pd
import numpy as np
from typing import List
user_ID = 78
query = 'userID == ' + str(user_ID)
# head = my_join()
head = ['Action', 'Adventure', 'Animation', 'Children', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Fantasy', 'Film-Noir', 'Horror', 'IMAX', 'Musical', 'Mystery', 'Romance', 'Sci-Fi', 'Short', 'Thriller', 'War', 'Western']
joined = pd.read_csv('/home/kwitnoncy/Documents/politechnika/wti/wtiproj03/data/joined.dat', sep='\t')
joined = joined.query(query).to_numpy()
print([np.nanmean([(row[2] * row[genre + 9]) if row[genre + 9] != 0 else np.nan for row in joined]) for genre in range(len(head))])
|
py | 1a50f86e090f553b5304ac9355982ded6d25e94d | #!/usr/bin/env python3
"""A script for running Robot Framework's own acceptance tests.
Usage: atest/run.py [--interpreter interpreter] [options] [data]
`data` is path (or paths) of the file or directory under the `atest/robot`
folder to execute. If `data` is not given, all tests except for tests tagged
with `no-ci` are executed.
Available `options` are the same that can be used with Robot Framework.
See its help (e.g. `robot --help`) for more information.
By default uses the same Python interpreter for running tests that is used
for running this script. That can be changed by using the `--interpreter` (`-I`)
option. It can be the name of the interpreter (e.g. `pypy3`) or a path to the
selected interpreter (e.g. `/usr/bin/python39`). If the interpreter itself needs
arguments, the interpreter and its arguments need to be quoted (e.g. `"py -3"`).
Examples:
$ atest/run.py
$ atest/run.py --exclude no-ci atest/robot/standard_libraries
$ atest/run.py --interpreter pypy3
The results of the test execution are written into an interpreter specific
directory under the `atest/results` directory. Temporary outputs created
during the execution are created under the system temporary directory.
"""
import argparse
import os
from pathlib import Path
import shutil
import signal
import subprocess
import sys
import tempfile
from interpreter import Interpreter
CURDIR = Path(__file__).parent
ARGUMENTS = '''
--doc Robot Framework acceptance tests
--metadata interpreter:{interpreter}
--variablefile {variable_file};{interpreter.path};{interpreter.name};{interpreter.version}
--pythonpath {pythonpath}
--outputdir {outputdir}
--splitlog
--console dotted
--consolewidth 100
--SuiteStatLevel 3
'''.strip()
def atests(interpreter, arguments):
try:
interpreter = Interpreter(interpreter)
except ValueError as err:
sys.exit(err)
outputdir, tempdir = _get_directories(interpreter)
arguments = list(_get_arguments(interpreter, outputdir)) + list(arguments)
return _run(arguments, tempdir, interpreter)
def _get_directories(interpreter):
name = interpreter.output_name
outputdir = CURDIR / 'results' / name
tempdir = Path(tempfile.gettempdir()) / 'robotatest' / name
if outputdir.exists():
shutil.rmtree(outputdir)
if tempdir.exists():
shutil.rmtree(tempdir)
os.makedirs(tempdir)
return outputdir, tempdir
def _get_arguments(interpreter, outputdir):
arguments = ARGUMENTS.format(interpreter=interpreter,
variable_file=CURDIR / 'interpreter.py',
pythonpath=CURDIR / 'resources',
outputdir=outputdir)
for line in arguments.splitlines():
yield from line.split(' ', 1)
for exclude in interpreter.excludes:
yield '--exclude'
yield exclude
def _run(args, tempdir, interpreter):
command = [sys.executable, str(CURDIR.parent / 'src/robot/run.py')] + args
environ = dict(os.environ,
TEMPDIR=str(tempdir),
PYTHONCASEOK='True',
PYTHONIOENCODING='')
print('%s\n%s\n' % (interpreter, '-' * len(str(interpreter))))
print('Running command:\n%s\n' % ' '.join(command))
sys.stdout.flush()
signal.signal(signal.SIGINT, signal.SIG_IGN)
return subprocess.call(command, env=environ)
if __name__ == '__main__':
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('-I', '--interpreter', default=sys.executable)
parser.add_argument('-h', '--help', action='store_true')
options, robot_args = parser.parse_known_args()
if not robot_args or not Path(robot_args[-1]).exists():
robot_args += ['--exclude', 'no-ci', str(CURDIR/'robot')]
if options.help:
print(__doc__)
rc = 251
else:
rc = atests(options.interpreter, robot_args)
sys.exit(rc)
|
py | 1a50f96c98f24977ffc72faf9538ab6f1e87d99a | import requests
import json
session = requests.Session()
jar = requests.cookies.RequestsCookieJar()
baseurl = "https://general.direction.com:8443/wsg/api/public/v6_1/" #replace "general.direction.com" with either the host name or IP of a member of the cluster
# Written with 3.6.2 in mind
#http://docs.ruckuswireless.com/smartzone/3.6.2/sz100-public-api-reference-guide-3-6-2.html API documentation
szusername = "" #Enter a username with read privages to everything you want to access
szpassword = "" #Password for the above account
headers_template = {'Content-Type': "application/json;charset=UTF-8"}
loginpayload = '{ "username": "' + szusername + '",\r\n "password": "' + szpassword + '"}'
def ruckus_post(url,data,headers = headers_template,check_cert = False):
output = session.post(baseurl + url, data=data, headers=headers, verify=check_cert, cookies=jar)
return output
get_login_session_cookie = ruckus_post("session",loginpayload) #This uses the ruckus_post above to get a session valid session cookie into the cookie jar
def ruckus_get(url,headers = headers_template,check_cert = False):
output = session.get(baseurl + url, headers=headers, verify=check_cert, cookies=jar)
return output
jsonzones = ruckus_get("rkszones") #Get the JSON data for the zones confiured on the cluster
#The below function ruckus_list is used for stripping out the "list" dictionary from the returned JSON
def ruckus_list(jsondata):
output = {}
output = json.loads(jsondata.text)
output = output["list"]
return output
zones = ruckus_list(jsonzones)
def clean_ruckus_list(dictdata,dict_parent_name = "NONE",dict_parent_id = "NONE",names="name",ids="id"):
output = []
for row in dictdata:
output_name = ""
output_id = ""
for key,val in row.items():
if key == ids:
output_id = row[key]
elif key == names:
output_name = row[key]
if dict_parent_name and dict_parent_id == "NONE": #Produce a list without useless data but catchs if someone doesn't pass both arguements
output.append([output_name,output_id])
else:
output.append([dict_parent_name,dict_parent_id,output_name,output_id])
return output
cleaned_zones = clean_ruckus_list(zones)
print("\n")
print("-" * 50)
print("\n")
print("The AP zones configured on this szcluster are:")
print("\n")
for row in cleaned_zones:
print("Name: {} and ID: {}".format(row[0],row[1]))
print("-" * 5)
print("\n")
print("-" * 50)
|
py | 1a50fc5c52effe40e12d65a592df2ac846f4bb51 | from django.conf import settings
from datetime import datetime
OUTPUT_FOLDER = settings.MEDIA_ROOT
METRICS = {'R' : 'Pearson\'s r',
'p_R' : 'Pearson\'s r p-value',
'rho' : 'Spearman\'s rho',
'p_rho' : 'Spearman\'s rho p-value',
'RMSD' : 'Root-mean-square deviation',
'BIAS' : 'Bias (difference of means)',
'n_obs' : '# observations',
'urmsd' : 'Unbiased root-mean-square deviation',
'RSS' : 'Residual sum of squares',
'mse' : 'Mean square error',
'mse_corr' : 'Mean square error correlation',
'mse_bias' : 'Mean square error bias',
'mse_var' : 'Mean square error variance',}
METRIC_TEMPLATE = ["overview_{id_ref}-{ds_ref}_and_{id_sat}-{ds_sat}_",
"{metric}"]
TC_METRICS = {'snr': 'TC: Signal-to-noise ratio',
'err_std': 'TC: Error standard deviation',
'beta': 'TC: Scaling coefficient',}
TC_METRIC_TEMPLATE = ["overview_{id_ref}-{ds_ref}_and_{id_sat}-{ds_sat}_and_{id_sat2}-{ds_sat2}",
"_{metric}",
"_for_{id_met}-{ds_met}"]
C3S = 'C3S'
ISMN = 'ISMN'
GLDAS = 'GLDAS'
SMAP = 'SMAP'
ASCAT = 'ASCAT'
CCI = 'ESA_CCI_SM_combined'
CCIA = 'ESA_CCI_SM_active'
CCIP = 'ESA_CCI_SM_passive'
SMOS = 'SMOS'
ERA5 = 'ERA5'
ERA5_LAND = 'ERA5_LAND'
## dataset versions
C3S_V201706 = 'C3S_V201706'
C3S_V201812 = 'C3S_V201812'
C3S_V201912 = 'C3S_V201912'
ISMN_V20180712_MINI = 'ISMN_V20180712_MINI'
ISMN_V20191211 = 'ISMN_V20191211'
SMAP_V5_PM = 'SMAP_V5_PM'
SMAP_V6_PM = 'SMAP_V6_PM'
SMOS_105_ASC = 'SMOS_105_ASC'
GLDAS_NOAH025_3H_2_1 = 'GLDAS_NOAH025_3H_2_1'
ASCAT_H113 = 'ASCAT_H113'
ERA5_20190613 = 'ERA5_20190613'
ERA5_Land_V20190904 = 'ERA5_LAND_V20190904'
ESA_CCI_SM_A_V04_4 = 'ESA_CCI_SM_A_V04_4'
ESA_CCI_SM_P_V04_4 = 'ESA_CCI_SM_P_V04_4'
ESA_CCI_SM_C_V04_4 = 'ESA_CCI_SM_C_V04_4'
ESA_CCI_SM_A_V04_5 = 'ESA_CCI_SM_A_V04_5'
ESA_CCI_SM_P_V04_5 = 'ESA_CCI_SM_P_V04_5'
ESA_CCI_SM_C_V04_5 = 'ESA_CCI_SM_C_V04_5'
ESA_CCI_SM_C_V04_7 = 'ESA_CCI_SM_C_V04_7'
ESA_CCI_SM_A_V05_2 = 'ESA_CCI_SM_A_V05_2'
ESA_CCI_SM_P_V05_2 = 'ESA_CCI_SM_P_V05_2'
ESA_CCI_SM_C_V05_2 = 'ESA_CCI_SM_C_V05_2'
## dataset data variables
C3S_sm = 'C3S_sm'
SMAP_soil_moisture = 'SMAP_soil_moisture'
SMOS_sm = 'SMOS_sm'
ASCAT_sm = 'ASCAT_sm'
ISMN_soil_moisture = 'ISMN_soil_moisture'
GLDAS_SoilMoi0_10cm_inst = 'GLDAS_SoilMoi0_10cm_inst'
GLDAS_SoilMoi10_40cm_inst = 'GLDAS_SoilMoi10_40cm_inst'
GLDAS_SoilMoi40_100cm_inst = 'GLDAS_SoilMoi40_100cm_inst'
GLDAS_SoilMoi100_200cm_inst = 'GLDAS_SoilMoi100_200cm_inst'
ERA5_sm = 'ERA5_sm'
ERA5_LAND_sm = 'ERA5_LAND_sm'
ESA_CCI_SM_P_sm = 'ESA_CCI_SM_P_sm'
ESA_CCI_SM_A_sm = 'ESA_CCI_SM_A_sm'
ESA_CCI_SM_C_sm = 'ESA_CCI_SM_C_sm'
NOT_AS_REFERENCE = [SMAP, SMOS, ASCAT]
IRREGULAR_GRIDS = {'SMAP' : 0.35,
'SMOS' : 0.25,
'ASCAT' : 0.1}
START_TIME = datetime(1978, 1, 1).strftime('%Y-%m-%d')
END_TIME = datetime.now().strftime('%Y-%m-%d')
|
py | 1a50fd35406fe0a005a7dc367e0b64072e649352 | #!/usr/bin/env python
"""
Script to run benchmarks (used by regression tests)
"""
import os
import os.path
import sys
import csv
from LogManager import LoggingManager
def printf(format, *args):
sys.stdout.write(format % args)
_log = LoggingManager.get_logger(__name__)
def isexec (fpath):
if fpath == None: return False
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def which(program):
fpath, fname = os.path.split(program)
if fpath:
if isexec (program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if isexec (exe_file):
return exe_file
return None
def parseArgs (argv):
import argparse as a
p = a.ArgumentParser (description='Benchmark Runner')
p.add_argument ('--cpu', metavar='CPU',
type=int, help='CPU limit', default=60)
p.add_argument ('--mem', metavar='MEM',
type=int, help='Memory limit (MB)', default=512)
p.add_argument ('file', nargs='+',
help='Benchmark files')
p.add_argument ('--prefix', default='BRUNCH_STAT',
help='Prefix for stats')
p.add_argument ('--format', required=True, help='Fields')
p.add_argument ('--out', metavar='DIR',
default="out", help='Output directory')
if '-h' in argv or '--help' in argv:
p.print_help ()
p.exit (0)
try:
k = argv.index ('--')
except ValueError:
p.error ("No '--' argument")
args = p.parse_args (argv[:k])
args.tool_args = argv[k+1:]
# include date in output directory
# import datetime as dt
# dt = dt.datetime.now ().strftime ('%d_%m_%Y-t%H-%M-%S')
# args.out = '{out}.{dt}'.format (out=args.out, dt=dt)
return args
def collectStats (stats, file):
f = open (file, 'r')
for line in f:
if not line.startswith ('BRUNCH_STAT'): continue
fld = line.split (' ')
stats [fld[1]] = fld[2].strip ()
f.close ()
return stats
def statsHeader (stats_file, flds):
with open (stats_file, 'w') as sf:
writer = csv.writer (sf)
writer.writerow (flds)
def statsLine (stats_file, fmt, stats):
line = list()
for fld in fmt:
if fld in stats: line.append (str (stats [fld]))
else: line.append (None)
with open (stats_file, 'a') as sf:
writer = csv.writer (sf)
writer.writerow (line)
cpuTotal = 0.0
def runTool (tool_args, f, out, cpu, mem, fmt):
global cpuTotal
import resource as r
def set_limits ():
if mem > 0:
mem_bytes = mem * 1024 * 1024
r.setrlimit (r.RLIMIT_AS, [mem_bytes, mem_bytes])
if cpu > 0:
r.setrlimit (r.RLIMIT_CPU, [cpu, cpu])
fmt_tool_args = [v.format(f=f) for v in tool_args]
fmt_tool_args[0] = which (fmt_tool_args[0])
fmt_tool_args.append(f)
base = os.path.basename (f)
#outfile = os.path.join (out, base + '.stdout')
errfile = os.path.join (out, base + '.stderr')
import subprocess as sub
_log.info(base)
p = sub.Popen (fmt_tool_args,
shell=False, stdout=sub.PIPE, stderr=sub.STDOUT,
preexec_fn=set_limits)
#stdout=open(outfile, 'w'), stderr=open(errfile, 'w'),
result, _ = p.communicate ()
cpuUsage = r.getrusage (r.RUSAGE_CHILDREN).ru_utime
stats = dict()
stats['File'] = f
stats['base'] = base
stats['Status'] = p.returncode
stats['Cpu'] = '{0:.3f}'.format (cpuUsage - cpuTotal)
if "UNSAT" in result:
stats['Result'] = "UNSAT"
elif "SAT" in result:
stats['Result'] = "SAT"
elif "UNKNOWN" in result:
stats['Result'] = "UNKNOWN"
else:
_log.error(base)
f = open(errfile,"w")
f.write(result)
stats['Result'] = "ERR"
cpuTotal = cpuUsage
#stats = collectStats (stats, outfile)
#stats = collectStats (stats, errfile)
statsLine (os.path.join (out, 'stats'), fmt, stats)
def main (argv):
args = parseArgs (argv[1:])
if not os.path.exists (args.out):
os.mkdir (args.out)
fmt = args.format.split (':')
statsHeader (os.path.join (args.out, 'stats'), fmt)
global cpuTotal
import resource as r
cpuTotal = r.getrusage (r.RUSAGE_CHILDREN).ru_utime
for f in args.file:
runTool (args.tool_args, f, args.out,
cpu=args.cpu,
mem=args.mem,
fmt=fmt)
return 0
if __name__ == '__main__':
sys.exit (main (sys.argv))
|
py | 1a50fd43ed33e03df006aa6ffe3e189bbb62331b | # Python
import unittest
# Ats
from pyats.topology import Device
# Genie package
from genie.ops.base import Base
from genie.ops.base.maker import Maker
from unittest.mock import Mock
# genie.libs
from genie.libs.ops.static_routing.iosxe.static_routing import StaticRouting
from genie.libs.ops.static_routing.iosxe.tests.static_routing_output import StaticRouteOutput
from genie.libs.parser.iosxe.show_vrf import ShowVrfDetail
outputs = {}
outputs['show ip static route'] = StaticRouteOutput.showIpv4StaticRoute_default
outputs['show ip static route vrf VRF1'] = StaticRouteOutput.showIpv4StaticRoute_vrf1
outputs['show ipv6 static detail'] = StaticRouteOutput.showIpv6StaticRoute_default
outputs['show ipv6 static vrf VRF1 detail'] = StaticRouteOutput.showIpv6StaticRoute_vrf1
def mapper(key):
return outputs[key]
class test_static_route_all(unittest.TestCase):
def setUp(self):
self.device = Device(name='aDevice')
self.device.os = 'iosxe'
self.device.custom['abstraction'] = {'order':['os']}
self.device.mapping = {}
self.device.mapping['cli'] = 'cli'
self.device.connectionmgr.connections['cli'] = self.device
def test_full_static_route(self):
f = StaticRouting(device=self.device)
f.maker.outputs[ShowVrfDetail] = {'': StaticRouteOutput.ShowVrfDetail}
# Get 'show ip static route' output
self.device.execute = Mock()
self.device.execute.side_effect = mapper
# Learn the feature
f.learn()
self.maxDiff = None
self.assertEqual(f.info, StaticRouteOutput.staticRouteOpsOutput)
def test_selective_attribute_static_route(self):
f = StaticRouting(device=self.device)
f.maker.outputs[ShowVrfDetail] = {'': StaticRouteOutput.ShowVrfDetail}
# Get 'show ip static route' output
self.device.execute = Mock()
self.device.execute.side_effect = mapper
# Learn the feature
f.learn()
# Check match
self.assertEqual('GigabitEthernet0/2', f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\
['10.36.3.3/32']['next_hop']['outgoing_interface']['GigabitEthernet0/2']['outgoing_interface'])
# Check does not match
self.assertNotEqual('GigabitEthernet0/0', f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\
['10.36.3.3/32']['next_hop']['outgoing_interface']['GigabitEthernet0/2']['outgoing_interface'])
def test_missing_attributes_static_route(self):
f = StaticRouting(device=self.device)
f.maker.outputs[ShowVrfDetail] = {'': StaticRouteOutput.ShowVrfDetail}
# Get 'show ip static route' output
self.device.execute = Mock()
self.device.execute.side_effect = mapper
# Learn the feature
f.learn()
with self.assertRaises(KeyError):
interfaces = f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\
['10.36.3.3/32']['next_hop']['interface']
def test_empty_output_static_route(self):
self.maxDiff = None
f = StaticRouting(device=self.device)
# Get outputs
f.maker.outputs[ShowVrfDetail] = {'': {}}
outputs['show ip static route'] = ''
outputs['show ip static route vrf VRF1'] = ''
outputs['show ipv6 static detail'] = ''
outputs['show ipv6 static vrf VRF1 detail'] = ''
# Return outputs above as inputs to parser when called
self.device.execute = Mock()
self.device.execute.side_effect = mapper
# Learn the feature
f.learn()
# revert back
outputs['show ip static route'] = StaticRouteOutput.showIpv4StaticRoute_default
outputs['show ip static route vrf VRF1'] = StaticRouteOutput.showIpv4StaticRoute_vrf1
outputs['show ipv6 static detail'] = StaticRouteOutput.showIpv6StaticRoute_default
outputs['show ipv6 static vrf VRF1 detail'] = StaticRouteOutput.showIpv6StaticRoute_vrf1
# Check no attribute not found
with self.assertRaises(AttributeError):
f.info['vrf']
if __name__ == '__main__':
unittest.main()
|
py | 1a50fe4bd853b852c7ad62dccdb10ff315c82a00 | """
Copyright 2019 Team Mark
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# services/file_system/project/tests/test_db.py
import json
import unittest
from project import db
from project.api.models import BlacklistToken
from project.tests.base import BaseTestCase
class TestReadingBlacklistTokens(BaseTestCase):
"""Tests to make sure we can read BlasklistTokens created from users service"""
def test_valid_blacklisted_token_logout(self):
"""Test for logout after valid token gets blacklisted"""
blacklist_token = BlacklistToken('lolol')
db.session.add(blacklist_token)
db.session.commit()
self.assertTrue(BlacklistToken.query.filter_by(token='lolol').first())
|
py | 1a50ff6263e56c08e21c13f1f50b71839c258fd5 | # __init__.py
#!/usr/bin/python
|
py | 1a50ff92e94c0c0b33f62e3b2fcf7077914dbccd | # -*- coding: utf-8 -*-
from tkinter import *
import os
import sys
import subprocess
class Application:
def __init__(self, master=None):
self.fontePadrao = ("Arial", "10")
self.primeiroContainer = Frame(master)
self.primeiroContainer["padx"] = 50
self.primeiroContainer.pack()
self.segundoContainer = Frame(master)
self.segundoContainer["padx"] = 50
self.segundoContainer.pack()
self.terceiroContainer = Frame(master)
self.terceiroContainer["padx"] = 50
self.terceiroContainer.pack()
self.quartoContainer = Frame(master)
self.quartoContainer["padx"] = 50
self.quartoContainer.pack()
self.titulo = Label(self.primeiroContainer, text="Instalador STC")
self.titulo["font"] = ("Arial", "16", "bold")
self.titulo.pack(side=RIGHT)
self.pergunta = Label(self.segundoContainer, text="Instalar STC?")
self.pergunta["font"] = ("Arial", "10")
self.pergunta.pack()
self.botaoInstalar = Button(self.terceiroContainer)
self.botaoInstalar["text"] = "Sim"
self.botaoInstalar["font"] = ("Calibri", "8")
self.botaoInstalar["width"] = 12
self.botaoInstalar["command"] = self.instalarSTC
self.botaoInstalar.pack(side=LEFT)
self.botaoInstalar = Button(self.terceiroContainer)
self.botaoInstalar["text"] = "Nao"
self.botaoInstalar["font"] = ("Calibri", "8")
self.botaoInstalar["width"] = 12
self.botaoInstalar["command"] = self.sairInstalador
self.botaoInstalar.pack()
self.mensagem = Label(self.quartoContainer, text="", font=self.fontePadrao)
self.mensagem.pack(side=LEFT)
# Metodo verificar senha
def instalarSTC(self):
print('Validando STC...')
######## matando os processos STC
os.system('tskill STCLauncher')
os.system('tskill STCDIS')
os.system('tskill STCPanel')
os.system('tskill STCPlayback')
os.system('tskill STCInterfaceExterna')
os.system('tskill STCMQ')
os.system('tskill STCMQMonitor')
os.system('tskill HybridOBCSimulator')
os.system('tskill Decryptor')
os.system('tskill Encryptor')
######## matando os servicos STC
os.system('net stop ABR')
os.system('net stop STC.Router.Service')
os.system('net stop STCGlobal')
######## exclui STC_old
os.system('rd c:\STC_old /s/q')
######## STC antigo para STC_old
os.system('move c:\STC c:\STC_old')
########## copia STC novo pasta para c:\
os.system('mkdir c:\STC')
dirname = os.path.dirname(os.path.realpath(sys.argv[0]))
caminho = ('xcopy {}\STC\*.* c:\STC /E '.format(dirname))
os.system(caminho)
#######Validar.pasta_stc()
start = "C:\\STC\\Client"
erro = "S"
for dirpath, dirnames, filenames in os.walk(start):
for filename in filenames:
if filename == "ConfigSTC.ini":
erro = "N"
filename = os.path.join(dirpath, filename)
print(filename)
print(dirpath)
if erro == "S":
print('Erro - "c:\STC\Client\ConfigSTC.ini" nao encontrado!!!')
os.system("pause")
sys.exit()
start = "C:\\STC\\Server"
for dirpath, dirnames, filenames in os.walk(start):
for filename in filenames:
if filename == "ConfigSTC.ini":
erro = "N"
filename = os.path.join(dirpath, filename)
print(filename)
print(dirpath)
if erro == "S":
print('Erro - "c:\STC\Server\ConfigSTC.ini" nao encontrado!!!')
os.system("pause")
sys.exit()
#############################################
########## validar ORACLE ###################
#############################################
######Validando se funciona o tnsping
proc = subprocess.Popen(["tnsping", "ORCL"], stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
pos_ini = out.find(b'C:\\')
pos_fin = out.find(b'sqlnet.ora')
pos_falha = out.find(b'Falha')
if (pos_ini or pos_fin) > 0: ##### aqui trocar por simbolo de menor(<)
print('Oracle não instalado, por favor verifique!!!')
os.system("pause")
sys.exit()
else:
# caminho = " "
############# caminho = (out[pos_ini:pos_fin]) >>>> aqui esta o caminho
#######>>>>>> excluir depois
caminho = 'C:\\app\\bruno.uthman\\product\\11.2.0\\client_1\\network\\admin'
#######>>>>>> excluir depois os comments
if pos_falha > 0:
print('configurar o tnsname ORCL em: {}'.format(caminho))
os.system('{}\\tnsnames.ora'.format(caminho))
os.system("pause")
sys.exit()
else:
print('Oracle ok')
######## configurando o tnsname ORCL
########## >>>>>precisa fazer depois
print('ENDDDDD!!!!!!')
os.system("pause")
sys.exit()
def sairInstalador(self):
sys.exit()
root = Tk()
root.geometry('{}x{}'.format(500, 150))
Application(root)
root.mainloop() |
py | 1a5100123a5d0a77660cfc6bb381ff02c12a95db | """Support for Voice mailboxes."""
from __future__ import annotations
import asyncio
from contextlib import suppress
from datetime import timedelta
from http import HTTPStatus
import logging
from aiohttp import web
from aiohttp.web_exceptions import HTTPNotFound
import async_timeout
from homeassistant.components.http import HomeAssistantView
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform, discovery
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.setup import async_prepare_setup_platform
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mailbox"
EVENT = "mailbox_updated"
CONTENT_TYPE_MPEG = "audio/mpeg"
CONTENT_TYPE_NONE = "none"
SCAN_INTERVAL = timedelta(seconds=30)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Track states and offer events for mailboxes."""
mailboxes: list[Mailbox] = []
hass.components.frontend.async_register_built_in_panel(
"mailbox", "mailbox", "mdi:mailbox"
)
hass.http.register_view(MailboxPlatformsView(mailboxes))
hass.http.register_view(MailboxMessageView(mailboxes))
hass.http.register_view(MailboxMediaView(mailboxes))
hass.http.register_view(MailboxDeleteView(mailboxes))
async def async_setup_platform(
p_type: str,
p_config: ConfigType | None = None,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a mailbox platform."""
if p_config is None:
p_config = {}
if discovery_info is None:
discovery_info = {}
platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type)
if platform is None:
_LOGGER.error("Unknown mailbox platform specified")
return
_LOGGER.info("Setting up %s.%s", DOMAIN, p_type)
mailbox = None
try:
if hasattr(platform, "async_get_handler"):
mailbox = await platform.async_get_handler(
hass, p_config, discovery_info
)
elif hasattr(platform, "get_handler"):
mailbox = await hass.async_add_executor_job(
platform.get_handler, hass, p_config, discovery_info
)
else:
raise HomeAssistantError("Invalid mailbox platform.")
if mailbox is None:
_LOGGER.error("Failed to initialize mailbox platform %s", p_type)
return
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform %s", p_type)
return
mailboxes.append(mailbox)
mailbox_entity = MailboxEntity(mailbox)
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL
)
await component.async_add_entities([mailbox_entity])
setup_tasks = [
asyncio.create_task(async_setup_platform(p_type, p_config))
for p_type, p_config in config_per_platform(config, DOMAIN)
if p_type is not None
]
if setup_tasks:
await asyncio.wait(setup_tasks)
async def async_platform_discovered(platform, info):
"""Handle for discovered platform."""
await async_setup_platform(platform, discovery_info=info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
return True
class MailboxEntity(Entity):
"""Entity for each mailbox platform to provide a badge display."""
def __init__(self, mailbox: Mailbox) -> None:
"""Initialize mailbox entity."""
self.mailbox = mailbox
self.message_count = 0
async def async_added_to_hass(self):
"""Complete entity initialization."""
@callback
def _mailbox_updated(event):
self.async_schedule_update_ha_state(True)
self.hass.bus.async_listen(EVENT, _mailbox_updated)
self.async_schedule_update_ha_state(True)
@property
def state(self):
"""Return the state of the binary sensor."""
return str(self.message_count)
@property
def name(self):
"""Return the name of the entity."""
return self.mailbox.name
async def async_update(self):
"""Retrieve messages from platform."""
messages = await self.mailbox.async_get_messages()
self.message_count = len(messages)
class Mailbox:
"""Represent a mailbox device."""
def __init__(self, hass, name):
"""Initialize mailbox object."""
self.hass = hass
self.name = name
@callback
def async_update(self):
"""Send event notification of updated mailbox."""
self.hass.bus.async_fire(EVENT)
@property
def media_type(self):
"""Return the supported media type."""
raise NotImplementedError()
@property
def can_delete(self):
"""Return if messages can be deleted."""
return False
@property
def has_media(self):
"""Return if messages have attached media files."""
return False
async def async_get_media(self, msgid):
"""Return the media blob for the msgid."""
raise NotImplementedError()
async def async_get_messages(self):
"""Return a list of the current messages."""
raise NotImplementedError()
async def async_delete(self, msgid):
"""Delete the specified messages."""
raise NotImplementedError()
class StreamError(Exception):
"""Media streaming exception."""
class MailboxView(HomeAssistantView):
"""Base mailbox view."""
def __init__(self, mailboxes: list[Mailbox]) -> None:
"""Initialize a basic mailbox view."""
self.mailboxes = mailboxes
def get_mailbox(self, platform):
"""Retrieve the specified mailbox."""
for mailbox in self.mailboxes:
if mailbox.name == platform:
return mailbox
raise HTTPNotFound
class MailboxPlatformsView(MailboxView):
"""View to return the list of mailbox platforms."""
url = "/api/mailbox/platforms"
name = "api:mailbox:platforms"
async def get(self, request: web.Request) -> web.Response:
"""Retrieve list of platforms."""
platforms = []
for mailbox in self.mailboxes:
platforms.append(
{
"name": mailbox.name,
"has_media": mailbox.has_media,
"can_delete": mailbox.can_delete,
}
)
return self.json(platforms)
class MailboxMessageView(MailboxView):
"""View to return the list of messages."""
url = "/api/mailbox/messages/{platform}"
name = "api:mailbox:messages"
async def get(self, request, platform):
"""Retrieve messages."""
mailbox = self.get_mailbox(platform)
messages = await mailbox.async_get_messages()
return self.json(messages)
class MailboxDeleteView(MailboxView):
"""View to delete selected messages."""
url = "/api/mailbox/delete/{platform}/{msgid}"
name = "api:mailbox:delete"
async def delete(self, request, platform, msgid):
"""Delete items."""
mailbox = self.get_mailbox(platform)
await mailbox.async_delete(msgid)
class MailboxMediaView(MailboxView):
"""View to return a media file."""
url = r"/api/mailbox/media/{platform}/{msgid}"
name = "api:asteriskmbox:media"
async def get(self, request, platform, msgid):
"""Retrieve media."""
mailbox = self.get_mailbox(platform)
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
async with async_timeout.timeout(10):
try:
stream = await mailbox.async_get_media(msgid)
except StreamError as err:
_LOGGER.error("Error getting media: %s", err)
return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR)
if stream:
return web.Response(body=stream, content_type=mailbox.media_type)
return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
py | 1a5100cd0cf109687219e7c30a4acfb52d2e5b57 | # Generated by Django 2.2 on 2019-07-26 16:30
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Partner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('partner_name', models.CharField(max_length=250)),
('partner_bio', models.TextField(help_text='A short bio about partner.')),
('partner_logo', models.ImageField(upload_to='partner_pics')),
('partner_website_url', models.CharField(default='#', max_length=250)),
('partner_twitter_url', models.CharField(default='#', max_length=250)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
],
),
]
|
py | 1a51036b6f7df27251053252c9d4e007b9f83dd5 | from django.contrib.auth import get_user_model, authenticate
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
"""
Serializer for the users object
"""
class Meta:
model = get_user_model()
fields = ('email', 'password', 'name')
extra_kwargs = {'password': {'write_only': True, 'min_length': 5}}
def create(self, validated_data):
"""
Create a mew user with encrypted password
"""
return get_user_model().objects.create_user(**validated_data)
def update(self, instance, validated_data):
password = validated_data.pop('password', None)
user = super().update(instance, validated_data)
if password:
user.set_password(password)
user.save()
class AuthTokenSerializer(serializers.Serializer):
"""Serializer for the user authentication object"""
email = serializers.CharField()
password = serializers.CharField(
style={'input_type': 'password'},
trim_whitespace=False
)
def validate(self, attrs):
"""Validate and authenticate the user"""
email = attrs.get('email')
password = attrs.get('password')
user = authenticate(
request=self.context.get('request'),
username=email,
password=password
)
if not user:
msg = _('Unable to authenticate with provided credentials')
raise serializers.ValidationError(msg, code='authorization')
attrs['user'] = user
return attrs
|
py | 1a5103ba0c3f49a51bc9a9ccbb76ce819d25d5e5 | from __future__ import absolute_import
import unittest
from flexmock import flexmock
from pony.pony import StandupPonyPlugin
class BaseTest(unittest.TestCase):
def setUp(self):
self.bot = StandupPonyPlugin(
plugin_config={
'db_file': ''
},
slack_client=flexmock(server=flexmock())
)
self.slack = flexmock()
|
py | 1a510401ab60a2a92ac8a2e614fcbb5974d0503c | #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from pre_ingestion import csv_tranform,csv_iso_collection,geo_helper,par,validate_csv
dirname = os.path.dirname(__file__).replace("test","test_data")
# 1. input: updated csv files - modified metadata
# 2. write to: work diretory => _temp.tif.xml
# 3. transform to: a) ISO19139 directory, b) source data directory
# 4. depend on arcpy, local direcotry no ISO19139 files generated
def main():
try:
process_path = os.path.join(dirname,"raster_data","raster_output")
valid_updated_csv = validate_csv.ValidateCSV(process_path)
if valid_updated_csv.updated_csv_files_existed():
if valid_updated_csv.updated_csv_files_valid():
updated_csv_files = geo_helper.GeoHelper.dest_csv_files_updated(process_path)
csv_collection = csv_iso_collection.CsvIsoCollection(updated_csv_files).csv_collection()
csvtransform = csv_tranform.CsvTransform(csv_collection,process_path)
csvtransform.transform_iso19139()
if os.name == "nt":
arcpy.RefreshCatalog(output_iso19139_dir)
except Exception, e:
txt = "Code exception: {0} ; {1}".format(__file__,str(e))
geo_helper.GeoHelper.arcgis_message(txt)
if __name__ == '__main__':
main()
|
py | 1a51046de5cc324ba6adc4ef8ee15ac43019f9bd | from typing import Union, Tuple, Optional
from torch_geometric.typing import (OptPairTensor, Adj, Size, NoneType,
OptTensor)
import torch
from torch import Tensor
import torch.nn.functional as F
from torch.nn import Parameter, Linear
from torch_sparse import SparseTensor, set_diag
from torch_geometric.nn.conv import MessagePassing
from torch_geometric.utils import remove_self_loops, add_self_loops, softmax
from ..inits import glorot, zeros
class GATConvGI(MessagePassing): # gat with gpool
r"""The graph attentional operator from the `"Graph Attention Networks"
<https://arxiv.org/abs/1710.10903>`_ paper
.. math::
\mathbf{x}^{\prime}_i = \alpha_{i,i}\mathbf{\Theta}\mathbf{x}_{i} +
\sum_{j \in \mathcal{N}(i)} \alpha_{i,j}\mathbf{\Theta}\mathbf{x}_{j},
where the attention coefficients :math:`\alpha_{i,j}` are computed as
.. math::
\alpha_{i,j} =
\frac{
\exp\left(\mathrm{LeakyReLU}\left(\mathbf{a}^{\top}
[\mathbf{\Theta}\mathbf{x}_i \, \Vert \, \mathbf{\Theta}\mathbf{x}_j]
\right)\right)}
{\sum_{k \in \mathcal{N}(i) \cup \{ i \}}
\exp\left(\mathrm{LeakyReLU}\left(\mathbf{a}^{\top}
[\mathbf{\Theta}\mathbf{x}_i \, \Vert \, \mathbf{\Theta}\mathbf{x}_k]
\right)\right)}.
Args:
in_channels (int or tuple): Size of each input sample. A tuple
corresponds to the sizes of source and target dimensionalities.
out_channels (int): Size of each output sample.
heads (int, optional): Number of multi-head-attentions.
(default: :obj:`1`)
concat (bool, optional): If set to :obj:`False`, the multi-head
attentions are averaged instead of concatenated.
(default: :obj:`True`)
negative_slope (float, optional): LeakyReLU angle of the negative
slope. (default: :obj:`0.2`)
dropout (float, optional): Dropout probability of the normalized
attention coefficients which exposes each node to a stochastically
sampled neighborhood during training. (default: :obj:`0`)
add_self_loops (bool, optional): If set to :obj:`False`, will not add
self-loops to the input graph. (default: :obj:`True`)
bias (bool, optional): If set to :obj:`False`, the layer will not learn
an additive bias. (default: :obj:`True`)
**kwargs (optional): Additional arguments of
:class:`torch_geometric.nn.conv.MessagePassing`.
"""
_alpha: OptTensor
def __init__(self, in_channels: Union[int, Tuple[int, int]],
out_channels: int, heads: int = 1, concat: bool = True,
negative_slope: float = 0.2, dropout: float = 0.,
add_self_loops: bool = True, bias: bool = True, **kwargs):
kwargs.setdefault('aggr', 'add')
super(GATConv, self).__init__(node_dim=0, **kwargs)
self.in_channels = in_channels
self.out_channels = out_channels
# print('out_channels in gat', out_channels)
# hid in net init 4
# out_channels in gat 4 # 4是hid1
# out_channels in gat 7 # 7 是num_classes
self.heads = heads
self.concat = concat
self.negative_slope = negative_slope
self.dropout = dropout
self.add_self_loops = add_self_loops
if isinstance(in_channels, int):
self.lin_l = Linear(in_channels, heads * out_channels, bias=False)
self.lin_r = self.lin_l
else:
self.lin_l = Linear(in_channels[0], heads * out_channels, False)
self.lin_r = Linear(in_channels[1], heads * out_channels, False)
self.att_l = Parameter(torch.Tensor(1, heads, out_channels))
self.att_r = Parameter(torch.Tensor(1, heads, out_channels))
if bias and concat:
self.bias = Parameter(torch.Tensor(heads * out_channels))
elif bias and not concat:
self.bias = Parameter(torch.Tensor(out_channels))
else:
self.register_parameter('bias', None)
self._alpha = None
self.reset_parameters()
def reset_parameters(self):
glorot(self.lin_l.weight)
glorot(self.lin_r.weight)
glorot(self.att_l)
glorot(self.att_r)
zeros(self.bias)
def forward(self, x: Union[Tensor, OptPairTensor], edge_index: Adj,
size: Size = None, return_attention_weights=None):
# type: (Union[Tensor, OptPairTensor], Tensor, Size, NoneType) -> Tensor # noqa
# type: (Union[Tensor, OptPairTensor], SparseTensor, Size, NoneType) -> Tensor # noqa
# type: (Union[Tensor, OptPairTensor], Tensor, Size, bool) -> Tuple[Tensor, Tuple[Tensor, Tensor]] # noqa
# type: (Union[Tensor, OptPairTensor], SparseTensor, Size, bool) -> Tuple[Tensor, SparseTensor] # noqa
r"""
Args:
return_attention_weights (bool, optional): If set to :obj:`True`,
will additionally return the tuple
:obj:`(edge_index, attention_weights)`, holding the computed
attention weights for each edge. (default: :obj:`None`)
"""
H, C = self.heads, self.out_channels
x_l: OptTensor = None
x_r: OptTensor = None
alpha_l: OptTensor = None
alpha_r: OptTensor = None
if isinstance(x, Tensor):
assert x.dim() == 2, 'Static graphs not supported in `GATConv`.'
x_l = x_r = self.lin_l(x).view(-1, H, C)
alpha_l = (x_l * self.att_l).sum(dim=-1)
alpha_r = (x_r * self.att_r).sum(dim=-1)
else:
x_l, x_r = x[0], x[1]
assert x[0].dim() == 2, 'Static graphs not supported in `GATConv`.'
x_l = self.lin_l(x_l).view(-1, H, C)
alpha_l = (x_l * self.att_l).sum(dim=-1)
if x_r is not None:
x_r = self.lin_r(x_r).view(-1, H, C)
alpha_r = (x_r * self.att_r).sum(dim=-1)
assert x_l is not None
assert alpha_l is not None
if self.add_self_loops:
if isinstance(edge_index, Tensor):
num_nodes = x_l.size(0)
if x_r is not None:
num_nodes = min(num_nodes, x_r.size(0))
if size is not None:
num_nodes = min(size[0], size[1])
edge_index, _ = remove_self_loops(edge_index)
edge_index, _ = add_self_loops(edge_index, num_nodes=num_nodes)
elif isinstance(edge_index, SparseTensor):
edge_index = set_diag(edge_index)
# propagate_type: (x: OptPairTensor, alpha: OptPairTensor)
out = self.propagate(edge_index, x=(x_l, x_r),
alpha=(alpha_l, alpha_r), size=size) # base class 的propagate 调用了message()
alpha = self._alpha
self._alpha = None
if self.concat:
out = out.view(-1, self.heads * self.out_channels)
else:
out = out.mean(dim=1)
if self.bias is not None:
out += self.bias
if isinstance(return_attention_weights, bool):
assert alpha is not None
if isinstance(edge_index, Tensor):
return out, (edge_index, alpha)
elif isinstance(edge_index, SparseTensor):
return out, edge_index.set_value(alpha, layout='coo')
else:
return out
def message(self, x_j: Tensor, alpha_j: Tensor, alpha_i: OptTensor,
index: Tensor, ptr: OptTensor,
size_i: Optional[int]) -> Tensor:
alpha = alpha_j if alpha_i is None else alpha_j + alpha_i
alpha = F.leaky_relu(alpha, self.negative_slope)
alpha = softmax(alpha, index, ptr, size_i)
# 8 和1 都是number_heads , 出现两次是因为分别在训练和测试一个epoch时调用了程序
# print alpha
# print('alpha size: ', alpha.size())
# alpha size: torch.Size([13264, 8]) conv1
# alpha size: torch.Size([13264, 1]) conv2
# alpha size: torch.Size([13264, 8])
# alpha size: torch.Size([13264, 1])
self._alpha = alpha
alpha = F.dropout(alpha, p=self.dropout, training=self.training)
return x_j * alpha.unsqueeze(-1)
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
|
py | 1a510525cc65e19aa1e0f7aa93ee13d61a45b398 | # -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'point'."""
from primaires.interpreteur.commande.commande import Commande
from primaires.vehicule.vecteur import get_direction
from secondaires.navigation.constantes import get_longitude_latitude
class CmdPoint(Commande):
"""Commande 'point'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "point", "bearing")
self.nom_categorie = "navire"
self.aide_courte = "fait le point"
self.aide_longue = \
"Cette commande permet de faire le point sur un navire. " \
"Vous aurez besoin d'avoir un sextant équipé. Faire le " \
"point prend un peu de temps, nécessite un ciel dégagé de " \
"nuages et est affecté par la qualité des instruments " \
"utilisés."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
personnage.agir("fairepoint")
salle = personnage.salle
if not hasattr(salle, "navire") or salle.navire is None or \
salle.navire.etendue is None:
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
navire = salle.navire
sextant = None
for objet in personnage.equipement.equipes:
if objet.est_de_type("sextant"):
sextant = objet
break
if not sextant:
personnage << "|err|Vous n'avez pas de sextant équipé.|ff|"
return
if salle.interieur:
personnage << "|err|Vous ne pouvez faire le point d'ici.|ff|"
return
else:
perturbation = importeur.meteo.get_perturbation(salle)
if perturbation is not None and perturbation.est_opaque():
personnage << "|err|Vous ne voyez pas le ciel.|ff|"
return
personnage << "Vous étudiez le ciel en utilisant {}.".format(
sextant.get_nom())
personnage.salle.envoyer("{{}} étudie le ciel grâce à {}.".format(
sextant.get_nom()), personnage)
personnage.etats.ajouter("faire_point")
yield 60
if "faire_point" not in personnage.etats:
return
personnage.etats.retirer("faire_point")
x = salle.coords.x
y = salle.coords.y
personnage << "Après calcul, vous obtenez " + get_longitude_latitude(
x, y, sextant.precision) + "."
personnage.salle.envoyer("{{}} baisse {}".format(sextant.get_nom()),
personnage)
|
py | 1a5105613fe77e1ec5c351ae9a2db8fcbe2b6f6d | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def nile(path):
"""Flow of the River Nile
Measurements of the annual flow of the river Nile at Aswan (formerly
`Assuan`), 1871–1970, in *10^8 m^3*, “with apparent changepoint near
1898” (Cobb(1978), Table 1, p.249).
A time series of length 100.
Durbin, J. and Koopman, S. J. (2001) *Time Series Analysis by State
Space Methods.* Oxford University Press.
http://www.ssfpack.com/DKbook.html
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `nile.csv`.
Returns:
Tuple of np.ndarray `x_train` with 100 rows and 2 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'nile.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/datasets/Nile.csv'
maybe_download_and_extract(path, url,
save_file_name='nile.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
|
py | 1a5106c302681eedd333b738be53c0b59f3a8594 | import logging
import os
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
import torch
import torch.distributed as dist
from monai.config import print_config
from monai.handlers import (
CheckpointSaver,
LrScheduleHandler,
MeanDice,
StatsHandler,
ValidationHandler,
)
from monai.inferers import SimpleInferer, SlidingWindowInferer
from monai.losses import DiceCELoss
from monai.utils import set_determinism
from torch.nn.parallel import DistributedDataParallel
from create_dataset import get_data
from create_network import get_network
from evaluator import DynUNetEvaluator
from task_params import data_loader_params, patch_size
from trainer import DynUNetTrainer
def validation(args):
# load hyper parameters
task_id = args.task_id
sw_batch_size = args.sw_batch_size
tta_val = args.tta_val
window_mode = args.window_mode
eval_overlap = args.eval_overlap
multi_gpu_flag = args.multi_gpu
local_rank = args.local_rank
amp = args.amp
# produce the network
checkpoint = args.checkpoint
val_output_dir = "./runs_{}_fold{}_{}/".format(task_id, args.fold, args.expr_name)
if multi_gpu_flag:
dist.init_process_group(backend="nccl", init_method="env://")
device = torch.device(f"cuda:{local_rank}")
torch.cuda.set_device(device)
else:
device = torch.device("cuda")
properties, val_loader = get_data(args, mode="validation")
net = get_network(properties, task_id, val_output_dir, checkpoint)
net = net.to(device)
if multi_gpu_flag:
net = DistributedDataParallel(
module=net, device_ids=[device], find_unused_parameters=True
)
n_classes = len(properties["labels"])
net.eval()
evaluator = DynUNetEvaluator(
device=device,
val_data_loader=val_loader,
network=net,
n_classes=n_classes,
inferer=SlidingWindowInferer(
roi_size=patch_size[task_id],
sw_batch_size=sw_batch_size,
overlap=eval_overlap,
mode=window_mode,
),
post_transform=None,
key_val_metric={
"val_mean_dice": MeanDice(
include_background=False,
output_transform=lambda x: (x["pred"], x["label"]),
)
},
additional_metrics=None,
amp=amp,
tta_val=tta_val,
)
evaluator.run()
if local_rank == 0:
print(evaluator.state.metrics)
results = evaluator.state.metric_details["val_mean_dice"]
if n_classes > 2:
for i in range(n_classes - 1):
print(
"mean dice for label {} is {}".format(i + 1, results[:, i].mean())
)
def train(args):
# load hyper parameters
task_id = args.task_id
fold = args.fold
val_output_dir = "./runs_{}_fold{}_{}/".format(task_id, fold, args.expr_name)
log_filename = "nnunet_task{}_fold{}.log".format(task_id, fold)
log_filename = os.path.join(val_output_dir, log_filename)
interval = args.interval
learning_rate = args.learning_rate
max_epochs = args.max_epochs
multi_gpu_flag = args.multi_gpu
amp_flag = args.amp
lr_decay_flag = args.lr_decay
sw_batch_size = args.sw_batch_size
tta_val = args.tta_val
batch_dice = args.batch_dice
window_mode = args.window_mode
eval_overlap = args.eval_overlap
local_rank = args.local_rank
determinism_flag = args.determinism_flag
determinism_seed = args.determinism_seed
if determinism_flag:
set_determinism(seed=determinism_seed)
if local_rank == 0:
print("Using deterministic training.")
# transforms
train_batch_size = data_loader_params[task_id]["batch_size"]
if multi_gpu_flag:
dist.init_process_group(backend="nccl", init_method="env://")
device = torch.device(f"cuda:{local_rank}")
torch.cuda.set_device(device)
else:
device = torch.device("cuda")
properties, val_loader = get_data(args, mode="validation")
_, train_loader = get_data(args, batch_size=train_batch_size, mode="train")
# produce the network
checkpoint = args.checkpoint
net = get_network(properties, task_id, val_output_dir, checkpoint)
net = net.to(device)
if multi_gpu_flag:
net = DistributedDataParallel(
module=net, device_ids=[device], find_unused_parameters=True
)
optimizer = torch.optim.SGD(
net.parameters(),
lr=learning_rate,
momentum=0.99,
weight_decay=3e-5,
nesterov=True,
)
scheduler = torch.optim.lr_scheduler.LambdaLR(
optimizer, lr_lambda=lambda epoch: (1 - epoch / max_epochs) ** 0.9
)
# produce evaluator
val_handlers = [
StatsHandler(output_transform=lambda x: None),
CheckpointSaver(
save_dir=val_output_dir, save_dict={"net": net}, save_key_metric=True
),
]
evaluator = DynUNetEvaluator(
device=device,
val_data_loader=val_loader,
network=net,
n_classes=len(properties["labels"]),
inferer=SlidingWindowInferer(
roi_size=patch_size[task_id],
sw_batch_size=sw_batch_size,
overlap=eval_overlap,
mode=window_mode,
),
post_transform=None,
key_val_metric={
"val_mean_dice": MeanDice(
include_background=False,
output_transform=lambda x: (x["pred"], x["label"]),
)
},
val_handlers=val_handlers,
amp=amp_flag,
tta_val=tta_val,
)
# produce trainer
loss = DiceCELoss(to_onehot_y=True, softmax=True, batch=batch_dice)
train_handlers = []
if lr_decay_flag:
train_handlers += [LrScheduleHandler(lr_scheduler=scheduler, print_lr=True)]
train_handlers += [
ValidationHandler(validator=evaluator, interval=interval, epoch_level=True),
StatsHandler(tag_name="train_loss", output_transform=lambda x: x["loss"]),
]
trainer = DynUNetTrainer(
device=device,
max_epochs=max_epochs,
train_data_loader=train_loader,
network=net,
optimizer=optimizer,
loss_function=loss,
inferer=SimpleInferer(),
post_transform=None,
key_train_metric=None,
train_handlers=train_handlers,
amp=amp_flag,
)
if local_rank > 0:
evaluator.logger.setLevel(logging.WARNING)
trainer.logger.setLevel(logging.WARNING)
logger = logging.getLogger()
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
# Setup file handler
fhandler = logging.FileHandler(log_filename)
fhandler.setLevel(logging.INFO)
fhandler.setFormatter(formatter)
logger.addHandler(fhandler)
if not multi_gpu_flag:
chandler = logging.StreamHandler()
chandler.setLevel(logging.INFO)
chandler.setFormatter(formatter)
logger.addHandler(chandler)
logger.setLevel(logging.INFO)
trainer.run()
if __name__ == "__main__":
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-fold", "--fold", type=int, default=0, help="0-5")
parser.add_argument(
"-task_id", "--task_id", type=str, default="04", help="task 01 to 10"
)
parser.add_argument(
"-root_dir",
"--root_dir",
type=str,
default="/workspace/data/medical/",
help="dataset path",
)
parser.add_argument(
"-expr_name",
"--expr_name",
type=str,
default="expr",
help="the suffix of the experiment's folder",
)
parser.add_argument(
"-datalist_path",
"--datalist_path",
type=str,
default="config/",
)
parser.add_argument(
"-train_num_workers",
"--train_num_workers",
type=int,
default=4,
help="the num_workers parameter of training dataloader.",
)
parser.add_argument(
"-val_num_workers",
"--val_num_workers",
type=int,
default=1,
help="the num_workers parameter of validation dataloader.",
)
parser.add_argument(
"-interval",
"--interval",
type=int,
default=5,
help="the validation interval under epoch level.",
)
parser.add_argument(
"-eval_overlap",
"--eval_overlap",
type=float,
default=0.5,
help="the overlap parameter of SlidingWindowInferer.",
)
parser.add_argument(
"-sw_batch_size",
"--sw_batch_size",
type=int,
default=4,
help="the sw_batch_size parameter of SlidingWindowInferer.",
)
parser.add_argument(
"-window_mode",
"--window_mode",
type=str,
default="gaussian",
choices=["constant", "gaussian"],
help="the mode parameter for SlidingWindowInferer.",
)
parser.add_argument(
"-num_samples",
"--num_samples",
type=int,
default=3,
help="the num_samples parameter of RandCropByPosNegLabeld.",
)
parser.add_argument(
"-pos_sample_num",
"--pos_sample_num",
type=int,
default=1,
help="the pos parameter of RandCropByPosNegLabeld.",
)
parser.add_argument(
"-neg_sample_num",
"--neg_sample_num",
type=int,
default=1,
help="the neg parameter of RandCropByPosNegLabeld.",
)
parser.add_argument(
"-cache_rate",
"--cache_rate",
type=float,
default=1.0,
help="the cache_rate parameter of CacheDataset.",
)
parser.add_argument("-learning_rate", "--learning_rate", type=float, default=1e-2)
parser.add_argument(
"-max_epochs",
"--max_epochs",
type=int,
default=1000,
help="number of epochs of training.",
)
parser.add_argument(
"-mode", "--mode", type=str, default="train", choices=["train", "val"]
)
parser.add_argument(
"-checkpoint",
"--checkpoint",
type=str,
default=None,
help="the filename of weights.",
)
parser.add_argument(
"-amp",
"--amp",
type=bool,
default=False,
help="whether to use automatic mixed precision.",
)
parser.add_argument(
"-lr_decay",
"--lr_decay",
type=bool,
default=False,
help="whether to use learning rate decay.",
)
parser.add_argument(
"-tta_val",
"--tta_val",
type=bool,
default=False,
help="whether to use test time augmentation.",
)
parser.add_argument(
"-batch_dice",
"--batch_dice",
type=bool,
default=False,
help="the batch parameter of DiceCELoss.",
)
parser.add_argument(
"-determinism_flag", "--determinism_flag", type=bool, default=False
)
parser.add_argument(
"-determinism_seed",
"--determinism_seed",
type=int,
default=0,
help="the seed used in deterministic training",
)
parser.add_argument(
"-multi_gpu",
"--multi_gpu",
type=bool,
default=False,
help="whether to use multiple GPUs for training.",
)
parser.add_argument("-local_rank", "--local_rank", type=int, default=0)
args = parser.parse_args()
if args.local_rank == 0:
print_config()
if args.mode == "train":
train(args)
elif args.mode == "val":
validation(args)
|
py | 1a5107377a47c34feb64d4a82d4ef030076be6cb | import asyncio
import pickle
from time import time
from typing import FrozenSet, Optional
import aiomcache
import morcilla
from sqlalchemy import and_, select
from athenian.api.cache import cached, middle_term_exptime, short_term_exptime
from athenian.api.models.metadata.github import Bot
from athenian.api.models.state.models import Team
class Bots:
"""Lazy loader of the set of bot logins."""
def __init__(self):
"""Initialize a new instance of the Bots class."""
self._bots = None # type: Optional[FrozenSet[str]]
self._timestamp = time()
self._lock = None # type: Optional[asyncio.Lock]
async def _fetch(self, mdb: morcilla.Database) -> None:
self._bots = frozenset(r[0] for r in await mdb.fetch_all(select([Bot.login])))
self._timestamp = time()
@cached(
exptime=short_term_exptime,
serialize=pickle.dumps,
deserialize=pickle.loads,
key=lambda account, **_: (account,),
)
async def __call__(self,
account: int,
mdb: morcilla.Database,
sdb: morcilla.Database,
cache: Optional[aiomcache.Client],
) -> FrozenSet[str]:
"""
Return the bot logins.
There are two parts: global bots in mdb and local bots in the Bots team in sdb.
"""
if self._bots is None or time() - self._timestamp >= middle_term_exptime:
if self._lock is None:
# we don't run multi-threaded
self._lock = asyncio.Lock()
async with self._lock:
if self._bots is None or time() - self._timestamp >= middle_term_exptime:
await self._fetch(mdb)
extra = await sdb.fetch_val(select([Team.members]).where(and_(
Team.owner_id == account,
Team.name == Team.BOTS,
)))
if extra is None:
return self._bots
return self._bots.union(u.rsplit("/", 1)[1] for u in extra)
bots = Bots()
del Bots # yes, don't allow to use it directly
|
py | 1a510805023c4b941e7645d14e710f81d5d95dea | from datetime import timedelta
from django.test import TestCase
from django.utils import timezone
from ..forms import InvoiceForm
class InvoiceFormTestCase(TestCase):
def setUp(self):
self.form_class = InvoiceForm
def test_clean_due_date_valid_data(self):
now = timezone.now()
data = {"issue_date": now, "due_date": now + timedelta(days=1)}
form = self.form_class(data)
form.is_valid()
self.assertNotIn("due_date", form.errors)
def test_clean_due_date_invalid_data_same_issue_date(self):
now = timezone.now()
data = {"issue_date": now, "due_date": now}
form = self.form_class(data)
form.is_valid()
self.assertIn("due_date", form.errors)
def test_clean_due_date_invalid_data_due_date_is_earlier_than_issue_date(self):
now = timezone.now()
data = {"issue_date": now, "due_date": now - timedelta(days=1)}
form = self.form_class(data)
form.is_valid()
self.assertIn("due_date", form.errors)
|
py | 1a510857529d792f3d4367a341c527461fdd65dc | ##########################################################################
#
# Copyright (c) 2018, Alex Fuller. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferCycles
def __visibilitySummary( plug ) :
info = []
for childName in ( "camera", "diffuse", "glossy", "transmission", "shadow", "scatter" ) :
if plug[childName + "Visibility"]["enabled"].getValue() :
info.append( IECore.CamelCase.toSpaced( childName ) + ( " On" if plug[childName + "Visibility"]["value"].getValue() else " Off" ) )
return ", ".join( info )
def __renderingSummary( plug ) :
info = []
for childName in ( "useHoldout", "isShadowCatcher", "color", "dupliGenerated", "dupliUV", "lightGroup" ) :
if plug[childName]["enabled"].getValue() :
info.append( IECore.CamelCase.toSpaced( childName ) + ( " On" if plug[childName]["value"].getValue() else " Off" ) )
return ", ".join( info )
def __subdivisionSummary( plug ) :
info = []
for childName in ( "maxLevel", "dicingScale" ) :
if plug[childName]["enabled"].getValue() :
info.append( IECore.CamelCase.toSpaced( childName ) + ( " On" if plug[childName]["value"].getValue() else " Off" ) )
return ", ".join( info )
def __volumeSummary( plug ) :
info = []
for childName in ( "volumeClipping", "volumeStepSize", "volumeObjectSpace" ) :
if plug[childName]["enabled"].getValue() :
info.append( IECore.CamelCase.toSpaced( childName ) + ( " On" if plug[childName]["value"].getValue() else " Off" ) )
return ", ".join( info )
def __objectSummary( plug ) :
info = []
if plug["assetName"]["enabled"].getValue() :
info.append( IECore.CamelCase.toSpaced( "assetName" ) + ( " On" if plug["assetName"]["value"].getValue() else " Off" ) )
return ", ".join( info )
def __shaderSummary( plug ) :
info = []
for childName in ( "useMis", "useTransparentShadow", "heterogeneousVolume", "volumeSamplingMethod", "volumeInterpolationMethod", "volumeStepRate", "displacementMethod" ) :
if plug[childName]["enabled"].getValue() :
info.append( IECore.CamelCase.toSpaced( childName ) + ( " On" if plug[childName]["value"].getValue() else " Off" ) )
return ", ".join( info )
Gaffer.Metadata.registerNode(
GafferCycles.CyclesAttributes,
"description",
"""
Applies Cycles attributes to objects in the scene.
""",
plugs = {
# Sections
"attributes" : [
"layout:section:Visibility:summary", __visibilitySummary,
"layout:section:Rendering:summary", __renderingSummary,
"layout:section:Subdivision:summary", __subdivisionSummary,
"layout:section:Volume:summary", __volumeSummary,
"layout:section:Object:summary", __objectSummary,
"layout:section:Shader:summary", __shaderSummary,
],
# Visibility
"attributes.cameraVisibility" : [
"description",
"""
Whether or not the object is visible to camera
rays. To hide an object completely, use the
visibility settings on the StandardAttributes
node instead.
""",
"layout:section", "Visibility",
"label", "Camera",
],
"attributes.diffuseVisibility" : [
"description",
"""
Whether or not the object is visible to diffuse
rays.
""",
"layout:section", "Visibility",
"label", "Diffuse",
],
"attributes.glossyVisibility" : [
"description",
"""
Whether or not the object is visible in
glossy rays.
""",
"layout:section", "Visibility",
"label", "Glossy",
],
"attributes.transmissionVisibility" : [
"description",
"""
Whether or not the object is visible in
transmission.
""",
"layout:section", "Visibility",
"label", "Transmission",
],
"attributes.shadowVisibility" : [
"description",
"""
Whether or not the object is visible to shadow
rays - whether it casts shadows or not.
""",
"layout:section", "Visibility",
"label", "Shadow",
],
"attributes.scatterVisibility" : [
"description",
"""
Whether or not the object is visible to
scatter rays.
""",
"layout:section", "Visibility",
"label", "Scatter",
],
# Rendering
"attributes.useHoldout" : [
"description",
"""
Turns the object into a holdout matte.
This only affects primary (camera) rays.
""",
"layout:section", "Rendering",
],
"attributes.isShadowCatcher" : [
"description",
"""
Turns the object into a shadow catcher.
""",
"layout:section", "Rendering",
],
"attributes.shadowTerminatorShadingOffset" : [
"description",
"""
Push the shadow terminator towards the light to hide artifacts on low poly geometry.
""",
"layout:section", "Rendering",
],
"attributes.shadowTerminatorGeometryOffset" : [
"description",
"""
Offset rays from the surface to reduce shadow terminator artifact on low poly geometry. Only affects triangles at grazing angles to light.
""",
"layout:section", "Rendering",
],
"attributes.color" : [
"description",
"""
Set a unique color per-object. This is intended for setting
a unique constant color that can be accessed from an object_info
shader, even if the object is being instanced.
""",
"layout:section", "Rendering",
],
"attributes.dupliGenerated" : [
"description",
"""
Set a unique position offset. Accessible from a texture_coordinate
via the generated output plug and from_dupli enabled.
""",
"layout:section", "Rendering",
],
"attributes.dupliUV" : [
"description",
"""
Set a unique UV offset. Accessible from either a texture_coordinate
or uv_map node via the UV output plug and from_dupli enabled.
""",
"layout:section", "Rendering",
],
"attributes.lightGroup" : [
"description",
"""
Set the lightgroup of an object with emission.
""",
"layout:section", "Rendering",
],
# Subdivision
"attributes.maxLevel" : [
"description",
"""
The max level of subdivision that can be
applied.
""",
"layout:section", "Subdivision",
],
"attributes.dicingScale" : [
"description",
"""
Multiplier for scene dicing rate.
""",
"layout:section", "Subdivision",
],
# Volume
"attributes.volumeClipping" : [
"description",
"""
Value under which voxels are considered empty space to
optimize rendering.
""",
"layout:section", "Volume",
],
"attributes.volumeStepSize" : [
"description",
"""
Distance between volume samples. When zero it is automatically
estimated based on the voxel size.
""",
"layout:section", "Volume",
],
"attributes.volumeObjectSpace" : [
"description",
"""
Specify volume density and step size in object or world space.
By default object space is used, so that the volume opacity and
detail remains the same regardless of object scale.
""",
"layout:section", "Volume",
],
"attributes.assetName" : [
"description",
"""
Asset name for cryptomatte.
""",
"layout:section", "Object",
],
# Shader
"attributes.useMis" : [
"description",
"""
Use multiple importance sampling for this material,
disabling may reduce overall noise for large
objects that emit little light compared to other light sources.
""",
"layout:section", "Shader",
],
"attributes.useTransparentShadow" : [
"description",
"""
Use transparent shadows for this material if it contains a Transparent BSDF,
disabling will render faster but not give accurate shadows.
""",
"layout:section", "Shader",
],
"attributes.heterogeneousVolume" : [
"description",
"""
Disabling this when using volume rendering, assume volume has the same density
everywhere (not using any textures), for faster rendering.
""",
"layout:section", "Shader",
],
"attributes.volumeSamplingMethod" : [
"description",
"""
Sampling method to use for volumes.
""",
"layout:section", "Shader",
],
"attributes.volumeSamplingMethod.value" : [
"preset:Distance", "distance",
"preset:Equiangular", "equiangular",
"preset:Multiple-Importance", "multiple_importance",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
],
"attributes.volumeInterpolationMethod" : [
"description",
"""
Interpolation method to use for volumes.
""",
"layout:section", "Shader",
],
"attributes.volumeInterpolationMethod.value" : [
"preset:Linear", "linear",
"preset:Cubic", "cubic",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
],
"attributes.volumeStepRate" : [
"description",
"""
Scale the distance between volume shader samples when rendering the volume
(lower values give more accurate and detailed results, but also increased render time).
""",
"layout:section", "Shader",
],
"attributes.displacementMethod" : [
"description",
"""
Method to use for the displacement.
""",
"layout:section", "Shader",
],
"attributes.displacementMethod.value" : [
"preset:Bump", "bump",
"preset:True", "true",
"preset:Both", "both",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
],
}
)
if not GafferCycles.withLightGroups :
Gaffer.Metadata.registerValue( GafferCycles.CyclesOptions, "attributes.lightGroup", "plugValueWidget:type", "" )
|
py | 1a51090c05310c088d3df9d0eaeba7f71b8f7dc6 | # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/master/LICENSE
from __future__ import absolute_import
import sys
import pytest
import numpy
import awkward1
def test_array_slice():
array = awkward1.Array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9], check_valid=True)
assert awkward1.to_list(array[[5, 2, 2, 3, 9, 0, 1]]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[numpy.array([5, 2, 2, 3, 9, 0, 1])]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.layout.NumpyArray(numpy.array([5, 2, 2, 3, 9, 0, 1], dtype=numpy.int32))]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.Array(numpy.array([5, 2, 2, 3, 9, 0, 1], dtype=numpy.int32), check_valid=True)]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.Array([5, 2, 2, 3, 9, 0, 1], check_valid=True)]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.layout.NumpyArray(numpy.array([False, False, False, False, False, True, False, True, False, True]))]) == [5.5, 7.7, 9.9]
content = awkward1.layout.NumpyArray(numpy.array([1, 0, 9, 3, 2, 2, 5], dtype=numpy.int64))
index = awkward1.layout.Index64(numpy.array([6, 5, 4, 3, 2, 1, 0], dtype=numpy.int64))
indexedarray = awkward1.layout.IndexedArray64(index, content)
assert awkward1.to_list(array[indexedarray]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.Array(indexedarray, check_valid=True)]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.layout.EmptyArray()]) == []
content0 = awkward1.layout.NumpyArray(numpy.array([5, 2, 2]))
content1 = awkward1.layout.NumpyArray(numpy.array([3, 9, 0, 1]))
tags = awkward1.layout.Index8(numpy.array([0, 0, 0, 1, 1, 1, 1], dtype=numpy.int8))
index2 = awkward1.layout.Index64(numpy.array([0, 1, 2, 0, 1, 2, 3], dtype=numpy.int64))
unionarray = awkward1.layout.UnionArray8_64(tags, index2, [content0, content1])
assert awkward1.to_list(array[unionarray]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
assert awkward1.to_list(array[awkward1.Array(unionarray, check_valid=True)]) == [5.5, 2.2, 2.2, 3.3, 9.9, 0.0, 1.1]
array = awkward1.Array(numpy.array([[0.0, 1.1, 2.2, 3.3, 4.4], [5.5, 6.6, 7.7, 8.8, 9.9]]), check_valid=True)
assert awkward1.to_list(array[awkward1.layout.NumpyArray(numpy.array([[0, 1], [1, 0]])), awkward1.layout.NumpyArray(numpy.array([[2, 4], [3, 3]]))]) == [[2.2, 9.9], [8.8, 3.3]]
assert awkward1.to_list(array[awkward1.layout.NumpyArray(numpy.array([[0, 1], [1, 0]]))]) == [[[0.0, 1.1, 2.2, 3.3, 4.4], [5.5, 6.6, 7.7, 8.8, 9.9]], [[5.5, 6.6, 7.7, 8.8, 9.9], [0.0, 1.1, 2.2, 3.3, 4.4]]]
array = awkward1.Array([{"x": 1, "y": 1.1, "z": [1]}, {"x": 2, "y": 2.2, "z": [2, 2]}, {"x": 3, "y": 3.3, "z": [3, 3, 3]}, {"x": 4, "y": 4.4, "z": [4, 4, 4, 4]}, {"x": 5, "y": 5.5, "z": [5, 5, 5, 5, 5]}], check_valid=True)
awkward1.to_list(array[awkward1.from_iter(["y", "x"], highlevel=False)]) == [{"y": 1.1, "x": 1}, {"y": 2.2, "x": 2}, {"y": 3.3, "x": 3}, {"y": 4.4, "x": 4}, {"y": 5.5, "x": 5}]
def test_new_slices():
content = awkward1.layout.NumpyArray(numpy.array([1, 0, 9, 3, 2, 2, 5], dtype=numpy.int64))
index = awkward1.layout.Index64(numpy.array([6, 5, -1, 3, 2, -1, 0], dtype=numpy.int64))
indexedarray = awkward1.layout.IndexedOptionArray64(index, content)
assert awkward1.to_list(indexedarray) == [5, 2, None, 3, 9, None, 1]
assert awkward1._ext._slice_tostring(indexedarray) == "[missing([0, 1, -1, ..., 3, -1, 4], array([5, 2, 3, 9, 1]))]"
offsets = awkward1.layout.Index64(numpy.array([0, 4, 4, 7], dtype=numpy.int64))
listoffsetarray = awkward1.layout.ListOffsetArray64(offsets, content)
assert awkward1.to_list(listoffsetarray) == [[1, 0, 9, 3], [], [2, 2, 5]]
assert awkward1._ext._slice_tostring(listoffsetarray) == "[jagged([0, 4, 4, 7], array([1, 0, 9, ..., 2, 2, 5]))]"
offsets = awkward1.layout.Index64(numpy.array([1, 4, 4, 6], dtype=numpy.int64))
listoffsetarray = awkward1.layout.ListOffsetArray64(offsets, content)
assert awkward1.to_list(listoffsetarray) == [[0, 9, 3], [], [2, 2]]
assert awkward1._ext._slice_tostring(listoffsetarray) == "[jagged([0, 3, 3, 5], array([0, 9, 3, 2, 2]))]"
starts = awkward1.layout.Index64(numpy.array([1, 99, 5], dtype=numpy.int64))
stops = awkward1.layout.Index64(numpy.array([4, 99, 7], dtype=numpy.int64))
listarray = awkward1.layout.ListArray64(starts, stops, content)
assert awkward1.to_list(listarray) == [[0, 9, 3], [], [2, 5]]
assert awkward1._ext._slice_tostring(listarray) == "[jagged([0, 3, 3, 5], array([0, 9, 3, 2, 5]))]"
def test_missing():
array = awkward1.Array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([3, 6, None, None, -2, 6], check_valid=True)]) == [3.3, 6.6, None, None, 8.8, 6.6]
content = awkward1.layout.NumpyArray(numpy.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9, 10.0, 11.1, 999]))
regulararray = awkward1.layout.RegularArray(content, 4)
assert awkward1.to_list(regulararray) == [[0.0, 1.1, 2.2, 3.3], [4.4, 5.5, 6.6, 7.7], [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(regulararray[awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[8.8, 9.9, 10.0, 11.1], [4.4, 5.5, 6.6, 7.7], [4.4, 5.5, 6.6, 7.7], None, [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(regulararray[:, awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[2.2, 1.1, 1.1, None, 3.3], [6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(regulararray[1:, awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(regulararray[numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[8.8, 9.9, 10.0, 11.1], [4.4, 5.5, 6.6, 7.7], [4.4, 5.5, 6.6, 7.7], None, [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(regulararray[:, numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[2.2, 1.1, 1.1, None, 3.3], [6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(regulararray[1:, numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
content = awkward1.layout.NumpyArray(numpy.array([[0.0, 1.1, 2.2, 3.3], [4.4, 5.5, 6.6, 7.7], [8.8, 9.9, 10.0, 11.1]]))
assert awkward1.to_list(content[awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[8.8, 9.9, 10.0, 11.1], [4.4, 5.5, 6.6, 7.7], [4.4, 5.5, 6.6, 7.7], None, [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(content[:, awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[2.2, 1.1, 1.1, None, 3.3], [6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(content[1:, awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(content[numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[8.8, 9.9, 10.0, 11.1], [4.4, 5.5, 6.6, 7.7], [4.4, 5.5, 6.6, 7.7], None, [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(content[:, numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[2.2, 1.1, 1.1, None, 3.3], [6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(content[1:, numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
content = awkward1.layout.NumpyArray(numpy.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9, 10.0, 11.1, 999]))
offsets = awkward1.layout.Index64(numpy.array([0, 4, 8, 12], dtype=numpy.int64))
listoffsetarray = awkward1.layout.ListOffsetArray64(offsets, content)
assert awkward1.to_list(listoffsetarray) == [[0.0, 1.1, 2.2, 3.3], [4.4, 5.5, 6.6, 7.7], [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(listoffsetarray[:, awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[2.2, 1.1, 1.1, None, 3.3], [6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(listoffsetarray[1:, awkward1.Array([2, 1, 1, None, -1], check_valid=True)]) == [[6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(listoffsetarray[:, numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[2.2, 1.1, 1.1, None, 3.3], [6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
assert awkward1.to_list(listoffsetarray[1:, numpy.ma.MaskedArray([2, 1, 1, 999, -1], [False, False, False, True, False])]) == [[6.6, 5.5, 5.5, None, 7.7], [10.0, 9.9, 9.9, None, 11.1]]
def test_bool_missing():
data = [1.1, 2.2, 3.3, 4.4, 5.5]
array = awkward1.layout.NumpyArray(numpy.array(data))
assert awkward1._ext._slice_tostring(awkward1.Array([True, False, None, True, False], check_valid=True)) == "[missing([0, -1, 1], array([0, 3]))]"
assert awkward1._ext._slice_tostring(awkward1.Array([None, None, None], check_valid=True)) == "[missing([-1, -1, -1], array([]))]"
for x1 in [True, False, None]:
for x2 in [True, False, None]:
for x3 in [True, False, None]:
for x4 in [True, False, None]:
for x5 in [True, False, None]:
mask = [x1, x2, x3, x4, x5]
expected = [m if m is None else x for x, m in zip(data, mask) if m is not False]
assert awkward1.to_list(array[awkward1.Array(mask, check_valid=True)]) == expected
def test_bool_missing2():
array = awkward1.Array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([3, 6, None, None, -2, 6], check_valid=True)]) == [3.3, 6.6, None, None, 8.8, 6.6]
content = awkward1.layout.NumpyArray(numpy.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9, 10.0, 11.1, 999]))
regulararray = awkward1.layout.RegularArray(content, 4)
assert awkward1.to_list(regulararray) == [[0.0, 1.1, 2.2, 3.3], [4.4, 5.5, 6.6, 7.7], [8.8, 9.9, 10.0, 11.1]]
assert awkward1.to_list(regulararray[:, awkward1.Array([True, None, False, True], check_valid=True)]) == [[0.0, None, 3.3], [4.4, None, 7.7], [8.8, None, 11.1]]
assert awkward1.to_list(regulararray[1:, awkward1.Array([True, None, False, True], check_valid=True)]) == [[4.4, None, 7.7], [8.8, None, 11.1]]
content = awkward1.layout.NumpyArray(numpy.array([[0.0, 1.1, 2.2, 3.3], [4.4, 5.5, 6.6, 7.7], [8.8, 9.9, 10.0, 11.1]]))
assert awkward1.to_list(content[:, awkward1.Array([True, None, False, True], check_valid=True)]) == [[0.0, None, 3.3], [4.4, None, 7.7], [8.8, None, 11.1]]
assert awkward1.to_list(content[1:, awkward1.Array([True, None, False, True], check_valid=True)]) == [[4.4, None, 7.7], [8.8, None, 11.1]]
content = awkward1.layout.NumpyArray(numpy.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9, 10.0, 11.1, 999]))
offsets = awkward1.layout.Index64(numpy.array([0, 4, 8, 12], dtype=numpy.int64))
listoffsetarray = awkward1.layout.ListOffsetArray64(offsets, content)
assert awkward1.to_list(listoffsetarray[:, awkward1.Array([True, None, False, True], check_valid=True)]) == [[0.0, None, 3.3], [4.4, None, 7.7], [8.8, None, 11.1]]
assert awkward1.to_list(listoffsetarray[1:, awkward1.Array([True, None, False, True], check_valid=True)]) == [[4.4, None, 7.7], [8.8, None, 11.1]]
def test_records_missing():
array = awkward1.Array([{"x": 0, "y": 0.0}, {"x": 1, "y": 1.1}, {"x": 2, "y": 2.2}, {"x": 3, "y": 3.3}, {"x": 4, "y": 4.4}, {"x": 5, "y": 5.5}, {"x": 6, "y": 6.6}, {"x": 7, "y": 7.7}, {"x": 8, "y": 8.8}, {"x": 9, "y": 9.9}], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([3, 1, None, 1, 7], check_valid=True)]) == [{"x": 3, "y": 3.3}, {"x": 1, "y": 1.1}, None, {"x": 1, "y": 1.1}, {"x": 7, "y": 7.7}]
array = awkward1.Array([[{"x": 0, "y": 0.0}, {"x": 1, "y": 1.1}, {"x": 2, "y": 2.2}, {"x": 3, "y": 3.3}], [{"x": 4, "y": 4.4}, {"x": 5, "y": 5.5}, {"x": 6, "y": 6.6}, {"x": 7, "y": 7.7}, {"x": 8, "y": 8.8}, {"x": 9, "y": 9.9}]], check_valid=True)
assert awkward1.to_list(array[:, awkward1.Array([1, None, 2, -1], check_valid=True)]) == [[{"x": 1, "y": 1.1}, None, {"x": 2, "y": 2.2}, {"x": 3, "y": 3.3}], [{"x": 5, "y": 5.5}, None, {"x": 6, "y": 6.6}, {"x": 9, "y": 9.9}]]
array = awkward1.Array([{"x": [0, 1, 2, 3], "y": [0.0, 1.1, 2.2, 3.3]}, {"x": [4, 5, 6, 7], "y": [4.4, 5.5, 6.6, 7.7]}, {"x": [8, 9, 10, 11], "y": [8.8, 9.9, 10.0, 11.1]}], check_valid=True)
assert awkward1.to_list(array[:, awkward1.Array([1, None, 2, -1], check_valid=True)]) == [{"x": [1, None, 2, 3], "y": [1.1, None, 2.2, 3.3]}, {"x": [5, None, 6, 7], "y": [5.5, None, 6.6, 7.7]}, {"x": [9, None, 10, 11], "y": [9.9, None, 10.0, 11.1]}]
assert awkward1.to_list(array[1:, awkward1.Array([1, None, 2, -1], check_valid=True)]) == [{"x": [5, None, 6, 7], "y": [5.5, None, 6.6, 7.7]}, {"x": [9, None, 10, 11], "y": [9.9, None, 10.0, 11.1]}]
def test_jagged():
array = awkward1.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([[0, -1], [], [-1, 0], [-1], [1, 1, -2, 0]], check_valid=True)]) == [[1.1, 3.3], [], [5.5, 4.4], [6.6], [8.8, 8.8, 8.8, 7.7]]
def test_double_jagged():
array = awkward1.Array([[[0, 1, 2, 3], [4, 5]], [[6, 7, 8], [9, 10, 11, 12, 13]]], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([[[2, 1, 0], [-1]], [[-1, -2, -3], [2, 1, 1, 3]]], check_valid=True)]) == [[[2, 1, 0], [5]], [[8, 7, 6], [11, 10, 10, 12]]]
content = awkward1.from_iter([[0, 1, 2, 3], [4, 5], [6, 7, 8], [9, 10, 11, 12, 13]], highlevel=False)
regulararray = awkward1.layout.RegularArray(content, 2)
assert awkward1.to_list(regulararray[:, awkward1.Array([[2, 1, 0], [-1]], check_valid=True)]) == [[[2, 1, 0], [5]], [[8, 7, 6], [13]]]
assert awkward1.to_list(regulararray[1:, awkward1.Array([[2, 1, 0], [-1]], check_valid=True)]) == [[[8, 7, 6], [13]]]
offsets = awkward1.layout.Index64(numpy.array([0, 2, 4], dtype=numpy.int64))
listoffsetarray = awkward1.layout.ListOffsetArray64(offsets, content)
assert awkward1.to_list(listoffsetarray[:, awkward1.Array([[2, 1, 0], [-1]], check_valid=True)]) == [[[2, 1, 0], [5]], [[8, 7, 6], [13]]]
assert awkward1.to_list(listoffsetarray[1:, awkward1.Array([[2, 1, 0], [-1]], check_valid=True)]) == [[[8, 7, 6], [13]]]
def test_masked_jagged():
array = awkward1.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([[-1, -2], None, [], None, [-2, 0]], check_valid=True)]) == [[3.3, 2.2], None, [], None, [8.8, 7.7]]
def test_jagged_masked():
array = awkward1.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([[-1, None], [], [None, 0], [None], [1]], check_valid=True)]) == [[3.3, None], [], [None, 4.4], [None], [8.8]]
def test_regular_regular():
content = awkward1.layout.NumpyArray(numpy.arange(2*3*5))
regulararray1 = awkward1.layout.RegularArray(content, 5)
regulararray2 = awkward1.layout.RegularArray(regulararray1, 3)
assert awkward1.to_list(regulararray2[awkward1.Array([[[2], [1, -2], [-1, 2, 0]], [[-3], [-4, 3], [-5, -3, 4]]], check_valid=True)]) == [[[2], [6, 8], [14, 12, 10]], [[17], [21, 23], [25, 27, 29]]]
assert awkward1.to_list(regulararray2[awkward1.Array([[[2], [1, -2], [-1, None, 0]], [[-3], [-4, 3], [-5, None, 4]]], check_valid=True)]) == [[[2], [6, 8], [14, None, 10]], [[17], [21, 23], [25, None, 29]]]
def test_masked_of_jagged_of_whatever():
content = awkward1.layout.NumpyArray(numpy.arange(2*3*5))
regulararray1 = awkward1.layout.RegularArray(content, 5)
regulararray2 = awkward1.layout.RegularArray(regulararray1, 3)
assert awkward1.to_list(regulararray2[awkward1.Array([[[2], None, [-1, 2, 0]], [[-3], None, [-5, -3, 4]]], check_valid=True)]) == [[[2], None, [14, 12, 10]], [[17], None, [25, 27, 29]]]
assert awkward1.to_list(regulararray2[awkward1.Array([[[2], None, [-1, None, 0]], [[-3], None, [-5, None, 4]]], check_valid=True)]) == [[[2], None, [14, None, 10]], [[17], None, [25, None, 29]]]
def test_emptyarray():
content = awkward1.layout.EmptyArray()
offsets = awkward1.layout.Index64(numpy.array([0, 0, 0, 0, 0], dtype=numpy.int64))
listoffsetarray = awkward1.layout.ListOffsetArray64(offsets, content)
assert awkward1.to_list(listoffsetarray) == [[], [], [], []]
assert awkward1.to_list(listoffsetarray[awkward1.Array([[], [], [], []], check_valid=True)]) == [[], [], [], []]
assert awkward1.to_list(listoffsetarray[awkward1.Array([[], [None], [], []], check_valid=True)]) == [[], [None], [], []]
assert awkward1.to_list(listoffsetarray[awkward1.Array([[], [], None, []], check_valid=True)]) == [[], [], None, []]
assert awkward1.to_list(listoffsetarray[awkward1.Array([[], [None], None, []], check_valid=True)]) == [[], [None], None, []]
with pytest.raises(ValueError):
listoffsetarray[awkward1.Array([[], [0], [], []], check_valid=True)]
def test_numpyarray():
array = awkward1.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5]], check_valid=True)
with pytest.raises(ValueError):
array[awkward1.Array([[[], [], []], [], [[], []]], check_valid=True)]
def test_record():
array = awkward1.Array([{"x": [0, 1, 2], "y": [0.0, 1.1, 2.2, 3.3]}, {"x": [3, 4, 5, 6], "y": [4.4, 5.5]}, {"x": [7, 8], "y": [6.6, 7.7, 8.8, 9.9]}], check_valid=True)
assert awkward1.to_list(array[awkward1.Array([[-1, 1], [0, 0, 1], [-1, -2]], check_valid=True)]) == [{"x": [2, 1], "y": [3.3, 1.1]}, {"x": [3, 3, 4], "y": [4.4, 4.4, 5.5]}, {"x": [8, 7], "y": [9.9, 8.8]}]
assert awkward1.to_list(array[awkward1.Array([[-1, 1], [0, 0, None, 1], [-1, -2]], check_valid=True)]) == [{"x": [2, 1], "y": [3.3, 1.1]}, {"x": [3, 3, None, 4], "y": [4.4, 4.4, None, 5.5]}, {"x": [8, 7], "y": [9.9, 8.8]}]
assert awkward1.to_list(array[awkward1.Array([[-1, 1], None, [-1, -2]], check_valid=True)]) == [{"x": [2, 1], "y": [3.3, 1.1]}, None, {"x": [8, 7], "y": [9.9, 8.8]}]
def test_indexedarray():
array = awkward1.from_iter([[0.0, 1.1, 2.2], [3.3, 4.4], [5.5], [6.6, 7.7, 8.8, 9.9]], highlevel=False)
index = awkward1.layout.Index64(numpy.array([3, 2, 1, 0], dtype=numpy.int64))
indexedarray = awkward1.layout.IndexedArray64(index, array)
assert awkward1.to_list(indexedarray) == [[6.6, 7.7, 8.8, 9.9], [5.5], [3.3, 4.4], [0.0, 1.1, 2.2]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], [], [1, 1]], check_valid=True)]) == [[6.6, 9.9], [5.5], [], [1.1, 1.1]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], [None], [1, None, 1]], check_valid=True)]) == [[6.6, 9.9], [5.5], [None], [1.1, None, 1.1]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], None, [1, 1]], check_valid=True)]) == [[6.6, 9.9], [5.5], None, [1.1, 1.1]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], None, [None]], check_valid=True)]) == [[6.6, 9.9], [5.5], None, [None]]
index = awkward1.layout.Index64(numpy.array([3, 2, 1, 0], dtype=numpy.int64))
indexedarray = awkward1.layout.IndexedOptionArray64(index, array)
assert awkward1.to_list(indexedarray) == [[6.6, 7.7, 8.8, 9.9], [5.5], [3.3, 4.4], [0.0, 1.1, 2.2]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], [], [1, 1]], check_valid=True)]) == [[6.6, 9.9], [5.5], [], [1.1, 1.1]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], [None], [1, None, 1]], check_valid=True)]) == [[6.6, 9.9], [5.5], [None], [1.1, None, 1.1]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], None, []], check_valid=True)]) == [[6.6, 9.9], [5.5], None, []]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], None, [1, None, 1]], check_valid=True)]) == [[6.6, 9.9], [5.5], None, [1.1, None, 1.1]]
def test_indexedarray2():
array = awkward1.from_iter([[0.0, 1.1, 2.2], [3.3, 4.4], [5.5], [6.6, 7.7, 8.8, 9.9]], highlevel=False)
index = awkward1.layout.Index64(numpy.array([3, 2, -1, 0], dtype=numpy.int64))
indexedarray = awkward1.layout.IndexedOptionArray64(index, array)
assert awkward1.to_list(indexedarray) == [[6.6, 7.7, 8.8, 9.9], [5.5], None, [0.0, 1.1, 2.2]]
assert awkward1.to_list(indexedarray[awkward1.Array([[0, -1], [0], None, [1, 1]])]) == [[6.6, 9.9], [5.5], None, [1.1, 1.1]]
def test_indexedarray2b():
array = awkward1.from_iter([[0.0, 1.1, 2.2], [3.3, 4.4], [5.5], [6.6, 7.7, 8.8, 9.9]], highlevel=False)
index = awkward1.layout.Index64(numpy.array([0, -1, 2, 3], dtype=numpy.int64))
indexedarray = awkward1.layout.IndexedOptionArray64(index, array)
assert awkward1.to_list(indexedarray) == [[0.0, 1.1, 2.2], None, [5.5], [6.6, 7.7, 8.8, 9.9]]
assert awkward1.to_list(indexedarray[awkward1.Array([[1, 1], None, [0], [0, -1]])]) == [[1.1, 1.1], None, [5.5], [6.6, 9.9]]
def test_bytemaskedarray2b():
array = awkward1.from_iter([[0.0, 1.1, 2.2], [3.3, 4.4], [5.5], [6.6, 7.7, 8.8, 9.9]], highlevel=False)
mask = awkward1.layout.Index8(numpy.array([0, 1, 0, 0], dtype=numpy.int8))
maskedarray = awkward1.layout.ByteMaskedArray(mask, array, valid_when=False)
assert awkward1.to_list(maskedarray) == [[0.0, 1.1, 2.2], None, [5.5], [6.6, 7.7, 8.8, 9.9]]
assert awkward1.to_list(maskedarray[awkward1.Array([[1, 1], None, [0], [0, -1]])]) == [[1.1, 1.1], None, [5.5], [6.6, 9.9]]
def test_bitmaskedarray2b():
array = awkward1.from_iter([[0.0, 1.1, 2.2], [3.3, 4.4], [5.5], [6.6, 7.7, 8.8, 9.9]], highlevel=False)
mask = awkward1.layout.IndexU8(numpy.array([66], dtype=numpy.uint8))
maskedarray = awkward1.layout.BitMaskedArray(mask, array, valid_when=False, length=4, lsb_order=True) # lsb_order is irrelevant in this example
assert awkward1.to_list(maskedarray) == [[0.0, 1.1, 2.2], None, [5.5], [6.6, 7.7, 8.8, 9.9]]
assert awkward1.to_list(maskedarray[awkward1.Array([[1, 1], None, [0], [0, -1]])]) == [[1.1, 1.1], None, [5.5], [6.6, 9.9]]
def test_indexedarray3():
array = awkward1.Array([0.0, 1.1, 2.2, None, 4.4, None, None, 7.7])
assert awkward1.to_list(array[awkward1.Array([4, 3, 2])]) == [4.4, None, 2.2]
assert awkward1.to_list(array[awkward1.Array([4, 3, 2, None, 1])]) == [4.4, None, 2.2, None, 1.1]
array = awkward1.Array([[0.0, 1.1, None, 2.2], [3.3, None, 4.4], [5.5]])
assert awkward1.to_list(array[awkward1.Array([[3, 2, 2, 1], [1, 2], []])]) == [[2.2, None, None, 1.1], [None, 4.4], []]
array = awkward1.Array([[0.0, 1.1, 2.2], [3.3, 4.4], None, [5.5]])
assert awkward1.to_list(array[awkward1.Array([3, 2, 1])]) == [[5.5], None, [3.3, 4.4]]
assert awkward1.to_list(array[awkward1.Array([3, 2, 1, None, 0])]) == [[5.5], None, [3.3, 4.4], None, [0.0, 1.1, 2.2]]
assert (awkward1.to_list(array[awkward1.Array([[2, 1, 1, 0], [1], None, [0]])])) == [[2.2, 1.1, 1.1, 0.0], [4.4], None, [5.5]]
assert awkward1.to_list(array[awkward1.Array([[2, 1, 1, 0], None, [1], [0]])]) == [[2.2, 1.1, 1.1, 0], None, None, [5.5]]
with pytest.raises(ValueError):
array[awkward1.Array([[2, 1, 1, 0], None, [1], [0], None])]
def test_sequential():
array = awkward1.Array(numpy.arange(2*3*5).reshape(2, 3, 5).tolist(), check_valid=True)
assert awkward1.to_list(array[awkward1.Array([[2, 1, 0], [2, 1, 0]], check_valid=True)]) == [[[10, 11, 12, 13, 14], [5, 6, 7, 8, 9], [0, 1, 2, 3, 4]], [[25, 26, 27, 28, 29], [20, 21, 22, 23, 24], [15, 16, 17, 18, 19]]]
assert awkward1.to_list(array[awkward1.Array([[2, 1, 0], [2, 1, 0]], check_valid=True), :2]) == [[[10, 11], [5, 6], [0, 1]], [[25, 26], [20, 21], [15, 16]]]
def test_union():
one = awkward1.from_iter([[1.1, 2.2, 3.3], [], [4.4, 5.5]], highlevel=False)
two = awkward1.from_iter([[6.6], [7.7, 8.8], [], [9.9, 10.0, 11.1, 12.2]], highlevel=False)
tags = awkward1.layout.Index8(numpy.array([0, 0, 0, 1, 1, 1, 1], dtype=numpy.int8))
index = awkward1.layout.Index64(numpy.array([0, 1, 2, 0, 1, 2, 3], dtype=numpy.int64))
unionarray = awkward1.layout.UnionArray8_64(tags, index, [one, two])
assert awkward1.to_list(unionarray) == [[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8], [], [9.9, 10.0, 11.1, 12.2]]
assert awkward1.to_list(unionarray[awkward1.Array([[0, -1], [], [1, 1], [], [-1], [], [1, -2, -1]], check_valid=True)]) == [[1.1, 3.3], [], [5.5, 5.5], [], [8.8], [], [10.0, 11.1, 12.2]]
def test_python_to_jaggedslice():
assert awkward1._ext._slice_tostring([[1, 2, 3], [], [4, 5]]) == "[jagged([0, 3, 3, 5], array([1, 2, 3, 4, 5]))]"
assert awkward1._ext._slice_tostring([[1, 2], [3, 4], [5, 6]]) == "[array([[1, 2], [3, 4], [5, 6]])]"
def test_jagged_mask():
array = awkward1.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]], check_valid=True)
assert awkward1.to_list(array[[[True, True, True], [], [True, True], [True], [True, True, True]]]) == [[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array[[[False, True, True], [], [True, True], [True], [True, True, True]]]) == [[2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array[[[True, False, True], [], [True, True], [True], [True, True, True]]]) == [[1.1, 3.3], [], [4.4, 5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array[[[True, True, True], [], [False, True], [True], [True, True, True]]]) == [[1.1, 2.2, 3.3], [], [5.5], [6.6], [7.7, 8.8, 9.9]]
assert awkward1.to_list(array[[[True, True, True], [], [False, False], [True], [True, True, True]]]) == [[1.1, 2.2, 3.3], [], [], [6.6], [7.7, 8.8, 9.9]]
def test_jagged_missing_mask():
array = awkward1.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5]], check_valid=True)
assert awkward1.to_list(array[[[True, True, True], [], [True, True]]]) == [[1.1, 2.2, 3.3], [], [4.4, 5.5]]
assert awkward1.to_list(array[[[True, False, True], [], [True, True]]]) == [[1.1, 3.3], [], [4.4, 5.5]]
assert awkward1.to_list(array[[[True, None, True], [], [True, True]]]) == [[1.1, None, 3.3], [], [4.4, 5.5]]
assert awkward1.to_list(array[[[True, None, False], [], [True, True]]]) == [[1.1, None], [], [4.4, 5.5]]
assert awkward1.to_list(array[[[False, None, True], [], [True, True]]]) == [[None, 3.3], [], [4.4, 5.5]]
assert awkward1.to_list(array[[[False, None, False], [], [True, True]]]) == [[None], [], [4.4, 5.5]]
assert awkward1.to_list(array[[[True, True, False], [], [False, True]]]) == [[1.1, 2.2], [], [5.5]]
assert awkward1.to_list(array[[[True, True, None], [], [False, True]]]) == [[1.1, 2.2, None], [], [5.5]]
assert awkward1.to_list(array[[[True, True, False], [None], [False, True]]]) == [[1.1, 2.2], [None], [5.5]]
assert awkward1.to_list(array[[[True, True, False], [], [None, True]]]) == [[1.1, 2.2], [], [None, 5.5]]
assert awkward1.to_list(array[[[True, True, False], [], [True, None]]]) == [[1.1, 2.2], [], [4.4, None]]
assert awkward1.to_list(array[[[True, True, False], [], [False, None]]]) == [[1.1, 2.2], [], [None]]
|
py | 1a510951db9aa714789b01478c9e2075818a31f5 | # Copyright (C) 2020 THL A29 Limited, a Tencent company.
# All rights reserved.
# Licensed under the BSD 3-Clause License (the "License"); you may
# not use this file except in compliance with the License. You may
# obtain a copy of the License at
# https://opensource.org/licenses/BSD-3-Clause
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# See the AUTHORS file for names of contributors.
import torch
import transformers
import turbo_transformers
import enum
import time
import numpy
class LoadType(enum.Enum):
PYTORCH = "PYTORCH"
PRETRAINED = "PRETRAINED"
NPZ = "NPZ"
def test(loadtype: LoadType, use_cuda: bool):
cfg = transformers.AlbertConfig()
model = transformers.AlbertModel(cfg)
model.eval()
torch.set_grad_enabled(False)
test_device = torch.device('cuda:0') if use_cuda else \
torch.device('cpu:0')
cfg = model.config
# use 4 threads for computing
turbo_transformers.set_num_threads(4)
input_ids = torch.tensor(
([12166, 10699, 16752, 4454], [5342, 16471, 817, 16022]),
dtype=torch.long)
model.to(test_device)
start_time = time.time()
for _ in range(10):
torch_res = model(input_ids)
end_time = time.time()
print("\ntorch time consum: {}".format(end_time - start_time))
# there are three ways to load pretrained model.
if loadtype is LoadType.PYTORCH:
# 1, from a PyTorch model, which has loaded a pretrained model
tt_model = turbo_transformers.AlbertModel.from_torch(model)
else:
raise ("LoadType is not supported")
start_time = time.time()
for _ in range(10):
res = tt_model(input_ids) # sequence_output, pooled_output
end_time = time.time()
print("\nturbo time consum: {}".format(end_time - start_time))
assert (numpy.max(
numpy.abs(res[0].cpu().numpy() - torch_res[0].cpu().numpy())) < 0.1)
if __name__ == "__main__":
test(LoadType.PYTORCH, use_cuda=False)
|
py | 1a5109aa57ce2bb5a3bced5014cfef826e459d34 | # -*- coding: utf-8 -*-
#
# Sentry documentation build configuration file, created by
# spx-quickstart on Wed Oct 20 16:21:42 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = ['sphinxtogithub']
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Raven'
copyright = u'%s, David Cramer' % datetime.datetime.today().year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __import__('pkg_resources').get_distribution('raven').version
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
intersphinx_mapping = {
'http://docs.python.org/2.7': None,
'django': ('http://docs.djangoproject.com/en/dev/', 'http://docs.djangoproject.com/en/dev/_objects/'),
'http://raven.readthedocs.org/en/latest': None
}
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'kr'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "_static/logo.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Ravendoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Raven.tex', u'Raven Documentation',
u'David Cramer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'raven', u'Raven Documentation',
[u'David Cramer'], 1)
]
|
py | 1a5109e49f890e0747baa4028f2fb5b3890dbee0 | """
This file offers the methods to automatically retrieve the graph Dictyostelium purpureum.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 17:15:48.378633
The undirected graph Dictyostelium purpureum has 7408 nodes and 739904
weighted edges, of which none are self-loops. The graph is dense as it
has a density of 0.02697 and has 22 connected components, where the component
with most nodes has 7362 nodes and the component with the least nodes has
2 nodes. The graph median node degree is 120, the mean node degree is 199.76,
and the node degree mode is 1. The top 5 most central nodes are 5786.XP_003290740.1
(degree 2421), 5786.XP_003289922.1 (degree 1879), 5786.XP_003286088.1 (degree
1648), 5786.XP_003285647.1 (degree 1421) and 5786.XP_003294084.1 (degree
1420).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import DictyosteliumPurpureum
# Then load the graph
graph = DictyosteliumPurpureum()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def DictyosteliumPurpureum(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Dictyostelium purpureum graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Dictyostelium purpureum graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 17:15:48.378633
The undirected graph Dictyostelium purpureum has 7408 nodes and 739904
weighted edges, of which none are self-loops. The graph is dense as it
has a density of 0.02697 and has 22 connected components, where the component
with most nodes has 7362 nodes and the component with the least nodes has
2 nodes. The graph median node degree is 120, the mean node degree is 199.76,
and the node degree mode is 1. The top 5 most central nodes are 5786.XP_003290740.1
(degree 2421), 5786.XP_003289922.1 (degree 1879), 5786.XP_003286088.1 (degree
1648), 5786.XP_003285647.1 (degree 1421) and 5786.XP_003294084.1 (degree
1420).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import DictyosteliumPurpureum
# Then load the graph
graph = DictyosteliumPurpureum()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="DictyosteliumPurpureum",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
py | 1a510a0691f7fdd621edbf0b52034a2b3e5d237e | import sys
import re
import PySimpleGUI as sg
import subprocess
import datetime
from bs4 import BeautifulSoup
import shutil
import openpyxl
def insert_Excel(translatedHtmlFile, checkedHtmlFile, resultsFile):
# 結果を入れるエクセルを用意
shutil.copyfile(xlsxTemplate, resultsFile)
# 翻訳後のhtmlをオープンしてパース
with open(translatedHtmlFile, encoding='utf-8') as f:
translated = f.read()
soupTranslated = BeautifulSoup(translated, 'html.parser')
translatedList = []
for t in soupTranslated.find_all('tr'):
cols = t.find_all('td')
src = cols[0].get_text()
target = cols[1].get_text()
# print(src, target)
translatedList.append(src + '\t' + target)
# チェック後のhtmlをオープンしてパース
with open(checkedHtmlFile, encoding='utf-8') as f:
checked = f.read()
soupChecked = BeautifulSoup(checked, 'html.parser')
checkedList = []
for t in soupChecked.find_all('tr'):
cols = t.find_all('td')
src = cols[0].get_text()
target = cols[1].get_text()
checkedList.append(src + '\t' + target)
# Excelを準備
wb = openpyxl.load_workbook(resultsFile)
ws = wb['Sheet1']
# 翻訳後のテキストを入力する
countT = 2
for i in translatedList:
countStr = str(countT)
src, target =i.split('\t')
judge = '=IF(B'+countStr+'=E'+countStr+',"-","check!")'
srcA = 'A' + countStr
targetB = 'B' + countStr
judgeC = 'C' + countStr
ws[srcA].value = src
ws[targetB].value = target
ws[judgeC].value = judge
countT += 1
# チェック後のテキストを入力する
countC = 2
for i in checkedList:
src, target =i.split('\t')
countStr = str(countC)
srcA = 'D' + countStr
targetB = 'E' + countStr
ws[srcA].value = src
ws[targetB].value = target
countC += 1
# Excelを閉じて保存
wb.close()
wb.save(resultsFile)
if __name__ == '__main__':
sg.theme('Dark Blue 3')
layout = [
[sg.Text('xlf file(before):', size=(20, 1)), sg.InputText('', enable_events=True,), sg.FilesBrowse('Add', key='-FILES-', file_types=(('xlf file', '*.xlf'),))],
[sg.Text('xlf file(after):', size=(20, 1)), sg.InputText('', enable_events=True,), sg.FilesBrowse('Add', key='-FILES-', file_types=(('xlf file', '*.xlf'),))],
[sg.Text('xsl file:', size=(20, 1)), sg.InputText('', enable_events=True,), sg.FilesBrowse('Add', key='-FILES-', file_types=(('xsl file', '*.xsl'),))],
[sg.Submit(button_text='Run')]
]
window = sg.Window('xlf2html-saxon', layout)
while True:
event, values = window.read()
# ウィンドウの[x]で終了
if event is None:
break
if event == 'Run':
f_before = values[0]
f_after = values[1]
xsl = values[2]
# 行頭と行末にダブルクォーテーションがあったら削除
f_before = re.sub('^\"', '', f_before)
f_before = re.sub('\"$', '', f_before)
f_after = re.sub('^\"', '', f_after)
f_after = re.sub('\"$', '', f_after)
xsl = re.sub('^\"', '', xsl)
xsl = re.sub('\"$', '', xsl)
# OutputするHTMLファイル
f_before_html = re.sub('xlf$', 'html', f_before)
f_after_html = re.sub('xlf$', 'html', f_after)
if f_before == '':
sg.popup('Please specify a xlf (before) file.')
continue
elif f_after == '':
sg.popup('Please specify a xlf (after) file.')
continue
elif xsl == '':
sg.popup('Please specify a xsl file.')
cmd1 = 'java' + ' -jar' + ' D:\\tool\\saxonb9-1-0-8j\\saxon9.jar' + ' -s:' + f_before + ' -xsl:' + xsl + ' -o:' + f_before_html
cmd2 = 'java' + ' -jar' + ' D:\\tool\\saxonb9-1-0-8j\\saxon9.jar' + ' -s:' + f_after + ' -xsl:' + xsl + ' -o:' + f_after_html
res1 = subprocess.check_call(cmd1)
res2 = subprocess.check_call(cmd2)
print(res1)
print(res2)
xlsxTemplate = "xliff_diff.xlsx"
todaydetail = datetime.datetime.today()
datetime = todaydetail.strftime("%Y%m%d%H%M%S")
resultsFile = datetime + '_' + xlsxTemplate
insert_Excel(f_before_html, f_after_html, resultsFile)
sg.popup('Done!')
window.close()
sys.exit()
|
py | 1a510abc4e9ac64ec2ca00a08a22015206d44d62 | import csv
import json
input_file_name = "C:/Users/cch23/Desktop/창업 아이템/걸어서나눔나눔/파싱/in.csv"
output_file_name = "C:/Users/cch23/Desktop/창업 아이템/걸어서나눔나눔/파싱/in.json"
with open(input_file_name, "r", encoding="utf-8", newline="") as input_file, \
open(output_file_name, "w", encoding="utf-8", newline="") as output_file:
reader = csv.reader(input_file)
# 첫 줄은 col_names 리스트로 읽어 놓고
col_names = next(reader)
# 그 다음 줄부터 zip으로 묶어서 json으로 dumps
for cols in reader:
doc = {col_name: col for col_name, col in zip(col_names, cols)}
print(json.dumps(doc, ensure_ascii=False), file=output_file) |
py | 1a510b938fec1160c11e3049d261d36e905ca196 | """
Experiment Management
"""
from datetime import datetime
from os import pardir
from attrdict import AttrDict
import pathlib
import hashlib
import os
from rl_helper import envhelper
import yaml
class ExperimentManager(object):
def __init__(self,add_env_helper=True) -> None:
super().__init__()
self.saves_root=pathlib.Path("./runs/")
self.exp_time=datetime.now().strftime("%Y%m%d-%H%M%S")
self.env_helper=envhelper()
def init(self,model_name,exp_class,exp_target,comments,sub_id):
assert len(exp_target)<24, "exp target to long > 20"
assert " " not in exp_target, "exp target should contain no space"
self.model_name=model_name
self.exp_class=exp_class
self.exp_target=exp_target
self.comments=comments
self.sub_id=sub_id
self.config={"model_name":self.model_name,"exp_class":self.exp_class,"exp_target":self.exp_target, "comments":self.comments,"sub_id":sub_id}
self.paras=AttrDict()
@property
def health(self):
a=[self.model_name,self.exp_class,self.exp_target, self.comments,self.sub_id
, self.sub_id , self.config , self.paras ]
for s in a:
assert a is not None
return True
def load(self,pth):
pth=pathlib.Path(pth)
assert pth.is_dir(),pth
config_yaml=pth.joinpath("config.yaml")
paras_yaml=pth.joinpath('paras.yaml')
assert config_yaml.is_file()
assert paras_yaml.is_file()
with open(config_yaml, "r") as stream:
self.config=yaml.safe_load(stream)
with open(paras_yaml, "r") as stream:
self.paras=AttrDict(yaml.safe_load(stream))
for k in self.config.keys():
self.__setattr__(k,self.config[k])
assert self.health
def get_exp_hash(self):
hash_seed=self.model_name+self.exp_class+self.exp_target+self.comments+str(self.sub_id)
pkeys=[]
for k in self.paras.keys():
pkeys.append(k)
pkeys.sort()
pkeys_value=[self.paras[k] for k in pkeys]
hash_seed+=str(pkeys)
hash_seed+=str(pkeys_value)
return hashlib.sha1(hash_seed.encode('utf-8')).hexdigest()[:5]
@property
def exp_hash_dir_path(self):
return self.saves_root.joinpath(self.exp_class).joinpath(self.exp_target).joinpath(str(self.sub_id)).joinpath(self.get_exp_hash())
# @property
# def model_save_dir_path(self):
# dir_pth=
# pass
@property
def model_save_pth(self):
return self.exp_hash_dir_path.joinpath("model")
@property
def log_save_dir_pth(self):
return self.exp_hash_dir_path.joinpath("logs")
pass
@property
def paras_save_dir_pth(self):
return self.exp_hash_dir_path.joinpath("paras")
@property
def tensorbord_log_name(self):
return str(self.sub_id)
@property
def paras_dict(self):
d={}
for k in self.paras.keys():
d[k]=self.paras[k]
return d
def add_para(self,k,value):
assert self.paras.get(k,None) is None, "{} has existed in paras".format(k)
self.paras[k]=value
print("Set {} : {}".format(k,value))
def start(self,overwrite=False):
try:
os.makedirs(self.exp_hash_dir_path.__str__(),exist_ok=False)
except:
if not overwrite:
raise NotImplementedError("Error ! Fail to create, You already have this experiment : {}".format(self.exp_hash_dir_path))
os.makedirs(self.paras_save_dir_pth,exist_ok=True)
with open(self.exp_hash_dir_path.joinpath("paras.yaml"), 'w') as outfile:
yaml.dump(self.paras_dict, outfile, default_flow_style=False)
with open(self.exp_hash_dir_path.joinpath("config.yaml"), 'w') as outfile:
yaml.dump(self.config, outfile, default_flow_style=False)
for k in self.paras.keys():
ss="{}_{}".format(k,str(self.paras[k]))
with open(self.paras_save_dir_pth.joinpath(ss), 'w') as outfile:
pass
with open(self.exp_hash_dir_path.joinpath(""+str(self.exp_time)), 'w') as outfile:
pass
def save_gif(self,**kargs):
self.env_helper.save_gif(path=self.log_save_dir_pth,**kargs) |
py | 1a510bc8ceb4e69f1c9ea0cb1c517fe7f543275a | import unicodedata
from typing import Optional
from django.utils.translation import gettext as _
from zerver.lib.exceptions import JsonableError
from zerver.models import Stream
# There are 66 Unicode non-characters; see
# https://www.unicode.org/faq/private_use.html#nonchar4
unicode_non_chars = {
chr(x)
for x in list(range(0xFDD0, 0xFDF0)) # FDD0 through FDEF, inclusive
+ list(range(0xFFFE, 0x110000, 0x10000)) # 0xFFFE, 0x1FFFE, ... 0x10FFFE inclusive
+ list(range(0xFFFF, 0x110000, 0x10000)) # 0xFFFF, 0x1FFFF, ... 0x10FFFF inclusive
}
def check_string_is_printable(var: str) -> Optional[int]:
# Return position (1-indexed!) of the character which is not
# printable, None if no such character is present.
for i, char in enumerate(var):
unicode_character = unicodedata.category(char)
if (unicode_character in ["Cc", "Cs"]) or char in unicode_non_chars:
return i + 1
return None
def check_stream_name(stream_name: str) -> None:
if stream_name.strip() == "":
raise JsonableError(_("Stream name can't be empty!"))
if len(stream_name) > Stream.MAX_NAME_LENGTH:
raise JsonableError(
_("Stream name too long (limit: {} characters).").format(Stream.MAX_NAME_LENGTH)
)
for i in stream_name:
if ord(i) == 0:
raise JsonableError(
_("Stream name '{}' contains NULL (0x00) characters.").format(stream_name)
)
def check_stream_topic(topic: str) -> None:
if topic.strip() == "":
raise JsonableError(_("Topic can't be empty!"))
invalid_character_pos = check_string_is_printable(topic)
if invalid_character_pos is not None:
raise JsonableError(
_("Invalid character in topic, at position {}!").format(invalid_character_pos)
)
|
py | 1a510c6e93f655bf396eca04c951b1c4b4dfd4ac | import tensorflow as tf
import numpy as np
# NOTE: If you want full control for model architecture. please take a look
# at the code and change whatever you want. Some hyper parameters are hardcoded.
# Default hyperparameters:
hparams = tf.contrib.training.HParams(
name="wavenet_vocoder",
# Convenient model builder
builder="wavenet",
# Presets known to work good.
# NOTE: If specified, override hyper parameters with preset
preset="",
presets={
},
# Input type:
# 1. raw [-1, 1]
# 2. mulaw [-1, 1]
# 3. mulaw-quantize [0, mu]
# If input_type is raw or mulaw, network assumes scalar input and
# discretized mixture of logistic distributions output, otherwise one-hot
# input and softmax output are assumed.
# **NOTE**: if you change the one of the two parameters below, you need to
# re-run preprocessing before training.
# **NOTE**: scaler input (raw or mulaw) is experimental. Use it your own risk.
input_type="mulaw-quantize",
quantize_channels=256, # 65536 or 256
# Audio:
sample_rate=24000,
# this is only valid for mulaw is True
silence_threshold=2,
num_mels=80,
fmin=125,
fmax=7600,
fft_size=1024,
# shift can be specified by either hop_size or frame_shift_ms
hop_size=256,
frame_shift_ms=None,
min_level_db=-100,
ref_level_db=20,
# whether to rescale waveform or not.
# Let x is an input waveform, rescaled waveform y is given by:
# y = x / np.abs(x).max() * rescaling_max
rescaling=True,
rescaling_max=0.999,
# mel-spectrogram is normalized to [0, 1] for each utterance and clipping may
# happen depends on min_level_db and ref_level_db, causing clipping noise.
# If False, assertion is added to ensure no clipping happens.
allow_clipping_in_normalization=False,
# Mixture of logistic distributions:
log_scale_min=float(np.log(1e-14)),
# Model:
# This should equal to `quantize_channels` if mu-law quantize enabled
# otherwise num_mixture * 3 (pi, mean, log_scale)
out_channels=256,
layers=30,
stacks=3,
residual_channels=512,
gate_channels=512, # split into 2 gropus internally for gated activation
skip_out_channels=256,
dropout=1 - 0.95,
kernel_size=3,
# If True, apply weight normalization as same as DeepVoice3
weight_normalization=True,
# Local conditioning (set negative value to disable))
cin_channels=80,
# If True, use transposed convolutions to upsample conditional features,
# otherwise repeat features to adjust time resolution
upsample_conditional_features=True,
# should np.prod(upsample_scales) == hop_size
upsample_scales=[4, 4, 4, 4],
# Freq axis kernel size for upsampling network
freq_axis_kernel_size=3,
# Global conditioning (set negative value to disable)
# currently limited for speaker embedding
# this should only be enabled for multi-speaker dataset
gin_channels=-1, # i.e., speaker embedding dim
n_speakers=7, # 7 for CMU ARCTIC
# Data loader
pin_memory=True,
num_workers=2,
# train/test
# test size can be specified as portion or num samples
test_size=0.0441, # 50 for CMU ARCTIC single speaker
test_num_samples=None,
random_state=1234,
# Loss
# Training:
batch_size=32,
adam_beta1=0.9,
adam_beta2=0.999,
adam_eps=1e-8,
initial_learning_rate=1e-3,
# see lrschedule.py for available lr_schedule
lr_schedule="noam_learning_rate_decay",
lr_schedule_kwargs={}, # {"anneal_rate": 0.5, "anneal_interval": 50000},
nepochs=2000,
weight_decay=0.0,
clip_thresh=-1,
# max time steps can either be specified as sec or steps
# This is needed for those who don't have huge GPU memory...
# if both are None, then full audio samples are used
max_time_sec=None,
max_time_steps=7680,
# Hold moving averaged parameters and use them for evaluation
exponential_moving_average=True,
# averaged = decay * averaged + (1 - decay) * x
ema_decay=0.9999,
# Save
# per-step intervals
checkpoint_interval=10000,
train_eval_interval=10000,
# per-epoch interval
test_eval_epoch_interval=5,
save_optimizer_state=True,
# Eval:
)
def hparams_debug_string():
values = hparams.values()
hp = [' %s: %s' % (name, values[name]) for name in sorted(values)]
return 'Hyperparameters:\n' + '\n'.join(hp)
|
py | 1a510d5ca335ecec11ba963ea69f3bbe0d15c946 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import os
import sys
import signal
import time
import ConfigParser
import glob
import json
import Mobigen.Common.Log as Log; Log.Init()
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
resultList = list()
resultSrc = ''
class Handler(FileSystemEventHandler) :
#def on_any_event(event) : # 모든 이벤트 발생시
# __LOG__.Trace(event)
def on_modified(self, event) :
__LOG__.Trace(event)
strEvent = str(event)
strFile = strEvent.split("'")[1]
global resultList
global resultSrc
resultSrc = ''
if os.path.isfile(strFile) :
tempList = strFile.split("/")[0:-1]
tempSrc = ''
for i in tempList :
if tempList[-1] == i :
tempSrc = tempSrc + i
else :
tempSrc = tempSrc + i + "/"
resultSrc = tempSrc
resultList.append(strFile)
|
py | 1a510d92c5067079b7abcf7b7adbf59ff4042901 | # Generated by Django 4.0 on 2021-08-15 18:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_of_birth', models.DateField(blank=True, null=True)),
('photo', models.ImageField(blank=True, upload_to='users/%Y/%m/%d/')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='auth.user')),
],
),
]
|
py | 1a510eb14b80361c4e7a3d38042d0d57d962dad6 | from cffi import FFI
ffi = FFI()
ffi.cdef('''
typedef void * HANDLE;
typedef HANDLE HWND;
typedef int BOOL;
typedef unsigned int UINT;
typedef int SIZE_T;
typedef char * LPTSTR;
typedef HANDLE HGLOBAL;
typedef HANDLE LPVOID;
HWND GetConsoleWindow(void);
LPVOID GlobalLock( HGLOBAL hMem );
BOOL GlobalUnlock( HGLOBAL hMem );
HGLOBAL GlobalAlloc(UINT uFlags, SIZE_T dwBytes);
BOOL OpenClipboard(HWND hWndNewOwner);
BOOL CloseClipboard(void);
BOOL EmptyClipboard(void);
HANDLE SetClipboardData(UINT uFormat, HANDLE hMem);
#define CF_TEXT ...
#define GMEM_MOVEABLE ...
void * memcpy(void * s1, void * s2, int n);
''')
ffi.set_source('_winclipboard_cffi', '''
#include <windows.h>
''', libraries=["user32"])
if __name__ == '__main__':
ffi.compile()
|
py | 1a510f25032949c1754cfefaa4f88d0a88b9caab | import os
import re
import sys
import signal
import subprocess
import pty
import fcntl
import struct
import termios
import datetime
import traceback
from base64 import b64decode, b64encode
INVALID_CHARS = re.compile(u'[\xe2\x80\x99]')
import pyte
TERM_W = 80
TERM_H = 24
def remove_invalid_char (value):
return INVALID_CHARS.sub('', value)
class Terminal:
def __init__(self):
self._proc = None
def start(self, app, home, width, height, tsid=None, onclose=None, screen=None):
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
env = {}
env.update(os.environ)
env['TERM'] = 'linux'
env['COLUMNS'] = str(width)
env['LINES'] = str(height)
env['LC_ALL'] = 'en_US.UTF8'
sh = app
self.lines = height
self.cols = width
pid, master = pty.fork()
self.pid = pid
if pid == 0:
os.chdir(home)
p = subprocess.Popen(
sh,
shell=True,
close_fds=True,
env=env,
)
try:
p.wait()
except OSError:
pass
#exit typed
if onclose and tsid:
onclose(tsid)
sys.exit(0)
self._proc = PTYProtocol(pid, master, width, height)
self.resize(self.lines, self.cols)
def restart(self):
if self._proc is not None:
self._proc.kill()
self.start()
def dead(self):
return self._proc is None
def write(self, data):
self._proc.write(data)
#self._proc.write(b64decode(data))
def resize (self, lines, columns):
self.lines = lines
self.cols = columns
self._proc.resize(lines, columns)
class PTYProtocol():
def __init__(self, proc, stream, width, height):
self.data = ''
self.proc = proc
self.master = stream
fd = self.master
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
self.mstream = os.fdopen(self.master, 'r+')
self.term = pyte.HistoryScreen(width, height, 1000)
self.stream = pyte.ByteStream()
self.stream.escape["N"] = "next_page"
self.stream.escape["P"] = "prev_page"
self.stream.attach(self.term)
self.data = ''
self.unblock()
self.updated = None
self.lastx = None
self.lasty = None
def resize (self, lines, columns):
fd = self.master
self.term.resize(lines, columns)
s = struct.pack("HHHH", lines, columns, 0, 0)
fcntl.ioctl(fd, termios.TIOCSWINSZ, s)
self.term.reset()
def unblock(self):
fd = self.master
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
def block(self):
fd = self.master
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl - os.O_NONBLOCK)
def read(self):
for i in range(0, 45):
try:
d = self.mstream.read()
self.data += d
if len(self.data) > 0:
#u = unicode(remove_invalid_char(str(self.data)))
self.stream.feed(self.data)
self.data = ''
self.updated = datetime.datetime.now()
break
except IOError, e:
pass
except UnicodeDecodeError, e:
print 'UNICODE'
print e
import traceback
traceback.print_exc()
return self.format()
def history(self):
return self.format(full=True)
def format(self, full=False):
l = {}
if self.lastx != self.term.cursor.x or self.lasty != self.term.cursor.y:
self.lastx = self.term.cursor.x
self.lasty = self.term.cursor.y
self.updated = datetime.datetime.now()
self.term.dirty.add(self.term.cursor.y)
for k in self.term.dirty:
try:
l[k] = self.term[k]
except:
pass
self.term.dirty.clear()
r = {
'lines': self.term if full else l,
'cx': self.term.cursor.x,
'cy': self.term.cursor.y,
'cursor': not self.term.cursor.hidden,
}
return r
def write(self, data):
self.block()
self.mstream.write(data)
self.mstream.flush()
self.unblock()
|
py | 1a510f977f5445712021803088397efddd6584a0 | # Generated by Django 2.1.4 on 2019-02-10 11:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('passwordmanager', '0006_auto_20190206_2234'),
]
operations = [
migrations.AlterModelOptions(
name='account',
options={'verbose_name_plural': ' Accounts, Usernames & Passwords for Sacema '},
),
]
|
py | 1a5111163053b39582b5330b1f4807bc839e0d3d | import networkx as nx
from ndex2 import create_nice_cx_from_networkx
from ndex2.client import Ndex2
class NDEx:
""" An interface to the NDEx network catalog. """
def __init__(self, account, password, uri="http://public.ndexbio.org"):
self.uri = uri
self.session = None
self.account = account
self.password = password
try:
self.session = Ndex2 (uri, account, password)
self.session.update_status()
networks = self.session.status.get("networkCount")
users = self.session.status.get("userCount")
groups = self.session.status.get("groupCount")
print(f"session: networks: {networks} users: {users} groups: {groups}")
except Exception as inst:
print(f"Could not access account {account}")
raise inst
def save_nx_graph (self, name, graph):
""" Save a networkx graph to NDEx. """
assert name, "A name for the network is required."
""" Serialize node and edge python objects. """
g = nx.MultiDiGraph()
nodes = { n.id : i for i, n in enumerate (graph.nodes()) }
for n in graph.nodes ():
g.add_node(n.id, attr_dict=n.n2json())
for e in graph.edges (data=True):
edge = e[2]['object']
g.add_edge (edge.source_id,
edge.target_id,
attr_dict=e[2]['object'].e2json())
""" Convert to CX network. """
nice_cx = create_nice_cx_from_networkx (g)
nice_cx.set_name (name)
print (f" connected: {nx.is_connected(graph.to_undirected())} edges: {len(graph.edges())} nodes: {len(graph.nodes())}")
print (nice_cx)
""" Upload to NDEx. """
upload_message = nice_cx.upload_to(self.uri, self.account, self.password)
|
py | 1a5113381d6795c86912007367d2d56700015851 | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import json
import os
import psutil
import shutil
import signal
import subprocess
import sys
import time
import zlib
from datetime import datetime, timedelta, timezone
from multiprocessing import Process
from time import sleep
from unittest import skipIf
import redis.exceptions
import pytest
import mock
from mock import Mock
from tests import RQTestCase, slow
from tests.fixtures import (
access_self, create_file, create_file_after_timeout, create_file_after_timeout_and_setsid, div_by_zero, do_nothing,
kill_worker, long_running_job, modify_self, modify_self_and_error,
run_dummy_heroku_worker, save_key_ttl, say_hello, say_pid, raise_exc_mock,
launch_process_within_worker_and_store_pid
)
from rq import Queue, SimpleWorker, Worker, get_current_connection
from rq.compat import as_text, PY2
from rq.job import Job, JobStatus, Retry
from rq.registry import StartedJobRegistry, FailedJobRegistry, FinishedJobRegistry
from rq.suspension import resume, suspend
from rq.utils import utcnow
from rq.version import VERSION
from rq.worker import HerokuWorker, WorkerStatus
from rq.serializers import JSONSerializer
class CustomJob(Job):
pass
class CustomQueue(Queue):
pass
class TestWorker(RQTestCase):
def test_create_worker(self):
"""Worker creation using various inputs."""
# With single string argument
w = Worker('foo')
self.assertEqual(w.queues[0].name, 'foo')
# With list of strings
w = Worker(['foo', 'bar'])
self.assertEqual(w.queues[0].name, 'foo')
self.assertEqual(w.queues[1].name, 'bar')
self.assertEqual(w.queue_keys(), [w.queues[0].key, w.queues[1].key])
self.assertEqual(w.queue_names(), ['foo', 'bar'])
# With iterable of strings
w = Worker(iter(['foo', 'bar']))
self.assertEqual(w.queues[0].name, 'foo')
self.assertEqual(w.queues[1].name, 'bar')
# Also accept byte strings in Python 2
if PY2:
# With single byte string argument
w = Worker(b'foo')
self.assertEqual(w.queues[0].name, 'foo')
# With list of byte strings
w = Worker([b'foo', b'bar'])
self.assertEqual(w.queues[0].name, 'foo')
self.assertEqual(w.queues[1].name, 'bar')
# With iterable of byte strings
w = Worker(iter([b'foo', b'bar']))
self.assertEqual(w.queues[0].name, 'foo')
self.assertEqual(w.queues[1].name, 'bar')
# With single Queue
w = Worker(Queue('foo'))
self.assertEqual(w.queues[0].name, 'foo')
# With iterable of Queues
w = Worker(iter([Queue('foo'), Queue('bar')]))
self.assertEqual(w.queues[0].name, 'foo')
self.assertEqual(w.queues[1].name, 'bar')
# With list of Queues
w = Worker([Queue('foo'), Queue('bar')])
self.assertEqual(w.queues[0].name, 'foo')
self.assertEqual(w.queues[1].name, 'bar')
# With string and serializer
w = Worker('foo', serializer=json)
self.assertEqual(w.queues[0].name, 'foo')
# With queue having serializer
w = Worker(Queue('foo'), serializer=json)
self.assertEqual(w.queues[0].name, 'foo')
def test_work_and_quit(self):
"""Worker processes work, then quits."""
fooq, barq = Queue('foo'), Queue('bar')
w = Worker([fooq, barq])
self.assertEqual(
w.work(burst=True), False,
'Did not expect any work on the queue.'
)
fooq.enqueue(say_hello, name='Frank')
self.assertEqual(
w.work(burst=True), True,
'Expected at least some work done.'
)
def test_work_and_quit_custom_serializer(self):
"""Worker processes work, then quits."""
fooq, barq = Queue('foo', serializer=JSONSerializer), Queue('bar', serializer=JSONSerializer)
w = Worker([fooq, barq], serializer=JSONSerializer)
self.assertEqual(
w.work(burst=True), False,
'Did not expect any work on the queue.'
)
fooq.enqueue(say_hello, name='Frank')
self.assertEqual(
w.work(burst=True), True,
'Expected at least some work done.'
)
def test_worker_all(self):
"""Worker.all() works properly"""
foo_queue = Queue('foo')
bar_queue = Queue('bar')
w1 = Worker([foo_queue, bar_queue], name='w1')
w1.register_birth()
w2 = Worker([foo_queue], name='w2')
w2.register_birth()
self.assertEqual(
set(Worker.all(connection=foo_queue.connection)),
set([w1, w2])
)
self.assertEqual(set(Worker.all(queue=foo_queue)), set([w1, w2]))
self.assertEqual(set(Worker.all(queue=bar_queue)), set([w1]))
w1.register_death()
w2.register_death()
def test_find_by_key(self):
"""Worker.find_by_key restores queues, state and job_id."""
queues = [Queue('foo'), Queue('bar')]
w = Worker(queues)
w.register_death()
w.register_birth()
w.set_state(WorkerStatus.STARTED)
worker = Worker.find_by_key(w.key)
self.assertEqual(worker.queues, queues)
self.assertEqual(worker.get_state(), WorkerStatus.STARTED)
self.assertEqual(worker._job_id, None)
self.assertTrue(worker.key in Worker.all_keys(worker.connection))
self.assertEqual(worker.version, VERSION)
# If worker is gone, its keys should also be removed
worker.connection.delete(worker.key)
Worker.find_by_key(worker.key)
self.assertFalse(worker.key in Worker.all_keys(worker.connection))
self.assertRaises(ValueError, Worker.find_by_key, 'foo')
def test_worker_ttl(self):
"""Worker ttl."""
w = Worker([])
w.register_birth()
[worker_key] = self.testconn.smembers(Worker.redis_workers_keys)
self.assertIsNotNone(self.testconn.ttl(worker_key))
w.register_death()
def test_work_via_string_argument(self):
"""Worker processes work fed via string arguments."""
q = Queue('foo')
w = Worker([q])
job = q.enqueue('tests.fixtures.say_hello', name='Frank')
self.assertEqual(
w.work(burst=True), True,
'Expected at least some work done.'
)
self.assertEqual(job.result, 'Hi there, Frank!')
self.assertIsNone(job.worker_name)
def test_job_times(self):
"""job times are set correctly."""
q = Queue('foo')
w = Worker([q])
before = utcnow()
before = before.replace(microsecond=0)
job = q.enqueue(say_hello)
self.assertIsNotNone(job.enqueued_at)
self.assertIsNone(job.started_at)
self.assertIsNone(job.ended_at)
self.assertEqual(
w.work(burst=True), True,
'Expected at least some work done.'
)
self.assertEqual(job.result, 'Hi there, Stranger!')
after = utcnow()
job.refresh()
self.assertTrue(
before <= job.enqueued_at <= after,
'Not %s <= %s <= %s' % (before, job.enqueued_at, after)
)
self.assertTrue(
before <= job.started_at <= after,
'Not %s <= %s <= %s' % (before, job.started_at, after)
)
self.assertTrue(
before <= job.ended_at <= after,
'Not %s <= %s <= %s' % (before, job.ended_at, after)
)
def test_work_is_unreadable(self):
"""Unreadable jobs are put on the failed job registry."""
q = Queue()
self.assertEqual(q.count, 0)
# NOTE: We have to fake this enqueueing for this test case.
# What we're simulating here is a call to a function that is not
# importable from the worker process.
job = Job.create(func=div_by_zero, args=(3,), origin=q.name)
job.save()
job_data = job.data
invalid_data = job_data.replace(b'div_by_zero', b'nonexisting')
assert job_data != invalid_data
self.testconn.hset(job.key, 'data', zlib.compress(invalid_data))
# We use the low-level internal function to enqueue any data (bypassing
# validity checks)
q.push_job_id(job.id)
self.assertEqual(q.count, 1)
# All set, we're going to process it
w = Worker([q])
w.work(burst=True) # should silently pass
self.assertEqual(q.count, 0)
failed_job_registry = FailedJobRegistry(queue=q)
self.assertTrue(job in failed_job_registry)
def test_heartbeat(self):
"""Heartbeat saves last_heartbeat"""
q = Queue()
w = Worker([q])
w.register_birth()
self.assertEqual(str(w.pid), as_text(self.testconn.hget(w.key, 'pid')))
self.assertEqual(w.hostname,
as_text(self.testconn.hget(w.key, 'hostname')))
last_heartbeat = self.testconn.hget(w.key, 'last_heartbeat')
self.assertIsNotNone(self.testconn.hget(w.key, 'birth'))
self.assertTrue(last_heartbeat is not None)
w = Worker.find_by_key(w.key)
self.assertIsInstance(w.last_heartbeat, datetime)
# worker.refresh() shouldn't fail if last_heartbeat is None
# for compatibility reasons
self.testconn.hdel(w.key, 'last_heartbeat')
w.refresh()
# worker.refresh() shouldn't fail if birth is None
# for compatibility reasons
self.testconn.hdel(w.key, 'birth')
w.refresh()
@slow
def test_heartbeat_survives_lost_connection(self):
with mock.patch.object(Worker, 'heartbeat') as mocked:
# None -> Heartbeat is first called before the job loop
mocked.side_effect = [None, redis.exceptions.ConnectionError()]
q = Queue()
w = Worker([q])
w.work(burst=True)
# First call is prior to job loop, second raises the error,
# third is successful, after "recovery"
assert mocked.call_count == 3
@slow
def test_heartbeat_busy(self):
"""Periodic heartbeats while horse is busy with long jobs"""
q = Queue()
w = Worker([q], job_monitoring_interval=5)
for timeout, expected_heartbeats in [(2, 0), (7, 1), (12, 2)]:
job = q.enqueue(long_running_job,
args=(timeout,),
job_timeout=30,
result_ttl=-1)
with mock.patch.object(w, 'heartbeat', wraps=w.heartbeat) as mocked:
w.execute_job(job, q)
self.assertEqual(mocked.call_count, expected_heartbeats)
job = Job.fetch(job.id)
self.assertEqual(job.get_status(), JobStatus.FINISHED)
def test_work_fails(self):
"""Failing jobs are put on the failed queue."""
q = Queue()
self.assertEqual(q.count, 0)
# Action
job = q.enqueue(div_by_zero)
self.assertEqual(q.count, 1)
# keep for later
enqueued_at_date = str(job.enqueued_at)
w = Worker([q])
w.work(burst=True)
# Postconditions
self.assertEqual(q.count, 0)
failed_job_registry = FailedJobRegistry(queue=q)
self.assertTrue(job in failed_job_registry)
self.assertEqual(w.get_current_job_id(), None)
# Check the job
job = Job.fetch(job.id)
self.assertEqual(job.origin, q.name)
self.assertIsNone(job.worker_name) # Worker name is cleared after failures
# Should be the original enqueued_at date, not the date of enqueueing
# to the failed queue
self.assertEqual(str(job.enqueued_at), enqueued_at_date)
self.assertTrue(job.exc_info) # should contain exc_info
def test_horse_fails(self):
"""Tests that job status is set to FAILED even if horse unexpectedly fails"""
q = Queue()
self.assertEqual(q.count, 0)
# Action
job = q.enqueue(say_hello)
self.assertEqual(q.count, 1)
# keep for later
enqueued_at_date = str(job.enqueued_at)
w = Worker([q])
with mock.patch.object(w, 'perform_job', new_callable=raise_exc_mock):
w.work(burst=True) # should silently pass
# Postconditions
self.assertEqual(q.count, 0)
failed_job_registry = FailedJobRegistry(queue=q)
self.assertTrue(job in failed_job_registry)
self.assertEqual(w.get_current_job_id(), None)
# Check the job
job = Job.fetch(job.id)
self.assertEqual(job.origin, q.name)
# Should be the original enqueued_at date, not the date of enqueueing
# to the failed queue
self.assertEqual(str(job.enqueued_at), enqueued_at_date)
self.assertTrue(job.exc_info) # should contain exc_info
def test_statistics(self):
"""Successful and failed job counts are saved properly"""
queue = Queue()
job = queue.enqueue(div_by_zero)
worker = Worker([queue])
worker.register_birth()
self.assertEqual(worker.failed_job_count, 0)
self.assertEqual(worker.successful_job_count, 0)
self.assertEqual(worker.total_working_time, 0)
registry = StartedJobRegistry(connection=worker.connection)
job.started_at = utcnow()
job.ended_at = job.started_at + timedelta(seconds=0.75)
worker.handle_job_failure(job, queue)
worker.handle_job_success(job, queue, registry)
worker.refresh()
self.assertEqual(worker.failed_job_count, 1)
self.assertEqual(worker.successful_job_count, 1)
self.assertEqual(worker.total_working_time, 1.5) # 1.5 seconds
worker.handle_job_failure(job, queue)
worker.handle_job_success(job, queue, registry)
worker.refresh()
self.assertEqual(worker.failed_job_count, 2)
self.assertEqual(worker.successful_job_count, 2)
self.assertEqual(worker.total_working_time, 3.0)
def test_handle_retry(self):
"""handle_job_failure() handles retry properly"""
connection = self.testconn
queue = Queue(connection=connection)
retry = Retry(max=2)
job = queue.enqueue(div_by_zero, retry=retry)
registry = FailedJobRegistry(queue=queue)
worker = Worker([queue])
# If job if configured to retry, it will be put back in the queue
# and not put in the FailedJobRegistry.
# This is the original execution
queue.empty()
worker.handle_job_failure(job, queue)
job.refresh()
self.assertEqual(job.retries_left, 1)
self.assertEqual([job.id], queue.job_ids)
self.assertFalse(job in registry)
# First retry
queue.empty()
worker.handle_job_failure(job, queue)
job.refresh()
self.assertEqual(job.retries_left, 0)
self.assertEqual([job.id], queue.job_ids)
# Second retry
queue.empty()
worker.handle_job_failure(job, queue)
job.refresh()
self.assertEqual(job.retries_left, 0)
self.assertEqual([], queue.job_ids)
# If a job is no longer retries, it's put in FailedJobRegistry
self.assertTrue(job in registry)
def test_retry_interval(self):
"""Retries with intervals are scheduled"""
connection = self.testconn
queue = Queue(connection=connection)
retry = Retry(max=1, interval=5)
job = queue.enqueue(div_by_zero, retry=retry)
worker = Worker([queue])
registry = queue.scheduled_job_registry
# If job if configured to retry with interval, it will be scheduled,
# not directly put back in the queue
queue.empty()
worker.handle_job_failure(job, queue)
job.refresh()
self.assertEqual(job.get_status(), JobStatus.SCHEDULED)
self.assertEqual(job.retries_left, 0)
self.assertEqual(len(registry), 1)
self.assertEqual(queue.job_ids, [])
# Scheduled time is roughly 5 seconds from now
scheduled_time = registry.get_scheduled_time(job)
now = datetime.now(timezone.utc)
self.assertTrue(now + timedelta(seconds=4) < scheduled_time < now + timedelta(seconds=6))
def test_total_working_time(self):
"""worker.total_working_time is stored properly"""
queue = Queue()
job = queue.enqueue(long_running_job, 0.05)
worker = Worker([queue])
worker.register_birth()
worker.perform_job(job, queue)
worker.refresh()
# total_working_time should be a little bit more than 0.05 seconds
self.assertGreaterEqual(worker.total_working_time, 0.05)
# in multi-user environments delays might be unpredictable,
# please adjust this magic limit accordingly in case if It takes even longer to run
self.assertLess(worker.total_working_time, 1)
def test_max_jobs(self):
"""Worker exits after number of jobs complete."""
queue = Queue()
job1 = queue.enqueue(do_nothing)
job2 = queue.enqueue(do_nothing)
worker = Worker([queue])
worker.work(max_jobs=1)
self.assertEqual(JobStatus.FINISHED, job1.get_status())
self.assertEqual(JobStatus.QUEUED, job2.get_status())
def test_disable_default_exception_handler(self):
"""
Job is not moved to FailedJobRegistry when default custom exception
handler is disabled.
"""
queue = Queue(name='default', connection=self.testconn)
job = queue.enqueue(div_by_zero)
worker = Worker([queue], disable_default_exception_handler=False)
worker.work(burst=True)
registry = FailedJobRegistry(queue=queue)
self.assertTrue(job in registry)
# Job is not added to FailedJobRegistry if
# disable_default_exception_handler is True
job = queue.enqueue(div_by_zero)
worker = Worker([queue], disable_default_exception_handler=True)
worker.work(burst=True)
self.assertFalse(job in registry)
def test_custom_exc_handling(self):
"""Custom exception handling."""
def first_handler(job, *exc_info):
job.meta = {'first_handler': True}
job.save_meta()
return True
def second_handler(job, *exc_info):
job.meta.update({'second_handler': True})
job.save_meta()
def black_hole(job, *exc_info):
# Don't fall through to default behaviour (moving to failed queue)
return False
q = Queue()
self.assertEqual(q.count, 0)
job = q.enqueue(div_by_zero)
w = Worker([q], exception_handlers=first_handler)
w.work(burst=True)
# Check the job
job.refresh()
self.assertEqual(job.is_failed, True)
self.assertTrue(job.meta['first_handler'])
job = q.enqueue(div_by_zero)
w = Worker([q], exception_handlers=[first_handler, second_handler])
w.work(burst=True)
# Both custom exception handlers are run
job.refresh()
self.assertEqual(job.is_failed, True)
self.assertTrue(job.meta['first_handler'])
self.assertTrue(job.meta['second_handler'])
job = q.enqueue(div_by_zero)
w = Worker([q], exception_handlers=[first_handler, black_hole,
second_handler])
w.work(burst=True)
# second_handler is not run since it's interrupted by black_hole
job.refresh()
self.assertEqual(job.is_failed, True)
self.assertTrue(job.meta['first_handler'])
self.assertEqual(job.meta.get('second_handler'), None)
def test_cancelled_jobs_arent_executed(self):
"""Cancelling jobs."""
SENTINEL_FILE = '/tmp/rq-tests.txt' # noqa
try:
# Remove the sentinel if it is leftover from a previous test run
os.remove(SENTINEL_FILE)
except OSError as e:
if e.errno != 2:
raise
q = Queue()
job = q.enqueue(create_file, SENTINEL_FILE)
# Here, we cancel the job, so the sentinel file may not be created
self.testconn.delete(job.key)
w = Worker([q])
w.work(burst=True)
assert q.count == 0
# Should not have created evidence of execution
self.assertEqual(os.path.exists(SENTINEL_FILE), False)
@slow # noqa
def test_timeouts(self):
"""Worker kills jobs after timeout."""
sentinel_file = '/tmp/.rq_sentinel'
q = Queue()
w = Worker([q])
# Put it on the queue with a timeout value
res = q.enqueue(create_file_after_timeout,
args=(sentinel_file, 4),
job_timeout=1)
try:
os.unlink(sentinel_file)
except OSError as e:
if e.errno == 2:
pass
self.assertEqual(os.path.exists(sentinel_file), False)
w.work(burst=True)
self.assertEqual(os.path.exists(sentinel_file), False)
# TODO: Having to do the manual refresh() here is really ugly!
res.refresh()
self.assertIn('JobTimeoutException', as_text(res.exc_info))
def test_worker_sets_result_ttl(self):
"""Ensure that Worker properly sets result_ttl for individual jobs."""
q = Queue()
job = q.enqueue(say_hello, args=('Frank',), result_ttl=10)
w = Worker([q])
self.assertIn(job.get_id().encode(), self.testconn.lrange(q.key, 0, -1))
w.work(burst=True)
self.assertNotEqual(self.testconn.ttl(job.key), 0)
self.assertNotIn(job.get_id().encode(), self.testconn.lrange(q.key, 0, -1))
# Job with -1 result_ttl don't expire
job = q.enqueue(say_hello, args=('Frank',), result_ttl=-1)
w = Worker([q])
self.assertIn(job.get_id().encode(), self.testconn.lrange(q.key, 0, -1))
w.work(burst=True)
self.assertEqual(self.testconn.ttl(job.key), -1)
self.assertNotIn(job.get_id().encode(), self.testconn.lrange(q.key, 0, -1))
# Job with result_ttl = 0 gets deleted immediately
job = q.enqueue(say_hello, args=('Frank',), result_ttl=0)
w = Worker([q])
self.assertIn(job.get_id().encode(), self.testconn.lrange(q.key, 0, -1))
w.work(burst=True)
self.assertEqual(self.testconn.get(job.key), None)
self.assertNotIn(job.get_id().encode(), self.testconn.lrange(q.key, 0, -1))
def test_worker_sets_job_status(self):
"""Ensure that worker correctly sets job status."""
q = Queue()
w = Worker([q])
job = q.enqueue(say_hello)
self.assertEqual(job.get_status(), JobStatus.QUEUED)
self.assertEqual(job.is_queued, True)
self.assertEqual(job.is_finished, False)
self.assertEqual(job.is_failed, False)
w.work(burst=True)
job = Job.fetch(job.id)
self.assertEqual(job.get_status(), JobStatus.FINISHED)
self.assertEqual(job.is_queued, False)
self.assertEqual(job.is_finished, True)
self.assertEqual(job.is_failed, False)
# Failed jobs should set status to "failed"
job = q.enqueue(div_by_zero, args=(1,))
w.work(burst=True)
job = Job.fetch(job.id)
self.assertEqual(job.get_status(), JobStatus.FAILED)
self.assertEqual(job.is_queued, False)
self.assertEqual(job.is_finished, False)
self.assertEqual(job.is_failed, True)
def test_job_dependency(self):
"""Enqueue dependent jobs only if their parents don't fail"""
q = Queue()
w = Worker([q])
parent_job = q.enqueue(say_hello, result_ttl=0)
job = q.enqueue_call(say_hello, depends_on=parent_job)
w.work(burst=True)
job = Job.fetch(job.id)
self.assertEqual(job.get_status(), JobStatus.FINISHED)
parent_job = q.enqueue(div_by_zero)
job = q.enqueue_call(say_hello, depends_on=parent_job)
w.work(burst=True)
job = Job.fetch(job.id)
self.assertNotEqual(job.get_status(), JobStatus.FINISHED)
def test_get_current_job(self):
"""Ensure worker.get_current_job() works properly"""
q = Queue()
worker = Worker([q])
job = q.enqueue_call(say_hello)
self.assertEqual(self.testconn.hget(worker.key, 'current_job'), None)
worker.set_current_job_id(job.id)
self.assertEqual(
worker.get_current_job_id(),
as_text(self.testconn.hget(worker.key, 'current_job'))
)
self.assertEqual(worker.get_current_job(), job)
def test_custom_job_class(self):
"""Ensure Worker accepts custom job class."""
q = Queue()
worker = Worker([q], job_class=CustomJob)
self.assertEqual(worker.job_class, CustomJob)
def test_custom_queue_class(self):
"""Ensure Worker accepts custom queue class."""
q = CustomQueue()
worker = Worker([q], queue_class=CustomQueue)
self.assertEqual(worker.queue_class, CustomQueue)
def test_custom_queue_class_is_not_global(self):
"""Ensure Worker custom queue class is not global."""
q = CustomQueue()
worker_custom = Worker([q], queue_class=CustomQueue)
q_generic = Queue()
worker_generic = Worker([q_generic])
self.assertEqual(worker_custom.queue_class, CustomQueue)
self.assertEqual(worker_generic.queue_class, Queue)
self.assertEqual(Worker.queue_class, Queue)
def test_custom_job_class_is_not_global(self):
"""Ensure Worker custom job class is not global."""
q = Queue()
worker_custom = Worker([q], job_class=CustomJob)
q_generic = Queue()
worker_generic = Worker([q_generic])
self.assertEqual(worker_custom.job_class, CustomJob)
self.assertEqual(worker_generic.job_class, Job)
self.assertEqual(Worker.job_class, Job)
def test_work_via_simpleworker(self):
"""Worker processes work, with forking disabled,
then returns."""
fooq, barq = Queue('foo'), Queue('bar')
w = SimpleWorker([fooq, barq])
self.assertEqual(w.work(burst=True), False,
'Did not expect any work on the queue.')
job = fooq.enqueue(say_pid)
self.assertEqual(w.work(burst=True), True,
'Expected at least some work done.')
self.assertEqual(job.result, os.getpid(),
'PID mismatch, fork() is not supposed to happen here')
def test_simpleworker_heartbeat_ttl(self):
"""SimpleWorker's key must last longer than job.timeout when working"""
queue = Queue('foo')
worker = SimpleWorker([queue])
job_timeout = 300
job = queue.enqueue(save_key_ttl, worker.key, job_timeout=job_timeout)
worker.work(burst=True)
job.refresh()
self.assertGreater(job.meta['ttl'], job_timeout)
def test_prepare_job_execution(self):
"""Prepare job execution does the necessary bookkeeping."""
queue = Queue(connection=self.testconn)
job = queue.enqueue(say_hello)
worker = Worker([queue])
worker.prepare_job_execution(job)
# Updates working queue
registry = StartedJobRegistry(connection=self.testconn)
self.assertEqual(registry.get_job_ids(), [job.id])
# Updates worker statuses
self.assertEqual(worker.get_state(), 'busy')
self.assertEqual(worker.get_current_job_id(), job.id)
# job status is also updated
self.assertEqual(job._status, JobStatus.STARTED)
self.assertEqual(job.worker_name, worker.name)
def test_work_unicode_friendly(self):
"""Worker processes work with unicode description, then quits."""
q = Queue('foo')
w = Worker([q])
job = q.enqueue('tests.fixtures.say_hello', name='Adam',
description='你好 世界!')
self.assertEqual(w.work(burst=True), True,
'Expected at least some work done.')
self.assertEqual(job.result, 'Hi there, Adam!')
self.assertEqual(job.description, '你好 世界!')
def test_work_log_unicode_friendly(self):
"""Worker process work with unicode or str other than pure ascii content,
logging work properly"""
q = Queue("foo")
w = Worker([q])
job = q.enqueue('tests.fixtures.say_hello', name='阿达姆',
description='你好 世界!')
w.work(burst=True)
self.assertEqual(job.get_status(), JobStatus.FINISHED)
job = q.enqueue('tests.fixtures.say_hello_unicode', name='阿达姆',
description='你好 世界!')
w.work(burst=True)
self.assertEqual(job.get_status(), JobStatus.FINISHED)
def test_suspend_worker_execution(self):
"""Test Pause Worker Execution"""
SENTINEL_FILE = '/tmp/rq-tests.txt' # noqa
try:
# Remove the sentinel if it is leftover from a previous test run
os.remove(SENTINEL_FILE)
except OSError as e:
if e.errno != 2:
raise
q = Queue()
q.enqueue(create_file, SENTINEL_FILE)
w = Worker([q])
suspend(self.testconn)
w.work(burst=True)
assert q.count == 1
# Should not have created evidence of execution
self.assertEqual(os.path.exists(SENTINEL_FILE), False)
resume(self.testconn)
w.work(burst=True)
assert q.count == 0
self.assertEqual(os.path.exists(SENTINEL_FILE), True)
@slow
def test_suspend_with_duration(self):
q = Queue()
for _ in range(5):
q.enqueue(do_nothing)
w = Worker([q])
# This suspends workers for working for 2 second
suspend(self.testconn, 2)
# So when this burst of work happens the queue should remain at 5
w.work(burst=True)
assert q.count == 5
sleep(3)
# The suspension should be expired now, and a burst of work should now clear the queue
w.work(burst=True)
assert q.count == 0
def test_worker_hash_(self):
"""Workers are hashed by their .name attribute"""
q = Queue('foo')
w1 = Worker([q], name="worker1")
w2 = Worker([q], name="worker2")
w3 = Worker([q], name="worker1")
worker_set = set([w1, w2, w3])
self.assertEqual(len(worker_set), 2)
def test_worker_sets_birth(self):
"""Ensure worker correctly sets worker birth date."""
q = Queue()
w = Worker([q])
w.register_birth()
birth_date = w.birth_date
self.assertIsNotNone(birth_date)
self.assertEqual(type(birth_date).__name__, 'datetime')
def test_worker_sets_death(self):
"""Ensure worker correctly sets worker death date."""
q = Queue()
w = Worker([q])
w.register_death()
death_date = w.death_date
self.assertIsNotNone(death_date)
self.assertIsInstance(death_date, datetime)
def test_clean_queue_registries(self):
"""worker.clean_registries sets last_cleaned_at and cleans registries."""
foo_queue = Queue('foo', connection=self.testconn)
foo_registry = StartedJobRegistry('foo', connection=self.testconn)
self.testconn.zadd(foo_registry.key, {'foo': 1})
self.assertEqual(self.testconn.zcard(foo_registry.key), 1)
bar_queue = Queue('bar', connection=self.testconn)
bar_registry = StartedJobRegistry('bar', connection=self.testconn)
self.testconn.zadd(bar_registry.key, {'bar': 1})
self.assertEqual(self.testconn.zcard(bar_registry.key), 1)
worker = Worker([foo_queue, bar_queue])
self.assertEqual(worker.last_cleaned_at, None)
worker.clean_registries()
self.assertNotEqual(worker.last_cleaned_at, None)
self.assertEqual(self.testconn.zcard(foo_registry.key), 0)
self.assertEqual(self.testconn.zcard(bar_registry.key), 0)
# worker.clean_registries() only runs once every 15 minutes
# If we add another key, calling clean_registries() should do nothing
self.testconn.zadd(bar_registry.key, {'bar': 1})
worker.clean_registries()
self.assertEqual(self.testconn.zcard(bar_registry.key), 1)
def test_should_run_maintenance_tasks(self):
"""Workers should run maintenance tasks on startup and every hour."""
queue = Queue(connection=self.testconn)
worker = Worker(queue)
self.assertTrue(worker.should_run_maintenance_tasks)
worker.last_cleaned_at = utcnow()
self.assertFalse(worker.should_run_maintenance_tasks)
worker.last_cleaned_at = utcnow() - timedelta(seconds=3700)
self.assertTrue(worker.should_run_maintenance_tasks)
def test_worker_calls_clean_registries(self):
"""Worker calls clean_registries when run."""
queue = Queue(connection=self.testconn)
registry = StartedJobRegistry(connection=self.testconn)
self.testconn.zadd(registry.key, {'foo': 1})
worker = Worker(queue, connection=self.testconn)
worker.work(burst=True)
self.assertEqual(self.testconn.zcard(registry.key), 0)
def test_job_dependency_race_condition(self):
"""Dependencies added while the job gets finished shouldn't get lost."""
# This patches the enqueue_dependents to enqueue a new dependency AFTER
# the original code was executed.
orig_enqueue_dependents = Queue.enqueue_dependents
def new_enqueue_dependents(self, job, *args, **kwargs):
orig_enqueue_dependents(self, job, *args, **kwargs)
if hasattr(Queue, '_add_enqueue') and Queue._add_enqueue is not None and Queue._add_enqueue.id == job.id:
Queue._add_enqueue = None
Queue().enqueue_call(say_hello, depends_on=job)
Queue.enqueue_dependents = new_enqueue_dependents
q = Queue()
w = Worker([q])
with mock.patch.object(Worker, 'execute_job', wraps=w.execute_job) as mocked:
parent_job = q.enqueue(say_hello, result_ttl=0)
Queue._add_enqueue = parent_job
job = q.enqueue_call(say_hello, depends_on=parent_job)
w.work(burst=True)
job = Job.fetch(job.id)
self.assertEqual(job.get_status(), JobStatus.FINISHED)
# The created spy checks two issues:
# * before the fix of #739, 2 of the 3 jobs where executed due
# to the race condition
# * during the development another issue was fixed:
# due to a missing pipeline usage in Queue.enqueue_job, the job
# which was enqueued before the "rollback" was executed twice.
# So before that fix the call count was 4 instead of 3
self.assertEqual(mocked.call_count, 3)
def test_self_modification_persistence(self):
"""Make sure that any meta modification done by
the job itself persists completely through the
queue/worker/job stack."""
q = Queue()
# Also make sure that previously existing metadata
# persists properly
job = q.enqueue(modify_self, meta={'foo': 'bar', 'baz': 42},
args=[{'baz': 10, 'newinfo': 'waka'}])
w = Worker([q])
w.work(burst=True)
job_check = Job.fetch(job.id)
self.assertEqual(job_check.meta['foo'], 'bar')
self.assertEqual(job_check.meta['baz'], 10)
self.assertEqual(job_check.meta['newinfo'], 'waka')
def test_self_modification_persistence_with_error(self):
"""Make sure that any meta modification done by
the job itself persists completely through the
queue/worker/job stack -- even if the job errored"""
q = Queue()
# Also make sure that previously existing metadata
# persists properly
job = q.enqueue(modify_self_and_error, meta={'foo': 'bar', 'baz': 42},
args=[{'baz': 10, 'newinfo': 'waka'}])
w = Worker([q])
w.work(burst=True)
# Postconditions
self.assertEqual(q.count, 0)
failed_job_registry = FailedJobRegistry(queue=q)
self.assertTrue(job in failed_job_registry)
self.assertEqual(w.get_current_job_id(), None)
job_check = Job.fetch(job.id)
self.assertEqual(job_check.meta['foo'], 'bar')
self.assertEqual(job_check.meta['baz'], 10)
self.assertEqual(job_check.meta['newinfo'], 'waka')
@mock.patch('rq.worker.logger.info')
def test_log_result_lifespan_true(self, mock_logger_info):
"""Check that log_result_lifespan True causes job lifespan to be logged."""
q = Queue()
w = Worker([q])
job = q.enqueue(say_hello, args=('Frank',), result_ttl=10)
w.perform_job(job, q)
mock_logger_info.assert_called_with('Result is kept for %s seconds', 10)
self.assertIn('Result is kept for %s seconds', [c[0][0] for c in mock_logger_info.call_args_list])
@mock.patch('rq.worker.logger.info')
def test_log_result_lifespan_false(self, mock_logger_info):
"""Check that log_result_lifespan False causes job lifespan to not be logged."""
q = Queue()
class TestWorker(Worker):
log_result_lifespan = False
w = TestWorker([q])
job = q.enqueue(say_hello, args=('Frank',), result_ttl=10)
w.perform_job(job, q)
self.assertNotIn('Result is kept for 10 seconds', [c[0][0] for c in mock_logger_info.call_args_list])
@mock.patch('rq.worker.logger.info')
def test_log_job_description_true(self, mock_logger_info):
"""Check that log_job_description True causes job lifespan to be logged."""
q = Queue()
w = Worker([q])
q.enqueue(say_hello, args=('Frank',), result_ttl=10)
w.dequeue_job_and_maintain_ttl(10)
self.assertIn("Frank", mock_logger_info.call_args[0][2])
@mock.patch('rq.worker.logger.info')
def test_log_job_description_false(self, mock_logger_info):
"""Check that log_job_description False causes job lifespan to not be logged."""
q = Queue()
w = Worker([q], log_job_description=False)
q.enqueue(say_hello, args=('Frank',), result_ttl=10)
w.dequeue_job_and_maintain_ttl(10)
self.assertNotIn("Frank", mock_logger_info.call_args[0][2])
def test_worker_version(self):
q = Queue()
w = Worker([q])
w.version = '0.0.0'
w.register_birth()
self.assertEqual(w.version, '0.0.0')
w.refresh()
self.assertEqual(w.version, '0.0.0')
# making sure that version is preserved when worker is retrieved by key
worker = Worker.find_by_key(w.key)
self.assertEqual(worker.version, '0.0.0')
def test_python_version(self):
python_version = sys.version
q = Queue()
w = Worker([q])
w.register_birth()
self.assertEqual(w.python_version, python_version)
# now patching version
python_version = 'X.Y.Z.final' # dummy version
self.assertNotEqual(python_version, sys.version) # otherwise tests are pointless
w2 = Worker([q])
w2.python_version = python_version
w2.register_birth()
self.assertEqual(w2.python_version, python_version)
# making sure that version is preserved when worker is retrieved by key
worker = Worker.find_by_key(w2.key)
self.assertEqual(worker.python_version, python_version)
def wait_and_kill_work_horse(pid, time_to_wait=0.0):
time.sleep(time_to_wait)
os.kill(pid, signal.SIGKILL)
class TimeoutTestCase:
def setUp(self):
# we want tests to fail if signal are ignored and the work remain
# running, so set a signal to kill them after X seconds
self.killtimeout = 15
signal.signal(signal.SIGALRM, self._timeout)
signal.alarm(self.killtimeout)
def _timeout(self, signal, frame):
raise AssertionError(
"test still running after %i seconds, likely the worker wasn't shutdown correctly" % self.killtimeout
)
class WorkerShutdownTestCase(TimeoutTestCase, RQTestCase):
@slow
def test_idle_worker_warm_shutdown(self):
"""worker with no ongoing job receiving single SIGTERM signal and shutting down"""
w = Worker('foo')
self.assertFalse(w._stop_requested)
p = Process(target=kill_worker, args=(os.getpid(), False))
p.start()
w.work()
p.join(1)
self.assertFalse(w._stop_requested)
@slow
def test_working_worker_warm_shutdown(self):
"""worker with an ongoing job receiving single SIGTERM signal, allowing job to finish then shutting down"""
fooq = Queue('foo')
w = Worker(fooq)
sentinel_file = '/tmp/.rq_sentinel_warm'
fooq.enqueue(create_file_after_timeout, sentinel_file, 2)
self.assertFalse(w._stop_requested)
p = Process(target=kill_worker, args=(os.getpid(), False))
p.start()
w.work()
p.join(2)
self.assertFalse(p.is_alive())
self.assertTrue(w._stop_requested)
self.assertTrue(os.path.exists(sentinel_file))
self.assertIsNotNone(w.shutdown_requested_date)
self.assertEqual(type(w.shutdown_requested_date).__name__, 'datetime')
@slow
def test_working_worker_cold_shutdown(self):
"""Busy worker shuts down immediately on double SIGTERM signal"""
fooq = Queue('foo')
w = Worker(fooq)
sentinel_file = '/tmp/.rq_sentinel_cold'
fooq.enqueue(create_file_after_timeout, sentinel_file, 2)
self.assertFalse(w._stop_requested)
p = Process(target=kill_worker, args=(os.getpid(), True))
p.start()
self.assertRaises(SystemExit, w.work)
p.join(1)
self.assertTrue(w._stop_requested)
self.assertFalse(os.path.exists(sentinel_file))
shutdown_requested_date = w.shutdown_requested_date
self.assertIsNotNone(shutdown_requested_date)
self.assertEqual(type(shutdown_requested_date).__name__, 'datetime')
@slow
def test_work_horse_death_sets_job_failed(self):
"""worker with an ongoing job whose work horse dies unexpectadly (before
completing the job) should set the job's status to FAILED
"""
fooq = Queue('foo')
self.assertEqual(fooq.count, 0)
w = Worker(fooq)
sentinel_file = '/tmp/.rq_sentinel_work_horse_death'
if os.path.exists(sentinel_file):
os.remove(sentinel_file)
fooq.enqueue(create_file_after_timeout, sentinel_file, 100)
job, queue = w.dequeue_job_and_maintain_ttl(5)
w.fork_work_horse(job, queue)
p = Process(target=wait_and_kill_work_horse, args=(w._horse_pid, 0.5))
p.start()
w.monitor_work_horse(job, queue)
job_status = job.get_status()
p.join(1)
self.assertEqual(job_status, JobStatus.FAILED)
failed_job_registry = FailedJobRegistry(queue=fooq)
self.assertTrue(job in failed_job_registry)
self.assertEqual(fooq.count, 0)
@slow
def test_work_horse_force_death(self):
"""Simulate a frozen worker that doesn't observe the timeout properly.
Fake it by artificially setting the timeout of the parent process to
something much smaller after the process is already forked.
"""
fooq = Queue('foo')
self.assertEqual(fooq.count, 0)
w = Worker(fooq)
sentinel_file = '/tmp/.rq_sentinel_work_horse_death'
if os.path.exists(sentinel_file):
os.remove(sentinel_file)
fooq.enqueue(launch_process_within_worker_and_store_pid, sentinel_file, 100)
job, queue = w.dequeue_job_and_maintain_ttl(5)
w.fork_work_horse(job, queue)
job.timeout = 5
w.job_monitoring_interval = 1
now = utcnow()
time.sleep(1)
with open(sentinel_file) as f:
subprocess_pid = int(f.read().strip())
self.assertTrue(psutil.pid_exists(subprocess_pid))
w.monitor_work_horse(job, queue)
fudge_factor = 1
total_time = w.job_monitoring_interval + 65 + fudge_factor
self.assertTrue((utcnow() - now).total_seconds() < total_time)
self.assertEqual(job.get_status(), JobStatus.FAILED)
failed_job_registry = FailedJobRegistry(queue=fooq)
self.assertTrue(job in failed_job_registry)
self.assertEqual(fooq.count, 0)
self.assertFalse(psutil.pid_exists(subprocess_pid))
def schedule_access_self():
q = Queue('default', connection=get_current_connection())
q.enqueue(access_self)
@pytest.mark.skipif(sys.platform == 'darwin', reason='Fails on OS X')
class TestWorkerSubprocess(RQTestCase):
def setUp(self):
super(TestWorkerSubprocess, self).setUp()
db_num = self.testconn.connection_pool.connection_kwargs['db']
self.redis_url = 'redis://127.0.0.1:6379/%d' % db_num
def test_run_empty_queue(self):
"""Run the worker in its own process with an empty queue"""
subprocess.check_call(['rqworker', '-u', self.redis_url, '-b'])
def test_run_access_self(self):
"""Schedule a job, then run the worker as subprocess"""
q = Queue()
job = q.enqueue(access_self)
subprocess.check_call(['rqworker', '-u', self.redis_url, '-b'])
registry = FinishedJobRegistry(queue=q)
self.assertTrue(job in registry)
assert q.count == 0
@skipIf('pypy' in sys.version.lower(), 'often times out with pypy')
def test_run_scheduled_access_self(self):
"""Schedule a job that schedules a job, then run the worker as subprocess"""
q = Queue()
job = q.enqueue(schedule_access_self)
subprocess.check_call(['rqworker', '-u', self.redis_url, '-b'])
registry = FinishedJobRegistry(queue=q)
self.assertTrue(job in registry)
assert q.count == 0
@pytest.mark.skipif(sys.platform == 'darwin', reason='requires Linux signals')
@skipIf('pypy' in sys.version.lower(), 'these tests often fail on pypy')
class HerokuWorkerShutdownTestCase(TimeoutTestCase, RQTestCase):
def setUp(self):
super(HerokuWorkerShutdownTestCase, self).setUp()
self.sandbox = '/tmp/rq_shutdown/'
os.makedirs(self.sandbox)
def tearDown(self):
shutil.rmtree(self.sandbox, ignore_errors=True)
@slow
def test_immediate_shutdown(self):
"""Heroku work horse shutdown with immediate (0 second) kill"""
p = Process(target=run_dummy_heroku_worker, args=(self.sandbox, 0))
p.start()
time.sleep(0.5)
os.kill(p.pid, signal.SIGRTMIN)
p.join(2)
self.assertEqual(p.exitcode, 1)
self.assertTrue(os.path.exists(os.path.join(self.sandbox, 'started')))
self.assertFalse(os.path.exists(os.path.join(self.sandbox, 'finished')))
@slow
def test_1_sec_shutdown(self):
"""Heroku work horse shutdown with 1 second kill"""
p = Process(target=run_dummy_heroku_worker, args=(self.sandbox, 1))
p.start()
time.sleep(0.5)
os.kill(p.pid, signal.SIGRTMIN)
time.sleep(0.1)
self.assertEqual(p.exitcode, None)
p.join(2)
self.assertEqual(p.exitcode, 1)
self.assertTrue(os.path.exists(os.path.join(self.sandbox, 'started')))
self.assertFalse(os.path.exists(os.path.join(self.sandbox, 'finished')))
@slow
def test_shutdown_double_sigrtmin(self):
"""Heroku work horse shutdown with long delay but SIGRTMIN sent twice"""
p = Process(target=run_dummy_heroku_worker, args=(self.sandbox, 10))
p.start()
time.sleep(0.5)
os.kill(p.pid, signal.SIGRTMIN)
# we have to wait a short while otherwise the second signal wont bet processed.
time.sleep(0.1)
os.kill(p.pid, signal.SIGRTMIN)
p.join(2)
self.assertEqual(p.exitcode, 1)
self.assertTrue(os.path.exists(os.path.join(self.sandbox, 'started')))
self.assertFalse(os.path.exists(os.path.join(self.sandbox, 'finished')))
@mock.patch('rq.worker.logger.info')
def test_handle_shutdown_request(self, mock_logger_info):
"""Mutate HerokuWorker so _horse_pid refers to an artificial process
and test handle_warm_shutdown_request"""
w = HerokuWorker('foo')
path = os.path.join(self.sandbox, 'shouldnt_exist')
p = Process(target=create_file_after_timeout_and_setsid, args=(path, 2))
p.start()
self.assertEqual(p.exitcode, None)
time.sleep(0.1)
w._horse_pid = p.pid
w.handle_warm_shutdown_request()
p.join(2)
# would expect p.exitcode to be -34
self.assertEqual(p.exitcode, -34)
self.assertFalse(os.path.exists(path))
mock_logger_info.assert_called_with('Killed horse pid %s', p.pid)
def test_handle_shutdown_request_no_horse(self):
"""Mutate HerokuWorker so _horse_pid refers to non existent process
and test handle_warm_shutdown_request"""
w = HerokuWorker('foo')
w._horse_pid = 19999
w.handle_warm_shutdown_request()
class TestExceptionHandlerMessageEncoding(RQTestCase):
def setUp(self):
super(TestExceptionHandlerMessageEncoding, self).setUp()
self.worker = Worker("foo")
self.worker._exc_handlers = []
# Mimic how exception info is actually passed forwards
try:
raise Exception(u"💪")
except Exception:
self.exc_info = sys.exc_info()
def test_handle_exception_handles_non_ascii_in_exception_message(self):
"""worker.handle_exception doesn't crash on non-ascii in exception message."""
self.worker.handle_exception(Mock(), *self.exc_info)
|
py | 1a5113b042987c07b95c02f3760f3492c4fede77 | import os
import requests
import codecs
import json
import hashlib
import io
from pathlib import Path
import pandas as pd
from bs4 import BeautifulSoup as bs
from bs4.element import Tag
from sklearn.model_selection import train_test_split
from finetune.datasets import Dataset
from finetune import SequenceLabeler
from finetune.utils import finetune_to_indico_sequence
from finetune.metrics import annotation_report
XML_PATH = os.path.join("Data", "Sequence", "reuters.xml")
DATA_PATH = os.path.join("Data", "Sequence", "reuters.json")
CHECKSUM = "a79cab99ed30b7932d46711ef8d662e0"
class Reuters(Dataset):
def __init__(self, filename=None, **kwargs):
super().__init__(filename=(filename or DATA_PATH), **kwargs)
@property
def md5(self):
return CHECKSUM
def download(self):
url = "https://raw.githubusercontent.com/dice-group/n3-collection/master/reuters.xml"
r = requests.get(url)
with open(XML_PATH, 'wb') as fd:
fd.write(r.content)
fd = open(XML_PATH)
soup = bs(fd, "html5lib")
docs = []
docs_labels = []
for elem in soup.find_all("document"):
texts = []
labels = []
# Loop through each child of the element under "textwithnamedentities"
for c in elem.find("textwithnamedentities").children:
if type(c) == Tag:
if c.name == "namedentityintext":
label = "Named Entity" # part of a named entity
else:
label = "<PAD>" # irrelevant word
texts.append(c.text)
labels.append(label)
docs.append(texts)
docs_labels.append(labels)
fd.close()
os.remove(XML_PATH)
raw_texts = ["".join(doc) for doc in docs]
texts, annotations = finetune_to_indico_sequence(raw_texts, docs, docs_labels)
df = pd.DataFrame({'texts': texts, 'annotations': [json.dumps(annotation) for annotation in annotations]})
df.to_csv(DATA_PATH)
if __name__ == "__main__":
dataset = Reuters().dataframe
dataset['annotations'] = [json.loads(annotation) for annotation in dataset['annotations']]
trainX, testX, trainY, testY = train_test_split(
dataset.texts.values,
dataset.annotations.values,
test_size=0.3,
random_state=42
)
model = SequenceLabeler(batch_size=2, val_size=0.)
model.fit(trainX, trainY)
predictions = model.predict(testX)
print(annotation_report(testY, predictions))
|
py | 1a5113ba11843ac984977bc870429b0198698f44 | import shutil
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from restapi.tests.util import create_test_data
from users.models import User
class ListRetrieveModelTests(APITestCase):
"""
Simple tests for the basic list and retrieve endpoints. Note that when filtering the result, the list endpoints
should only show the objects that the user has permission for. If the user does not have permission for an object,
the retrieve endpoints should return an error. Permission is given through ownership in this case.
"""
@classmethod
def setUpTestData(cls):
credentials = {"email": "[email protected]", "password": "test"}
cls.user = User.objects.create_user(email=credentials["email"], password=credentials["password"])
create_test_data(cls.user)
def setUp(self):
self.client.force_authenticate(user=self.user)
@classmethod
def tearDownClass(cls):
super(ListRetrieveModelTests, cls).tearDownClass()
shutil.rmtree("test_datalake")
def test_list_organization_groups(self):
response = self.client.get(reverse("organizationgroup-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "All")
self.assertContains(response, "Admin")
def test_list_object_permissions(self):
response = self.client.get(reverse("objectpermission-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "connection")
self.assertContains(response, "dataset")
def test_list_connections(self):
"""Should only show the connections that the user has permission for."""
response = self.client.get(reverse("connection-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "connection 1")
self.assertNotContains(response, "connection 2")
def test_list_postgres_datastores(self):
"""Should only show Postgres datastores related to the connections that the user has permission for."""
response = self.client.get(reverse("postgresdatastore-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "postgres 1")
self.assertNotContains(response, "postgres 2")
def test_list_azure_datastores(self):
"""Should only show Azure datastores related to the connections that the user has permission for."""
response = self.client.get(reverse("azuredatastore-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "azure 1")
self.assertNotContains(response, "azure 2")
def test_list_datasets(self):
"""Should only show datasets that the user has permission for."""
response = self.client.get(reverse("dataset-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "dataset 1")
self.assertNotContains(response, "dataset 2")
def test_list_dataset_runs(self):
"""Should only show dataset runs related to the datasets that the user has permission for."""
response = self.client.get(reverse("datasetrun-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "938c276c-b2ab-4410-9142-af7d1054bfc2")
self.assertNotContains(response, "71672ac1-7038-4ed9-a8b6-81794a8d239f")
def test_list_notes(self):
"""Should only show notes related to the datasets that the user has permission for."""
response = self.client.get(reverse("note-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "note 1")
self.assertNotContains(response, "note 2")
def test_list_jobs(self):
"""Should only show jobs that the user has permission for."""
response = self.client.get(reverse("job-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "job 1")
self.assertNotContains(response, "job 2")
def test_list_job_runs(self):
"""Should only show job runs related to the jobs that the user has permission for."""
response = self.client.get(reverse("jobrun-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "65cf3c6c-dabd-4256-b068-d717de40375d")
self.assertNotContains(response, "99dd1a79-e4f0-4311-8d79-44b5ce5402e5")
def test_list_contacts(self):
response = self.client.get(reverse("contact-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "contact 1")
self.assertContains(response, "contact 2")
def test_list_users_no_admin(self):
"""If the user is not an admin user, the user list should be forbidden."""
response = self.client.get(reverse("user-list"))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_list_users_admin(self):
# Change the logged-in user to an admin user.
self.client.logout()
admin = User.objects.create_superuser(email="[email protected]", password="test")
self.client.force_authenticate(user=admin)
response = self.client.get(reverse("user-list"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "[email protected]")
self.assertContains(response, "[email protected]")
def test_retrieve_organization(self):
response = self.client.get(reverse("organization-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "organization 1")
def test_retrieve_organization_group(self):
response = self.client.get(reverse("organizationgroup-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "All")
def test_retrieve_object_permission(self):
response = self.client.get(reverse("objectpermission-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "connection")
self.assertContains(response, "1")
def test_retrieve_connection(self):
response = self.client.get(reverse("connection-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "connection 1")
def test_retrieve_connection_no_permission(self):
response = self.client.get(reverse("connection-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_postgres_datastore(self):
response = self.client.get(reverse("postgresdatastore-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "postgres 1")
def test_retrieve_postgres_datastore_no_permission(self):
response = self.client.get(reverse("postgresdatastore-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_azure_datastore(self):
response = self.client.get(reverse("azuredatastore-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "azure 1")
def test_retrieve_azure_datastore_no_permission(self):
response = self.client.get(reverse("azuredatastore-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_dataset(self):
response = self.client.get(reverse("dataset-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "dataset 1")
def test_retrieve_dataset_no_permission(self):
response = self.client.get(reverse("dataset-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_dataset_run(self):
response = self.client.get(reverse("datasetrun-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "938c276c-b2ab-4410-9142-af7d1054bfc2")
def test_retrieve_dataset_run_no_permission(self):
response = self.client.get(reverse("datasetrun-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_note(self):
response = self.client.get(reverse("note-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "note 1")
def test_retrieve_note_no_permission(self):
response = self.client.get(reverse("note-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_job(self):
response = self.client.get(reverse("job-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "job 1")
def test_retrieve_job_no_permission(self):
response = self.client.get(reverse("job-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_job_run(self):
response = self.client.get(reverse("jobrun-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "65cf3c6c-dabd-4256-b068-d717de40375d")
def test_retrieve_job_run_no_permission(self):
response = self.client.get(reverse("job-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_contact(self):
response = self.client.get(reverse("contact-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "contact 1")
def test_retrieve_settings(self):
response = self.client.get(reverse("settings-detail", args=[1]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "UTC")
def retrieve_user_no_admin(self):
response = self.client.get(reverse("user-detail", args=[2]))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data["detail"], "You do not have permission to perform this action.")
def test_retrieve_user_admin(self):
# Change the logged-in user to an admin user.
self.client.logout()
admin = User.objects.create_superuser(email="[email protected]", password="test")
self.client.force_authenticate(user=admin)
response = self.client.get(reverse("user-detail", args=[self.user.id]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, "[email protected]")
|
py | 1a5113d81416e9f13e67baf7066048f49f04c800 | # coding: utf-8
"""
RadioManager
RadioManager # noqa: E501
OpenAPI spec version: 2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import radiomanager_sdk
from radiomanager_sdk.models.presenter import Presenter # noqa: E501
from radiomanager_sdk.rest import ApiException
class TestPresenter(unittest.TestCase):
"""Presenter unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPresenter(self):
"""Test Presenter"""
# FIXME: construct object with mandatory attributes with example values
# model = radiomanager_sdk.models.presenter.Presenter() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
py | 1a51142c0ed535cb046b3a7f45dbe93fd1a04aa8 | """Place multiple rectangles with the mouse."""
import pygame
from pygame.locals import *
RED = (255, 0, 0)
BLUE = (0, 0, 255)
GRAY = (127, 127, 127)
pygame.init()
screen = pygame.display.set_mode((640, 240))
start = (0, 0)
size = (0, 0)
drawing = False
rect_list = []
running = True
while running:
for event in pygame.event.get():
if event.type == QUIT:
running = False
elif event.type == MOUSEBUTTONDOWN:
start = event.pos
size = 0, 0
drawing = True
elif event.type == MOUSEBUTTONUP:
end = event.pos
size = end[0]-start[0], end[1]-start[1]
rect = pygame.Rect(start, size)
rect_list.append(rect)
drawing = False
elif event.type == MOUSEMOTION and drawing:
end = event.pos
size = end[0]-start[0], end[1]-start[1]
screen.fill(GRAY)
for rect in rect_list:
pygame.draw.rect(screen, RED, rect, 3)
pygame.draw.rect(screen, BLUE, (start, size), 1)
pygame.display.update()
pygame.quit() |
py | 1a51150fc158c5b2e99939ec53200a1f5bb05568 | import asyncio
import inspect
import click
from fastapi import FastAPI, APIRouter
from starlette.middleware.sessions import SessionMiddleware
try:
from importlib.metadata import entry_points, version
except ImportError:
from importlib_metadata import entry_points, version
from . import logger, config
from .models import db
router = APIRouter()
login_router = APIRouter()
def get_app():
app = FastAPI(title="FenceX", version=version("fencex"), debug=config.DEBUG)
app.idps = {}
db.init_app(app)
load_modules(app)
return app
class ClientDisconnectMiddleware:
def __init__(self, app):
self._app = app
async def __call__(self, scope, receive, send):
loop = asyncio.get_running_loop()
rv = loop.create_task(self._app(scope, receive, send))
waiter = None
cancelled = False
if scope["type"] == "http":
def add_close_watcher():
nonlocal waiter
async def wait_closed():
nonlocal cancelled
while True:
message = await receive()
if message["type"] == "http.disconnect":
if not rv.done():
cancelled = True
rv.cancel()
break
waiter = loop.create_task(wait_closed())
scope["add_close_watcher"] = add_close_watcher
try:
await rv
except asyncio.CancelledError:
if not cancelled:
raise
if waiter and not waiter.done():
waiter.cancel()
def load_modules(app=None):
if app:
app.add_middleware(ClientDisconnectMiddleware)
app.add_middleware(SessionMiddleware, secret_key=config.SESSION_SECRET)
all_args = dict(app=app, router=router, login_router=login_router)
logger.info("Start to load modules.")
for ep in entry_points()["fencex.modules"]:
mod = ep.load()
if app:
init_app = getattr(mod, "init_app", None)
if init_app:
args = []
for name in inspect.getfullargspec(init_app).args:
args.append(all_args[name])
init_app(*args)
msg = "Loaded module: "
logger.info(
msg + "%s",
ep.name,
extra={"color_message": msg + click.style("%s", fg="cyan")},
)
if app:
router.include_router(login_router, prefix="/login")
app.include_router(router, prefix=config.URL_PREFIX if config.DEBUG else "")
app.all_paths = set([r.path for r in app.routes])
@router.get("/version")
def get_version():
return version("fencex")
@router.get("/_status")
async def get_status():
now = await db.scalar("SELECT now()")
return dict(status="OK", timestamp=now)
|
py | 1a511575e1c31c3f1494d2cada5b244cee9d3e0f | import rclpy
from rclpy.node import Node
from std_msgs.msg import String
class TestSubscriber(Node):
def __init__(self):
super().__init__('test_subscriber')
self.subscription = self.create_subscription(
String,
'websock_echo',
self.listener_callback,
10)
self.subscription
def listener_callback(self, msg):
self.get_logger().info('Received from websocket bridge: "%s"' % msg.data)
def main(args=None):
rclpy.init(args=args)
test_subscriber = TestSubscriber()
rclpy.spin(test_subscriber)
test_subscriber.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main() |
py | 1a511590723b2ac9651eb033e97068183bacc314 | import typing
from PyQt5.QtCore import QAbstractListModel, QModelIndex, Qt, QMimeData
from PyQt5.QtGui import QFont
from PyQt5.QtWidgets import QFileDialog
from src.expression import calculateExpr, isValidExpression, toEditableExpr, fromEditableExpr
from .utils import saveHistoryToFile, addExpressionToHistoryCache, clearHistoryCache
ExpressionRole = Qt.UserRole
ResultRole = Qt.UserRole + 1
class HistoryListModel(QAbstractListModel):
def __init__(self, parent=None) -> None:
super(HistoryListModel, self).__init__(parent)
self._expressions = []
self._font = None
self.need_clear_history = True
def addExpression(self, expr: str, index: int = 0, save_to_cache: bool = True) -> None:
if not isValidExpression(expr):
return
if self.rowCount(QModelIndex()):
latest_expr_index = self.index(0, 0)
latest_expr = self.data(latest_expr_index, ExpressionRole)
if latest_expr == expr:
return
if self.need_clear_history and self._expressions:
self.need_clear_history = False
self.beginResetModel()
self._expressions.insert(index, expr)
self.endResetModel()
if save_to_cache:
if self.need_clear_history:
clearHistoryCache()
self.need_clear_history = False
addExpressionToHistoryCache(expr)
def addExpressions(self, expressions: list) -> None:
for expression in expressions:
self.addExpression(expression, save_to_cache=False)
def rowCount(self, parent: QModelIndex) -> int:
return len(self._expressions)
def data(self, index: QModelIndex, role: int) -> typing.Any:
if not index.isValid():
return None
expression = self._expressions[index.row()]
if role == Qt.DisplayRole:
return f'{expression} = {calculateExpr(expression)}'
elif role == Qt.FontRole:
return self._font
elif role == Qt.EditRole:
return expression
elif role == ExpressionRole:
return expression
elif role == ResultRole:
return calculateExpr(expression)
def clear(self) -> None:
self.beginResetModel()
self._expressions.clear()
clearHistoryCache()
self.endResetModel()
def saveHistory(self) -> None:
if self.rowCount(QModelIndex()) == 0:
return
file_path, _ = QFileDialog.getSaveFileName(filter='*.txt')
if not file_path:
return
expressions = self.equations()
saveHistoryToFile(expressions, file_path)
def equations(self) -> typing.List[str]:
equations_list = []
for expression in self._expressions:
equations_list.append(f'{expression} = {calculateExpr(expression)}')
return equations_list
def insertRows(self, row: int, count: int, parent: QModelIndex = ...) -> bool:
self.beginInsertRows(parent, row, row + count - 1)
for _ in range(count):
self._expressions.insert(row, None)
self.endInsertRows()
return True
def removeRows(self, row: int, count: int, parent: QModelIndex = ...) -> bool:
self.beginRemoveRows(parent, row, row + count - 1)
del self._expressions[row:row + count]
self.endRemoveRows()
return True
def setData(self, index: QModelIndex, value: typing.Any, role: int = ...) -> bool:
if not index.isValid():
return False
value = fromEditableExpr(value.lower())
if not isValidExpression(value):
return False
if role == Qt.EditRole:
self._expressions[index.row()] = value
self.dataChanged.emit(index, index)
return True
return False
def flags(self, index: QModelIndex) -> int:
if index.isValid():
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled | Qt.ItemIsEditable
else:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDropEnabled
def supportedDropActions(self) -> int:
return Qt.MoveAction
def canDropMimeData(self, data: QMimeData, action: int, row: int, column: int, parent: QModelIndex) -> bool:
return action == Qt.MoveAction and data.hasText()
def mimeData(self, indexes: typing.List[QModelIndex]) -> QMimeData:
mime_data = QMimeData()
expressions = []
for index in indexes:
if index.isValid():
text = toEditableExpr(self.data(index, ExpressionRole))
expressions.append(text)
mime_data.setText('\n'.join(expressions))
return mime_data
def dropMimeData(self, data: QMimeData, action: int, row: int, column: int, parent: QModelIndex) -> bool:
if not self.canDropMimeData(data, action, row, column, parent):
return False
data = data.text().split('\n')
for value in data:
if row < 0:
row = self.rowCount(QModelIndex())
self.insertRow(row, QModelIndex())
else:
self.insertRow(row, QModelIndex())
index = self.index(row, 0, QModelIndex())
text = fromEditableExpr(value.lower())
self.setData(index, text, Qt.EditRole)
row += 1
return True
|
py | 1a5115fed301ed46bee59874e1c8d6d8e6a52c69 | import pytest
from numpy.testing import assert_allclose
from sklearn import __version__
from sklearn.exceptions import NotFittedError
from pysindy import FourierLibrary
from pysindy import SINDy
from pysindy import STLSQ
from pysindy.deeptime import SINDyEstimator
from pysindy.deeptime import SINDyModel
def test_estimator_has_model(data_lorenz):
x, t = data_lorenz
estimator = SINDyEstimator()
assert not estimator.has_model
estimator.fit(x, t=t)
assert estimator.has_model
def test_estimator_fetch_model(data_lorenz):
x, t = data_lorenz
estimator = SINDyEstimator()
assert estimator.fetch_model() is None
estimator.fit(x, t=t)
assert isinstance(estimator.fetch_model(), SINDyModel)
def test_model_sindy_equivalence(data_lorenz_c_1d):
x, t, u, _ = data_lorenz_c_1d
model = SINDyEstimator().fit(x, t=t, u=u).fetch_model()
sindy_model = SINDy().fit(x, t=t, u=u)
assert_allclose(model.coefficients(), sindy_model.coefficients())
print(sindy_model.n_features_in_)
if float(__version__[:3]) >= 1.0:
assert model.n_features_in_ == sindy_model.n_features_in_
else:
assert model.n_input_features_ == sindy_model.n_input_features_
assert model.n_output_features_ == sindy_model.n_output_features_
assert model.n_control_features_ == sindy_model.n_control_features_
def test_model_has_sindy_methods(data_lorenz):
x, t = data_lorenz
model = SINDyEstimator().fit(x, t=t).fetch_model()
assert hasattr(model, "predict")
assert hasattr(model, "simulate")
assert hasattr(model, "score")
assert hasattr(model, "print")
assert hasattr(model, "equations")
def test_model_unfitted_library(data_derivative_2d):
x, x_dot = data_derivative_2d
optimizer = STLSQ().fit(x, x_dot)
library = FourierLibrary()
with pytest.raises(NotFittedError):
SINDyModel(optimizer, library)
def test_model_unfitted_optimizer(data_lorenz):
x, t = data_lorenz
optimizer = STLSQ()
library = FourierLibrary().fit(x)
with pytest.raises(NotFittedError):
SINDyModel(optimizer, library)
def test_model_copy(data_lorenz):
x, t = data_lorenz
model = SINDyEstimator().fit(x, t=t).fetch_model()
model_copy = model.copy()
assert model is not model_copy
|
py | 1a5116c133451d5295e5a8f88d62c84ff963fd04 | #!/usr/bin/env python3
#
# Constants for the generation of patches for CBMC proofs.
#
# Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
PATCHES_DIR = os.path.dirname(os.path.abspath(__file__))
shared_prefix = [
"."
]
shared_prefix_port = [
"..", "..", "FreeRTOS-Kernel", "portable", "MSVC-MingW"
]
absolute_prefix = os.path.abspath(os.path.join(PATCHES_DIR, *shared_prefix))
absolute_prefix_port = os.path.abspath(os.path.join(PATCHES_DIR, *shared_prefix_port))
HEADERS = [os.path.join(absolute_prefix, "FreeRTOSConfig.h"),
os.path.join(absolute_prefix, "FreeRTOSIPConfig.h"),
os.path.join(absolute_prefix_port, "portmacro.h")]
|
py | 1a5117d2b11deca734899a38d08cace51d3592d2 | # Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate the sprites tfrecords from raw_images."""
import os
import random
import re
import sys
import numpy as np
import scipy.misc
import tensorflow as tf
tf.flags.DEFINE_string('data_filepattern', '', 'The raw images.')
tf.flags.DEFINE_string('out_file', '',
'File name for the tfrecord output.')
def _read_images():
"""Read images from image files into data structure."""
sprites = dict()
files = tf.gfile.Glob(tf.flags.FLAGS.data_filepattern)
for f in files:
image = scipy.misc.imread(f)
m = re.search('image_([0-9]+)_([0-9]+)_([0-9]+).jpg', os.path.basename(f))
if m.group(1) not in sprites:
sprites[m.group(1)] = dict()
character = sprites[m.group(1)]
if m.group(2) not in character:
character[m.group(2)] = dict()
pose = character[m.group(2)]
pose[int(m.group(3))] = image
return sprites
def _images_to_example(image, image2):
"""Convert 2 consecutive image to a SequenceExample."""
example = tf.SequenceExample()
feature_list = example.feature_lists.feature_list['moving_objs']
feature = feature_list.feature.add()
feature.float_list.value.extend(np.reshape(image, [-1]).tolist())
feature = feature_list.feature.add()
feature.float_list.value.extend(np.reshape(image2, [-1]).tolist())
return example
def generate_input():
"""Generate tfrecords."""
sprites = _read_images()
sys.stderr.write('Finish reading images.\n')
train_writer = tf.python_io.TFRecordWriter(
tf.flags.FLAGS.out_file.replace('sprites', 'sprites_train'))
test_writer = tf.python_io.TFRecordWriter(
tf.flags.FLAGS.out_file.replace('sprites', 'sprites_test'))
train_examples = []
test_examples = []
for i in sprites:
if int(i) < 24:
examples = test_examples
else:
examples = train_examples
character = sprites[i]
for j in character.keys():
pose = character[j]
for k in xrange(1, len(pose), 1):
image = pose[k]
image2 = pose[k+1]
examples.append(_images_to_example(image, image2))
sys.stderr.write('Finish generating examples: %d, %d.\n' %
(len(train_examples), len(test_examples)))
random.shuffle(train_examples)
_ = [train_writer.write(ex.SerializeToString()) for ex in train_examples]
_ = [test_writer.write(ex.SerializeToString()) for ex in test_examples]
def main(_):
generate_input()
if __name__ == '__main__':
tf.app.run()
|
py | 1a5118033dbd48ec8edc46dd9ae37e5f60fe64a8 | from __future__ import unicode_literals
import logging
import traceback
from django.core.paginator import Paginator
from django.http import HttpResponseServerError, Http404
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateView, View
from djblets.siteconfig.models import SiteConfiguration
from reviewboard.diffviewer.diffutils import (get_diff_files,
populate_diff_chunks,
get_enable_highlighting)
from reviewboard.diffviewer.errors import UserVisibleError
from reviewboard.diffviewer.models import DiffSet, FileDiff
from reviewboard.diffviewer.renderers import get_diff_renderer
def get_collapse_diff(request):
if request.GET.get('expand', False):
return False
elif request.GET.get('collapse', False):
return True
elif 'collapsediffs' in request.COOKIES:
return (request.COOKIES['collapsediffs'] == "True")
else:
return True
class DiffViewerView(TemplateView):
"""Renders the main diff viewer.
This renders the diff viewer for a given DiffSet (or an interdiff
between two DiffSets). It handles loading information on the diffs,
generating the side-by-side view, and pagination.
The view expects the following parameters to be provided:
* diffset
- The DiffSet to render.
The following may also be provided:
* interdiffset
- A DiffSet object representing the other end of an interdiff range.
The following query parameters can be passed in on the URL:
* ?expand=1
- Expands all files within the diff viewer.
* ?collapse=1
- Collapses all files within the diff viewer, showing only
modifications and a few lines of context.
* ?file=<id>
- Renders only the FileDiff represented by the provided ID.
* ?page=<pagenum>
- Renders diffs found on the given page number, if the diff viewer
is paginated.
"""
template_name = 'diffviewer/view_diff.html'
fragment_error_template_name = 'diffviewer/diff_fragment_error.html'
def get(self, request, diffset, interdiffset=None, *args, **kwargs):
"""Handles GET requests for this view.
This will render the full diff viewer based on the provided
parameters.
The full rendering time will be logged.
If there's any exception thrown during rendering, an error page
with a traceback will be returned instead.
"""
self.collapse_diffs = get_collapse_diff(request)
if interdiffset:
logging.debug('Generating diff viewer page for interdiffset '
'ids %s-%s',
diffset.id, interdiffset.id, request=request)
else:
logging.debug('Generating diff viewer page for filediff id %s',
diffset.id, request=request)
try:
response = super(DiffViewerView, self).get(
request, diffset=diffset, interdiffset=interdiffset,
*args, **kwargs)
if interdiffset:
logging.debug('Done generating diff viewer page for '
'interdiffset ids %s-%s',
diffset.id, interdiffset.id, request=request)
else:
logging.debug('Done generating diff viewer page for filediff '
'id %s',
diffset.id, request=request)
return response
except Exception as e:
return exception_traceback(request, e, self.template_name)
def render_to_response(self, *args, **kwargs):
"""Renders the page to an HttpResponse.
This renders the diff viewer page, based on the context data
generated, and sets cookies before returning an HttpResponse to
the client.
"""
response = super(DiffViewerView, self).render_to_response(*args,
**kwargs)
response.set_cookie('collapsediffs', self.collapse_diffs)
return response
def get_context_data(self, diffset, interdiffset, extra_context={},
**kwargs):
"""Calculates and returns data used for rendering the diff viewer.
This handles all the hard work of generating the data backing the
side-by-side diff, handling pagination, and more. The data is
collected into a context dictionary and returned for rendering.
"""
files = get_diff_files(diffset, None, interdiffset,
request=self.request)
# Break the list of files into pages
siteconfig = SiteConfiguration.objects.get_current()
paginator = Paginator(files,
siteconfig.get('diffviewer_paginate_by'),
siteconfig.get('diffviewer_paginate_orphans'))
page_num = int(self.request.GET.get('page', 1))
if self.request.GET.get('file', False):
file_id = int(self.request.GET['file'])
for i, f in enumerate(files):
if f['filediff'].pk == file_id:
page_num = i // paginator.per_page + 1
if page_num > paginator.num_pages:
page_num = paginator.num_pages
break
page = paginator.page(page_num)
diff_context = {
'revision': {
'revision': diffset.revision,
'is_interdiff': interdiffset is not None,
'interdiff_revision': (interdiffset.revision
if interdiffset else None),
},
'pagination': {
'is_paginated': page.has_other_pages(),
'current_page': page.number,
'pages': paginator.num_pages,
'page_numbers': paginator.page_range,
'has_next': page.has_next(),
'has_previous': page.has_previous(),
},
}
if page.has_next():
diff_context['pagination']['next_page'] = page.next_page_number()
if page.has_previous():
diff_context['pagination']['previous_page'] = \
page.previous_page_number()
context = dict({
'diff_context': diff_context,
'diffset': diffset,
'interdiffset': interdiffset,
'diffset_pair': (diffset, interdiffset),
'files': page.object_list,
'collapseall': self.collapse_diffs,
}, **extra_context)
return context
class DiffFragmentView(View):
"""Renders a fragment from a file in the diff viewer.
Based on the diffset data and other arguments provided, this will render
a fragment from a file in a diff. This may be the entire file, or some
chunk within.
The view expects the following parameters to be provided:
* diffset_or_id
- A DiffSet object or the ID for one.
* filediff_id
- The ID of a FileDiff within the DiffSet.
The following may also be provided:
* interdiffset_or_id
- A DiffSet object or the ID for one representing the other end of
an interdiff range.
* chunkindex
- The index (0-based) of the chunk to render. If left out, the
entire file will be rendered.
The caller may also pass ``?lines-of-context=`` as a query parameter to
the URL to indicate how many lines of context should be provided around
the chunk.
"""
template_name = 'diffviewer/diff_file_fragment.html'
error_template_name = 'diffviewer/diff_fragment_error.html'
def get(self, request, *args, **kwargs):
"""Handles GET requests for this view.
This will create the renderer for the diff fragment, render it, and
return it.
If there's an error when rendering the diff fragment, an error page
will be rendered and returned instead.
"""
context = self.get_context_data(**kwargs)
try:
renderer = self.create_renderer(context, *args, **kwargs)
return renderer.render_to_response()
except Http404:
raise
except Exception as e:
return exception_traceback(
self.request, e, self.error_template_name,
extra_context={
'file': self._get_requested_diff_file(False),
})
def create_renderer(self, context, diffset_or_id, filediff_id,
interdiffset_or_id=None, chunkindex=None,
*args, **kwargs):
"""Creates the renderer for the diff.
This calculates all the state and data needed for rendering, and
constructs a DiffRenderer with that data. That renderer is then
returned, ready for rendering.
If there's an error in looking up the necessary information, this
may raise a UserVisibleError (best case), or some other form of
Exception.
"""
# Depending on whether we're invoked from a URL or from a wrapper
# with precomputed diffsets, we may be working with either IDs or
# actual objects. If they're objects, just use them as-is. Otherwise,
# if they're IDs, we want to grab them both (if both are provided)
# in one go, to save on an SQL query.
self.diffset = None
self.interdiffset = None
diffset_ids = []
if isinstance(diffset_or_id, DiffSet):
self.diffset = diffset_or_id
else:
diffset_ids.append(diffset_or_id)
if interdiffset_or_id:
if isinstance(interdiffset_or_id, DiffSet):
self.interdiffset = interdiffset_or_id
else:
diffset_ids.append(interdiffset_or_id)
if diffset_ids:
diffsets = DiffSet.objects.filter(pk__in=diffset_ids)
if len(diffsets) != len(diffset_ids):
raise Http404
for temp_diffset in diffsets:
if temp_diffset.pk == diffset_or_id:
self.diffset = temp_diffset
elif temp_diffset.pk == interdiffset_or_id:
self.interdiffset = temp_diffset
else:
assert False
self.highlighting = get_enable_highlighting(self.request.user)
self.filediff = get_object_or_404(FileDiff, pk=filediff_id,
diffset=self.diffset)
# Store this so we don't end up causing an SQL query later when looking
# this up.
self.filediff.diffset = self.diffset
try:
lines_of_context = self.request.GET.get('lines-of-context', '')
lines_of_context = [int(i) for i in lines_of_context.split(',', 1)]
except (TypeError, ValueError):
lines_of_context = None
if chunkindex is not None:
try:
chunkindex = int(chunkindex)
except (TypeError, ValueError):
chunkindex = None
if lines_of_context:
collapseall = True
elif chunkindex is not None:
# If we're currently expanding part of a chunk, we want to render
# the entire chunk without any lines collapsed. In the case of
# showing a range of lines, we're going to get all chunks and then
# only show the range. This is so that we won't have separate
# cached entries for each range.
collapseall = False
else:
collapseall = get_collapse_diff(self.request)
self.diff_file = self._get_requested_diff_file()
if not self.diff_file:
raise UserVisibleError(
_('Internal error. Unable to locate file record for '
'filediff %s')
% self.filediff.pk)
return get_diff_renderer(
self.diff_file,
chunk_index=chunkindex,
highlighting=self.highlighting,
collapse_all=collapseall,
lines_of_context=lines_of_context,
extra_context=context,
template_name=self.template_name)
def get_context_data(self, *args, **kwargs):
"""Returns context data used for rendering the view.
This can be overridden by subclasses to provide additional data for the
view.
"""
return {}
def _get_requested_diff_file(self, get_chunks=True):
"""Fetches information on the requested diff.
This will look up information on the diff that's to be rendered
and return it, if found. It may also augment it with additional
data.
If get_chunks is True, the diff file information will include chunks
for rendering. Otherwise, it will just contain generic information
from the database.
"""
files = get_diff_files(self.diffset, self.filediff, self.interdiffset,
request=self.request)
if get_chunks:
populate_diff_chunks(files, self.highlighting,
request=self.request)
if files:
assert len(files) == 1
file = files[0]
if 'index' in self.request.GET:
file['index'] = self.request.GET.get('index')
return file
return None
def exception_traceback_string(request, e, template_name, extra_context={}):
context = {'error': e}
context.update(extra_context)
if e.__class__ is not UserVisibleError:
context['trace'] = traceback.format_exc()
if request:
request_context = RequestContext(request, context)
else:
request_context = context
return render_to_string(template_name, request_context)
def exception_traceback(request, e, template_name, extra_context={}):
return HttpResponseServerError(
exception_traceback_string(request, e, template_name, extra_context))
|
py | 1a51188b4766d79d387ed92f144759275adc5290 | def extract_items(list):
result = []
for index in range(0, len(list)):
bottom = list[0:index]
top = list[index+1:]
item = list[index]
result.append((item, bottom + top))
return result
def perms(list):
if list == []:
return [[]]
result = []
for (item, rest) in extract_items(list):
for p in perms(rest):
result.append([item] + p)
return result
for p in perms(list(range(4))):
print(p)
|
py | 1a511902a44aaced46ded5e3a6cca7c1bb6d0954 | # -*- test-case-name: klein.test.test_request -*-
# Copyright (c) 2011-2021. See LICENSE for details.
"""
HTTP request API.
"""
from typing import Union
from attr import Factory, attrib, attrs
from attr.validators import instance_of, provides
from hyperlink import DecodedURL
from tubes.itube import IFount
from zope.interface import implementer
from ._imessage import IHTTPHeaders, IHTTPRequest
from ._message import MessageState, bodyAsBytes, bodyAsFount, validateBody
__all__ = ()
@implementer(IHTTPRequest)
@attrs(frozen=True)
class FrozenHTTPRequest:
"""
Immutable HTTP request.
"""
method: str = attrib(validator=instance_of(str))
uri: DecodedURL = attrib(validator=instance_of(DecodedURL))
headers: IHTTPHeaders = attrib(validator=provides(IHTTPHeaders))
_body: Union[bytes, IFount] = attrib(validator=validateBody)
_state: MessageState = attrib(default=Factory(MessageState), init=False)
def bodyAsFount(self) -> IFount:
return bodyAsFount(self._body, self._state)
async def bodyAsBytes(self) -> bytes:
return await bodyAsBytes(self._body, self._state)
|
py | 1a5119529bfe8912609be9939e13202e7d695e92 | from datetime import datetime
from flaskblog import db, login_manager
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
image_file = db.Column(db.String(20), nullable=False, default='default.jpg')
password = db.Column(db.String(60), nullable=False)
posts = db.relationship('Post', backref='author', lazy=True)
def __repr__(self):
return f"User('{self.username}','{self.email}', '{self.image_file}')"
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False)
date_posted = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
content = db.Column(db.Text, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return f"User('{self.title}','{self.date_posted}', '{self.content}')"
|
py | 1a51198a762787ad5b6e337498c2bbc15b52ae72 | # Copyright (c) 2017 Midokura SARL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_config import cfg
import testtools
from neutron_lib.utils import test
from tempest.common import utils
from tempest.common import waiters
from tempest.lib.common import ssh
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from neutron_tempest_plugin import config
from neutron_tempest_plugin.scenario import constants
from neutron_tempest_plugin.vpnaas.scenario import base_vpnaas as base
CONF = config.CONF
# NOTE(huntxu): This is a workaround due to a upstream bug [1].
# VPNaaS 4in6 and 6in4 is not working properly with LibreSwan 3.19+.
# In OpenStack zuul checks the base CentOS 7 node is using Libreswan 3.20 on
# CentOS 7.4. So we need to provide a way to skip the 4in6 and 6in4 test cases
# for zuul.
#
# Once the upstream bug gets fixed and the base node uses a newer version of
# Libreswan with that fix, we can remove this.
#
# [1] https://github.com/libreswan/libreswan/issues/175
CONF.register_opt(
cfg.BoolOpt('skip_4in6_6in4_tests',
default=False,
help='Whether to skip 4in6 and 6in4 test cases.'),
'neutron_vpnaas_plugin_options'
)
class Vpnaas(base.BaseTempestTestCase):
"""Test the following topology
+-------------------+
| public |
| network |
| |
+-+---------------+-+
| |
| |
+-------+-+ +-+-------+
| LEFT | | RIGHT |
| router | <--VPN--> | router |
| | | |
+----+----+ +----+----+
| |
+----+----+ +----+----+
| LEFT | | RIGHT |
| network | | network |
| | | |
+---------+ +---------+
"""
credentials = ['primary', 'admin']
inner_ipv6 = False
outer_ipv6 = False
@classmethod
@utils.requires_ext(extension="vpnaas", service="network")
def resource_setup(cls):
super(Vpnaas, cls).resource_setup()
# common
cls.keypair = cls.create_keypair()
cls.secgroup = cls.os_primary.network_client.create_security_group(
name=data_utils.rand_name('secgroup-'))['security_group']
cls.security_groups.append(cls.secgroup)
cls.create_loginable_secgroup_rule(secgroup_id=cls.secgroup['id'])
cls.create_pingable_secgroup_rule(secgroup_id=cls.secgroup['id'])
cls.ikepolicy = cls.create_ikepolicy(
data_utils.rand_name("ike-policy-"))
cls.ipsecpolicy = cls.create_ipsecpolicy(
data_utils.rand_name("ipsec-policy-"))
cls.extra_subnet_attributes = {}
if cls.inner_ipv6:
cls.create_v6_pingable_secgroup_rule(
secgroup_id=cls.secgroup['id'])
cls.extra_subnet_attributes['ipv6_address_mode'] = 'slaac'
cls.extra_subnet_attributes['ipv6_ra_mode'] = 'slaac'
# LEFT
cls.router = cls.create_router(
data_utils.rand_name('left-router'),
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
cls.network = cls.create_network(network_name='left-network')
ip_version = 6 if cls.inner_ipv6 else 4
v4_cidr = netaddr.IPNetwork('10.20.0.0/24')
v6_cidr = netaddr.IPNetwork('2001:db8:0:2::/64')
cidr = v6_cidr if cls.inner_ipv6 else v4_cidr
cls.subnet = cls.create_subnet(
cls.network, ip_version=ip_version, cidr=cidr, name='left-subnet',
**cls.extra_subnet_attributes)
cls.create_router_interface(cls.router['id'], cls.subnet['id'])
# Gives an internal IPv4 subnet for floating IP to the left server,
# we use it to ssh into the left server.
if cls.inner_ipv6:
v4_subnet = cls.create_subnet(
cls.network, ip_version=4, name='left-v4-subnet')
cls.create_router_interface(cls.router['id'], v4_subnet['id'])
# RIGHT
cls._right_network, cls._right_subnet, cls._right_router = \
cls._create_right_network()
@classmethod
def create_v6_pingable_secgroup_rule(cls, secgroup_id=None, client=None):
# NOTE(huntxu): This method should be moved into the base class, along
# with the v4 version.
"""This rule is intended to permit inbound ping6"""
rule_list = [{'protocol': 'ipv6-icmp',
'direction': 'ingress',
'port_range_min': 128, # type
'port_range_max': 0, # code
'ethertype': 'IPv6',
'remote_ip_prefix': '::/0'}]
client = client or cls.os_primary.network_client
cls.create_secgroup_rules(rule_list, client=client,
secgroup_id=secgroup_id)
@classmethod
def _create_right_network(cls):
router = cls.create_router(
data_utils.rand_name('right-router'),
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
network = cls.create_network(network_name='right-network')
v4_cidr = netaddr.IPNetwork('10.10.0.0/24')
v6_cidr = netaddr.IPNetwork('2001:db8:0:1::/64')
cidr = v6_cidr if cls.inner_ipv6 else v4_cidr
ip_version = 6 if cls.inner_ipv6 else 4
subnet = cls.create_subnet(
network, ip_version=ip_version, cidr=cidr, name='right-subnet',
**cls.extra_subnet_attributes)
cls.create_router_interface(router['id'], subnet['id'])
return network, subnet, router
def _create_server(self, create_floating_ip=True, network=None):
if network is None:
network = self.network
port = self.create_port(network, security_groups=[self.secgroup['id']])
if create_floating_ip:
fip = self.create_and_associate_floatingip(port['id'])
else:
fip = None
server = self.create_server(
flavor_ref=CONF.compute.flavor_ref,
image_ref=CONF.compute.image_ref,
key_name=self.keypair['name'],
networks=[{'port': port['id']}])['server']
waiters.wait_for_server_status(self.os_primary.servers_client,
server['id'],
constants.SERVER_STATUS_ACTIVE)
return {'port': port, 'fip': fip, 'server': server}
def _setup_vpn(self):
sites = [
dict(name="left", network=self.network, subnet=self.subnet,
router=self.router),
dict(name="right", network=self._right_network,
subnet=self._right_subnet, router=self._right_router),
]
psk = data_utils.rand_name('mysecret')
for i in range(0, 2):
site = sites[i]
site['vpnservice'] = self.create_vpnservice(
site['subnet']['id'], site['router']['id'],
name=data_utils.rand_name('%s-vpnservice' % site['name']))
for i in range(0, 2):
site = sites[i]
vpnservice = site['vpnservice']
peer = sites[1 - i]
if self.outer_ipv6:
peer_address = peer['vpnservice']['external_v6_ip']
if not peer_address:
msg = "Public network must have an IPv6 subnet."
raise self.skipException(msg)
else:
peer_address = peer['vpnservice']['external_v4_ip']
self.create_ipsec_site_connection(
self.ikepolicy['id'],
self.ipsecpolicy['id'],
vpnservice['id'],
peer_address=peer_address,
peer_id=peer_address,
peer_cidrs=[peer['subnet']['cidr']],
psk=psk,
name=data_utils.rand_name(
'%s-ipsec-site-connection' % site['name']))
def _get_ip_on_subnet_for_port(self, port, subnet_id):
for fixed_ip in port['fixed_ips']:
if fixed_ip['subnet_id'] == subnet_id:
return fixed_ip['ip_address']
msg = "Cannot get IP address on specified subnet %s for port %r." % (
subnet_id, port)
raise self.fail(msg)
@test.unstable_test("bug 1882220")
def _test_vpnaas(self):
# RIGHT
right_server = self._create_server(network=self._right_network,
create_floating_ip=False)
right_ip = self._get_ip_on_subnet_for_port(
right_server['port'], self._right_subnet['id'])
# LEFT
left_server = self._create_server()
ssh_client = ssh.Client(left_server['fip']['floating_ip_address'],
CONF.validation.image_ssh_user,
pkey=self.keypair['private_key'])
# check LEFT -> RIGHT connectivity via VPN
self.check_remote_connectivity(ssh_client, right_ip,
should_succeed=False)
self._setup_vpn()
self.check_remote_connectivity(ssh_client, right_ip)
# Test VPN traffic and floating IP traffic don't interfere each other.
if not self.inner_ipv6:
# Assign a floating-ip and check connectivity.
# This is NOT via VPN.
fip = self.create_and_associate_floatingip(
right_server['port']['id'])
self.check_remote_connectivity(ssh_client,
fip['floating_ip_address'])
# check LEFT -> RIGHT connectivity via VPN again, to ensure
# the above floating-ip doesn't interfere the traffic.
self.check_remote_connectivity(ssh_client, right_ip)
class Vpnaas4in4(Vpnaas):
@decorators.idempotent_id('aa932ab2-63aa-49cf-a2a0-8ae71ac2bc24')
def test_vpnaas(self):
self._test_vpnaas()
class Vpnaas4in6(Vpnaas):
outer_ipv6 = True
@decorators.idempotent_id('2d5f18dc-6186-4deb-842b-051325bd0466')
@testtools.skipUnless(CONF.network_feature_enabled.ipv6,
'IPv6 tests are disabled.')
@testtools.skipIf(
CONF.neutron_vpnaas_plugin_options.skip_4in6_6in4_tests,
'VPNaaS 4in6 test is skipped.')
def test_vpnaas_4in6(self):
self._test_vpnaas()
class Vpnaas6in4(Vpnaas):
inner_ipv6 = True
@decorators.idempotent_id('10febf33-c5b7-48af-aa13-94b4fb585a55')
@testtools.skipUnless(CONF.network_feature_enabled.ipv6,
'IPv6 tests are disabled.')
@testtools.skipIf(
CONF.neutron_vpnaas_plugin_options.skip_4in6_6in4_tests,
'VPNaaS 6in4 test is skipped.')
def test_vpnaas_6in4(self):
self._test_vpnaas()
class Vpnaas6in6(Vpnaas):
inner_ipv6 = True
outer_ipv6 = True
@decorators.idempotent_id('8b503ffc-aeb0-4938-8dba-73c7323e276d')
@testtools.skipUnless(CONF.network_feature_enabled.ipv6,
'IPv6 tests are disabled.')
def test_vpnaas_6in6(self):
self._test_vpnaas()
|
py | 1a511b1926e8cc6150d638cacfbc1baac1570f70 | # -*- coding: utf-8 -*-
"""Admin index for Django."""
# :copyright: (c) 2017, Maykin Media BV.
# All rights reserved.
# :license: BSD (3 Clause), see LICENSE for more details.
from __future__ import absolute_import, unicode_literals
import re
from collections import namedtuple
__version__ = "1.4.0"
__author__ = "Joeri Bekker"
__contact__ = "[email protected]"
__homepage__ = "https://github.com/maykinmedia/django-admin-index"
__docformat__ = "restructuredtext"
# -eof meta-
version_info_t = namedtuple(
"version_info_t", ("major", "minor", "patch", "releaselevel", "serial",)
)
# bumpversion can only search for {current_version}
# so we have to parse the version here.
_temp = re.match(r"(\d+)\.(\d+).(\d+)(.+)?", __version__).groups()
VERSION = version_info = version_info_t(
int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or "", ""
)
del _temp
del re
__all__ = []
default_app_config = "django_admin_index.apps.AdminIndexConfig"
|
py | 1a511b7114b313a104558f68fb88b4e404674270 | """
ASGI config for OpenGoggles project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'OpenGoggles.settings')
application = get_asgi_application()
|
py | 1a511cd3783e2570a3963c8932f6dc89ddce2ddf | # Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import json
import mock
import re
import requests
import sys
import unittest
from six.moves.urllib.parse import urlparse
from blinkpy.common.host_mock import MockHost
from blinkpy.common.path_finder import RELATIVE_WEB_TESTS
from blinkpy.web_tests.controllers.test_result_sink import CreateTestResultSink
from blinkpy.web_tests.controllers.test_result_sink import TestResultSink
from blinkpy.web_tests.models import test_results
from blinkpy.web_tests.models.typ_types import ResultType
from blinkpy.web_tests.port.test import add_manifest_to_mock_filesystem
from blinkpy.web_tests.port.test import TestPort
from blinkpy.web_tests.port.test import WEB_TEST_DIR
class TestResultSinkTestBase(unittest.TestCase):
def setUp(self):
super(TestResultSinkTestBase, self).setUpClass()
self.port = TestPort(MockHost())
def luci_context(self, **section_values):
if not section_values:
return
host = self.port.host
f, fname = host.filesystem.open_text_tempfile()
json.dump(section_values, f)
f.close()
host.environ['LUCI_CONTEXT'] = f.path
class TestCreateTestResultSink(TestResultSinkTestBase):
def test_without_luci_context(self):
self.assertIsNone(CreateTestResultSink(self.port))
def test_without_result_sink_section(self):
self.luci_context(app={'foo': 'bar'})
self.assertIsNone(CreateTestResultSink(self.port))
def test_auth_token(self):
ctx = {'address': 'localhost:123', 'auth_token': 'secret'}
self.luci_context(result_sink=ctx)
rs = CreateTestResultSink(self.port)
self.assertIsNotNone(rs)
self.assertEqual(rs._session.headers['Authorization'],
'ResultSink ' + ctx['auth_token'])
def test_with_result_sink_section(self):
ctx = {'address': 'localhost:123', 'auth_token': 'secret'}
self.luci_context(result_sink=ctx)
rs = CreateTestResultSink(self.port)
self.assertIsNotNone(rs)
response = requests.Response()
response.status_code = 200
with mock.patch.object(rs._session, 'post',
return_value=response) as m:
rs.sink(True, test_results.TestResult('test'), None)
self.assertTrue(m.called)
self.assertEqual(
urlparse(m.call_args[0][0]).netloc, ctx['address'])
class TestResultSinkMessage(TestResultSinkTestBase):
"""Tests ResulkSink.sink."""
def setUp(self):
super(TestResultSinkMessage, self).setUp()
patcher = mock.patch.object(TestResultSink, '_send')
self.mock_send = patcher.start()
self.addCleanup(patcher.stop)
ctx = {'address': 'localhost:123', 'auth_token': 'super-secret'}
self.luci_context(result_sink=ctx)
self.rs = CreateTestResultSink(self.port)
def sink(self, expected, test_result, expectations=None):
self.rs.sink(expected, test_result, expectations)
self.assertTrue(self.mock_send.called)
return self.mock_send.call_args[0][0]['testResults'][0]
def test_sink(self):
tr = test_results.TestResult(test_name='test-name')
tr.total_run_time = 123.456
tr.type = ResultType.Crash
sent_data = self.sink(True, tr)
self.assertEqual(sent_data['testId'], 'test-name')
self.assertEqual(sent_data['expected'], True)
self.assertEqual(sent_data['status'], 'CRASH')
self.assertEqual(sent_data['duration'], '123.456s')
def test_sink_with_expectations(self):
class FakeTestExpectation(object):
def __init__(self):
self.raw_results = ['Failure']
class FakeExpectations(object):
def __init__(self):
self.system_condition_tags = ['tag1', 'tag2']
def get_expectations(self, _):
return FakeTestExpectation()
# Values should be extracted from expectations.
tr = test_results.TestResult(test_name='test-name')
tr.type = ResultType.Crash
expectations = FakeExpectations()
expected_tags = [
{
'key': 'test_name',
'value': 'test-name'
},
{
'key': 'web_tests_device_failed',
'value': 'False'
},
{
'key': 'web_tests_result_type',
'value': 'CRASH'
},
{
'key': 'web_tests_flag_specific_config_name',
'value': ''
},
{
'key': 'web_tests_used_expectations_file',
'value': 'TestExpectations',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'WebDriverExpectations',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'NeverFixTests',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'StaleTestExpectations',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'SlowTests',
},
{
'key': 'raw_typ_expectation',
'value': 'Failure'
},
{
'key': 'typ_tag',
'value': 'tag1'
},
{
'key': 'typ_tag',
'value': 'tag2'
},
]
sent_data = self.sink(True, tr, expectations)
self.assertEqual(sent_data['tags'], expected_tags)
def test_sink_without_expectations(self):
tr = test_results.TestResult(test_name='test-name')
tr.type = ResultType.Crash
expected_tags = [
{
'key': 'test_name',
'value': 'test-name'
},
{
'key': 'web_tests_device_failed',
'value': 'False'
},
{
'key': 'web_tests_result_type',
'value': 'CRASH'
},
{
'key': 'web_tests_flag_specific_config_name',
'value': ''
},
{
'key': 'web_tests_used_expectations_file',
'value': 'TestExpectations',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'WebDriverExpectations',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'NeverFixTests',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'StaleTestExpectations',
},
{
'key': 'web_tests_used_expectations_file',
'value': 'SlowTests',
},
]
sent_data = self.sink(True, tr)
self.assertEqual(sent_data['tags'], expected_tags)
def test_test_metadata(self):
tr = test_results.TestResult('')
base_path = '//' + RELATIVE_WEB_TESTS
tr.test_name = "test-name"
self.assertDictEqual(
self.sink(True, tr)['testMetadata'],
{
'name': 'test-name',
'location': {
'repo': 'https://chromium.googlesource.com/chromium/src',
'fileName': base_path + 'test-name',
},
},
)
tr.test_name = "///test-name"
self.assertDictEqual(
self.sink(True, tr)['testMetadata'],
{
'name': '///test-name',
'location': {
'repo': 'https://chromium.googlesource.com/chromium/src',
'fileName': base_path + '///test-name',
},
},
)
def test_device_failure(self):
tr = test_results.TestResult(test_name='test-name')
tr.type = ResultType.Failure
tr.device_failed = True
sent_data = self.sink(True, tr)
# If the device failed, 'expected' and 'status' must be False and 'ABORT'
self.assertEqual(sent_data['expected'], False)
self.assertEqual(sent_data['status'], 'ABORT')
def test_timeout(self):
tr = test_results.TestResult(test_name='test-name')
tr.type = ResultType.Timeout
sent_data = self.sink(True, tr)
# Timeout is considered as 'ABORT'
self.assertEqual(sent_data['status'], 'ABORT')
def test_artifacts(self):
tr = test_results.TestResult(test_name='test-name')
tr.artifacts.AddArtifact('test-image.png', '/tmp/test-image.png', True)
tr.artifacts.AddArtifact('stdout', '/tmp/stdout', True)
sent_data = self.sink(True, tr)
self.assertDictEqual(
sent_data['artifacts'], {
'test-image.png': {
'filePath': '/tmp/test-image.png'
},
'stdout': {
'filePath': '/tmp/stdout'
}
})
def test_artifacts_with_duplicate_paths(self):
tr = test_results.TestResult(test_name='test-name')
tr.artifacts.AddArtifact('artifact', '/tmp/foo', False)
tr.artifacts.AddArtifact('artifact', '/tmp/bar', False)
sent_data = self.sink(True, tr)
self.assertDictEqual(
sent_data['artifacts'], {
'artifact': {
'filePath': '/tmp/foo'
},
'artifact-1': {
'filePath': '/tmp/bar'
}
})
def test_summary_html(self):
tr = test_results.TestResult(test_name='test-name')
tr.artifacts.AddArtifact('stderr', '/tmp/stderr', False)
tr.artifacts.AddArtifact('crash_log', '/tmp/crash_log', False)
tr.artifacts.AddArtifact('command', '/tmp/cmd', False)
sent_data = self.sink(True, tr)
p = re.compile(
'<text-artifact artifact-id="(command|stderr|crash_log)" />')
self.assertListEqual(
p.findall(sent_data['summaryHtml']),
# The artifact tags should be sorted by the artifact names.
['command', 'crash_log', 'stderr'],
)
def assertFilename(self, test_name, expected_filename):
sent_data = self.sink(True, test_results.TestResult(test_name))
self.assertEqual(sent_data['testMetadata']['location']['fileName'],
'//' + RELATIVE_WEB_TESTS + expected_filename)
def test_location_filename(self):
self.assertFilename('real/test.html', 'real/test.html')
# TestPort.virtual_test_suites() has a set of hard-coded virtualized
# tests, and a test name must start with one of the virtual prefixes
# and base in order for it to be recognized as a virtual test.
self.assertFilename(
'virtual/virtual_passes/passes/does_not_exist.html',
'passes/does_not_exist.html')
self.port.host.filesystem.write_text_file(
self.port.host.filesystem.join(WEB_TEST_DIR, 'virtual',
'virtual_passes', 'passes',
'exists.html'),
'body',
)
self.assertFilename('virtual/virtual_passes/passes/exists.html',
'virtual/virtual_passes/passes/exists.html')
def test_wpt_location_filename(self):
add_manifest_to_mock_filesystem(self.port)
self.assertFilename(
'external/wpt/html/parse.html?run_type=uri',
'external/wpt/html/parse.html',
)
self.assertFilename(
'virtual/virtual_wpt/external/wpt/dom/ranges/Range-attributes.html',
'external/wpt/dom/ranges/Range-attributes.html',
)
|
py | 1a511e0b12648c2f000e55af25b44494b1bb287a | # Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains tools and decorators for registering batch transforms."""
# pylint: disable=too-few-public-methods
import copy
import functools
import inspect
import os
import types
import warnings
import pennylane as qml
class batch_transform:
r"""Class for registering a tape transform that takes a tape, and outputs
a batch of tapes to be independently executed on a quantum device.
Examples of such transforms include quantum gradient shift rules (such
as finite-differences and the parameter-shift rule) and metrics such as
the quantum Fisher information matrix.
Args:
transform_fn (function): The function to register as the batch tape transform.
It can have an arbitrary number of arguments, but the first argument
**must** be the input tape.
expand_fn (function): An expansion function (if required) to be applied to the
input tape before the transformation takes place.
It **must** take the same input arguments as ``transform_fn``.
differentiable (bool): Specifies whether the transform is differentiable or
not. A transform may be non-differentiable for several reasons:
- It does not use an autodiff framework for its tensor manipulations;
- It returns a non-differentiable or non-numeric quantity, such as
a boolean, string, or integer.
In such a case, setting ``differentiable=False`` instructs the decorator
to mark the output as 'constant', reducing potential overhead.
**Example**
A valid batch tape transform is a function that satisfies the following:
- The first argument must be a tape.
- Depending on the structure of this input tape, various quantum operations, functions,
and templates may be called.
- Any internal classical processing should use the ``qml.math`` module to ensure
the transform is differentiable.
- The transform should return a tuple containing:
* Multiple transformed tapes to be executed on a device.
* A classical processing function for post-processing the executed tape results.
This processing function should have the signature ``f(list[tensor_like]) → Any``.
If ``None``, no classical processing is applied to the results.
For example:
.. code-block:: python
@qml.batch_transform
def my_transform(tape, a, b):
'''Generates two tapes, one with all RX replaced with RY,
and the other with all RX replaced with RZ.'''
tape1 = qml.tape.QuantumTape()
tape2 = qml.tape.QuantumTape()
# loop through all operations on the input tape
for op in tape:
if op.name == "RX":
wires = op.wires
param = op.parameters[0]
with tape1:
qml.RY(a * qml.math.abs(param), wires=wires)
with tape2:
qml.RZ(b * qml.math.abs(param), wires=wires)
else:
for t in [tape1, tape2]:
with t:
qml.apply(op)
def processing_fn(results):
return qml.math.sum(qml.math.stack(results))
return [tape1, tape2], processing_fn
We can apply this transform to a quantum tape:
>>> with qml.tape.QuantumTape() as tape:
... qml.Hadamard(wires=0)
... qml.RX(-0.5, wires=0)
... qml.expval(qml.PauliX(0))
>>> tapes, fn = my_transform(tape, 0.65, 2.5)
>>> print(qml.drawer.tape_text(tapes[0], decimals=2))
0: ──H──RY(0.33)─┤ <X>
>>> print(qml.drawer.tape_text(tapes[1], decimals=2))
0: ──H──RZ(1.25)─┤ <X>
We can execute these tapes manually:
>>> dev = qml.device("default.qubit", wires=1)
>>> res = qml.execute(tapes, dev, interface="autograd", gradient_fn=qml.gradients.param_shift)
>>> print(res)
[tensor([0.94765073], requires_grad=True), tensor([0.31532236], requires_grad=True)]
Applying the processing function, we retrieve the end result of the transform:
>>> print(fn(res))
1.2629730888100839
Alternatively, we may also transform a QNode directly, using either
decorator syntax:
>>> @my_transform(0.65, 2.5)
... @qml.qnode(dev)
... def circuit(x):
... qml.Hadamard(wires=0)
... qml.RX(x, wires=0)
... return qml.expval(qml.PauliX(0))
>>> print(circuit(-0.5))
1.2629730888100839
or by transforming an existing QNode:
>>> @qml.qnode(dev)
... def circuit(x):
... qml.Hadamard(wires=0)
... qml.RX(x, wires=0)
... return qml.expval(qml.PauliX(0))
>>> circuit = my_transform(circuit, 0.65, 2.5)
>>> print(circuit(-0.5))
1.2629730888100839
Batch tape transforms are fully differentiable:
>>> x = np.array(-0.5, requires_grad=True)
>>> gradient = qml.grad(circuit)(x)
>>> print(gradient)
2.5800122591960153
.. details::
:title: Usage Details
**Expansion functions**
Tape expansion, decomposition, or manipulation may always be
performed within the custom batch transform. However, by specifying
a separate expansion function, PennyLane will be possible to access
this separate expansion function where needed via
>>> my_transform.expand_fn
The provided ``expand_fn`` must have the same input arguments as
``transform_fn`` and return a ``tape``. Following the example above:
.. code-block:: python
def expand_fn(tape, a, b):
stopping_crit = lambda obj: obj.name!="PhaseShift"
return tape.expand(depth=10, stop_at=stopping_crit)
my_transform = batch_transform(my_transform, expand_fn)
Note that:
- the transform arguments ``a`` and ``b`` must be passed to
the expansion function, and
- the expansion function must return a single tape.
"""
def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
if os.environ.get("SPHINX_BUILD") == "1":
# If called during a Sphinx documentation build,
# simply return the original function rather than
# instantiating the object. This allows the signature to
# be correctly displayed in the documentation.
warnings.warn(
"Batch transformations have been disabled, as a Sphinx "
"build has been detected via SPHINX_BUILD='1'. If this is not the "
"case, please set the environment variable SPHINX_BUILD='0'.",
UserWarning,
)
args[0].custom_qnode_wrapper = lambda x: x
return args[0]
return super().__new__(cls)
def __init__(self, transform_fn, expand_fn=None, differentiable=True):
if not callable(transform_fn):
raise ValueError(
f"The batch transform function to register, {transform_fn}, "
"does not appear to be a valid Python function or callable."
)
self.transform_fn = transform_fn
self.expand_fn = expand_fn
self.differentiable = differentiable
self.qnode_wrapper = self.default_qnode_wrapper
functools.update_wrapper(self, transform_fn)
def custom_qnode_wrapper(self, fn):
"""Register a custom QNode execution wrapper function
for the batch transform.
**Example**
.. code-block:: python
def my_transform(tape, *targs, **tkwargs):
...
return tapes, processing_fn
@my_transform.custom_qnode_wrapper
def my_custom_qnode_wrapper(self, qnode, targs, tkwargs):
def wrapper_fn(*args, **kwargs):
# construct QNode
qnode.construct(args, kwargs)
# apply transform to QNode's tapes
tapes, processing_fn = self.construct(qnode.qtape, *targs, **tkwargs)
# execute tapes and return processed result
...
return processing_fn(results)
return wrapper_fn
The custom QNode execution wrapper must have arguments
``self`` (the batch transform object), ``qnode`` (the input QNode
to transform and execute), ``targs`` and ``tkwargs`` (the transform
arguments and keyword arguments respectively).
It should return a callable object that accepts the *same* arguments
as the QNode, and returns the transformed numerical result.
The default :meth:`~.default_qnode_wrapper` method may be called
if only pre- or post-processing dependent on QNode arguments is required:
.. code-block:: python
@my_transform.custom_qnode_wrapper
def my_custom_qnode_wrapper(self, qnode, targs, tkwargs):
transformed_qnode = self.default_qnode_wrapper(qnode)
def wrapper_fn(*args, **kwargs):
args, kwargs = pre_process(args, kwargs)
res = transformed_qnode(*args, **kwargs)
...
return ...
return wrapper_fn
"""
self.qnode_wrapper = types.MethodType(fn, self)
def default_qnode_wrapper(self, qnode, targs, tkwargs):
"""A wrapper method that takes a QNode and transform arguments,
and returns a function that 'wraps' the QNode execution.
The returned function should accept the same keyword arguments as
the QNode, and return the output of applying the tape transform
to the QNode's constructed tape.
"""
transform_max_diff = tkwargs.pop("max_diff", None)
if "shots" in inspect.signature(qnode.func).parameters:
raise ValueError(
"Detected 'shots' as an argument of the quantum function to transform. "
"The 'shots' argument name is reserved for overriding the number of shots "
"taken by the device."
)
def _wrapper(*args, **kwargs):
shots = kwargs.pop("shots", False)
qnode.construct(args, kwargs)
tapes, processing_fn = self.construct(qnode.qtape, *targs, **tkwargs)
interface = qnode.interface
execute_kwargs = getattr(qnode, "execute_kwargs", {}).copy()
max_diff = execute_kwargs.pop("max_diff", 2)
max_diff = transform_max_diff or max_diff
gradient_fn = getattr(qnode, "gradient_fn", qnode.diff_method)
gradient_kwargs = getattr(qnode, "gradient_kwargs", {})
if interface is None or not self.differentiable:
gradient_fn = None
res = qml.execute(
tapes,
device=qnode.device,
gradient_fn=gradient_fn,
interface=interface,
max_diff=max_diff,
override_shots=shots,
gradient_kwargs=gradient_kwargs,
**execute_kwargs,
)
return processing_fn(res)
return _wrapper
def __call__(self, *targs, **tkwargs):
qnode = None
if targs:
qnode, *targs = targs
if isinstance(qnode, qml.Device):
# Input is a quantum device.
# dev = some_transform(dev, *transform_args)
return self._device_wrapper(*targs, **tkwargs)(qnode)
if isinstance(qnode, qml.tape.QuantumTape):
# Input is a quantum tape.
# tapes, fn = some_transform(tape, *transform_args)
return self._tape_wrapper(*targs, **tkwargs)(qnode)
if isinstance(qnode, (qml.QNode, qml.ExpvalCost)):
# Input is a QNode:
# result = some_transform(qnode, *transform_args)(*qnode_args)
wrapper = self.qnode_wrapper(qnode, targs, tkwargs)
wrapper = functools.wraps(qnode)(wrapper)
def _construct(args, kwargs):
qnode.construct(args, kwargs)
return self.construct(qnode.qtape, *targs, **tkwargs)
wrapper.construct = _construct
else:
# Input is not a QNode nor a quantum tape nor a device.
# Assume Python decorator syntax:
#
# result = some_transform(*transform_args)(qnode)(*qnode_args)
#
# or
#
# @some_transform(*transform_args)
# @qml.qnode(dev)
# def circuit(...):
# ...
# result = circuit(*qnode_args)
# Prepend the input to the transform args,
# and create a wrapper function.
if qnode is not None:
targs = (qnode,) + tuple(targs)
def wrapper(qnode):
if isinstance(qnode, qml.Device):
return self._device_wrapper(*targs, **tkwargs)(qnode)
if isinstance(qnode, qml.tape.QuantumTape):
return self._tape_wrapper(*targs, **tkwargs)(qnode)
_wrapper = self.qnode_wrapper(qnode, targs, tkwargs)
_wrapper = functools.wraps(qnode)(_wrapper)
def _construct(args, kwargs):
qnode.construct(args, kwargs)
return self.construct(qnode.qtape, *targs, **tkwargs)
_wrapper.construct = _construct
return _wrapper
wrapper.tape_fn = functools.partial(self.transform_fn, *targs, **tkwargs)
wrapper.expand_fn = self.expand_fn
wrapper.differentiable = self.differentiable
return wrapper
def construct(self, tape, *args, **kwargs):
"""Applies the batch tape transform to an input tape.
Args:
tape (.QuantumTape): the tape to be transformed
*args: positional arguments to pass to the tape transform
**kwargs: keyword arguments to pass to the tape transform
Returns:
tuple[list[tapes], callable]: list of transformed tapes
to execute and a post-processing function.
"""
expand = kwargs.pop("_expand", True)
if expand and self.expand_fn is not None:
tape = self.expand_fn(tape, *args, **kwargs)
tapes, processing_fn = self.transform_fn(tape, *args, **kwargs)
if processing_fn is None:
processing_fn = lambda x: x
return tapes, processing_fn
def _device_wrapper(self, *targs, **tkwargs):
def _wrapper(dev):
new_dev = copy.deepcopy(dev)
new_dev.batch_transform = lambda tape: self.construct(tape, *targs, **tkwargs)
return new_dev
return _wrapper
def _tape_wrapper(self, *targs, **tkwargs):
return lambda tape: self.construct(tape, *targs, **tkwargs)
def map_batch_transform(transform, tapes):
"""Map a batch transform over multiple tapes.
Args:
transform (.batch_transform): the batch transform
to be mapped
tapes (Sequence[QuantumTape]): The sequence of tapes the batch
transform should be applied to. Each tape in the sequence
is transformed by the batch transform.
**Example**
Consider the following tapes:
.. code-block:: python
H = qml.PauliZ(0) @ qml.PauliZ(1) - qml.PauliX(0)
with qml.tape.QuantumTape() as tape1:
qml.RX(0.5, wires=0)
qml.RY(0.1, wires=1)
qml.CNOT(wires=[0, 1])
qml.expval(H)
with qml.tape.QuantumTape() as tape2:
qml.Hadamard(wires=0)
qml.CRX(0.5, wires=[0, 1])
qml.CNOT(wires=[0, 1])
qml.expval(H + 0.5 * qml.PauliY(0))
We can use ``map_batch_transform`` to map a single
batch transform across both of the these tapes in such a way
that allows us to submit a single job for execution:
>>> tapes, fn = map_batch_transform(qml.transforms.hamiltonian_expand, [tape1, tape2])
>>> dev = qml.device("default.qubit", wires=2)
>>> fn(qml.execute(tapes, dev, qml.gradients.param_shift))
[0.9950041652780257, 0.8150893013179248]
"""
execution_tapes = []
batch_fns = []
tape_counts = []
for t in tapes:
# Preprocess the tapes by applying batch transforms
# to each tape, and storing corresponding tapes
# for execution, processing functions, and list of tape lengths.
new_tapes, fn = transform(t)
execution_tapes.extend(new_tapes)
batch_fns.append(fn)
tape_counts.append(len(new_tapes))
def processing_fn(res):
count = 0
final_results = []
for idx, s in enumerate(tape_counts):
# apply any batch transform post-processing
new_res = batch_fns[idx](res[count : count + s])
final_results.append(new_res)
count += s
return final_results
return execution_tapes, processing_fn
|
py | 1a511e4cabf240eadb54f86ac462b9bcec2f25f6 | from datetime import time
from django.forms import TimeInput
from django.test import override_settings
from django.utils import translation
from .base import WidgetTest
class TimeInputTest(WidgetTest):
widget = TimeInput()
def test_render_none(self):
self.check_html(self.widget, 'time', None, html='<input type="text" name="time">')
def test_render_value(self):
"""
The microseconds are trimmed on display, by default.
"""
t = time(12, 51, 34, 482548)
self.assertEqual(str(t), '12:51:34.482548')
self.check_html(self.widget, 'time', t, html='<input type="text" name="time" value="12:51:34">')
self.check_html(self.widget, 'time', time(12, 51, 34), html=(
'<input type="text" name="time" value="12:51:34">'
))
self.check_html(self.widget, 'time', time(12, 51), html=(
'<input type="text" name="time" value="12:51:00">'
))
def test_string(self):
"""Initializing from a string value."""
self.check_html(self.widget, 'time', '13:12:11', html=(
'<input type="text" name="time" value="13:12:11">'
))
def test_format(self):
"""
Use 'format' to change the way a value is displayed.
"""
t = time(12, 51, 34, 482548)
widget = TimeInput(format='%H:%M', attrs={'type': 'time'})
self.check_html(widget, 'time', t, html='<input type="time" name="time" value="12:51">')
@override_settings(USE_L10N=True)
@translation.override('de-at')
def test_l10n(self):
t = time(12, 51, 34, 482548)
self.check_html(self.widget, 'time', t, html='<input type="text" name="time" value="12:51:34">')
|
py | 1a511e997109bb0a8a562ee7b641b3acd5199a85 | import json
import logging
import os
from datetime import date
from sensors import Light
from utils import catch_measurement, save_measurement, find, exit_on_time
def main():
with open(find('setup_agriculture.json', '/')) as f:
setup = json.load(f)
local_storage: str = setup.get('local_storage')
logging.basicConfig(filename=os.path.join(local_storage, 'log.log'), level=logging.WARNING,
format='%(asctime)s %(levelname)s %(name)s %(message)s')
light_port: int = setup['light'].get('light_port')
period: int = setup['light'].get('period')
wait: float = setup['light'].get('wait')
light_sensor = Light(light_port)
filename = os.path.join(local_storage, 'light_' + str(date.today()) + '.txt')
if not os.path.exists(filename):
with open(filename, 'w+') as f:
f.write('Timestamp, Light\n')
while exit_on_time(setup['light'].get('exit_time')):
measurement = catch_measurement(sensor=light_sensor, period=period, wait=wait)
save_measurement(measurement=measurement,
path=filename)
quit()
if __name__ == '__main__':
main()
|
py | 1a511f5a1af5ce1b615bc7cb06f057f3081151a8 | import sys
import common as _c
class StatusArg:
def __init__(self):
self.test = False
def parsearg(globvar):
globvar['status'] = StatusArg();
for arg in sys.argv[1:]:
if arg == '-t':
globvar['status'].test = True
else:
print("unknown argument : {0}".format(arg))
return None
|
py | 1a511fad58a8a2d47d5ff41dbdb3d1ad9e486569 | # -*- coding: utf-8 -*-
from PySide2.QtCore import Signal
from PySide2.QtWidgets import QWidget
from ......Classes.CondType21 import CondType21
from ......GUI import gui_option
from ......GUI.Dialog.DMachineSetup.SBar.PCondType21.Gen_PCondType21 import (
Gen_PCondType21,
)
class PCondType21(Gen_PCondType21, QWidget):
"""Page to setup Conductor Type 21"""
# Signal to DMachineSetup to know that the save popup is needed
saveNeeded = Signal()
# Information for SBar combobox
cond_name = "Rectangular bar"
cond_type = CondType21
def __init__(self, machine=None, material_dict=None):
"""Initialize the widget according to machine
Parameters
----------
self : PCondType21
A PCondType21 widget
machine : Machine
current machine to edit
material_dict: dict
Materials dictionary (library + machine)
"""
# Build the interface according to the .ui file
QWidget.__init__(self)
self.setupUi(self)
# Setup material combobox according to matlib names
self.material_dict = material_dict
self.w_mat.def_mat = "Copper1"
# Set FloatEdit unit
self.lf_Hbar.unit = "m"
self.lf_Wbar.unit = "m"
# Set unit name (m ou mm)
self.u = gui_option.unit
wid_list = [self.unit_Hbar, self.unit_Wbar]
for wid in wid_list:
wid.setText(self.u.get_m_name())
# Fill the fields with the machine values (if they're filled)
self.machine = machine
conductor = machine.rotor.winding.conductor
# Make sure that the rotor's conductor is a 2_1
if conductor is None or not isinstance(conductor, CondType21):
self.machine.rotor.winding.conductor = CondType21()
self.machine.rotor.winding.conductor._set_None()
# Make sure to re-set conductor with the new object
conductor = machine.rotor.winding.conductor
self.lf_Hbar.setValue(conductor.Hbar)
self.lf_Wbar.setValue(conductor.Wbar)
self.w_mat.update(conductor, "cond_mat", self.material_dict)
# Display the main output
self.w_out.comp_output()
# Connect the widget
self.lf_Hbar.editingFinished.connect(self.set_Hbar)
self.lf_Wbar.editingFinished.connect(self.set_Wbar)
self.w_mat.saveNeeded.connect(self.emit_save)
def emit_save(self):
"""Emit the saveNeeded signal"""
self.saveNeeded.emit()
def set_Hbar(self):
"""Signal to update the value of Hbar according to the line edit
Parameters
----------
self : PCondType21
A PCondType21 object
Returns
-------
"""
self.machine.rotor.winding.conductor.Hbar = self.lf_Hbar.value()
self.w_out.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_Wbar(self):
"""Signal to update the value of Wbar according to the line edit
Parameters
----------
self : PCondType21
A PCondType21 object
Returns
-------
"""
self.machine.rotor.winding.conductor.Wbar = self.lf_Wbar.value()
self.w_out.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
|
py | 1a51239e02f187cf43e3aca5d1b82df7e31c03f7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
zmap.py - version and date, see below
Source code : https://github.com/nanshihui/python-zmap/
Author :
* Sherwel Nan - https://github.com/nanshihui/python-zmap/
Licence : Apache License 2.0
A permissive license whose main conditions require preservation of copyright and license notices.
Contributors provide an express grant of patent rights. Licensed works, modifications, and larger
works may be distributed under different terms and without source code.
"""
__author__ = 'Sherwel Nan'
__version__ = '0.1'
__last_modification__ = '2017.07.31'
import csv
import io
import os
import re
import shlex
import subprocess
import sys
try:
from multiprocessing import Process
except ImportError:
from threading import Thread as Process
############################################################################
class PortScanner(object):
"""
PortScanner class allows to use zmap from python
"""
def __init__(self, zmap_search_path=('zmap',
'/usr/bin/zmap',
'/usr/local/bin/zmap',
'/sw/bin/zmap',
'/opt/local/bin/zmap'),Async=False,call_back=None):
"""
Initialize PortScanner module
* detects zmap on the system and zmap version
* may raise PortScannerError exception if zmap is not found in the path
:param zmap_search_path: tupple of string where to search for zmap executable. Change this if you want to use a specific version of zmap.
:returns: nothing
"""
self._zmap_path = '' # zmap path
self._scan_result = {}
self._zmap_version_number = 0 # zmap version number
self._zmap_subversion_number = 0 # zmap subversion number
self._zmap_last_output = '' # last full ascii zmap output
is_zmap_found = False # true if we have found zmap
self._all_host=None
self.__process = None
self._command=None
# regex used to detect zmap (http or https)
regex = re.compile(
'zmap [0-9]*\.[0-9]*\.[0-9].*'
)
# launch 'zmap -V', we wait after
# 'zmap version 5.0 ( http://zmap.org )'
# This is for Mac OSX. When idle3 is launched from the finder, PATH is not set so zmap was not found
for zmap_path in zmap_search_path:
try:
if sys.platform.startswith('freebsd') \
or sys.platform.startswith('linux') \
or sys.platform.startswith('darwin'):
p = subprocess.Popen([zmap_path, '-V'],
bufsize=10000,
stdout=subprocess.PIPE,
close_fds=True)
else:
p = subprocess.Popen([zmap_path, '-V'],
bufsize=10000,
stdout=subprocess.PIPE)
except OSError:
pass
else:
self._zmap_path = zmap_path # save path
break
else:
raise PortScannerError(
'zmap program was not found in path. PATH is : {0}'.format(
os.getenv('PATH')
)
)
self._zmap_last_output = bytes.decode(p.communicate()[0]) # sav stdout
for line in self._zmap_last_output.split(os.linesep):
if regex.match(line) is not None:
is_zmap_found = True
# Search for version number
regex_version = re.compile('[0-9]+')
regex_subversion = re.compile('\.[0-9]+')
rv = regex_version.search(line)
rsv = regex_subversion.search(line)
if rv is not None and rsv is not None:
# extract version/subversion
self._zmap_version_number = int(line[rv.start():rv.end()])
self._zmap_subversion_number = int(
line[rsv.start() + 1:rsv.end()]
)
break
if not is_zmap_found:
raise PortScannerError('zmap program was not found in path')
return
def get_zmap_last_output(self):
"""
Returns the last text output of zmap in raw text
this may be used for debugging purpose
:returns: string containing the last text output of zmap in raw text
"""
return self._zmap_last_output
def zmap_version(self):
"""
returns zmap version if detected (int version, int subversion)
or (0, 0) if unknown
:returns: (zmap_version_number, zmap_subversion_number)
"""
return (self._zmap_version_number, self._zmap_subversion_number)
def scanbyfile(self,path,ports):
pass
def scanbylist(self,lists,ports):
pass
def scan(self, hosts='127.0.0.1', ports=None, arguments='', sudo=False):
"""
Scan given hosts
May raise PortScannerError exception if zmap output was not xml
Test existance of the following key to know
if something went wrong : ['zmap']['scaninfo']['error']
If not present, everything was ok.
:param hosts: string for hosts as zmap use it 'scanme.zmap.org' or '198.116.0-255.1-127' or '216.163.128.20/20'
:param ports: int for ports as zmap use it '22'
:param arguments: string of arguments for zmap '-q'
:param sudo: launch zmap with sudo if True
:returns: scan_result as dictionnary
"""
# assert os.geteuid() == 0,'zmap should be running with root'
if sys.version_info[0] == 2:
assert type(hosts) in (str, unicode), 'Wrong type for [hosts], should be a string [was {0}]'.format(
type(hosts)) # noqa
assert ports and type(ports) == (int),'Wrong type for [ports], should be a int [was {0}]'.format(
type(ports)) # noqa
assert type(arguments) in (str, unicode), 'Wrong type for [arguments], should be a string [was {0}]'.format(
type(arguments)) # noqa
else:
assert type(hosts) in (str), 'Wrong type for [hosts], should be a string [was {0}]'.format(
type(hosts)) # noqa
assert ports and type(ports)==(int), 'Wrong type for [ports], should be a string [was {0}]'.format(
type(ports)) # noqa
assert type(arguments) is str, 'Wrong type for [arguments], should be a string [was {0}]'.format(
type(arguments)) # noqa
h_args = shlex.split(hosts)
f_args = shlex.split(arguments)
# Launch scan
args = [self._zmap_path] + h_args + ['-p', str(ports)] * (ports is not None) + f_args
if sudo:
args = ['sudo'] + args
self._command=args
p = subprocess.Popen(args, bufsize=100000,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# wait until finished
# get output
(self._zmap_last_output, zmap_err) = p.communicate()
self._zmap_last_output = bytes.decode(self._zmap_last_output)
zmap_err = bytes.decode(zmap_err)
# If there was something on stderr, there was a problem so abort... in
# fact not always. As stated by AlenLPeacock :
# This actually makes python-zmap mostly unusable on most real-life
# networks -- a particular subnet might have dozens of scannable hosts,
# but if a single one is unreachable or unroutable during the scan,
# zmap.scan() returns nothing. This behavior also diverges significantly
# from commandline zmap, which simply stderrs individual problems but
# keeps on trucking.
zmap_err_keep_trace = []
zmap_warn_keep_trace = []
zmap_info_keep_trace=[]
if len(zmap_err) > 0:
regex_warning = re.compile('\[WARN\].*', re.IGNORECASE)
regex_info = re.compile('\[INFO\].*', re.IGNORECASE)
regex_fatal = re.compile('\[FATAL\].*', re.IGNORECASE)
for line in zmap_err.split(os.linesep):
if len(line) > 0:
rgw = regex_warning.search(line)
rgi=regex_info.search(line)
rgf=regex_fatal.search(line)
if rgw is not None:
# sys.stderr.write(line+os.linesep)
zmap_warn_keep_trace.append(line + os.linesep)
elif rgi is not None:
zmap_info_keep_trace.append(line + os.linesep)
elif rgf is not None:
zmap_err_keep_trace.append(line + os.linesep)
# raise PortScannerError(zmap_err)
else:
zmap_info_keep_trace.append(line)
return self.analyse_zmap_scan(
zmap_output=self._zmap_last_output,
zmap_err=zmap_err,
zmap_err_keep_trace=zmap_err_keep_trace,
zmap_warn_keep_trace=zmap_warn_keep_trace,
port=ports
)
def analyse_zmap_scan(self,port=None, zmap_output=None, zmap_err='', zmap_err_keep_trace='', zmap_warn_keep_trace=''):
"""
Analyses zmap scan ouput
May raise PortScannerError exception if zmap output was not xml
Test existance of the following key to know if something went wrong : ['zmap']['scaninfo']['error']
If not present, everything was ok.
:param zmap_output: string to analyse
:returns: scan_result as dictionnary
"""
if zmap_output is not None:
self._zmap_last_output = zmap_output
scan_result = {}
scan_result['alive']=[]
scan_result['error_info']=[]
scan_result['warn_info']=[]
if len(self._zmap_last_output)>0:
scan_result['alive']=self._zmap_last_output.split()
if zmap_err_keep_trace:
scan_result['error_info']=zmap_err_keep_trace
if zmap_warn_keep_trace:
scan_result['warn_info']=zmap_warn_keep_trace
# zmap command line
scan_info={}
scan_info['scaninfo']={}
scan_info['scaninfo'][port]=scan_result
scan_info['command_line']=' '.join(i for i in self._command)
self._scan_result = scan_info # store for later use
return scan_info
def __getitem__(self,port=None):
"""
returns a port's detail
"""
if sys.version_info[0] == 2:
assert port and type(port) ==int, 'Wrong type for [host], should be a int [was {0}]'.format(
type(port))
else:
assert port and type(port) == int, 'Wrong type for [host], should be a int [was {0}]'.format(type(port))
return self._scan_result['scaninfo'].get(port,{}).get('alive',None)
def all_hosts(self):
"""
returns a sorted list of all hosts
"""
if self._command:
if self._all_host:
return self._all_host
else:
args = self._command+['-d']+['-c 0']
p = subprocess.Popen(args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
# wait until finished
# get output
(msg, msg_err) = p.communicate()
if msg:
template=re.compile(r"""daddr: ((?<![\.\d])(?:\d{1,3}\.){3}\d{1,3}(?![\.\d]))""")
hosts=template.findall(msg)
self._all_host=hosts
return hosts
else:
return []
else:
return []
def command_line(self):
"""
returns command line used for the scan
may raise AssertionError exception if called before scanning
"""
assert 'command_line' in self._scan_result, 'Do a scan before trying to get result !'
return self._scan_result['command_line']
def scaninfo(self):
"""
returns scaninfo structure
{'tcp': {'services': '22', 'method': 'connect'}}
may raise AssertionError exception if called before scanning
"""
assert 'scaninfo' in self._scan_result, 'Do a scan before trying to get result !'
return self._scan_result['scaninfo']
def has_port(self, port):
"""
returns True if port has result, False otherwise
"""
assert type(port) is int, 'Wrong type for [host], should be a int [was {0}]'.format(type(port))
assert 'scaninfo' in self._scan_result, 'Do a scan before trying to get result !'
if self._scan_result['scaninfo'].get(port,{}).get('alive',None):
return True
return False
def csv(self):
"""
returns CSV output as text
Example :
host;port;status;
127.0.0.1;port;open
"""
assert 'scan' in self._scan_result, 'Do a scan before trying to get result !'
if sys.version_info < (3, 0):
fd = io.BytesIO()
else:
fd = io.StringIO()
csv_ouput = csv.writer(fd, delimiter=';')
csv_header = [
'host',
'port',
'state',
]
csv_ouput.writerow(csv_header)
for host in self.all_hosts():
for proto in self[host].all_protocols():
if proto not in ['tcp', 'udp']:
continue
lport = list(self[host][proto].keys())
lport.sort()
for port in lport:
hostname = ''
for h in self[host]['hostnames']:
hostname = h['name']
hostname_type = h['type']
csv_row = [
host, hostname, hostname_type,
proto, port,
self[host][proto][port]['name'],
self[host][proto][port]['state'],
self[host][proto][port]['product'],
self[host][proto][port]['extrainfo'],
self[host][proto][port]['reason'],
self[host][proto][port]['version'],
self[host][proto][port]['conf'],
self[host][proto][port]['cpe']
]
csv_ouput.writerow(csv_row)
return fd.getvalue()
############################################################################
def __scan_progressive__(self, hosts, ports, arguments, callback, sudo):
"""
Used by PortScannerAsync for callback
"""
for host in self._nm.listscan(hosts):
try:
scan_data = self._nm.scan(host, ports, arguments, sudo)
except PortScannerError:
scan_data = None
if callback is not None:
callback(host, scan_data)
return
############################################################################
class PortScannerAsync(object):
"""
PortScannerAsync allows to use zmap from python asynchronously
for each host scanned, callback is called with scan result for the host
"""
def __init__(self):
"""
Initialize the module
* detects zmap on the system and zmap version
* may raise PortScannerError exception if zmap is not found in the path
"""
self._process = None
self._nm = PortScanner()
return
def __del__(self):
"""
Cleanup when deleted
"""
if self._process is not None:
try:
if self._process.is_alive():
self._process.terminate()
except AssertionError:
# Happens on python3.4
# when using PortScannerAsync twice in a row
pass
self._process = None
return
def scan(self, hosts='127.0.0.1', ports=None, arguments='-sV', callback=None, sudo=False):
"""
Scan given hosts in a separate process and return host by host result using callback function
PortScannerError exception from standard zmap is catched and you won't know about but get None as scan_data
:param hosts: string for hosts as zmap use it 'scanme.zmap.org' or '198.116.0-255.1-127' or '216.163.128.20/20'
:param ports: string for ports as zmap use it '22,53,110,143-4564'
:param arguments: string of arguments for zmap '-sU -sX -sC'
:param callback: callback function which takes (host, scan_data) as arguments
:param sudo: launch zmap with sudo if true
"""
if sys.version_info[0] == 2:
assert type(hosts) in (str, unicode), 'Wrong type for [hosts], should be a string [was {0}]'.format(
type(hosts))
assert type(ports) in (
str, unicode, type(None)), 'Wrong type for [ports], should be a string [was {0}]'.format(type(ports))
assert type(arguments) in (str, unicode), 'Wrong type for [arguments], should be a string [was {0}]'.format(
type(arguments))
else:
assert type(hosts) is str, 'Wrong type for [hosts], should be a string [was {0}]'.format(type(hosts))
assert type(ports) in (str, type(None)), 'Wrong type for [ports], should be a string [was {0}]'.format(
type(ports))
assert type(arguments) is str, 'Wrong type for [arguments], should be a string [was {0}]'.format(
type(arguments))
assert callable(callback) or callback is None, 'The [callback] {0} should be callable or None.'.format(
str(callback))
for redirecting_output in ['-oX', '-oA']:
assert redirecting_output not in arguments, 'Xml output can\'t be redirected from command line.\nYou can access it after a scan using:\nzmap.nm.get_zmap_last_output()'
self._process = Process(
target=__scan_progressive__,
args=(self, hosts, ports, arguments, callback, sudo)
)
self._process.daemon = True
self._process.start()
return
def stop(self):
"""
Stop the current scan process
"""
if self._process is not None:
self._process.terminate()
return
def wait(self, timeout=None):
"""
Wait for the current scan process to finish, or timeout
:param timeout: default = None, wait timeout seconds
"""
assert type(timeout) in (
int, type(None)), 'Wrong type for [timeout], should be an int or None [was {0}]'.format(type(timeout))
self._process.join(timeout)
return
def still_scanning(self):
"""
:returns: True if a scan is currently running, False otherwise
"""
try:
return self._process.is_alive()
except:
return False
############################################################################
class PortScannerYield(PortScannerAsync):
"""
PortScannerYield allows to use zmap from python with a generator
for each host scanned, yield is called with scan result for the host
"""
def __init__(self):
"""
Initialize the module
* detects zmap on the system and zmap version
* may raise PortScannerError exception if zmap is not found in the path
"""
PortScannerAsync.__init__(self)
return
def scan(self, hosts='127.0.0.1', ports=None, arguments='-sV', sudo=False):
"""
Scan given hosts in a separate process and return host by host result using callback function
PortScannerError exception from standard zmap is catched and you won't know about it
:param hosts: string for hosts as zmap use it 'scanme.zmap.org' or '198.116.0-255.1-127' or '216.163.128.20/20'
:param ports: string for ports as zmap use it '22,53,110,143-4564'
:param arguments: string of arguments for zmap '-sU -sX -sC'
:param callback: callback function which takes (host, scan_data) as arguments
:param sudo: launch zmap with sudo if true
"""
assert type(hosts) is str, 'Wrong type for [hosts], should be a string [was {0}]'.format(type(hosts))
assert type(ports) in (str, type(None)), 'Wrong type for [ports], should be a string [was {0}]'.format(
type(ports))
assert type(arguments) is str, 'Wrong type for [arguments], should be a string [was {0}]'.format(
type(arguments))
for redirecting_output in ['-oX', '-oA']:
assert redirecting_output not in arguments, 'Xml output can\'t be redirected from command line.\nYou can access it after a scan using:\nzmap.nm.get_zmap_last_output()'
for host in self._nm.listscan(hosts):
try:
scan_data = self._nm.scan(host, ports, arguments, sudo)
except PortScannerError:
scan_data = None
yield (host, scan_data)
return
def stop(self):
pass
def wait(self, timeout=None):
pass
def still_scanning(self):
pass
class PortScannerError(Exception):
"""
Exception error class for PortScanner class
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def __repr__(self):
return 'PortScannerError exception {0}'.format(self.value)
|
py | 1a51255c110b9d96d1ffea1c1c87c9d7d3104636 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Finetuning the library models for sequence classification on GLUE (Bert, XLM, XLNet, RoBERTa)."""
from __future__ import absolute_import, division, print_function
import argparse
import glob
import logging
import os
import random
import json
import numpy as np
import torch
from torch.utils.data import (DataLoader, RandomSampler, SequentialSampler,
TensorDataset)
from torch.utils.data.distributed import DistributedSampler
try:
from torch.utils.tensorboard import SummaryWriter
except:
from tensorboardX import SummaryWriter
from tqdm import tqdm, trange
from transformers import (WEIGHTS_NAME, BertConfig,
BertForSequenceClassification, BertTokenizer,
RobertaConfig,
RobertaForSequenceClassification,
RobertaTokenizer,
XLMConfig, XLMForSequenceClassification,
XLMTokenizer, XLNetConfig,
XLNetForSequenceClassification,
XLNetTokenizer,
DistilBertConfig,
DistilBertForSequenceClassification,
DistilBertTokenizer,
AlbertConfig,
AlbertForSequenceClassification,
AlbertTokenizer,
)
from transformers import AdamW, get_linear_schedule_with_warmup
from transformers import glue_compute_metrics as compute_metrics
from transformers import glue_output_modes as output_modes
from transformers import glue_processors as processors
from transformers import glue_convert_examples_to_features as convert_examples_to_features
logger = logging.getLogger(__name__)
ALL_MODELS = sum((tuple(conf.pretrained_config_archive_map.keys()) for conf in (BertConfig, XLNetConfig, XLMConfig,
RobertaConfig, DistilBertConfig)), ())
MODEL_CLASSES = {
'bert': (BertConfig, BertForSequenceClassification, BertTokenizer),
'xlnet': (XLNetConfig, XLNetForSequenceClassification, XLNetTokenizer),
'xlm': (XLMConfig, XLMForSequenceClassification, XLMTokenizer),
'roberta': (RobertaConfig, RobertaForSequenceClassification, RobertaTokenizer),
'distilbert': (DistilBertConfig, DistilBertForSequenceClassification, DistilBertTokenizer),
'albert': (AlbertConfig, AlbertForSequenceClassification, AlbertTokenizer)
}
def set_seed(args):
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
def train(args, train_dataset, model, tokenizer):
""" Train the model """
if args.local_rank in [-1, 0]:
tb_writer = SummaryWriter()
args.train_batch_size = args.per_gpu_train_batch_size * max(1, args.n_gpu)
train_sampler = RandomSampler(train_dataset) if args.local_rank == -1 else DistributedSampler(train_dataset)
train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size)
if args.max_steps > 0:
t_total = args.max_steps
args.num_train_epochs = args.max_steps // (len(train_dataloader) // args.gradient_accumulation_steps) + 1
else:
t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
# Prepare optimizer and schedule (linear warmup and decay)
no_decay = ['bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)], 'weight_decay': args.weight_decay},
{'params': [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total)
if args.fp16:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
model, optimizer = amp.initialize(model, optimizer, opt_level=args.fp16_opt_level)
# multi-gpu training (should be after apex fp16 initialization)
if args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Distributed training (should be after apex fp16 initialization)
if args.local_rank != -1:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank],
output_device=args.local_rank,
find_unused_parameters=True)
# Train!
logger.info("***** Running training *****")
logger.info(" Num examples = %d", len(train_dataset))
logger.info(" Num Epochs = %d", args.num_train_epochs)
logger.info(" Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size)
logger.info(" Total train batch size (w. parallel, distributed & accumulation) = %d",
args.train_batch_size * args.gradient_accumulation_steps * (torch.distributed.get_world_size() if args.local_rank != -1 else 1))
logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps)
logger.info(" Total optimization steps = %d", t_total)
global_step = 0
tr_loss, logging_loss = 0.0, 0.0
model.zero_grad()
train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0])
set_seed(args) # Added here for reproductibility (even between python 2 and 3)
for _ in train_iterator:
epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0])
for step, batch in enumerate(epoch_iterator):
model.train()
batch = tuple(t.to(args.device) for t in batch)
inputs = {'input_ids': batch[0],
'attention_mask': batch[1],
'labels': batch[3]}
if args.model_type != 'distilbert':
inputs['token_type_ids'] = batch[2] if args.model_type in ['bert', 'xlnet'] else None # XLM, DistilBERT and RoBERTa don't use segment_ids
outputs = model(**inputs)
loss = outputs[0] # model outputs are always tuple in transformers (see doc)
if args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel training
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
if args.fp16:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
tr_loss += loss.item()
if (step + 1) % args.gradient_accumulation_steps == 0:
if args.fp16:
torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), args.max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
optimizer.step()
scheduler.step() # Update learning rate schedule
model.zero_grad()
global_step += 1
if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0:
logs = {}
if args.local_rank == -1 and args.evaluate_during_training: # Only evaluate when single GPU otherwise metrics may not average well
results = evaluate(args, model, tokenizer)
for key, value in results.items():
eval_key = 'eval_{}'.format(key)
logs[eval_key] = value
loss_scalar = (tr_loss - logging_loss) / args.logging_steps
learning_rate_scalar = scheduler.get_lr()[0]
logs['learning_rate'] = learning_rate_scalar
logs['loss'] = loss_scalar
logging_loss = tr_loss
for key, value in logs.items():
tb_writer.add_scalar(key, value, global_step)
print(json.dumps({**logs, **{'step': global_step}}))
if args.local_rank in [-1, 0] and args.save_steps > 0 and global_step % args.save_steps == 0:
# Save model checkpoint
output_dir = os.path.join(args.output_dir, 'checkpoint-{}'.format(global_step))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
model_to_save = model.module if hasattr(model, 'module') else model # Take care of distributed/parallel training
model_to_save.save_pretrained(output_dir)
torch.save(args, os.path.join(output_dir, 'training_args.bin'))
logger.info("Saving model checkpoint to %s", output_dir)
if args.max_steps > 0 and global_step > args.max_steps:
epoch_iterator.close()
break
if args.max_steps > 0 and global_step > args.max_steps:
train_iterator.close()
break
if args.local_rank in [-1, 0]:
tb_writer.close()
return global_step, tr_loss / global_step
def evaluate(args, model, tokenizer, prefix=""):
# Loop to handle MNLI double evaluation (matched, mis-matched)
eval_task_names = ("mnli", "mnli-mm") if args.task_name == "mnli" else (args.task_name,)
eval_outputs_dirs = (args.output_dir, args.output_dir + '-MM') if args.task_name == "mnli" else (args.output_dir,)
results = {}
for eval_task, eval_output_dir in zip(eval_task_names, eval_outputs_dirs):
eval_dataset = load_and_cache_examples(args, eval_task, tokenizer, evaluate=True)
if not os.path.exists(eval_output_dir) and args.local_rank in [-1, 0]:
os.makedirs(eval_output_dir)
args.eval_batch_size = args.per_gpu_eval_batch_size * max(1, args.n_gpu)
# Note that DistributedSampler samples randomly
eval_sampler = SequentialSampler(eval_dataset)
eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size)
# multi-gpu eval
if args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Eval!
logger.info("***** Running evaluation {} *****".format(prefix))
logger.info(" Num examples = %d", len(eval_dataset))
logger.info(" Batch size = %d", args.eval_batch_size)
eval_loss = 0.0
nb_eval_steps = 0
preds = None
out_label_ids = None
for batch in tqdm(eval_dataloader, desc="Evaluating"):
model.eval()
batch = tuple(t.to(args.device) for t in batch)
with torch.no_grad():
inputs = {'input_ids': batch[0],
'attention_mask': batch[1],
'labels': batch[3]}
if args.model_type != 'distilbert':
inputs['token_type_ids'] = batch[2] if args.model_type in ['bert', 'xlnet'] else None # XLM, DistilBERT and RoBERTa don't use segment_ids
outputs = model(**inputs)
tmp_eval_loss, logits = outputs[:2]
eval_loss += tmp_eval_loss.mean().item()
nb_eval_steps += 1
if preds is None:
preds = logits.detach().cpu().numpy()
out_label_ids = inputs['labels'].detach().cpu().numpy()
else:
preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(out_label_ids, inputs['labels'].detach().cpu().numpy(), axis=0)
eval_loss = eval_loss / nb_eval_steps
if args.output_mode == "classification":
preds = np.argmax(preds, axis=1)
elif args.output_mode == "regression":
preds = np.squeeze(preds)
result = compute_metrics(eval_task, preds, out_label_ids)
results.update(result)
output_eval_file = os.path.join(eval_output_dir, prefix, "eval_results.txt")
with open(output_eval_file, "w") as writer:
logger.info("***** Eval results {} *****".format(prefix))
for key in sorted(result.keys()):
logger.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
return results
def load_and_cache_examples(args, task, tokenizer, evaluate=False):
if args.local_rank not in [-1, 0] and not evaluate:
torch.distributed.barrier() # Make sure only the first process in distributed training process the dataset, and the others will use the cache
processor = processors[task]()
output_mode = output_modes[task]
# Load data features from cache or dataset file
cached_features_file = os.path.join(args.data_dir, 'cached_{}_{}_{}_{}'.format(
'dev' if evaluate else 'train',
list(filter(None, args.model_name_or_path.split('/'))).pop(),
str(args.max_seq_length),
str(task)))
if os.path.exists(cached_features_file) and not args.overwrite_cache:
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
else:
logger.info("Creating features from dataset file at %s", args.data_dir)
label_list = processor.get_labels()
if task in ['mnli', 'mnli-mm'] and args.model_type in ['roberta']:
# HACK(label indices are swapped in RoBERTa pretrained model)
label_list[1], label_list[2] = label_list[2], label_list[1]
examples = processor.get_dev_examples(args.data_dir) if evaluate else processor.get_train_examples(args.data_dir)
features = convert_examples_to_features(examples,
tokenizer,
label_list=label_list,
max_length=args.max_seq_length,
output_mode=output_mode,
pad_on_left=bool(args.model_type in ['xlnet']), # pad on the left for xlnet
pad_token=tokenizer.convert_tokens_to_ids([tokenizer.pad_token])[0],
pad_token_segment_id=4 if args.model_type in ['xlnet'] else 0,
)
if args.local_rank in [-1, 0]:
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(features, cached_features_file)
if args.local_rank == 0 and not evaluate:
torch.distributed.barrier() # Make sure only the first process in distributed training process the dataset, and the others will use the cache
# Convert to Tensors and build dataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_attention_mask = torch.tensor([f.attention_mask for f in features], dtype=torch.long)
all_token_type_ids = torch.tensor([f.token_type_ids for f in features], dtype=torch.long)
if output_mode == "classification":
all_labels = torch.tensor([f.label for f in features], dtype=torch.long)
elif output_mode == "regression":
all_labels = torch.tensor([f.label for f in features], dtype=torch.float)
dataset = TensorDataset(all_input_ids, all_attention_mask, all_token_type_ids, all_labels)
return dataset
def main():
parser = argparse.ArgumentParser()
## Required parameters
parser.add_argument("--data_dir", default=None, type=str, required=True,
help="The input data dir. Should contain the .tsv files (or other data files) for the task.")
parser.add_argument("--model_type", default=None, type=str, required=True,
help="Model type selected in the list: " + ", ".join(MODEL_CLASSES.keys()))
parser.add_argument("--model_name_or_path", default=None, type=str, required=True,
help="Path to pre-trained model or shortcut name selected in the list: " + ", ".join(ALL_MODELS))
parser.add_argument("--task_name", default=None, type=str, required=True,
help="The name of the task to train selected in the list: " + ", ".join(processors.keys()))
parser.add_argument("--output_dir", default=None, type=str, required=True,
help="The output directory where the model predictions and checkpoints will be written.")
## Other parameters
parser.add_argument("--config_name", default="", type=str,
help="Pretrained config name or path if not the same as model_name")
parser.add_argument("--tokenizer_name", default="", type=str,
help="Pretrained tokenizer name or path if not the same as model_name")
parser.add_argument("--cache_dir", default="", type=str,
help="Where do you want to store the pre-trained models downloaded from s3")
parser.add_argument("--max_seq_length", default=128, type=int,
help="The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded.")
parser.add_argument("--do_train", action='store_true',
help="Whether to run training.")
parser.add_argument("--do_eval", action='store_true',
help="Whether to run eval on the dev set.")
parser.add_argument("--evaluate_during_training", action='store_true',
help="Rul evaluation during training at each logging step.")
parser.add_argument("--do_lower_case", action='store_true',
help="Set this flag if you are using an uncased model.")
parser.add_argument("--per_gpu_train_batch_size", default=8, type=int,
help="Batch size per GPU/CPU for training.")
parser.add_argument("--per_gpu_eval_batch_size", default=8, type=int,
help="Batch size per GPU/CPU for evaluation.")
parser.add_argument('--gradient_accumulation_steps', type=int, default=1,
help="Number of updates steps to accumulate before performing a backward/update pass.")
parser.add_argument("--learning_rate", default=5e-5, type=float,
help="The initial learning rate for Adam.")
parser.add_argument("--weight_decay", default=0.0, type=float,
help="Weight decay if we apply some.")
parser.add_argument("--adam_epsilon", default=1e-8, type=float,
help="Epsilon for Adam optimizer.")
parser.add_argument("--max_grad_norm", default=1.0, type=float,
help="Max gradient norm.")
parser.add_argument("--num_train_epochs", default=3.0, type=float,
help="Total number of training epochs to perform.")
parser.add_argument("--max_steps", default=-1, type=int,
help="If > 0: set total number of training steps to perform. Override num_train_epochs.")
parser.add_argument("--warmup_steps", default=0, type=int,
help="Linear warmup over warmup_steps.")
parser.add_argument('--logging_steps', type=int, default=50,
help="Log every X updates steps.")
parser.add_argument('--save_steps', type=int, default=50,
help="Save checkpoint every X updates steps.")
parser.add_argument("--eval_all_checkpoints", action='store_true',
help="Evaluate all checkpoints starting with the same prefix as model_name ending and ending with step number")
parser.add_argument("--no_cuda", action='store_true',
help="Avoid using CUDA when available")
parser.add_argument('--overwrite_output_dir', action='store_true',
help="Overwrite the content of the output directory")
parser.add_argument('--overwrite_cache', action='store_true',
help="Overwrite the cached training and evaluation sets")
parser.add_argument('--seed', type=int, default=42,
help="random seed for initialization")
parser.add_argument('--fp16', action='store_true',
help="Whether to use 16-bit (mixed) precision (through NVIDIA apex) instead of 32-bit")
parser.add_argument('--fp16_opt_level', type=str, default='O1',
help="For fp16: Apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3']."
"See details at https://nvidia.github.io/apex/amp.html")
parser.add_argument("--local_rank", type=int, default=-1,
help="For distributed training: local_rank")
parser.add_argument('--server_ip', type=str, default='', help="For distant debugging.")
parser.add_argument('--server_port', type=str, default='', help="For distant debugging.")
args = parser.parse_args()
if os.path.exists(args.output_dir) and os.listdir(args.output_dir) and args.do_train and not args.overwrite_output_dir:
raise ValueError("Output directory ({}) already exists and is not empty. Use --overwrite_output_dir to overcome.".format(args.output_dir))
# Setup distant debugging if needed
if args.server_ip and args.server_port:
# Distant debugging - see https://code.visualstudio.com/docs/python/debugging#_attach-to-a-local-script
import ptvsd
print("Waiting for debugger attach")
ptvsd.enable_attach(address=(args.server_ip, args.server_port), redirect_output=True)
ptvsd.wait_for_attach()
# Setup CUDA, GPU & distributed training
if args.local_rank == -1 or args.no_cuda:
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
args.n_gpu = torch.cuda.device_count()
else: # Initializes the distributed backend which will take care of sychronizing nodes/GPUs
torch.cuda.set_device(args.local_rank)
device = torch.device("cuda", args.local_rank)
torch.distributed.init_process_group(backend='nccl')
args.n_gpu = 1
args.device = device
# Setup logging
logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S',
level = logging.INFO if args.local_rank in [-1, 0] else logging.WARN)
logger.warning("Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s",
args.local_rank, device, args.n_gpu, bool(args.local_rank != -1), args.fp16)
# Set seed
set_seed(args)
# Prepare GLUE task
args.task_name = args.task_name.lower()
if args.task_name not in processors:
raise ValueError("Task not found: %s" % (args.task_name))
processor = processors[args.task_name]()
args.output_mode = output_modes[args.task_name]
label_list = processor.get_labels()
num_labels = len(label_list)
# Load pretrained model and tokenizer
if args.local_rank not in [-1, 0]:
torch.distributed.barrier() # Make sure only the first process in distributed training will download model & vocab
args.model_type = args.model_type.lower()
config_class, model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
config = config_class.from_pretrained(args.config_name if args.config_name else args.model_name_or_path,
num_labels=num_labels,
finetuning_task=args.task_name,
cache_dir=args.cache_dir if args.cache_dir else None)
tokenizer = tokenizer_class.from_pretrained(args.tokenizer_name if args.tokenizer_name else args.model_name_or_path,
do_lower_case=args.do_lower_case,
cache_dir=args.cache_dir if args.cache_dir else None)
model = model_class.from_pretrained(args.model_name_or_path,
from_tf=bool('.ckpt' in args.model_name_or_path),
config=config,
cache_dir=args.cache_dir if args.cache_dir else None)
if args.local_rank == 0:
torch.distributed.barrier() # Make sure only the first process in distributed training will download model & vocab
model.to(args.device)
logger.info("Training/evaluation parameters %s", args)
# Training
if args.do_train:
train_dataset = load_and_cache_examples(args, args.task_name, tokenizer, evaluate=False)
global_step, tr_loss = train(args, train_dataset, model, tokenizer)
logger.info(" global_step = %s, average loss = %s", global_step, tr_loss)
# Saving best-practices: if you use defaults names for the model, you can reload it using from_pretrained()
if args.do_train and (args.local_rank == -1 or torch.distributed.get_rank() == 0):
# Create output directory if needed
if not os.path.exists(args.output_dir) and args.local_rank in [-1, 0]:
os.makedirs(args.output_dir)
logger.info("Saving model checkpoint to %s", args.output_dir)
# Save a trained model, configuration and tokenizer using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
model_to_save = model.module if hasattr(model, 'module') else model # Take care of distributed/parallel training
model_to_save.save_pretrained(args.output_dir)
tokenizer.save_pretrained(args.output_dir)
# Good practice: save your training arguments together with the trained model
torch.save(args, os.path.join(args.output_dir, 'training_args.bin'))
# Load a trained model and vocabulary that you have fine-tuned
model = model_class.from_pretrained(args.output_dir)
tokenizer = tokenizer_class.from_pretrained(args.output_dir)
model.to(args.device)
# Evaluation
results = {}
if args.do_eval and args.local_rank in [-1, 0]:
tokenizer = tokenizer_class.from_pretrained(args.output_dir, do_lower_case=args.do_lower_case)
checkpoints = [args.output_dir]
if args.eval_all_checkpoints:
checkpoints = list(os.path.dirname(c) for c in sorted(glob.glob(args.output_dir + '/**/' + WEIGHTS_NAME, recursive=True)))
logging.getLogger("transformers.modeling_utils").setLevel(logging.WARN) # Reduce logging
logger.info("Evaluate the following checkpoints: %s", checkpoints)
for checkpoint in checkpoints:
global_step = checkpoint.split('-')[-1] if len(checkpoints) > 1 else ""
prefix = checkpoint.split('/')[-1] if checkpoint.find('checkpoint') != -1 else ""
model = model_class.from_pretrained(checkpoint)
model.to(args.device)
result = evaluate(args, model, tokenizer, prefix=prefix)
result = dict((k + '_{}'.format(global_step), v) for k, v in result.items())
results.update(result)
return results
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.