repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
krafczyk/spack | var/spack/repos/builtin/packages/py-lazyarray/package.py | 5 | 1820 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyLazyarray(PythonPackage):
"""a Python package that provides a lazily-evaluated numerical array class,
larray, based on and compatible with NumPy arrays."""
homepage = "http://bitbucket.org/apdavison/lazyarray/"
url = "https://pypi.io/packages/source/l/lazyarray/lazyarray-0.2.8.tar.gz"
version('0.2.10', '336033357459e66cbca5543bf003a2ba')
version('0.2.8', '8e0072f0892b9fc0516e7048f96e9d74')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'), when='^python@3:')
| lgpl-2.1 | -8,476,803,654,688,925,000 | 45.666667 | 83 | 0.675275 | false | 3.752577 | false | false | false |
GoogleCloudPlatform/PerfKitBenchmarker | tests/providers/gcp/bigquery_test.py | 1 | 8239 | # Copyright 2020 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for perfkitbenchmarker.providers.gcp.bigquery."""
import json
import unittest
from absl import flags
from perfkitbenchmarker.providers.gcp import bigquery
from tests import pkb_common_test_case
PACKAGE_NAME = 'PACKAGE_NAME'
DATASET_ID = 'DATASET_ID'
PROJECT_ID = 'PROJECT_ID'
QUERY_NAME = 'QUERY_NAME'
_TEST_RUN_URI = 'fakeru'
_GCP_ZONE_US_CENTRAL_1_C = 'us-central1-c'
_BASE_BIGQUERY_SPEC = {
'type': 'bigquery',
'cluster_identifier': 'bigquerypkb.tpcds_100G'
}
FLAGS = flags.FLAGS
class FakeRemoteVM(object):
def Install(self, package_name):
if package_name != 'google_cloud_sdk':
raise RuntimeError
class FakeRemoteVMForCliClientInterfacePrepare(object):
"""Class to setup a Fake VM that prepares a Client VM (CLI Client)."""
def __init__(self):
self.valid_install_package_list = ['pip', 'google_cloud_sdk']
self.valid_remote_command_list = [
'sudo pip install absl-py',
'/tmp/pkb/google-cloud-sdk/bin/gcloud auth activate-service-account '
'SERVICE_ACCOUNT --key-file=SERVICE_ACCOUNT_KEY_FILE',
'chmod 755 script_runner.sh',
'echo "\nMaxSessions 100" | sudo tee -a /etc/ssh/sshd_config'
]
def Install(self, package_name):
if package_name not in self.valid_install_package_list:
raise RuntimeError
def RemoteCommand(self, command):
if command not in self.valid_remote_command_list:
raise RuntimeError
def InstallPreprovisionedPackageData(self, package_name, filenames,
install_path):
if package_name != 'PACKAGE_NAME':
raise RuntimeError
def PushFile(self, source_path):
pass
class FakeRemoteVMForCliClientInterfaceExecuteQuery(object):
"""Class to setup a Fake VM that executes script on Client VM (CLI Client)."""
def RemoteCommand(self, command):
if command == 'echo "\nMaxSessions 100" | sudo tee -a /etc/ssh/sshd_config':
return None, None
expected_command = ('python script_driver.py --script={} --bq_project_id={}'
' --bq_dataset_id={}').format(QUERY_NAME, PROJECT_ID,
DATASET_ID)
if command != expected_command:
raise RuntimeError
response_object = {QUERY_NAME: {'job_id': 'JOB_ID', 'execution_time': 1.0}}
response = json.dumps(response_object)
return response, None
class FakeRemoteVMForJavaClientInterfacePrepare(object):
"""Class to setup a Fake VM that prepares a Client VM (JAVA Client)."""
def __init__(self):
self.valid_install_package_list = ['openjdk']
def Install(self, package_name):
if package_name != 'openjdk':
raise RuntimeError
def RemoteCommand(self, command):
if command == 'echo "\nMaxSessions 100" | sudo tee -a /etc/ssh/sshd_config':
return None, None
else:
raise RuntimeError
def InstallPreprovisionedPackageData(self, package_name, filenames,
install_path):
if package_name != 'PACKAGE_NAME':
raise RuntimeError
class FakeRemoteVMForJavaClientInterfaceExecuteQuery(object):
"""Class to setup a Fake VM that executes script on Client VM (JAVA Client)."""
def RemoteCommand(self, command):
if command == 'echo "\nMaxSessions 100" | sudo tee -a /etc/ssh/sshd_config':
return None, None
expected_command = ('java -cp bq-java-client-2.3.jar '
'com.google.cloud.performance.edw.Single --project {} '
'--credentials_file {} --dataset {} --query_file '
'{}').format(PROJECT_ID, 'SERVICE_ACCOUNT_KEY_FILE',
DATASET_ID, QUERY_NAME)
if command != expected_command:
raise RuntimeError
response_object = {'query_wall_time_in_secs': 1.0,
'details': {'job_id': 'JOB_ID'}}
response = json.dumps(response_object)
return response, None
class FakeBenchmarkSpec(object):
"""Fake BenchmarkSpec to use for setting client interface attributes."""
def __init__(self, client_vm):
self.name = PACKAGE_NAME
self.vms = [client_vm]
class BigqueryTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(BigqueryTestCase, self).setUp()
FLAGS.cloud = 'GCP'
FLAGS.run_uri = _TEST_RUN_URI
FLAGS.zones = [_GCP_ZONE_US_CENTRAL_1_C]
def testGetBigQueryClientInterfaceGeneric(self):
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertEqual(interface.project_id, PROJECT_ID)
self.assertEqual(interface.dataset_id, DATASET_ID)
def testGetBigQueryClientInterfaceCli(self):
FLAGS.bq_client_interface = 'CLI'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertIsInstance(interface, bigquery.CliClientInterface)
def testGetBigQueryClientInterfaceJava(self):
FLAGS.bq_client_interface = 'JAVA'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertIsInstance(interface, bigquery.JavaClientInterface)
def testGenericClientInterfaceGetMetada(self):
FLAGS.bq_client_interface = 'CLI'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertDictEqual(interface.GetMetadata(), {'client': 'CLI'})
FLAGS.bq_client_interface = 'JAVA'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertDictEqual(interface.GetMetadata(), {'client': 'JAVA'})
def testCliClientInterfacePrepare(self):
FLAGS.bq_client_interface = 'CLI'
FLAGS.gcp_service_account_key_file = 'SERVICE_ACCOUNT_KEY_FILE'
FLAGS.gcp_service_account = 'SERVICE_ACCOUNT'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertIsInstance(interface, bigquery.CliClientInterface)
bm_spec = FakeBenchmarkSpec(FakeRemoteVMForCliClientInterfacePrepare())
interface.SetProvisionedAttributes(bm_spec)
interface.Prepare(PACKAGE_NAME)
def testCliClientInterfaceExecuteQuery(self):
FLAGS.bq_client_interface = 'CLI'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertIsInstance(interface, bigquery.CliClientInterface)
bm_spec = FakeBenchmarkSpec(FakeRemoteVMForCliClientInterfaceExecuteQuery())
interface.SetProvisionedAttributes(bm_spec)
performance, details = interface.ExecuteQuery(QUERY_NAME)
self.assertEqual(performance, 1.0)
self.assertDictEqual(details, {'client': 'CLI', 'job_id': 'JOB_ID'})
def testJavaClientInterfacePrepare(self):
FLAGS.bq_client_interface = 'JAVA'
FLAGS.gcp_service_account_key_file = 'SERVICE_ACCOUNT_KEY_FILE'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertIsInstance(interface, bigquery.JavaClientInterface)
bm_spec = FakeBenchmarkSpec(FakeRemoteVMForJavaClientInterfacePrepare())
interface.SetProvisionedAttributes(bm_spec)
interface.Prepare(PACKAGE_NAME)
def testJavaClientInterfaceExecuteQuery(self):
FLAGS.bq_client_interface = 'JAVA'
FLAGS.gcp_service_account_key_file = 'SERVICE_ACCOUNT_KEY_FILE'
interface = bigquery.GetBigQueryClientInterface(PROJECT_ID, DATASET_ID)
self.assertIsInstance(interface, bigquery.JavaClientInterface)
bm_spec = FakeBenchmarkSpec(
FakeRemoteVMForJavaClientInterfaceExecuteQuery())
interface.SetProvisionedAttributes(bm_spec)
performance, details = interface.ExecuteQuery(QUERY_NAME)
self.assertEqual(performance, 1.0)
self.assertDictEqual(details, {'client': 'JAVA', 'job_id': 'JOB_ID'})
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -1,418,090,864,913,887,500 | 36.967742 | 81 | 0.704576 | false | 3.748408 | true | false | false |
lk-geimfari/mimesis | mimesis/data/int/finance.py | 1 | 264187 | # -*- coding: utf-8 -*-
from typing import List
"""Provides all the generic data related to the business."""
CURRENCY_ISO_CODES: List[str] = [
"AED",
"AFN",
"ALL",
"AMD",
"ANG",
"AOA",
"ARS",
"AUD",
"AWG",
"AZN",
"BAM",
"BBD",
"BDT",
"BGN",
"BHD",
"BIF",
"BMD",
"BND",
"BOB",
"BOV",
"BRL",
"BSD",
"BTN",
"BWP",
"BYN",
"BYR",
"BZD",
"CAD",
"CDF",
"CHE",
"CHF",
"CHW",
"CLF",
"CLP",
"CNY",
"COP",
"COU",
"CRC",
"CUC",
"CUP",
"CVE",
"CZK",
"DJF",
"DKK",
"DOP",
"DZD",
"EGP",
"ERN",
"ETB",
"EUR",
"FJD",
"FKP",
"GBP",
"GEL",
"GHS",
"GIP",
"GMD",
"GNF",
"GTQ",
"GYD",
"HKD",
"HNL",
"HRK",
"HTG",
"HUF",
"IDR",
"ILS",
"INR",
"IQD",
"IRR",
"ISK",
"JMD",
"JOD",
"JPY",
"KES",
"KGS",
"KHR",
"KMF",
"KPW",
"KRW",
"KWD",
"KYD",
"KZT",
"LAK",
"LBP",
"LKR",
"LRD",
"LSL",
"LYD",
"MAD",
"MDL",
"MGA",
"MKD",
"MMK",
"MNT",
"MOP",
"MRO",
"MUR",
"MVR",
"MWK",
"MXN",
"MXV",
"MYR",
"MZN",
"NAD",
"NGN",
"NIO",
"NOK",
"NPR",
"NZD",
"OMR",
"PAB",
"PEN",
"PGK",
"PHP",
"PKR",
"PLN",
"PYG",
"QAR",
"RON",
"RSD",
"RUB",
"RWF",
"SAR",
"SBD",
"SCR",
"SDG",
"SEK",
"SGD",
"SHP",
"SLL",
"SOS",
"SRD",
"SSP",
"STD",
"SVC",
"SYP",
"SZL",
"THB",
"TJS",
"TMT",
"TND",
"TOP",
"TRY",
"TTD",
"TWD",
"TZS",
"UAH",
"UGX",
"USD",
"USN",
"UYI",
"UYU",
"UZS",
"VEF",
"VND",
"VUV",
"WST",
"XAF",
"XAG",
"XAU",
"XBA",
"XBB",
"XBC",
"XBD",
"XCD",
"XDR",
"XOF",
"XPD",
"XPF",
"XPT",
"XSU",
"XTS",
"XUA",
"XXX",
"YER",
"ZAR",
"ZMW",
"ZWL",
]
CRYPTOCURRENCY_ISO_CODES = [
"BCH",
"BNB",
"BTC",
"DASH",
"DOT",
"EOS",
"ETH",
"IOT",
"LTC",
"USDT",
"VTC",
"WBTC",
"XBC",
"XBT",
"XLM",
"XMR",
"XRP",
"XZC",
"ZEC",
]
CURRENCY_SYMBOLS = {
"cs": "Kč",
"da": "kr",
"de": "€",
"de-at": "€",
"de-ch": "Fr.",
"el": "€",
"en": "$",
"en-ca": "$",
"en-gb": "£",
"en-au": "$",
"es": "€",
"es-mx": "$",
"et": "€",
"fa": "﷼",
"fi": "€",
"fr": "€",
"hu": "Ft",
"is": "kr",
"it": "€",
"ja": "¥",
"kk": "₸",
"ko": "₩",
"nl": "€",
"nl-be": "€",
"no": "kr",
"pl": "zł",
"pt": "€",
"pt-br": "R$",
"ru": "₽",
"sk": "€",
"sv": "kr",
"tr": "₺",
"uk": "₴",
"zh": "¥",
"default": "$",
}
CRYPTOCURRENCY_SYMBOLS = [
"₿",
"Ł",
"Ξ",
]
STOCK_EXCHANGES = ["NYSE", "NASDAQ", "AMEX", "JPX", "SSE", "HKEX", "Euronext"]
STOCK_TICKERS = [
"A",
"AA",
"AABA",
"AAC",
"AAL",
"AAMC",
"AAME",
"AAN",
"AAOI",
"AAON",
"AAP",
"AAPL",
"AAT",
"AAU",
"AAWW",
"AAXJ",
"AAXN",
"AB",
"ABB",
"ABBV",
"ABC",
"ABCB",
"ABDC",
"ABEO",
"ABEOW",
"ABEV",
"ABG",
"ABIL",
"ABIO",
"ABM",
"ABMD",
"ABR",
"ABR^A",
"ABR^B",
"ABR^C",
"ABT",
"ABTX",
"ABUS",
"AC",
"ACA",
"ACAD",
"ACAM",
"ACAMU",
"ACAMW",
"ACB",
"ACBI",
"ACC",
"ACCO",
"ACCP",
"ACER",
"ACGL",
"ACGLO",
"ACGLP",
"ACH",
"ACHC",
"ACHN",
"ACHV",
"ACIA",
"ACIU",
"ACIW",
"ACLS",
"ACM",
"ACMR",
"ACN",
"ACNB",
"ACOR",
"ACP",
"ACRE",
"ACRS",
"ACRX",
"ACST",
"ACT",
"ACTG",
"ACTT",
"ACTTU",
"ACTTW",
"ACU",
"ACV",
"ACWI",
"ACWX",
"ACY",
"ADAP",
"ADBE",
"ADC",
"ADES",
"ADI",
"ADIL",
"ADILW",
"ADM",
"ADMA",
"ADMP",
"ADMS",
"ADNT",
"ADP",
"ADPT",
"ADRA",
"ADRD",
"ADRE",
"ADRO",
"ADRU",
"ADS",
"ADSK",
"ADSW",
"ADT",
"ADTN",
"ADUS",
"ADVM",
"ADX",
"ADXS",
"AE",
"AEB",
"AEE",
"AEF",
"AEG",
"AEGN",
"AEH",
"AEHR",
"AEIS",
"AEL",
"AEM",
"AEMD",
"AEO",
"AEP",
"AEP^B",
"AER",
"AERI",
"AES",
"AESE",
"AEY",
"AEYE",
"AEZS",
"AFB",
"AFC",
"AFG",
"AFGB",
"AFGE",
"AFGH",
"AFH",
"AFHBL",
"AFI",
"AFIN",
"AFINP",
"AFL",
"AFMD",
"AFT",
"AFYA",
"AG",
"AGBA",
"AGBAR",
"AGBAU",
"AGBAW",
"AGCO",
"AGD",
"AGE",
"AGEN",
"AGFS",
"AGFSW",
"AGI",
"AGIO",
"AGLE",
"AGM",
"AGM.A",
"AGMH",
"AGM^A",
"AGM^C",
"AGM^D",
"AGN",
"AGNC",
"AGNCB",
"AGNCM",
"AGNCN",
"AGND",
"AGO",
"AGO^B",
"AGO^E",
"AGO^F",
"AGR",
"AGRO",
"AGRX",
"AGS",
"AGTC",
"AGX",
"AGYS",
"AGZD",
"AHC",
"AHH",
"AHH^A",
"AHL^C",
"AHL^D",
"AHL^E",
"AHPI",
"AHT",
"AHT^D",
"AHT^F",
"AHT^G",
"AHT^H",
"AHT^I",
"AI",
"AIA",
"AIC",
"AIF",
"AIG",
"AIG.WS",
"AIG^A",
"AIHS",
"AIM",
"AIMC",
"AIMT",
"AIN",
"AINC",
"AINV",
"AIQ",
"AIR",
"AIRG",
"AIRI",
"AIRR",
"AIRT",
"AIRTP",
"AIRTW",
"AIT",
"AIV",
"AIW",
"AIZ",
"AIZP",
"AI^B",
"AI^C",
"AJG",
"AJRD",
"AJX",
"AJXA",
"AKAM",
"AKBA",
"AKCA",
"AKER",
"AKG",
"AKO.A",
"AKO.B",
"AKR",
"AKRO",
"AKRX",
"AKS",
"AKTS",
"AKTX",
"AL",
"ALAC",
"ALACR",
"ALACU",
"ALACW",
"ALB",
"ALBO",
"ALC",
"ALCO",
"ALDR",
"ALDX",
"ALE",
"ALEC",
"ALEX",
"ALG",
"ALGN",
"ALGR",
"ALGRR",
"ALGRU",
"ALGRW",
"ALGT",
"ALIM",
"ALIT",
"ALJJ",
"ALK",
"ALKS",
"ALL",
"ALLE",
"ALLK",
"ALLO",
"ALLT",
"ALLY",
"ALLY^A",
"ALL^A",
"ALL^B",
"ALL^D.CL",
"ALL^E.CL",
"ALL^F.CL",
"ALL^G",
"ALL^H",
"ALNA",
"ALNY",
"ALO",
"ALOT",
"ALPN",
"ALP^Q",
"ALRM",
"ALRN",
"ALRS",
"ALSK",
"ALSN",
"ALT",
"ALTM",
"ALTR",
"ALTY",
"ALV",
"ALX",
"ALXN",
"ALYA",
"AL^A",
"AM",
"AMAG",
"AMAL",
"AMAT",
"AMBA",
"AMBC",
"AMBCW",
"AMBO",
"AMC",
"AMCA",
"AMCI",
"AMCIU",
"AMCIW",
"AMCR",
"AMCX",
"AMD",
"AME",
"AMED",
"AMEH",
"AMG",
"AMGN",
"AMH",
"AMH^D",
"AMH^E",
"AMH^F",
"AMH^G",
"AMH^H",
"AMK",
"AMKR",
"AMN",
"AMNB",
"AMOT",
"AMOV",
"AMP",
"AMPE",
"AMPH",
"AMPY",
"AMR",
"AMRB",
"AMRC",
"AMRH",
"AMRHW",
"AMRK",
"AMRN",
"AMRS",
"AMRWW",
"AMRX",
"AMS",
"AMSC",
"AMSF",
"AMSWA",
"AMT",
"AMTB",
"AMTBB",
"AMTD",
"AMTX",
"AMWD",
"AMX",
"AMZN",
"AN",
"ANAB",
"ANAT",
"ANCN",
"ANDA",
"ANDAR",
"ANDAU",
"ANDAW",
"ANDE",
"ANET",
"ANF",
"ANFI",
"ANGI",
"ANGO",
"ANH",
"ANH^A",
"ANH^B",
"ANH^C",
"ANIK",
"ANIP",
"ANIX",
"ANSS",
"ANTE",
"ANTM",
"ANY",
"AOBC",
"AOD",
"AON",
"AOS",
"AOSL",
"AP",
"APA",
"APAM",
"APD",
"APDN",
"APDNW",
"APEI",
"APEN",
"APEX",
"APH",
"APHA",
"APLE",
"APLS",
"APLT",
"APM",
"APO",
"APOG",
"APOP",
"APOPW",
"APO^A",
"APO^B",
"APPF",
"APPN",
"APPS",
"APRN",
"APT",
"APTO",
"APTS",
"APTV",
"APTX",
"APVO",
"APWC",
"APY",
"APYX",
"AQ",
"AQB",
"AQMS",
"AQN",
"AQNA",
"AQNB",
"AQST",
"AQUA",
"AR",
"ARA",
"ARAV",
"ARAY",
"ARC",
"ARCB",
"ARCC",
"ARCE",
"ARCH",
"ARCO",
"ARCT",
"ARD",
"ARDC",
"ARDS",
"ARDX",
"ARE",
"AREC",
"ARES",
"ARES^A",
"AREX",
"ARE^D",
"ARGD",
"ARGO",
"ARGX",
"ARI",
"ARKR",
"ARL",
"ARLO",
"ARLP",
"ARMK",
"ARMP",
"ARNA",
"ARNC",
"ARNC^",
"AROC",
"AROW",
"ARPO",
"ARQL",
"ARR",
"ARR^B",
"ARTL",
"ARTLW",
"ARTNA",
"ARTW",
"ARTX",
"ARVN",
"ARW",
"ARWR",
"ARYA",
"ARYAU",
"ARYAW",
"ASA",
"ASB",
"ASB^C",
"ASB^D",
"ASB^E",
"ASC",
"ASET",
"ASFI",
"ASG",
"ASGN",
"ASH",
"ASIX",
"ASLN",
"ASM",
"ASMB",
"ASML",
"ASNA",
"ASND",
"ASPN",
"ASPS",
"ASPU",
"ASR",
"ASRT",
"ASRV",
"ASRVP",
"ASTC",
"ASTE",
"ASUR",
"ASX",
"ASYS",
"AT",
"ATAI",
"ATAX",
"ATEC",
"ATEN",
"ATEST",
"ATEST.A",
"ATEST.B",
"ATEST.C",
"ATEX",
"ATGE",
"ATH",
"ATHE",
"ATHM",
"ATHX",
"ATH^A",
"ATI",
"ATIF",
"ATIS",
"ATISW",
"ATKR",
"ATLC",
"ATLO",
"ATNI",
"ATNM",
"ATNX",
"ATO",
"ATOM",
"ATOS",
"ATR",
"ATRA",
"ATRC",
"ATRI",
"ATRO",
"ATRS",
"ATSG",
"ATTO",
"ATU",
"ATUS",
"ATV",
"ATVI",
"ATXI",
"AU",
"AUB",
"AUBN",
"AUDC",
"AUG",
"AUMN",
"AUO",
"AUPH",
"AUTL",
"AUTO",
"AUY",
"AVA",
"AVAL",
"AVAV",
"AVB",
"AVCO",
"AVD",
"AVDL",
"AVDR",
"AVEO",
"AVGO",
"AVGR",
"AVH",
"AVID",
"AVK",
"AVLR",
"AVNS",
"AVNW",
"AVP",
"AVRO",
"AVT",
"AVTR",
"AVTR^A",
"AVX",
"AVXL",
"AVY",
"AVYA",
"AWF",
"AWI",
"AWK",
"AWP",
"AWR",
"AWRE",
"AWSM",
"AWX",
"AX",
"AXAS",
"AXDX",
"AXE",
"AXGN",
"AXGT",
"AXL",
"AXLA",
"AXNX",
"AXO",
"AXP",
"AXR",
"AXS",
"AXSM",
"AXS^D",
"AXS^E",
"AXTA",
"AXTI",
"AXU",
"AY",
"AYI",
"AYR",
"AYTU",
"AYX",
"AZN",
"AZO",
"AZPN",
"AZRE",
"AZRX",
"AZUL",
"AZZ",
"B",
"BA",
"BABA",
"BAC",
"BAC^A",
"BAC^B",
"BAC^C",
"BAC^E",
"BAC^K",
"BAC^L",
"BAC^M",
"BAC^Y",
"BAF",
"BAH",
"BAM",
"BANC",
"BANC^D",
"BANC^E",
"BAND",
"BANF",
"BANFP",
"BANR",
"BANX",
"BAP",
"BAS",
"BASI",
"BATRA",
"BATRK",
"BAX",
"BB",
"BBAR",
"BBBY",
"BBC",
"BBCP",
"BBD",
"BBDC",
"BBDO",
"BBF",
"BBGI",
"BBH",
"BBI",
"BBIO",
"BBK",
"BBL",
"BBN",
"BBP",
"BBRX",
"BBSI",
"BBT",
"BBT^F",
"BBT^G",
"BBT^H",
"BBU",
"BBVA",
"BBW",
"BBX",
"BBY",
"BC",
"BCBP",
"BCC",
"BCDA",
"BCDAW",
"BCE",
"BCEI",
"BCEL",
"BCH",
"BCLI",
"BCML",
"BCNA",
"BCO",
"BCOM",
"BCOR",
"BCOV",
"BCOW",
"BCPC",
"BCRH",
"BCRX",
"BCS",
"BCSF",
"BCTF",
"BCV",
"BCV^A",
"BCX",
"BCYC",
"BC^A",
"BC^B",
"BC^C",
"BDC",
"BDGE",
"BDJ",
"BDL",
"BDN",
"BDR",
"BDSI",
"BDX",
"BDXA",
"BE",
"BEAT",
"BECN",
"BEDU",
"BELFA",
"BELFB",
"BEN",
"BEP",
"BERY",
"BEST",
"BF.A",
"BF.B",
"BFAM",
"BFC",
"BFIN",
"BFIT",
"BFK",
"BFO",
"BFRA",
"BFS",
"BFST",
"BFS^C",
"BFS^D",
"BFY",
"BFZ",
"BG",
"BGB",
"BGCP",
"BGFV",
"BGG",
"BGH",
"BGI",
"BGIO",
"BGNE",
"BGR",
"BGRN",
"BGS",
"BGSF",
"BGT",
"BGX",
"BGY",
"BH",
"BH.A",
"BHAT",
"BHB",
"BHC",
"BHE",
"BHF",
"BHFAL",
"BHFAP",
"BHGE",
"BHK",
"BHLB",
"BHP",
"BHR",
"BHR^B",
"BHR^D",
"BHTG",
"BHV",
"BHVN",
"BIB",
"BICK",
"BID",
"BIDU",
"BIF",
"BIG",
"BIIB",
"BILI",
"BIMI",
"BIO",
"BIO.B",
"BIOC",
"BIOL",
"BIOS",
"BIOX",
"BIOX.WS",
"BIP",
"BIS",
"BIT",
"BITA",
"BIVI",
"BJ",
"BJRI",
"BK",
"BKCC",
"BKCH",
"BKD",
"BKE",
"BKEP",
"BKEPP",
"BKH",
"BKI",
"BKJ",
"BKK",
"BKN",
"BKNG",
"BKSC",
"BKT",
"BKTI",
"BKU",
"BKYI",
"BK^C",
"BL",
"BLBD",
"BLCM",
"BLCN",
"BLD",
"BLDP",
"BLDR",
"BLE",
"BLFS",
"BLIN ",
"BLK",
"BLKB",
"BLL",
"BLMN",
"BLNK",
"BLNKW",
"BLPH",
"BLRX",
"BLU",
"BLUE",
"BLW",
"BLX",
"BMA",
"BMCH",
"BME",
"BMI",
"BMLP",
"BML^G",
"BML^H",
"BML^J",
"BML^L",
"BMO",
"BMRA",
"BMRC",
"BMRN",
"BMTC",
"BMY",
"BND",
"BNDW",
"BNDX",
"BNED",
"BNFT",
"BNGO",
"BNGOW",
"BNKL",
"BNS",
"BNSO",
"BNTC",
"BNTCW",
"BNY",
"BOCH",
"BOE",
"BOH",
"BOKF",
"BOKFL",
"BOLD",
"BOMN",
"BOOM",
"BOOT",
"BORR",
"BOSC",
"BOTJ",
"BOTZ",
"BOX",
"BOXL",
"BP",
"BPFH",
"BPL",
"BPMC",
"BPMP",
"BPMX",
"BPOP",
"BPOPM",
"BPOPN",
"BPR",
"BPRAP",
"BPRN",
"BPT",
"BPTH",
"BPY",
"BPYPO",
"BPYPP",
"BQH",
"BR",
"BRC",
"BREW",
"BRFS",
"BRG",
"BRG^A",
"BRG^C",
"BRG^D",
"BRID",
"BRK.A",
"BRK.B",
"BRKL",
"BRKR",
"BRKS",
"BRN",
"BRO",
"BROG",
"BROGR",
"BROGU",
"BROGW",
"BRPA",
"BRPAR",
"BRPAU",
"BRPAW",
"BRPM",
"BRPM.U",
"BRPM.WS",
"BRQS",
"BRT",
"BRX",
"BRY",
"BSA",
"BSAC",
"BSBR",
"BSD",
"BSE",
"BSET",
"BSGM",
"BSIG",
"BSL",
"BSM",
"BSMX",
"BSQR",
"BSRR",
"BST",
"BSTC",
"BSTZ",
"BSVN",
"BSX",
"BT",
"BTA",
"BTAI",
"BTE",
"BTEC",
"BTG",
"BTI",
"BTN",
"BTO",
"BTT",
"BTU",
"BTZ",
"BUD",
"BUI",
"BURG",
"BURL",
"BUSE",
"BV",
"BVN",
"BVSN",
"BVXV",
"BVXVW",
"BW",
"BWA",
"BWAY",
"BWB",
"BWEN",
"BWFG",
"BWG",
"BWL.A",
"BWMC",
"BWMCU",
"BWMCW",
"BWXT",
"BX",
"BXC",
"BXG",
"BXMT",
"BXMX",
"BXP",
"BXP^B",
"BXS",
"BY",
"BYD",
"BYFC",
"BYM",
"BYND",
"BYSI",
"BZH",
"BZM",
"BZUN",
"C",
"CAAP",
"CAAS",
"CABO",
"CAC",
"CACC",
"CACG",
"CACI",
"CADE",
"CAE",
"CAF",
"CAG",
"CAH",
"CAI",
"CAI^A",
"CAI^B",
"CAJ",
"CAKE",
"CAL",
"CALA",
"CALM",
"CALX",
"CAMP",
"CAMT",
"CANF",
"CANG",
"CAPL",
"CAPR",
"CAR",
"CARA",
"CARB",
"CARE",
"CARG",
"CARO",
"CARS",
"CART",
"CARV",
"CARZ",
"CASA",
"CASH",
"CASI",
"CASS",
"CASY",
"CAT",
"CATB",
"CATC",
"CATH",
"CATM",
"CATO",
"CATS",
"CATY",
"CB",
"CBAN",
"CBAT",
"CBAY",
"CBB",
"CBB^B",
"CBD",
"CBFV",
"CBH",
"CBIO",
"CBL",
"CBLI",
"CBLK",
"CBL^D",
"CBL^E",
"CBM",
"CBMB",
"CBMG",
"CBNK",
"CBO",
"CBOE",
"CBPO",
"CBPX",
"CBRE",
"CBRL",
"CBS",
"CBS.A",
"CBSH",
"CBSHP",
"CBT",
"CBTX",
"CBU",
"CBUS",
"CBX",
"CBZ",
"CC",
"CCB",
"CCBG",
"CCC",
"CCC.WS",
"CCCL",
"CCD",
"CCEP",
"CCF",
"CCH",
"CCH.U",
"CCH.WS",
"CCI",
"CCI^A",
"CCJ",
"CCK",
"CCL",
"CCLP",
"CCM",
"CCMP",
"CCNE",
"CCO",
"CCOI",
"CCR",
"CCRC",
"CCRN",
"CCS",
"CCU",
"CCX",
"CCX.U",
"CCX.WS",
"CCXI",
"CCZ",
"CDAY",
"CDC",
"CDE",
"CDEV",
"CDK",
"CDL",
"CDLX",
"CDMO",
"CDMOP",
"CDNA",
"CDNS",
"CDOR",
"CDR",
"CDR^B",
"CDR^C",
"CDTX",
"CDW",
"CDXC",
"CDXS",
"CDZI",
"CE",
"CEA",
"CECE",
"CECO",
"CEE",
"CEI",
"CEIX",
"CEL",
"CELC",
"CELG",
"CELGZ",
"CELH",
"CELP",
"CEM",
"CEMI",
"CEN",
"CENT",
"CENTA",
"CENX",
"CEO",
"CEPU",
"CEQP",
"CEQP^",
"CERC",
"CERN",
"CERS",
"CET",
"CETV",
"CETX",
"CETXP",
"CETXW",
"CEV",
"CEVA",
"CEY",
"CEZ",
"CF",
"CFA",
"CFB",
"CFBI",
"CFBK",
"CFFA",
"CFFAU",
"CFFAW",
"CFFI",
"CFFN",
"CFG",
"CFG^D",
"CFMS",
"CFO",
"CFR",
"CFRX",
"CFR^A",
"CFX",
"CFXA",
"CG",
"CGA",
"CGBD",
"CGC",
"CGEN",
"CGIX",
"CGNX",
"CGO",
"CHA",
"CHAC",
"CHAC.U",
"CHAC.WS",
"CHAP",
"CHCI",
"CHCO",
"CHCT",
"CHD",
"CHDN",
"CHE",
"CHEF",
"CHEK",
"CHEKW",
"CHEKZ",
"CHFS",
"CHGG",
"CHH",
"CHI",
"CHIC",
"CHK",
"CHKP",
"CHKR",
"CHK^D",
"CHL",
"CHMA",
"CHMG",
"CHMI",
"CHMI^A",
"CHMI^B",
"CHN",
"CHNA",
"CHNG",
"CHNGU",
"CHNR",
"CHRA",
"CHRS",
"CHRW",
"CHS",
"CHSCL",
"CHSCM",
"CHSCN",
"CHSCO",
"CHSCP",
"CHSP",
"CHT",
"CHTR",
"CHU",
"CHUY",
"CHW",
"CHWY",
"CHY",
"CI",
"CIA",
"CIB",
"CIBR",
"CID",
"CIDM",
"CIEN",
"CIF",
"CIFS",
"CIG",
"CIG.C",
"CIGI",
"CIH",
"CII",
"CIK",
"CIL",
"CIM",
"CIM^A",
"CIM^B",
"CIM^C",
"CIM^D",
"CINF",
"CINR",
"CIO",
"CIO^A",
"CIR",
"CISN",
"CIT",
"CIVB",
"CIVBP",
"CIX",
"CIZ",
"CIZN",
"CJ",
"CJJD",
"CKH",
"CKPT",
"CKX",
"CL",
"CLAR",
"CLB",
"CLBK",
"CLBS",
"CLCT",
"CLDB",
"CLDR",
"CLDT",
"CLDX",
"CLF",
"CLFD",
"CLGN",
"CLGX",
"CLH",
"CLI",
"CLIR",
"CLLS",
"CLM",
"CLMT",
"CLNC",
"CLNE",
"CLNY",
"CLNY^B",
"CLNY^E",
"CLNY^G",
"CLNY^H",
"CLNY^I",
"CLNY^J",
"CLOU",
"CLPR",
"CLPS",
"CLR",
"CLRB",
"CLRBZ",
"CLRG",
"CLRO",
"CLS",
"CLSD",
"CLSN",
"CLUB",
"CLVS",
"CLW",
"CLWT",
"CLX",
"CLXT",
"CM",
"CMA",
"CMBM",
"CMC",
"CMCL",
"CMCM",
"CMCO",
"CMCSA",
"CMCT",
"CMCTP",
"CMD",
"CME",
"CMFNL",
"CMG",
"CMI",
"CMLS",
"CMO",
"CMO^E",
"CMP",
"CMPR",
"CMRE",
"CMRE^B",
"CMRE^C",
"CMRE^D",
"CMRE^E",
"CMRX",
"CMS",
"CMSA",
"CMSC",
"CMSD",
"CMS^B",
"CMT",
"CMTL",
"CMU",
"CNA",
"CNAT",
"CNBKA",
"CNC",
"CNCE",
"CNCR",
"CNDT",
"CNET",
"CNF",
"CNFR",
"CNFRL",
"CNHI",
"CNI",
"CNK",
"CNMD",
"CNNE",
"CNO",
"CNOB",
"CNP",
"CNP^B",
"CNQ",
"CNR",
"CNS",
"CNSL",
"CNST",
"CNTF",
"CNTX",
"CNTY",
"CNX",
"CNXM",
"CNXN",
"CO",
"COCP",
"CODA",
"CODI",
"CODI^A",
"CODI^B",
"CODX",
"COE",
"COF",
"COF^C",
"COF^D",
"COF^F",
"COF^G",
"COF^H",
"COF^P",
"COG",
"COHN",
"COHR",
"COHU",
"COKE",
"COLB",
"COLD",
"COLL",
"COLM",
"COMM",
"COMT",
"CONE",
"CONN",
"COO",
"COOP",
"COP",
"COR",
"CORE",
"CORR",
"CORR^A",
"CORT",
"CORV",
"COST",
"COT",
"COTY",
"COUP",
"COWN",
"COWNL",
"COWNZ",
"CP",
"CPA",
"CPAA",
"CPAAU",
"CPAAW",
"CPAC",
"CPAH",
"CPB",
"CPE",
"CPF",
"CPG",
"CPHC",
"CPHI",
"CPIX",
"CPK",
"CPL",
"CPLG",
"CPLP",
"CPRI",
"CPRT",
"CPRX",
"CPS",
"CPSH",
"CPSI",
"CPSS",
"CPST",
"CPT",
"CPTA",
"CPTAG",
"CPTAL",
"CPTI",
"CQP",
"CR",
"CRAI",
"CRAY",
"CRBP",
"CRC",
"CRCM",
"CRD.A",
"CRD.B",
"CREE",
"CREG",
"CRESY",
"CREX",
"CREXW",
"CRF",
"CRH",
"CRHM",
"CRI",
"CRIS",
"CRK",
"CRL",
"CRM",
"CRMD",
"CRMT",
"CRNT",
"CRNX",
"CRON",
"CROX",
"CRR",
"CRS",
"CRSA",
"CRSAU",
"CRSAW",
"CRSP",
"CRT",
"CRTO",
"CRTX",
"CRUS",
"CRVL",
"CRVS",
"CRWD",
"CRWS",
"CRY",
"CRZO",
"CS",
"CSA",
"CSB",
"CSBR",
"CSCO",
"CSF",
"CSFL",
"CSGP",
"CSGS",
"CSII",
"CSIQ",
"CSL",
"CSLT",
"CSML",
"CSOD",
"CSPI",
"CSQ",
"CSS",
"CSSE",
"CSSEP",
"CSTE",
"CSTL",
"CSTM",
"CSTR",
"CSU",
"CSV",
"CSWC",
"CSWCL",
"CSWI",
"CSX",
"CTAA",
"CTAC",
"CTACU",
"CTACW",
"CTAS",
"CTA^A",
"CTA^B",
"CTB",
"CTBB",
"CTBI",
"CTDD",
"CTEK",
"CTEST",
"CTEST.E",
"CTEST.G",
"CTEST.L",
"CTEST.O",
"CTEST.S",
"CTEST.V",
"CTG",
"CTHR",
"CTIB",
"CTIC",
"CTK",
"CTL",
"CTLT",
"CTMX",
"CTO",
"CTR",
"CTRA",
"CTRC",
"CTRE",
"CTRM",
"CTRN",
"CTRP",
"CTS",
"CTSH",
"CTSO",
"CTST",
"CTT ",
"CTV",
"CTVA",
"CTWS",
"CTXR",
"CTXRW",
"CTXS",
"CTY",
"CTZ",
"CUB",
"CUBA",
"CUBE",
"CUBI",
"CUBI^C",
"CUBI^D",
"CUBI^E",
"CUBI^F",
"CUE",
"CUI",
"CUK",
"CULP",
"CUO",
"CUR",
"CURO",
"CUTR",
"CUZ",
"CVA",
"CVBF",
"CVCO",
"CVCY",
"CVE",
"CVEO",
"CVET",
"CVGI",
"CVGW",
"CVI",
"CVIA",
"CVLT",
"CVLY",
"CVM",
"CVNA",
"CVR",
"CVRS",
"CVS",
"CVTI",
"CVU",
"CVV",
"CVX",
"CW",
"CWBC",
"CWBR",
"CWCO",
"CWEN",
"CWEN.A",
"CWH",
"CWK",
"CWST",
"CWT",
"CX",
"CXDC",
"CXE",
"CXH",
"CXO",
"CXP",
"CXSE",
"CXW",
"CY",
"CYAD",
"CYAN",
"CYBE",
"CYBR",
"CYCC",
"CYCCP",
"CYCN",
"CYD",
"CYH",
"CYOU",
"CYRN",
"CYRX",
"CYRXW",
"CYTK",
"CZNC",
"CZR",
"CZWI",
"CZZ",
"C^J",
"C^K",
"C^N",
"C^S",
"D",
"DAC",
"DAIO",
"DAKT",
"DAL",
"DALI",
"DAN",
"DAR",
"DARE",
"DAVA",
"DAVE",
"DAX",
"DB",
"DBD",
"DBI",
"DBL",
"DBVT",
"DBX",
"DCAR",
"DCF",
"DCI",
"DCIX",
"DCO",
"DCOM",
"DCP",
"DCPH",
"DCP^B",
"DCP^C",
"DCUE",
"DD",
"DDD",
"DDF",
"DDIV",
"DDMX",
"DDMXU",
"DDMXW",
"DDOC",
"DDS",
"DDT",
"DE",
"DEA",
"DEAC",
"DEACU",
"DEACW",
"DECK",
"DEI",
"DELL",
"DENN",
"DEO",
"DERM",
"DESP",
"DEST",
"DEX",
"DF",
"DFBH",
"DFBHU",
"DFBHW",
"DFFN",
"DFIN",
"DFNL",
"DFP",
"DFRG",
"DFS",
"DFVL",
"DFVS",
"DG",
"DGICA",
"DGICB",
"DGII",
"DGLD",
"DGLY",
"DGRE",
"DGRS",
"DGRW",
"DGSE",
"DGX",
"DHF",
"DHI",
"DHIL",
"DHR",
"DHR^A",
"DHT",
"DHX",
"DHXM",
"DHY",
"DIAX",
"DIN",
"DINT",
"DIOD",
"DIS",
"DISCA",
"DISCB",
"DISCK",
"DISH",
"DIT",
"DJCO",
"DK",
"DKL",
"DKS",
"DKT",
"DL",
"DLA",
"DLB",
"DLBS",
"DLHC",
"DLNG",
"DLNG^A",
"DLNG^B",
"DLPH",
"DLPN",
"DLPNW",
"DLR",
"DLR^C",
"DLR^G",
"DLR^I",
"DLR^J",
"DLR^K",
"DLTH",
"DLTR",
"DLX",
"DMAC",
"DMB",
"DMF",
"DMLP",
"DMO",
"DMPI",
"DMRC",
"DMTK",
"DMTKW",
"DNBF",
"DNI",
"DNJR",
"DNKN",
"DNLI",
"DNN",
"DNOW",
"DNP",
"DNR",
"DO",
"DOC",
"DOCU",
"DOGZ",
"DOMO",
"DOOO",
"DOOR",
"DORM",
"DOV",
"DOVA",
"DOW",
"DOX",
"DOYU",
"DPG",
"DPHC",
"DPHCU",
"DPHCW",
"DPLO",
"DPW",
"DPZ",
"DQ",
"DRAD",
"DRADP",
"DRD",
"DRE",
"DRH",
"DRI",
"DRIO",
"DRIOW",
"DRIV",
"DRMT",
"DRNA",
"DRQ",
"DRRX",
"DRUA",
"DRYS",
"DS",
"DSE",
"DSGX",
"DSKE",
"DSKEW",
"DSL",
"DSLV",
"DSM",
"DSPG",
"DSS",
"DSSI",
"DSU",
"DSWL",
"DSX",
"DSX^B",
"DS^B",
"DS^C",
"DS^D",
"DT",
"DTE",
"DTEA",
"DTF",
"DTIL",
"DTJ",
"DTLA^",
"DTQ",
"DTSS",
"DTUL",
"DTUS",
"DTV",
"DTW",
"DTY",
"DTYL",
"DTYS",
"DUC",
"DUK",
"DUKB",
"DUKH",
"DUK^A",
"DUSA",
"DVA",
"DVAX",
"DVD",
"DVLU",
"DVN",
"DVOL",
"DVY",
"DWAQ",
"DWAS",
"DWAT",
"DWCR",
"DWFI",
"DWIN",
"DWLD",
"DWMC",
"DWPP",
"DWSH",
"DWSN",
"DWTR",
"DX",
"DXB",
"DXC",
"DXCM",
"DXF",
"DXGE",
"DXJS",
"DXLG",
"DXPE",
"DXR",
"DXYN",
"DX^A",
"DX^B",
"DY",
"DYAI",
"DYNT",
"DZSI",
"E",
"EA",
"EAB",
"EAD",
"EAE",
"EAF",
"EAI",
"EARN",
"EARS",
"EAST",
"EAT",
"EB",
"EBAY",
"EBAYL",
"EBF",
"EBIX",
"EBIZ",
"EBMT",
"EBR",
"EBR.B",
"EBS",
"EBSB",
"EBTC",
"EC",
"ECA",
"ECC ",
"ECCA",
"ECCB",
"ECCX",
"ECCY",
"ECF",
"ECF^A",
"ECHO",
"ECL",
"ECOL",
"ECOM ",
"ECOR",
"ECOW",
"ECPG",
"ECT",
"ED",
"EDAP",
"EDD",
"EDF",
"EDI",
"EDIT",
"EDN",
"EDNT",
"EDRY",
"EDSA",
"EDTX",
"EDTXU",
"EDTXW",
"EDU",
"EDUC",
"EE",
"EEA",
"EEFT",
"EEI",
"EEMA",
"EEX",
"EFAS",
"EFBI",
"EFC",
"EFF",
"EFL",
"EFOI",
"EFR",
"EFSC",
"EFT",
"EFX",
"EGAN",
"EGBN",
"EGF",
"EGHT",
"EGI",
"EGIF",
"EGLE",
"EGO",
"EGOV",
"EGP",
"EGRX",
"EGY",
"EHC",
"EHI",
"EHR",
"EHT",
"EHTH",
"EIC",
"EIDX",
"EIG",
"EIGI",
"EIGR",
"EIM",
"EIX",
"EKSO",
"EL",
"ELAN",
"ELC",
"ELF",
"ELGX",
"ELJ",
"ELLO",
"ELMD",
"ELOX",
"ELP",
"ELS",
"ELSE",
"ELTK",
"ELU",
"ELVT",
"ELY",
"EMAN",
"EMB",
"EMCB",
"EMCF",
"EMCG",
"EMCI",
"EMD",
"EME",
"EMF",
"EMIF",
"EMKR",
"EML",
"EMMS",
"EMN",
"EMO",
"EMP",
"EMR",
"EMX",
"EMXC",
"ENB",
"ENBA",
"ENBL",
"ENDP",
"ENFC",
"ENG",
"ENIA",
"ENIC",
"ENJ",
"ENLC",
"ENLV",
"ENO",
"ENOB",
"ENPH",
"ENR",
"ENR^A",
"ENS",
"ENSG",
"ENSV",
"ENT",
"ENTA",
"ENTG",
"ENTX",
"ENTXW",
"ENV",
"ENVA",
"ENX",
"ENZ",
"ENZL",
"EOD",
"EOG",
"EOI",
"EOLS",
"EOS",
"EOT",
"EPAM",
"EPAY",
"EPC",
"EPD",
"EPIX",
"EPM",
"EPR",
"EPRT",
"EPR^C",
"EPR^E",
"EPR^G",
"EPSN",
"EPZM",
"EP^C",
"EQ",
"EQBK",
"EQC",
"EQC^D",
"EQH",
"EQIX",
"EQM",
"EQNR",
"EQR",
"EQRR",
"EQS",
"EQT",
"ERA",
"ERC",
"ERF",
"ERH",
"ERI",
"ERIC",
"ERIE",
"ERII",
"ERJ",
"EROS",
"ERYP",
"ES",
"ESBK",
"ESCA",
"ESE",
"ESEA",
"ESG",
"ESGD",
"ESGE",
"ESGG",
"ESGR",
"ESGRO",
"ESGRP",
"ESGU",
"ESI",
"ESLT",
"ESNT",
"ESP",
"ESPR",
"ESQ",
"ESRT",
"ESS",
"ESSA",
"ESTA",
"ESTC",
"ESTE",
"ESTR",
"ESTRW",
"ESXB",
"ET",
"ETB",
"ETFC",
"ETG",
"ETH",
"ETI^",
"ETJ",
"ETM",
"ETN",
"ETO",
"ETON",
"ETP^C",
"ETP^D",
"ETP^E",
"ETR",
"ETRN",
"ETSY",
"ETTX",
"ETV",
"ETW",
"ETX ",
"ETY",
"EUFN",
"EURN",
"EV",
"EVA",
"EVBG",
"EVBN",
"EVC",
"EVER",
"EVF",
"EVFM",
"EVG",
"EVGBC",
"EVGN",
"EVH",
"EVI",
"EVK",
"EVLMC",
"EVLO",
"EVM",
"EVN",
"EVOK",
"EVOL",
"EVOP",
"EVR",
"EVRG",
"EVRI",
"EVSI",
"EVSIW",
"EVSTC",
"EVT",
"EVTC",
"EVV",
"EVY",
"EW",
"EWBC",
"EWJE",
"EWJV",
"EWZS",
"EXAS",
"EXC",
"EXD",
"EXEL",
"EXFO",
"EXG",
"EXK",
"EXLS",
"EXP",
"EXPC",
"EXPCU",
"EXPD",
"EXPE",
"EXPI",
"EXPO",
"EXPR",
"EXR",
"EXTN",
"EXTR",
"EYE",
"EYEG",
"EYEGW",
"EYEN",
"EYES",
"EYESW",
"EYPT",
"EZPW",
"EZT",
"F",
"FAAR",
"FAB",
"FAD",
"FAF",
"FALN",
"FAM",
"FAMI",
"FANG",
"FANH",
"FARM",
"FARO",
"FAST",
"FAT",
"FATE",
"FAX",
"FB",
"FBC",
"FBHS",
"FBIO",
"FBIOP",
"FBIZ",
"FBK",
"FBM",
"FBMS",
"FBNC",
"FBP",
"FBSS",
"FBZ",
"FC",
"FCA",
"FCAL",
"FCAN",
"FCAP",
"FCAU",
"FCBC",
"FCBP",
"FCCO",
"FCCY",
"FCEF",
"FCEL",
"FCF",
"FCFS",
"FCN",
"FCNCA",
"FCO",
"FCPT",
"FCSC",
"FCT",
"FCVT",
"FCX",
"FDBC",
"FDEF",
"FDEU",
"FDIV",
"FDNI",
"FDP",
"FDS",
"FDT",
"FDTS",
"FDUS",
"FDUSL",
"FDUSZ",
"FDX",
"FE",
"FEDU",
"FEI ",
"FEIM",
"FELE",
"FELP",
"FEM",
"FEMB",
"FEMS",
"FEN",
"FENC",
"FENG",
"FEO",
"FEP",
"FET",
"FEUZ",
"FEX",
"FEYE",
"FF",
"FFA",
"FFBC",
"FFBW",
"FFC",
"FFG",
"FFHL",
"FFIC",
"FFIN",
"FFIV",
"FFNW",
"FFWM",
"FG",
"FG.WS",
"FGB",
"FGBI",
"FGEN",
"FGM",
"FGP",
"FHB",
"FHK",
"FHL",
"FHN",
"FHN^A",
"FI",
"FIBK",
"FICO",
"FID",
"FIF",
"FII",
"FINS",
"FINX",
"FIS",
"FISI",
"FISV",
"FIT",
"FITB",
"FITBI",
"FITBP",
"FIV",
"FIVE",
"FIVN",
"FIX",
"FIXD",
"FIXX",
"FIZZ",
"FJP",
"FKLY",
"FKO",
"FKU",
"FL",
"FLAG",
"FLAT",
"FLC",
"FLDM",
"FLEX",
"FLGT",
"FLIC",
"FLIR",
"FLL",
"FLLCU",
"FLMN",
"FLMNW",
"FLN",
"FLNG",
"FLNT",
"FLO",
"FLOW",
"FLR",
"FLS",
"FLT",
"FLUX",
"FLWR",
"FLWS",
"FLXN",
"FLXS",
"FLY",
"FMAO",
"FMAX",
"FMB",
"FMBH",
"FMBI",
"FMC",
"FMCI",
"FMCIU",
"FMCIW",
"FMHI",
"FMK",
"FMN",
"FMNB",
"FMO",
"FMS",
"FMX",
"FMY",
"FN",
"FNB",
"FNB^E",
"FNCB",
"FND",
"FNF",
"FNHC",
"FNJN",
"FNK",
"FNKO",
"FNLC",
"FNSR",
"FNV",
"FNWB",
"FNX",
"FNY",
"FOCS",
"FOE",
"FOF",
"FOLD",
"FOMX",
"FONR",
"FOR",
"FORD",
"FORK",
"FORM",
"FORR",
"FORTY",
"FOSL",
"FOX",
"FOXA",
"FOXF",
"FPA",
"FPAC",
"FPAC.U",
"FPAC.WS",
"FPAY",
"FPAYW",
"FPF",
"FPH",
"FPI",
"FPI^B",
"FPL",
"FPRX",
"FPXE",
"FPXI",
"FR",
"FRA",
"FRAC",
"FRAF",
"FRAN",
"FRBA",
"FRBK",
"FRC",
"FRC^D",
"FRC^F",
"FRC^G",
"FRC^H",
"FRC^I",
"FRD",
"FRED",
"FRGI",
"FRME",
"FRO",
"FRPH",
"FRPT",
"FRSX",
"FRT",
"FRTA",
"FRT^C",
"FSB",
"FSBC",
"FSBW",
"FSCT",
"FSD",
"FSEA",
"FSFG",
"FSI",
"FSK",
"FSLR",
"FSLY",
"FSM",
"FSP",
"FSS",
"FSTR",
"FSV",
"FSZ",
"FT",
"FTA",
"FTAC",
"FTACU",
"FTACW",
"FTAG",
"FTAI",
"FTC",
"FTCH",
"FTCS",
"FTDR",
"FTEK",
"FTEO",
"FTF",
"FTFT",
"FTGC",
"FTHI",
"FTI",
"FTK",
"FTLB",
"FTNT",
"FTNW",
"FTR",
"FTRI",
"FTS",
"FTSI",
"FTSL",
"FTSM",
"FTSV",
"FTV",
"FTV^A",
"FTXD",
"FTXG",
"FTXH",
"FTXL",
"FTXN",
"FTXO",
"FTXR",
"FUL",
"FULC",
"FULT",
"FUN",
"FUNC",
"FUND",
"FUSB",
"FUV",
"FV",
"FVC",
"FVCB",
"FVE",
"FVRR",
"FWONA",
"FWONK",
"FWP",
"FWRD",
"FXNC",
"FYC",
"FYT",
"FYX",
"F^B",
"G",
"GAB",
"GABC",
"GAB^D",
"GAB^G",
"GAB^H",
"GAB^J",
"GAIA",
"GAIN",
"GAINL",
"GAINM",
"GALT",
"GAM",
"GAM^B",
"GARS",
"GASS",
"GATX",
"GBAB",
"GBCI",
"GBDC",
"GBL",
"GBLI",
"GBLIL",
"GBLIZ",
"GBLK",
"GBR",
"GBT",
"GBX",
"GCAP",
"GCBC",
"GCI",
"GCO",
"GCP",
"GCV",
"GCVRZ",
"GCV^B",
"GD",
"GDDY",
"GDEN",
"GDI",
"GDL",
"GDL^C",
"GDO",
"GDOT",
"GDP",
"GDS",
"GDV",
"GDV^A",
"GDV^D",
"GDV^G",
"GDV^H",
"GE",
"GEC",
"GECC",
"GECCL",
"GECCM",
"GECCN",
"GEF",
"GEF.B",
"GEL",
"GEMP",
"GEN ",
"GENC",
"GENE",
"GENY",
"GEO",
"GEOS",
"GER",
"GERN",
"GES",
"GEVO",
"GF",
"GFED",
"GFF",
"GFI",
"GFN",
"GFNCP",
"GFNSL",
"GFY",
"GGAL",
"GGB",
"GGG",
"GGM",
"GGN",
"GGN^B",
"GGO",
"GGO^A",
"GGT",
"GGT^B",
"GGT^E",
"GGZ",
"GGZ^A",
"GH",
"GHC",
"GHDX",
"GHG",
"GHL",
"GHM",
"GHSI",
"GHY",
"GIB",
"GIFI",
"GIG",
"GIG.U",
"GIG.WS",
"GIGE",
"GIGM",
"GIG~",
"GIII",
"GIL",
"GILD",
"GILT",
"GIM",
"GIS",
"GIX",
"GIX.U",
"GIX.WS",
"GIX~",
"GJH",
"GJO",
"GJP",
"GJR",
"GJS",
"GJT",
"GJV",
"GKOS",
"GL",
"GLAC",
"GLACR",
"GLACU",
"GLACW",
"GLAD",
"GLADD",
"GLADN",
"GLBS",
"GLBZ",
"GLDD",
"GLDI",
"GLG",
"GLIBA",
"GLIBP",
"GLMD",
"GLNG",
"GLO",
"GLOB",
"GLOG",
"GLOG^A",
"GLOP",
"GLOP^A",
"GLOP^B",
"GLOP^C",
"GLOW",
"GLP",
"GLPG",
"GLPI",
"GLP^A",
"GLQ",
"GLRE",
"GLT",
"GLU",
"GLUU",
"GLU^A",
"GLU^B",
"GLV",
"GLW",
"GLYC",
"GL^C",
"GM",
"GMAB",
"GMDA",
"GME",
"GMED",
"GMHI",
"GMHIU",
"GMHIW",
"GMLP",
"GMLPP",
"GMO",
"GMRE",
"GMRE^A",
"GMS",
"GMTA",
"GMZ",
"GNC",
"GNCA",
"GNE",
"GNE^A",
"GNFT",
"GNK",
"GNL",
"GNLN",
"GNL^A",
"GNMA",
"GNMK",
"GNMX",
"GNOM",
"GNPX",
"GNRC",
"GNST",
"GNT",
"GNTX",
"GNTY",
"GNT^A",
"GNUS",
"GNW",
"GO",
"GOF",
"GOGL",
"GOGO",
"GOL",
"GOLD",
"GOLF",
"GOOD",
"GOODM",
"GOODO",
"GOODP",
"GOOG",
"GOOGL",
"GOOS",
"GORO",
"GOSS",
"GPAQ",
"GPAQU",
"GPAQW",
"GPC",
"GPI",
"GPJA",
"GPK",
"GPL",
"GPM",
"GPMT",
"GPN",
"GPOR",
"GPP",
"GPRE",
"GPRK",
"GPRO",
"GPS",
"GPX",
"GRA",
"GRAF",
"GRAF.U",
"GRAF.WS",
"GRAM",
"GRBK",
"GRC",
"GRF",
"GRFS",
"GRID",
"GRIF",
"GRIN",
"GRMN",
"GRNQ",
"GROW",
"GRP.U",
"GRPN",
"GRSH",
"GRSHU",
"GRSHW",
"GRTS",
"GRUB",
"GRVY",
"GRX",
"GRX^A",
"GRX^B",
"GS",
"GSAH",
"GSAH.U",
"GSAH.WS",
"GSAT",
"GSB",
"GSBC",
"GSBD",
"GSH",
"GSHD",
"GSIT",
"GSK",
"GSKY",
"GSL",
"GSL^B",
"GSM",
"GSS",
"GSUM",
"GSV",
"GSX",
"GS^A",
"GS^C",
"GS^D",
"GS^J",
"GS^K",
"GS^N",
"GT",
"GTE",
"GTES",
"GTHX",
"GTIM",
"GTLS",
"GTN",
"GTN.A",
"GTS",
"GTT",
"GTX",
"GTY",
"GTYH",
"GULF",
"GURE",
"GUT",
"GUT^A",
"GUT^C",
"GV",
"GVA",
"GVP",
"GWB",
"GWGH",
"GWPH",
"GWR",
"GWRE",
"GWRS",
"GWW",
"GXGX",
"GXGXU",
"GXGXW",
"GYB",
"GYC",
"GYRO",
"H",
"HA",
"HABT",
"HAE",
"HAFC",
"HAIN",
"HAIR",
"HAL",
"HALL",
"HALO",
"HARP",
"HAS",
"HASI",
"HAYN",
"HBAN",
"HBANN",
"HBANO",
"HBB",
"HBCP",
"HBI",
"HBIO",
"HBM",
"HBMD",
"HBNC",
"HBP",
"HCA",
"HCAC",
"HCACU",
"HCACW",
"HCAP",
"HCAPZ",
"HCAT",
"HCC",
"HCCH",
"HCCHR",
"HCCHU",
"HCCHW",
"HCCI",
"HCFT",
"HCHC",
"HCI",
"HCKT",
"HCM",
"HCP",
"HCR",
"HCSG",
"HCXY",
"HCXZ",
"HD",
"HDB",
"HDS",
"HDSN",
"HE",
"HEAR",
"HEBT",
"HEES",
"HEI",
"HEI.A",
"HELE",
"HEP",
"HEPA",
"HEQ",
"HERD",
"HES",
"HESM",
"HEWG",
"HEXO",
"HFBL",
"HFC",
"HFFG",
"HFRO",
"HFRO^A",
"HFWA",
"HGH",
"HGLB",
"HGSH",
"HGV",
"HHC",
"HHHH",
"HHHHR",
"HHHHU",
"HHHHW",
"HHR",
"HHS",
"HHT",
"HI",
"HIBB",
"HIE",
"HIFS",
"HIG",
"HIG^G",
"HIHO",
"HII",
"HIIQ",
"HIL",
"HIMX",
"HIO",
"HIW",
"HIX",
"HJLI",
"HJLIW",
"HJV",
"HKIB",
"HL",
"HLAL",
"HLF",
"HLG",
"HLI",
"HLIO",
"HLIT",
"HLM^",
"HLNE",
"HLT",
"HLX",
"HL^B",
"HMC",
"HMG",
"HMHC",
"HMI",
"HMLP",
"HMLP^A",
"HMN",
"HMNF",
"HMST",
"HMSY",
"HMTV",
"HMY",
"HNDL",
"HNGR",
"HNI",
"HNNA",
"HNP",
"HNRG",
"HNW",
"HOFT",
"HOG",
"HOLI",
"HOLX",
"HOMB",
"HOME",
"HON",
"HONE",
"HOOK",
"HOPE",
"HOS",
"HOTH",
"HOV",
"HOVNP",
"HP",
"HPE",
"HPF",
"HPI",
"HPJ",
"HPP",
"HPQ",
"HPR",
"HPS",
"HPT",
"HQH",
"HQI",
"HQL",
"HQY",
"HR",
"HRB",
"HRC",
"HRI",
"HRL",
"HROW",
"HRTG",
"HRTX",
"HRZN",
"HSAC",
"HSACU",
"HSACW",
"HSBC",
"HSBC^A",
"HSC",
"HSDT",
"HSGX",
"HSIC",
"HSII",
"HSKA",
"HSON",
"HST",
"HSTM",
"HSY",
"HT",
"HTA",
"HTBI",
"HTBK",
"HTBX",
"HTD",
"HTFA",
"HTGC",
"HTGM",
"HTH",
"HTHT",
"HTLD",
"HTLF",
"HTY",
"HTZ",
"HT^C",
"HT^D",
"HT^E",
"HUBB",
"HUBG",
"HUBS",
"HUD",
"HUM",
"HUN",
"HURC",
"HURN",
"HUSA",
"HUYA",
"HVBC",
"HVT",
"HVT.A",
"HWBK",
"HWC",
"HWCC",
"HWCPL",
"HWKN",
"HX",
"HXL",
"HY",
"HYAC",
"HYACU",
"HYACW",
"HYB",
"HYI",
"HYLS",
"HYND",
"HYRE",
"HYT",
"HYXE",
"HYZD",
"HZN",
"HZNP",
"HZO",
"I",
"IAA",
"IAC",
"IAE",
"IAF",
"IAG",
"IART",
"IBA",
"IBB",
"IBCP",
"IBEX",
"IBIO",
"IBKC",
"IBKCN",
"IBKCO",
"IBKCP",
"IBM",
"IBN",
"IBO",
"IBOC",
"IBP",
"IBTX",
"IBUY",
"ICAD",
"ICBK",
"ICCC",
"ICCH",
"ICD",
"ICE",
"ICFI",
"ICHR",
"ICL",
"ICLK",
"ICLN",
"ICLR",
"ICMB",
"ICON",
"ICPT",
"ICUI",
"IDA",
"IDCC",
"IDE",
"IDEX",
"IDLB",
"IDN",
"IDRA",
"IDSA",
"IDSY",
"IDT",
"IDXG",
"IDXX",
"IDYA",
"IEA",
"IEAWW",
"IEC",
"IEF",
"IEI",
"IEP",
"IESC",
"IEUS",
"IEX",
"IFEU",
"IFF",
"IFFT",
"IFGL",
"IFMK",
"IFN",
"IFRX",
"IFS",
"IFV",
"IGA",
"IGC",
"IGD",
"IGF",
"IGI",
"IGIB",
"IGLD",
"IGLE",
"IGOV",
"IGR",
"IGSB",
"IGT",
"IHC",
"IHD",
"IHG",
"IHIT",
"IHRT",
"IHT",
"IHTA",
"IID",
"IIF",
"III",
"IIIN",
"IIIV",
"IIM",
"IIN",
"IIPR",
"IIPR^A",
"IIVI",
"IJT",
"IKNX",
"ILMN",
"ILPT",
"IMAC",
"IMACW",
"IMAX",
"IMBI",
"IMGN",
"IMH",
"IMI",
"IMKTA",
"IMMP",
"IMMR",
"IMMU",
"IMO",
"IMOS",
"IMRN",
"IMRNW",
"IMTE",
"IMUX",
"IMV",
"IMXI",
"INAP",
"INB",
"INBK",
"INBKL",
"INBKZ",
"INCY",
"INDB",
"INDY",
"INF",
"INFI",
"INFN",
"INFO",
"INFR",
"INFU",
"INFY",
"ING",
"INGN",
"INGR",
"INMB",
"INMD",
"INN",
"INNT",
"INN^D",
"INN^E",
"INO",
"INOD",
"INOV",
"INPX",
"INS",
"INSE",
"INSG",
"INSI",
"INSM",
"INSP",
"INST",
"INSU",
"INSUU",
"INSUW",
"INSW",
"INSW^A",
"INT",
"INTC",
"INTG",
"INTL",
"INTT",
"INTU",
"INUV",
"INVA",
"INVE",
"INVH",
"INWK",
"INXN",
"IO",
"IONS",
"IOR",
"IOSP",
"IOTS",
"IOVA",
"IP",
"IPAR",
"IPB",
"IPDN",
"IPG",
"IPGP",
"IPHI",
"IPHS",
"IPI",
"IPKW",
"IPLDP",
"IPOA",
"IPOA.U",
"IPOA.WS",
"IPWR",
"IQ",
"IQI",
"IQV",
"IR",
"IRBT",
"IRCP",
"IRDM",
"IRET",
"IRET^C",
"IRIX",
"IRL",
"IRM",
"IRMD",
"IROQ",
"IRR",
"IRS",
"IRT",
"IRTC",
"IRWD",
"ISBC",
"ISCA",
"ISD",
"ISDR",
"ISDS",
"ISDX",
"ISEE",
"ISEM",
"ISG",
"ISHG",
"ISIG",
"ISNS",
"ISR",
"ISRG",
"ISRL",
"ISSC",
"ISTB",
"ISTR",
"IT",
"ITCB",
"ITCI",
"ITEQ",
"ITGR",
"ITI",
"ITIC",
"ITMR",
"ITP",
"ITRI",
"ITRM",
"ITRN",
"ITT",
"ITUB",
"ITW",
"IUS",
"IUSB",
"IUSG",
"IUSS",
"IUSV",
"IVAC",
"IVC",
"IVH",
"IVR",
"IVR^A",
"IVR^B",
"IVR^C",
"IVZ",
"IX",
"IXUS",
"IZEA",
"JACK",
"JAG",
"JAGX",
"JAKK",
"JAN",
"JASN",
"JAX",
"JAZZ",
"JBGS",
"JBHT",
"JBK",
"JBL",
"JBLU",
"JBN",
"JBR",
"JBSS",
"JBT",
"JCAP",
"JCAP^B",
"JCE",
"JCI",
"JCO",
"JCOM",
"JCP",
"JCS",
"JCTCF",
"JD",
"JDD",
"JE",
"JEC",
"JEF",
"JELD",
"JEMD",
"JEQ",
"JE^A",
"JFIN",
"JFK",
"JFKKR",
"JFKKU",
"JFKKW",
"JFR",
"JFU",
"JG",
"JGH",
"JHAA",
"JHB",
"JHD",
"JHG",
"JHI",
"JHS",
"JHX",
"JHY",
"JILL",
"JJSF",
"JKHY",
"JKI",
"JKS",
"JLL",
"JLS",
"JMEI",
"JMF",
"JMIA",
"JMLP",
"JMM",
"JMP",
"JMPB",
"JMPD",
"JMT",
"JMU",
"JNCE",
"JNJ",
"JNPR",
"JOB",
"JOBS",
"JOE",
"JOF",
"JOUT",
"JP",
"JPC",
"JPI",
"JPM",
"JPM^A",
"JPM^C",
"JPM^D",
"JPM^F",
"JPM^G",
"JPM^H",
"JPS",
"JPT",
"JQC",
"JRI",
"JRJC",
"JRO",
"JRS",
"JRSH",
"JRVR",
"JSD",
"JSM",
"JSMD",
"JSML",
"JT",
"JTA",
"JTD",
"JVA",
"JW.A",
"JW.B",
"JWN",
"JYNT",
"K",
"KAI",
"KALA",
"KALU",
"KALV",
"KAMN",
"KAR",
"KB",
"KBAL",
"KBH",
"KBLM",
"KBLMR",
"KBLMU",
"KBLMW",
"KBR",
"KBSF",
"KBWB",
"KBWD",
"KBWP",
"KBWR",
"KBWY",
"KCAPL",
"KDMN",
"KDP",
"KE",
"KEG",
"KELYA",
"KELYB",
"KEM",
"KEN",
"KEP",
"KEQU",
"KERN",
"KERNW",
"KEX",
"KEY",
"KEYS",
"KEY^I",
"KEY^J",
"KEY^K",
"KF",
"KFFB",
"KFRC",
"KFS",
"KFY",
"KGC",
"KGJI",
"KHC",
"KIDS",
"KIM",
"KIM^I.CL",
"KIM^J",
"KIM^K.CL",
"KIM^L",
"KIM^M",
"KIN",
"KINS",
"KIO",
"KIQ",
"KIRK",
"KKR",
"KKR^A",
"KKR^B",
"KL",
"KLAC",
"KLDO",
"KLIC",
"KLXE",
"KMB",
"KMDA",
"KMF",
"KMI",
"KMPH",
"KMPR",
"KMT",
"KMX",
"KN",
"KNDI",
"KNL",
"KNOP",
"KNSA",
"KNSL",
"KNX",
"KO",
"KOD",
"KODK",
"KOF",
"KOOL",
"KOP",
"KOPN",
"KOS",
"KOSS",
"KPTI",
"KR",
"KRA",
"KRC",
"KREF",
"KRG",
"KRMA",
"KRNT",
"KRNY",
"KRO",
"KRP",
"KRTX",
"KRUS",
"KRYS",
"KSM",
"KSS",
"KSU",
"KSU^",
"KT",
"KTB",
"KTCC",
"KTF",
"KTH",
"KTN",
"KTOS",
"KTOV",
"KTOVW",
"KTP",
"KURA",
"KVHI",
"KW",
"KWEB",
"KWR",
"KXIN",
"KYN",
"KYN^F",
"KZIA",
"KZR",
"L",
"LAC",
"LACQ",
"LACQU",
"LACQW",
"LAD",
"LADR",
"LAIX",
"LAKE",
"LAMR",
"LANC",
"LAND",
"LANDP",
"LARK",
"LASR",
"LAUR",
"LAWS",
"LAZ",
"LAZY",
"LB",
"LBAI",
"LBC",
"LBRDA",
"LBRDK",
"LBRT",
"LBTYA",
"LBTYB",
"LBTYK",
"LBY",
"LBYAV",
"LBYKV",
"LC",
"LCA",
"LCAHU",
"LCAHW",
"LCI",
"LCII",
"LCNB",
"LCTX",
"LCUT",
"LDL",
"LDOS",
"LDP",
"LDRI",
"LDSF",
"LE",
"LEA",
"LEAF",
"LECO",
"LEDS",
"LEE",
"LEG",
"LEGH",
"LEGR",
"LEJU",
"LEN",
"LEN.B",
"LEO",
"LEU",
"LEVI",
"LEVL",
"LFAC",
"LFACU",
"LFACW",
"LFC",
"LFUS",
"LFVN",
"LGC",
"LGC.U",
"LGC.WS",
"LGF.A",
"LGF.B",
"LGI",
"LGIH",
"LGL",
"LGND",
"LH",
"LHC",
"LHC.U",
"LHC.WS",
"LHCG",
"LHX",
"LIFE",
"LII",
"LILA",
"LILAK",
"LIN",
"LINC",
"LIND",
"LINX",
"LIQT",
"LITB",
"LITE",
"LIVE",
"LIVN",
"LIVX",
"LJPC",
"LK",
"LKCO",
"LKFN",
"LKOR",
"LKQ",
"LKSD",
"LL",
"LLEX",
"LLIT",
"LLNW",
"LLY",
"LM",
"LMAT",
"LMB",
"LMBS",
"LMFA",
"LMFAW",
"LMHA",
"LMHB",
"LMNR",
"LMNX",
"LMRK",
"LMRKN",
"LMRKO",
"LMRKP",
"LMST",
"LMT",
"LN",
"LNC",
"LND",
"LNDC",
"LNG",
"LNGR",
"LNN",
"LNT",
"LNTH",
"LOAC",
"LOACR",
"LOACU",
"LOACW",
"LOAN",
"LOB",
"LOCO",
"LODE",
"LOGC",
"LOGI",
"LOGM",
"LOMA",
"LONE",
"LOOP",
"LOPE",
"LOR",
"LORL",
"LOV",
"LOVE",
"LOW",
"LPCN",
"LPG",
"LPI",
"LPL",
"LPLA",
"LPSN",
"LPT",
"LPTH",
"LPTX",
"LPX",
"LQDA",
"LQDT",
"LRAD",
"LRCX",
"LRGE",
"LRN",
"LSBK",
"LSCC",
"LSI",
"LSTR",
"LSXMA",
"LSXMB",
"LSXMK",
"LTBR",
"LTC",
"LTHM",
"LTM",
"LTRPA",
"LTRPB",
"LTRX",
"LTS",
"LTSF",
"LTSH",
"LTSK",
"LTSL",
"LTS^A",
"LTXB",
"LUB",
"LULU",
"LUNA",
"LUNG",
"LUV",
"LVGO",
"LVHD",
"LVS",
"LW",
"LWAY",
"LX",
"LXFR",
"LXP",
"LXP^C",
"LXRX",
"LXU",
"LYB",
"LYFT",
"LYG",
"LYL",
"LYTS",
"LYV",
"LZB",
"M",
"MA",
"MAA",
"MAA^I",
"MAC",
"MACK",
"MAG",
"MAGS",
"MAIN",
"MAMS",
"MAN",
"MANH",
"MANT",
"MANU",
"MAR",
"MARA",
"MARK",
"MARPS",
"MAS",
"MASI",
"MAT",
"MATW",
"MATX",
"MAV",
"MAXR",
"MAYS",
"MBB",
"MBCN",
"MBI",
"MBII",
"MBIN",
"MBINO",
"MBINP",
"MBIO",
"MBOT",
"MBRX",
"MBSD",
"MBT",
"MBUU",
"MBWM",
"MC",
"MCA",
"MCB",
"MCBC",
"MCC",
"MCD",
"MCEF",
"MCEP",
"MCF",
"MCFT",
"MCHI",
"MCHP",
"MCHX",
"MCI",
"MCK",
"MCN",
"MCO",
"MCR",
"MCRB",
"MCRI",
"MCRN",
"MCS",
"MCV",
"MCX",
"MCY",
"MD",
"MDB",
"MDC",
"MDCA",
"MDCO",
"MDGL",
"MDGS",
"MDGSW",
"MDIV",
"MDJH",
"MDLA",
"MDLQ",
"MDLX",
"MDLY",
"MDLZ",
"MDP",
"MDR",
"MDRR",
"MDRX",
"MDSO",
"MDT",
"MDU",
"MDWD",
"MEC",
"MED",
"MEDP",
"MEET",
"MEI",
"MEIP",
"MELI",
"MEN",
"MEOH",
"MERC",
"MER^K",
"MESA",
"MESO",
"MET",
"METC",
"MET^A",
"MET^E",
"MFA",
"MFAC",
"MFAC.U",
"MFAC.WS",
"MFA^B",
"MFC",
"MFD",
"MFG",
"MFGP",
"MFIN",
"MFINL",
"MFL",
"MFM",
"MFNC",
"MFO",
"MFSF",
"MFT",
"MFV",
"MG",
"MGA",
"MGEE",
"MGEN",
"MGF",
"MGI",
"MGIC",
"MGLN",
"MGM",
"MGNX",
"MGP",
"MGPI",
"MGR",
"MGRC",
"MGTA",
"MGTX",
"MGU",
"MGY",
"MGYR",
"MHD",
"MHE",
"MHF",
"MHH",
"MHI",
"MHK",
"MHLA",
"MHLD",
"MHN",
"MHNC",
"MHO",
"MH^A",
"MH^C",
"MH^D",
"MIC",
"MICR",
"MICT",
"MIDD",
"MIE",
"MIK",
"MILN",
"MIME",
"MIN",
"MIND",
"MINDP",
"MINI",
"MIRM",
"MIST",
"MITK",
"MITO",
"MITT",
"MITT^A",
"MITT^B",
"MIXT",
"MIY",
"MJCO",
"MKC",
"MKC.V",
"MKGI",
"MKL",
"MKSI",
"MKTX",
"MLAB",
"MLCO",
"MLHR",
"MLI",
"MLM",
"MLND",
"MLNT",
"MLNX",
"MLP",
"MLR",
"MLSS",
"MLVF",
"MMAC",
"MMC",
"MMD",
"MMI",
"MMLP",
"MMM",
"MMP",
"MMS",
"MMSI",
"MMT",
"MMU",
"MMX",
"MMYT",
"MN",
"MNCL",
"MNCLU",
"MNCLW",
"MNDO",
"MNE",
"MNI",
"MNK",
"MNKD",
"MNLO",
"MNOV",
"MNP",
"MNR",
"MNRL",
"MNRO",
"MNR^C",
"MNSB",
"MNST",
"MNTA",
"MNTX",
"MO",
"MOBL",
"MOD",
"MODN",
"MOFG",
"MOG.A",
"MOG.B",
"MOGO",
"MOGU",
"MOH",
"MOMO",
"MOR",
"MORF",
"MORN",
"MOS",
"MOSC",
"MOSC.U",
"MOSC.WS",
"MOSY",
"MOTA",
"MOTS",
"MOV",
"MOXC",
"MPA",
"MPAA",
"MPB",
"MPC",
"MPLX",
"MPV",
"MPVD",
"MPW",
"MPWR",
"MPX",
"MQT",
"MQY",
"MR",
"MRAM",
"MRBK",
"MRC",
"MRCC",
"MRCCL",
"MRCY",
"MREO",
"MRIC",
"MRIN",
"MRK",
"MRKR",
"MRLN",
"MRNA",
"MRNS",
"MRO",
"MRSN",
"MRTN",
"MRTX",
"MRUS",
"MRVL",
"MS",
"MSA",
"MSB",
"MSBF",
"MSBI",
"MSC",
"MSCI",
"MSD",
"MSEX",
"MSFT",
"MSG",
"MSGN",
"MSI",
"MSL",
"MSM",
"MSN",
"MSON",
"MSTR",
"MSVB",
"MS^A",
"MS^E",
"MS^F",
"MS^G",
"MS^I",
"MS^K",
"MT",
"MTB",
"MTBC",
"MTBCP",
"MTC",
"MTCH",
"MTD",
"MTDR",
"MTEM",
"MTEX",
"MTFB",
"MTFBW",
"MTG",
"MTH",
"MTL",
"MTLS",
"MTL^",
"MTN",
"MTNB",
"MTOR",
"MTP",
"MTR",
"MTRN",
"MTRX",
"MTSC",
"MTSI",
"MTSL",
"MTT",
"MTW",
"MTX",
"MTZ",
"MU",
"MUA",
"MUC",
"MUDS",
"MUDSU",
"MUDSW",
"MUE",
"MUFG",
"MUH",
"MUI",
"MUJ",
"MUR",
"MUS",
"MUSA",
"MUX",
"MVBF",
"MVC",
"MVCD",
"MVF",
"MVIS",
"MVO",
"MVT",
"MWA",
"MWK",
"MX",
"MXC",
"MXE",
"MXF",
"MXIM",
"MXL",
"MYC",
"MYD",
"MYE",
"MYF",
"MYFW",
"MYGN",
"MYI",
"MYJ",
"MYL",
"MYN",
"MYO",
"MYOK",
"MYOS",
"MYOV",
"MYRG",
"MYSZ",
"MYT",
"MZA",
"NAC",
"NAD",
"NAII",
"NAK",
"NAKD",
"NAN",
"NANO",
"NAOV",
"NAT",
"NATH",
"NATI",
"NATR",
"NAV",
"NAVB",
"NAVI",
"NAV^D",
"NAZ",
"NBB",
"NBCP",
"NBEV",
"NBH",
"NBHC",
"NBIX",
"NBL",
"NBLX",
"NBN",
"NBO",
"NBR",
"NBRV",
"NBR^A",
"NBSE",
"NBTB",
"NBW",
"NBY",
"NC",
"NCA",
"NCB",
"NCBS",
"NCI",
"NCLH",
"NCMI",
"NCNA",
"NCR",
"NCSM",
"NCTY",
"NCV",
"NCV^A",
"NCZ",
"NCZ^A",
"NDAQ",
"NDLS",
"NDP",
"NDRA",
"NDRAW",
"NDSN",
"NE",
"NEA",
"NEBU",
"NEBUU",
"NEBUW",
"NEE",
"NEE^I",
"NEE^J",
"NEE^K",
"NEE^N",
"NEE^O",
"NEM",
"NEN",
"NEO",
"NEOG",
"NEON",
"NEOS",
"NEP",
"NEPH",
"NEPT",
"NERV",
"NES",
"NESR",
"NESRW",
"NETE",
"NEU",
"NEV",
"NEW",
"NEWA",
"NEWM",
"NEWR",
"NEWT",
"NEWTI",
"NEWTL",
"NEXA",
"NEXT",
"NFBK",
"NFC",
"NFC.U",
"NFC.WS",
"NFE",
"NFG",
"NFIN",
"NFINU",
"NFINW",
"NFJ",
"NFLX",
"NFTY",
"NG",
"NGD",
"NGG",
"NGHC",
"NGHCN",
"NGHCO",
"NGHCP",
"NGHCZ",
"NGL",
"NGLS^A",
"NGL^B",
"NGL^C",
"NGM",
"NGS",
"NGVC",
"NGVT",
"NH",
"NHA",
"NHC",
"NHF",
"NHI",
"NHLD",
"NHLDW",
"NHS",
"NHTC",
"NI",
"NICE",
"NICK",
"NID",
"NIE",
"NIHD",
"NIM",
"NINE",
"NIO",
"NIQ",
"NIU",
"NI^B",
"NJR",
"NJV",
"NK",
"NKE",
"NKG",
"NKSH",
"NKTR",
"NKX",
"NL",
"NLNK",
"NLS",
"NLSN",
"NLTX",
"NLY",
"NLY^D",
"NLY^F",
"NLY^G",
"NLY^I",
"NM",
"NMCI",
"NMFC",
"NMFX",
"NMI",
"NMIH",
"NMK^B",
"NMK^C",
"NML",
"NMM",
"NMR",
"NMRD",
"NMRK",
"NMS",
"NMT",
"NMY",
"NMZ",
"NM^G",
"NM^H",
"NNA",
"NNBR",
"NNC",
"NNDM",
"NNI",
"NNN",
"NNN^E.CL",
"NNN^F",
"NNVC",
"NNY",
"NOA",
"NOAH",
"NOC",
"NODK",
"NOG",
"NOK",
"NOM",
"NOMD",
"NOV",
"NOVA",
"NOVN",
"NOVT",
"NOW",
"NP",
"NPAUU",
"NPK",
"NPN",
"NPO",
"NPTN",
"NPV",
"NQP",
"NR",
"NRC",
"NRCG",
"NRCG.WS",
"NRE",
"NRG",
"NRGX",
"NRIM",
"NRK",
"NRO",
"NRP",
"NRT",
"NRUC",
"NRZ",
"NRZ^A",
"NRZ^B",
"NS",
"NSA",
"NSA^A",
"NSC",
"NSCO",
"NSCO.WS",
"NSEC",
"NSIT",
"NSL",
"NSP",
"NSPR",
"NSPR.WS",
"NSPR.WS.B",
"NSS",
"NSSC",
"NSTG",
"NSYS",
"NS^A",
"NS^B",
"NS^C",
"NTAP",
"NTB",
"NTC",
"NTCT",
"NTEC",
"NTES",
"NTEST",
"NTEST.A",
"NTEST.B",
"NTEST.C",
"NTG",
"NTGN",
"NTGR",
"NTIC",
"NTIP",
"NTLA",
"NTN",
"NTNX",
"NTP",
"NTR",
"NTRA",
"NTRP",
"NTRS",
"NTRSP",
"NTUS",
"NTWK",
"NTX",
"NTZ",
"NUAN",
"NUE",
"NUM",
"NUO",
"NURO",
"NUROW",
"NUS",
"NUV",
"NUVA",
"NUW",
"NVAX",
"NVCN",
"NVCR",
"NVDA",
"NVEC",
"NVEE",
"NVFY",
"NVG",
"NVGS",
"NVIV",
"NVLN",
"NVMI",
"NVO",
"NVR",
"NVRO",
"NVS",
"NVT",
"NVTA",
"NVTR",
"NVUS",
"NWBI",
"NWE",
"NWFL",
"NWHM",
"NWL",
"NWLI",
"NWN",
"NWPX",
"NWS",
"NWSA",
"NX",
"NXC",
"NXE",
"NXGN",
"NXJ",
"NXMD",
"NXN",
"NXP",
"NXPI",
"NXQ",
"NXR",
"NXRT",
"NXST",
"NXTC",
"NXTD",
"NXTG",
"NYCB",
"NYCB^A",
"NYCB^U",
"NYMT",
"NYMTN",
"NYMTO",
"NYMTP",
"NYMX",
"NYNY",
"NYT",
"NYV",
"NZF",
"O",
"OAC",
"OAC.U",
"OAC.WS",
"OAK",
"OAK^A",
"OAK^B",
"OAS",
"OBAS",
"OBCI",
"OBE",
"OBLN",
"OBNK",
"OBSV",
"OC",
"OCC",
"OCCI",
"OCCIP",
"OCFC",
"OCN",
"OCSI",
"OCSL",
"OCSLL",
"OCUL",
"OCX",
"ODC",
"ODFL",
"ODP",
"ODT",
"OEC",
"OESX",
"OFC",
"OFED",
"OFG",
"OFG^A",
"OFG^B",
"OFG^D",
"OFIX",
"OFLX",
"OFS",
"OFSSL",
"OFSSZ",
"OGE",
"OGEN",
"OGI",
"OGS",
"OHAI",
"OHI",
"OI",
"OIA",
"OIBR.C",
"OII",
"OIIM",
"OIS",
"OKE",
"OKTA",
"OLBK",
"OLD",
"OLED",
"OLLI",
"OLN",
"OLP",
"OMAB",
"OMC",
"OMCL",
"OMER",
"OMEX",
"OMF",
"OMI",
"OMN",
"OMP",
"ON",
"ONB",
"ONCE",
"ONCS",
"ONCT",
"ONCY",
"ONDK",
"ONE",
"ONEQ",
"ONTX",
"ONTXW",
"ONVO",
"OOMA",
"OPB",
"OPBK",
"OPES",
"OPESU",
"OPESW",
"OPGN",
"OPGNW",
"OPHC",
"OPI",
"OPINI",
"OPK",
"OPNT",
"OPOF",
"OPP",
"OPRA",
"OPRX",
"OPTN",
"OPTT",
"OPY",
"OR",
"ORA",
"ORAN",
"ORBC",
"ORC",
"ORCC",
"ORCL",
"ORG",
"ORGO",
"ORGS",
"ORI",
"ORIT",
"ORLY",
"ORMP",
"ORN",
"ORRF",
"ORSNU",
"ORTX",
"OSB",
"OSBC",
"OSBCP",
"OSG",
"OSIS",
"OSK",
"OSLE",
"OSMT",
"OSN",
"OSPN",
"OSS",
"OSTK",
"OSUR",
"OSW",
"OTEL",
"OTEX",
"OTG",
"OTIC",
"OTIV",
"OTLK",
"OTLKW",
"OTTR",
"OTTW",
"OUT",
"OVBC",
"OVID",
"OVLY",
"OXBR",
"OXBRW",
"OXFD",
"OXLC",
"OXLCM",
"OXLCO",
"OXM",
"OXSQ",
"OXSQL",
"OXSQZ",
"OXY",
"OZK",
"PAA",
"PAAC",
"PAACR",
"PAACU",
"PAACW",
"PAAS",
"PAC",
"PACB",
"PACD",
"PACK",
"PACK.WS",
"PACQ",
"PACQU",
"PACQW",
"PACW",
"PAG",
"PAGP",
"PAGS",
"PAHC",
"PAI",
"PAM",
"PANL",
"PANW",
"PAR",
"PARR",
"PATI",
"PATK",
"PAVM",
"PAVMW",
"PAVMZ",
"PAYC",
"PAYS",
"PAYX",
"PB",
"PBA",
"PBB",
"PBBI",
"PBC",
"PBCT",
"PBCTP",
"PBF",
"PBFS",
"PBFX",
"PBH",
"PBHC",
"PBI",
"PBIO",
"PBIP",
"PBI^B",
"PBPB",
"PBR",
"PBR.A",
"PBT",
"PBTS",
"PBY",
"PBYI",
"PCAR",
"PCB",
"PCF",
"PCG",
"PCG^A",
"PCG^B",
"PCG^C",
"PCG^D",
"PCG^E",
"PCG^G",
"PCG^H",
"PCG^I",
"PCH",
"PCI",
"PCIM",
"PCK",
"PCM",
"PCN",
"PCOM",
"PCQ",
"PCRX",
"PCSB",
"PCTI",
"PCTY",
"PCYG",
"PCYO",
"PD",
"PDBC",
"PDCE",
"PDCO",
"PDD",
"PDEV",
"PDEX",
"PDFS",
"PDI",
"PDLB",
"PDLI",
"PDM",
"PDP",
"PDS",
"PDSB",
"PDT",
"PE",
"PEB",
"PEBK",
"PEBO",
"PEB^C",
"PEB^D",
"PEB^E",
"PEB^F",
"PECK",
"PED",
"PEER",
"PEG",
"PEGA",
"PEGI",
"PEI",
"PEIX",
"PEI^B",
"PEI^C",
"PEI^D",
"PEN",
"PENN",
"PEO",
"PEP",
"PER",
"PERI",
"PESI",
"PETQ",
"PETS",
"PETZ",
"PEY",
"PEZ",
"PFBC",
"PFBI",
"PFD",
"PFE",
"PFF",
"PFG",
"PFGC",
"PFH",
"PFI",
"PFIE",
"PFIN",
"PFIS",
"PFL",
"PFLT",
"PFM",
"PFMT",
"PFN",
"PFNX",
"PFO",
"PFPT",
"PFS",
"PFSI",
"PFSW",
"PG",
"PGC",
"PGJ",
"PGNX",
"PGP",
"PGR",
"PGRE",
"PGTI",
"PGZ",
"PH",
"PHAS",
"PHCF",
"PHD",
"PHG",
"PHI",
"PHIO",
"PHIOW",
"PHK",
"PHM",
"PHO",
"PHR",
"PHT",
"PHUN",
"PHUNW",
"PHX",
"PI",
"PIC",
"PIC.U",
"PIC.WS",
"PICO",
"PID",
"PIE",
"PIH",
"PIHPP",
"PII",
"PIM",
"PINC",
"PINS",
"PIO",
"PIR",
"PIRS",
"PIXY",
"PIY",
"PIZ",
"PJC",
"PJH",
"PJT",
"PK",
"PKBK",
"PKD",
"PKE",
"PKG",
"PKI",
"PKO",
"PKOH",
"PKW",
"PKX",
"PLAB",
"PLAG",
"PLAN",
"PLAY",
"PLBC",
"PLC",
"PLCE",
"PLD",
"PLG",
"PLIN",
"PLL",
"PLM",
"PLMR",
"PLNT",
"PLOW",
"PLPC",
"PLSE",
"PLT",
"PLUG",
"PLUS",
"PLW",
"PLX",
"PLXP",
"PLXS",
"PLYA",
"PLYM",
"PLYM^A",
"PM",
"PMBC",
"PMD",
"PME",
"PMF",
"PML",
"PMM",
"PMO",
"PMOM",
"PMT",
"PMTS",
"PMT^A",
"PMT^B",
"PMX",
"PNBK",
"PNC",
"PNC^P",
"PNC^Q",
"PNF",
"PNFP",
"PNI",
"PNM",
"PNNT",
"PNQI",
"PNR",
"PNRG",
"PNRL",
"PNTR",
"PNW",
"POAI",
"PODD",
"POL",
"POLA",
"POLY",
"POOL",
"POPE",
"POR",
"POST",
"POWI",
"POWL",
"PPBI",
"PPC",
"PPDF",
"PPG",
"PPH",
"PPHI",
"PPIH",
"PPL",
"PPR",
"PPSI",
"PPT",
"PPX",
"PQG",
"PRA",
"PRAA",
"PRAH",
"PRCP",
"PRE^F",
"PRE^G",
"PRE^H",
"PRE^I",
"PRFT",
"PRFZ",
"PRGO",
"PRGS",
"PRGX",
"PRH",
"PRI",
"PRIF^A",
"PRIF^B",
"PRIF^C",
"PRIF^D",
"PRIM",
"PRK",
"PRLB",
"PRMW",
"PRN",
"PRNB",
"PRO",
"PROS",
"PROV",
"PRPH",
"PRPL",
"PRPO",
"PRQR",
"PRS",
"PRSC",
"PRSP",
"PRT",
"PRTA",
"PRTH",
"PRTK",
"PRTO",
"PRTS",
"PRTY",
"PRU",
"PRVB",
"PRVL",
"PS",
"PSA",
"PSA^A",
"PSA^B",
"PSA^C",
"PSA^D",
"PSA^E",
"PSA^F",
"PSA^G",
"PSA^H",
"PSA^V",
"PSA^W",
"PSA^X",
"PSB",
"PSB^U",
"PSB^V",
"PSB^W",
"PSB^X",
"PSB^Y",
"PSC",
"PSCC",
"PSCD",
"PSCE",
"PSCF",
"PSCH",
"PSCI",
"PSCM",
"PSCT",
"PSCU",
"PSDO",
"PSEC",
"PSET",
"PSF",
"PSL",
"PSM",
"PSMT",
"PSN",
"PSNL",
"PSO",
"PSTG",
"PSTI",
"PSTL",
"PSTV",
"PSTVZ",
"PSV",
"PSX",
"PSXP",
"PT",
"PTC",
"PTCT",
"PTE",
"PTEN",
"PTF",
"PTGX",
"PTH",
"PTI",
"PTLA",
"PTMN",
"PTN",
"PTNR",
"PTR",
"PTSI",
"PTVCA",
"PTVCB",
"PTY",
"PUB",
"PUI",
"PUK",
"PUK^",
"PUK^A",
"PULM",
"PUMP",
"PUYI",
"PVAC",
"PVAL",
"PVBC",
"PVG",
"PVH",
"PVL",
"PVT",
"PVT.U",
"PVT.WS",
"PVTL",
"PW",
"PWOD",
"PWR",
"PW^A",
"PXD",
"PXI",
"PXLW",
"PXS",
"PY",
"PYN",
"PYPL",
"PYS",
"PYT",
"PYX",
"PYZ",
"PZC",
"PZG",
"PZN",
"PZZA",
"QABA",
"QADA",
"QADB",
"QAT",
"QBAK",
"QCLN",
"QCOM",
"QCRH",
"QD",
"QDEL",
"QEP",
"QES",
"QFIN",
"QGEN",
"QHC",
"QIWI",
"QLC",
"QLYS",
"QNST",
"QQEW",
"QQQ",
"QQQX",
"QQXT",
"QRHC",
"QRTEA",
"QRTEB",
"QRVO",
"QSR",
"QTEC",
"QTNT",
"QTRH",
"QTRX",
"QTS",
"QTS^A",
"QTS^B",
"QTT",
"QTWO",
"QUAD",
"QUIK",
"QUMU",
"QUOT",
"QURE",
"QVCD",
"QYLD",
"R",
"RA",
"RACE",
"RAD",
"RADA",
"RAIL",
"RAMP",
"RAND",
"RAPT",
"RARE",
"RARX",
"RAVE",
"RAVN",
"RBA",
"RBB",
"RBBN",
"RBC",
"RBCAA",
"RBCN",
"RBKB",
"RBNC",
"RBS",
"RBZ",
"RC",
"RCA",
"RCB",
"RCG",
"RCI",
"RCII",
"RCKT",
"RCKY",
"RCL",
"RCM",
"RCMT",
"RCON",
"RCP",
"RCS",
"RCUS",
"RDCM",
"RDFN",
"RDHL",
"RDI",
"RDIB",
"RDN",
"RDNT",
"RDS.A",
"RDS.B",
"RDUS",
"RDVT",
"RDVY",
"RDWR",
"RDY",
"RE",
"REAL",
"RECN",
"REDU",
"REED",
"REFR",
"REG",
"REGI",
"REGN",
"REI",
"REKR",
"RELL",
"RELV",
"RELX",
"RENN",
"REPH",
"REPL",
"RES",
"RESI",
"RESN",
"RETA",
"RETO",
"REV",
"REVG",
"REX",
"REXN",
"REXR",
"REXR^A",
"REXR^B",
"REZI",
"RF",
"RFAP",
"RFDI",
"RFEM",
"RFEU",
"RFI",
"RFIL",
"RFL",
"RFP",
"RF^A",
"RF^B",
"RF^C",
"RGA",
"RGCO",
"RGEN",
"RGLD",
"RGLS",
"RGNX",
"RGR",
"RGS",
"RGT",
"RH",
"RHE",
"RHE^A",
"RHI",
"RHP",
"RIBT",
"RICK",
"RIF",
"RIG",
"RIGL",
"RILY",
"RILYG",
"RILYH",
"RILYI",
"RILYL",
"RILYO",
"RILYZ",
"RING",
"RIO",
"RIOT",
"RIV",
"RIVE",
"RJF",
"RKDA",
"RL",
"RLGT",
"RLGY",
"RLH",
"RLI",
"RLJ",
"RLJ^A",
"RM",
"RMAX",
"RMBI",
"RMBL",
"RMBS",
"RMCF",
"RMD",
"RMED",
"RMG",
"RMG.U",
"RMG.WS",
"RMI",
"RMM",
"RMNI",
"RMPL^",
"RMR",
"RMT",
"RMTI",
"RNDB",
"RNDM",
"RNDV",
"RNEM",
"RNET",
"RNG",
"RNGR",
"RNLC",
"RNMC",
"RNP",
"RNR",
"RNR^C",
"RNR^E",
"RNR^F",
"RNSC",
"RNST",
"RNWK",
"ROAD",
"ROAN",
"ROBO",
"ROBT",
"ROCK",
"ROG",
"ROIC",
"ROK",
"ROKU",
"ROL",
"ROLL",
"ROP",
"ROSE",
"ROSEU",
"ROSEW",
"ROST",
"ROX",
"ROYT",
"RP",
"RPAI",
"RPAY",
"RPD",
"RPLA",
"RPLA.U",
"RPLA.WS",
"RPM",
"RPT",
"RPT^D",
"RQI",
"RRBI",
"RRC",
"RRD",
"RRGB",
"RRR",
"RRTS",
"RS",
"RSF",
"RSG",
"RST",
"RTEC",
"RTIX",
"RTLR",
"RTN",
"RTRX",
"RTTR",
"RTW",
"RUBI",
"RUBY",
"RUHN",
"RUN",
"RUSHA",
"RUSHB",
"RUTH",
"RVEN",
"RVI",
"RVLT",
"RVLV",
"RVNC",
"RVP",
"RVSB",
"RVT",
"RWGE",
"RWGE.U",
"RWGE.WS",
"RWLK",
"RWT",
"RXN",
"RXN^A",
"RY",
"RYAAY",
"RYAM",
"RYB",
"RYI",
"RYN",
"RYTM",
"RY^T",
"RZA",
"RZB",
"S",
"SA",
"SAB",
"SABR",
"SACH",
"SAEX",
"SAF",
"SAFE",
"SAFM",
"SAFT",
"SAGE",
"SAH",
"SAIA",
"SAIC",
"SAIL",
"SAL",
"SALM",
"SALT",
"SAM",
"SAMA",
"SAMAU",
"SAMAW",
"SAMG",
"SAN",
"SAND ",
"SANM",
"SANW",
"SAN^B",
"SAP",
"SAR",
"SASR",
"SATS",
"SAUC",
"SAVA",
"SAVE",
"SB",
"SBAC",
"SBBP",
"SBBX",
"SBCF",
"SBE.U",
"SBFG",
"SBFGP",
"SBGI",
"SBGL",
"SBH",
"SBI",
"SBLK",
"SBLKZ",
"SBNA",
"SBNY",
"SBOW",
"SBPH",
"SBR",
"SBRA",
"SBS",
"SBSI",
"SBT",
"SBUX",
"SB^C",
"SB^D",
"SC",
"SCA",
"SCCB",
"SCCI",
"SCCO",
"SCD",
"SCE^B",
"SCE^C",
"SCE^D",
"SCE^E",
"SCE^G",
"SCE^H",
"SCE^J",
"SCE^K",
"SCE^L",
"SCHL",
"SCHN",
"SCHW",
"SCHW^C",
"SCHW^D",
"SCI",
"SCKT",
"SCL",
"SCM",
"SCON",
"SCOR",
"SCPE",
"SCPE.U",
"SCPE.WS",
"SCPH",
"SCPL",
"SCS",
"SCSC",
"SCVL",
"SCWX",
"SCX",
"SCYX",
"SCZ",
"SD",
"SDC",
"SDG",
"SDI",
"SDPI",
"SDR",
"SDRL",
"SDT",
"SDVY",
"SE",
"SEAC",
"SEAS",
"SEB",
"SECO",
"SEDG",
"SEE",
"SEED",
"SEEL",
"SEIC",
"SELB",
"SELF",
"SEM",
"SEMG",
"SENEA",
"SENEB",
"SENS",
"SERV",
"SES",
"SESN",
"SF",
"SFB",
"SFBC",
"SFBS",
"SFE",
"SFET",
"SFIX",
"SFL",
"SFLY",
"SFM",
"SFNC",
"SFST",
"SFUN",
"SF^A",
"SF^B",
"SG",
"SGA",
"SGB",
"SGBX",
"SGC",
"SGEN",
"SGH",
"SGLB",
"SGLBW",
"SGMA",
"SGMO",
"SGMS",
"SGOC",
"SGRP",
"SGRY",
"SGU",
"SHAK",
"SHBI",
"SHEN",
"SHG",
"SHI",
"SHIP",
"SHIPW",
"SHIPZ",
"SHLL",
"SHLL.U",
"SHLL.WS",
"SHLO",
"SHLX",
"SHO",
"SHOO",
"SHOP",
"SHOS",
"SHO^E",
"SHO^F",
"SHSP",
"SHV",
"SHW",
"SHY",
"SIBN",
"SIC",
"SID",
"SIEB",
"SIEN",
"SIF",
"SIFY",
"SIG",
"SIGA",
"SIGI",
"SILC",
"SILK",
"SILV",
"SIM",
"SIMO",
"SINA",
"SINO",
"SINT",
"SIRI",
"SITC",
"SITC^A",
"SITC^J",
"SITC^K",
"SITE",
"SITO",
"SIVB",
"SIX",
"SJI",
"SJIU",
"SJM",
"SJR",
"SJT",
"SJW",
"SKIS",
"SKM",
"SKOR",
"SKT",
"SKX",
"SKY",
"SKYS",
"SKYW",
"SKYY",
"SLAB",
"SLB",
"SLCA",
"SLCT",
"SLDB",
"SLF",
"SLG",
"SLGG",
"SLGL",
"SLGN",
"SLG^I",
"SLIM",
"SLM",
"SLMBP",
"SLNG",
"SLNO",
"SLNOW",
"SLP",
"SLQD",
"SLRC",
"SLRX",
"SLS",
"SLVO",
"SM",
"SMAR",
"SMBC",
"SMBK",
"SMCP",
"SMED",
"SMFG",
"SMG",
"SMHI",
"SMIT",
"SMLP",
"SMM",
"SMMC",
"SMMCU",
"SMMCW",
"SMMF",
"SMMT",
"SMP",
"SMPL",
"SMRT",
"SMSI",
"SMTA",
"SMTC",
"SMTS",
"SMTX",
"SNA",
"SNAP",
"SNBR",
"SNCR",
"SND",
"SNDE",
"SNDL",
"SNDR",
"SNDX",
"SNE",
"SNES",
"SNFCA",
"SNGX",
"SNGXW",
"SNH",
"SNHNI",
"SNHNL",
"SNLN",
"SNMP",
"SNN",
"SNNA",
"SNOA",
"SNOAW",
"SNP",
"SNPS",
"SNR",
"SNSR",
"SNSS",
"SNV",
"SNV^D",
"SNV^E",
"SNX",
"SNY",
"SO",
"SOCL",
"SOGO",
"SOHO",
"SOHOB",
"SOHON",
"SOHOO",
"SOHU",
"SOI",
"SOJA",
"SOJB",
"SOJC",
"SOL",
"SOLN",
"SOLO",
"SOLOW",
"SOLY",
"SON",
"SONA",
"SONG",
"SONGW",
"SONM",
"SONO",
"SOR",
"SORL",
"SOXX",
"SP",
"SPAQ",
"SPAQ.U",
"SPAQ.WS",
"SPAR",
"SPB ",
"SPCB",
"SPE",
"SPEX",
"SPE^B",
"SPFI",
"SPG",
"SPGI",
"SPG^J",
"SPH",
"SPHS",
"SPI",
"SPKE",
"SPKEP",
"SPLK",
"SPLP",
"SPLP^A",
"SPN",
"SPNE",
"SPNS",
"SPOK",
"SPOT",
"SPPI",
"SPR",
"SPRO",
"SPRT",
"SPSC",
"SPTN",
"SPWH",
"SPWR",
"SPXC",
"SPXX",
"SQ",
"SQBG",
"SQLV",
"SQM",
"SQNS",
"SQQQ",
"SR",
"SRAX",
"SRC",
"SRCE",
"SRCI",
"SRCL",
"SRC^A",
"SRDX",
"SRE",
"SREA",
"SRET",
"SREV",
"SRE^A",
"SRE^B",
"SRF",
"SRG",
"SRG^A",
"SRI",
"SRL",
"SRLP",
"SRNE",
"SRPT",
"SRRA",
"SRRK",
"SRT",
"SRTS",
"SRTSW",
"SRV",
"SRVA",
"SR^A",
"SSB",
"SSBI",
"SSD",
"SSFN",
"SSI",
"SSKN",
"SSL",
"SSNC",
"SSNT",
"SSP",
"SSPKU",
"SSRM",
"SSSS",
"SSTI",
"SSTK",
"SSW",
"SSWA",
"SSW^D",
"SSW^E",
"SSW^G",
"SSW^H",
"SSW^I",
"SSY",
"SSYS",
"ST",
"STAA",
"STAF",
"STAG",
"STAG^C",
"STAR ",
"STAR^D",
"STAR^G",
"STAR^I",
"STAY",
"STBA",
"STC",
"STCN",
"STE",
"STFC",
"STG",
"STI",
"STIM",
"STI^A",
"STK",
"STKL",
"STKS",
"STL",
"STLD",
"STL^A",
"STM",
"STML",
"STMP",
"STN",
"STND",
"STNE",
"STNG",
"STNL",
"STNLU",
"STNLW",
"STOK",
"STON",
"STOR",
"STPP",
"STRA",
"STRL",
"STRM",
"STRO",
"STRS",
"STRT",
"STT",
"STT^C",
"STT^D",
"STT^E",
"STT^G",
"STWD",
"STX",
"STXB",
"STXS",
"STZ",
"STZ.B",
"SU",
"SUI",
"SUM",
"SUMR",
"SUN",
"SUNS",
"SUNW",
"SUP",
"SUPN",
"SUPV",
"SURF",
"SUSB",
"SUSC",
"SUSL",
"SUZ",
"SVA",
"SVBI",
"SVM",
"SVMK",
"SVRA",
"SVT",
"SVVC",
"SWAV",
"SWCH",
"SWI",
"SWIR",
"SWJ",
"SWK",
"SWKS",
"SWM",
"SWN",
"SWP",
"SWTX",
"SWX",
"SWZ",
"SXC",
"SXI",
"SXT",
"SXTC",
"SY",
"SYBT",
"SYBX",
"SYF",
"SYK",
"SYKE",
"SYMC",
"SYN",
"SYNA",
"SYNC",
"SYNH",
"SYNL",
"SYPR",
"SYRS",
"SYX",
"SYY",
"SZC",
"T",
"TA",
"TAC",
"TACO",
"TACOW",
"TACT",
"TAIT",
"TAK",
"TAL",
"TALO",
"TALO.WS",
"TANH",
"TANNI",
"TANNL",
"TANNZ",
"TAOP",
"TAP",
"TAP.A",
"TAPR",
"TARO",
"TAST",
"TAT",
"TATT",
"TAYD",
"TBB",
"TBBK",
"TBC",
"TBI",
"TBIO",
"TBK",
"TBLT",
"TBLTU",
"TBLTW",
"TBNK",
"TBPH",
"TC",
"TCBI",
"TCBIL",
"TCBIP",
"TCBK",
"TCCO",
"TCDA",
"TCF",
"TCFC",
"TCFCP",
"TCGP",
"TCI",
"TCMD",
"TCO",
"TCON",
"TCO^J",
"TCO^K",
"TCP",
"TCPC",
"TCRD",
"TCRR",
"TCRW",
"TCRZ",
"TCS",
"TCX",
"TD",
"TDA",
"TDAC",
"TDACU",
"TDACW",
"TDC",
"TDE",
"TDF",
"TDG",
"TDI",
"TDIV",
"TDJ",
"TDOC",
"TDS",
"TDW",
"TDW.WS",
"TDW.WS.A",
"TDW.WS.B",
"TDY",
"TEAF",
"TEAM",
"TECD",
"TECH",
"TECK",
"TECTP",
"TEDU",
"TEF",
"TEI",
"TEL",
"TELL",
"TEN",
"TENB",
"TENX",
"TEO",
"TER",
"TERP",
"TESS",
"TEUM",
"TEVA",
"TEX",
"TFSL",
"TFX",
"TG",
"TGA",
"TGB",
"TGC",
"TGE",
"TGEN",
"TGH",
"TGI",
"TGLS",
"TGNA",
"TGP",
"TGP^A",
"TGP^B",
"TGS",
"TGT",
"TGTX",
"TH",
"THBRU",
"THC",
"THCA",
"THCAU",
"THCAW",
"THCB",
"THCBU",
"THCBW",
"THFF",
"THG",
"THGA",
"THM",
"THO",
"THOR",
"THQ",
"THR",
"THRM",
"THS",
"THW",
"THWWW",
"TIBR",
"TIBRU",
"TIBRW",
"TIF",
"TIGO",
"TIGR",
"TILE",
"TIPT",
"TISI",
"TITN",
"TIVO",
"TJX",
"TK",
"TKAT",
"TKC",
"TKKS",
"TKKSR",
"TKKSU",
"TKKSW",
"TKR",
"TLC",
"TLF",
"TLGT",
"TLI",
"TLK",
"TLND",
"TLRA",
"TLRD",
"TLRY",
"TLSA",
"TLT",
"TLYS",
"TM",
"TMCX",
"TMCXU",
"TMCXW",
"TMDI",
"TMDX",
"TME",
"TMHC",
"TMO",
"TMP",
"TMQ",
"TMSR",
"TMST",
"TMUS",
"TNAV",
"TNC",
"TNDM",
"TNET",
"TNK",
"TNP",
"TNP^C",
"TNP^D",
"TNP^E",
"TNP^F",
"TNXP",
"TOCA",
"TOL",
"TOO",
"TOO^A",
"TOO^B",
"TOO^E",
"TOPS",
"TORC",
"TOT",
"TOTA",
"TOTAR",
"TOTAU",
"TOTAW",
"TOUR",
"TOWN",
"TOWR",
"TPB",
"TPC",
"TPCO",
"TPGH",
"TPGH.U",
"TPGH.WS",
"TPH",
"TPHS",
"TPIC",
"TPL",
"TPR",
"TPRE",
"TPTX",
"TPVG",
"TPVY",
"TPX",
"TPZ",
"TQQQ",
"TR",
"TRC",
"TRCB",
"TRCH",
"TRCO",
"TREC",
"TREE",
"TREX",
"TRGP",
"TRHC",
"TRI",
"TRIB",
"TRIL",
"TRIP",
"TRK",
"TRMB",
"TRMD",
"TRMK",
"TRMT",
"TRN",
"TRNE",
"TRNE.U",
"TRNE.WS",
"TRNO",
"TRNS",
"TRNX",
"TROV",
"TROW",
"TROX",
"TRP",
"TRPX",
"TRQ",
"TRS",
"TRST",
"TRT",
"TRTN",
"TRTN^A",
"TRTN^B",
"TRTX",
"TRU",
"TRUE",
"TRUP",
"TRV",
"TRVG",
"TRVI",
"TRVN",
"TRWH",
"TRX",
"TRXC",
"TS",
"TSBK",
"TSC",
"TSCAP",
"TSCBP",
"TSCO",
"TSE",
"TSEM",
"TSG",
"TSI",
"TSLA",
"TSLF",
"TSLX",
"TSM",
"TSN",
"TSQ",
"TSRI",
"TSS",
"TSU",
"TTC",
"TTD",
"TTEC",
"TTEK",
"TTGT",
"TTI",
"TTM",
"TTMI",
"TTNP",
"TTOO",
"TTP",
"TTPH",
"TTS",
"TTTN",
"TTWO",
"TU",
"TUES",
"TUFN",
"TUP",
"TUR",
"TURN",
"TUSA",
"TUSK",
"TV",
"TVC",
"TVE",
"TVIX",
"TVTY",
"TW",
"TWI",
"TWIN",
"TWLO",
"TWMC",
"TWN",
"TWNK",
"TWNKW",
"TWO",
"TWOU",
"TWO^A",
"TWO^B",
"TWO^C",
"TWO^D",
"TWO^E",
"TWST",
"TWTR",
"TX",
"TXG",
"TXMD",
"TXN",
"TXRH",
"TXT",
"TY",
"TYG",
"TYHT",
"TYL",
"TYME",
"TYPE",
"TY^",
"TZAC",
"TZACU",
"TZACW",
"TZOO",
"UA",
"UAA",
"UAE",
"UAL",
"UAMY",
"UAN",
"UAVS",
"UBA",
"UBCP",
"UBER",
"UBFO",
"UBIO",
"UBNK",
"UBOH",
"UBP",
"UBP^G",
"UBP^H",
"UBS",
"UBSI",
"UBX",
"UCBI",
"UCFC",
"UCTT",
"UDR",
"UE",
"UEC",
"UEIC",
"UEPS",
"UFAB",
"UFCS",
"UFI",
"UFPI",
"UFPT",
"UFS",
"UG",
"UGI",
"UGLD",
"UGP",
"UHAL",
"UHS",
"UHT",
"UI",
"UIHC",
"UIS",
"UL",
"ULBI",
"ULH",
"ULTA",
"UMBF",
"UMC",
"UMH",
"UMH^B",
"UMH^C",
"UMH^D",
"UMPQ",
"UMRX",
"UN",
"UNAM",
"UNB",
"UNF",
"UNFI",
"UNH",
"UNIT",
"UNM",
"UNMA",
"UNP",
"UNT",
"UNTY",
"UNVR",
"UONE",
"UONEK",
"UPLD",
"UPS",
"UPWK",
"URBN",
"URG",
"URGN",
"URI",
"UROV",
"USA",
"USAC",
"USAK",
"USAP",
"USAS",
"USAT",
"USATP",
"USAU",
"USB",
"USB^A",
"USB^H",
"USB^M",
"USB^O",
"USB^P",
"USCR",
"USDP",
"USEG",
"USFD",
"USIG",
"USIO",
"USLB",
"USLM",
"USLV",
"USM",
"USMC",
"USNA",
"USOI",
"USPH",
"USWS",
"USWSW",
"USX",
"UTF",
"UTG",
"UTHR",
"UTI",
"UTL",
"UTMD",
"UTSI",
"UTX",
"UUU",
"UUUU",
"UUUU.WS",
"UVE",
"UVSP",
"UVV",
"UXIN",
"UZA",
"UZB",
"UZC",
"V",
"VAC",
"VAL",
"VALE",
"VALU",
"VALX",
"VAM",
"VAPO",
"VAR",
"VBF",
"VBFC",
"VBIV",
"VBLT",
"VBND",
"VBTX",
"VC",
"VCEL",
"VCF",
"VCIF",
"VCIT",
"VCLT",
"VCNX",
"VCRA",
"VCSH",
"VCTR",
"VCV",
"VCYT",
"VEC",
"VECO",
"VEDL",
"VEEV",
"VEON",
"VER",
"VERB",
"VERBW",
"VERI",
"VERU",
"VERY",
"VER^F",
"VET",
"VETS",
"VFC",
"VFF",
"VFL",
"VG",
"VGI",
"VGIT",
"VGLT",
"VGM",
"VGR",
"VGSH",
"VGZ",
"VHC",
"VHI",
"VIA",
"VIAB",
"VIAV",
"VICI",
"VICR",
"VIDI",
"VIGI",
"VIIX",
"VIOT",
"VIPS",
"VIRC",
"VIRT",
"VISL",
"VIST",
"VISTER",
"VIV",
"VIVE",
"VIVO",
"VJET",
"VKI",
"VKQ",
"VKTX",
"VKTXW",
"VLGEA",
"VLO",
"VLRS",
"VLRX",
"VLT",
"VLY",
"VLYPO",
"VLYPP",
"VMBS",
"VMC",
"VMD",
"VMET",
"VMI",
"VMM",
"VMO",
"VMW",
"VNCE",
"VNDA",
"VNE",
"VNET",
"VNO",
"VNOM",
"VNO^K",
"VNO^L",
"VNO^M",
"VNQI",
"VNRX",
"VNTR",
"VOC",
"VOD",
"VOLT",
"VONE",
"VONG",
"VONV",
"VOXX",
"VOYA",
"VOYA^B",
"VPG",
"VPV",
"VRA",
"VRAY",
"VRCA",
"VREX",
"VRIG",
"VRML",
"VRNA",
"VRNS",
"VRNT",
"VRRM",
"VRS",
"VRSK",
"VRSN",
"VRTS",
"VRTSP",
"VRTU",
"VRTV",
"VRTX",
"VSAT",
"VSDA",
"VSEC",
"VSH",
"VSI",
"VSLR",
"VSM",
"VSMV",
"VST",
"VST.WS.A",
"VSTM",
"VSTO",
"VTA",
"VTC",
"VTEC",
"VTGN",
"VTHR",
"VTIP",
"VTIQ",
"VTIQU",
"VTIQW",
"VTN",
"VTNR",
"VTR",
"VTSI",
"VTUS",
"VTVT",
"VTWG",
"VTWO",
"VTWV",
"VUSE",
"VUZI",
"VVI",
"VVPR",
"VVR",
"VVUS",
"VVV",
"VWOB",
"VXRT",
"VXUS",
"VYGR",
"VYMI",
"VZ",
"W",
"WAAS",
"WAB",
"WABC",
"WAFD",
"WAFU",
"WAIR",
"WAL",
"WALA",
"WASH",
"WAT",
"WATT",
"WB",
"WBA",
"WBAI",
"WBC",
"WBK",
"WBND",
"WBS",
"WBS^F",
"WBT",
"WCC",
"WCG",
"WCLD",
"WCN",
"WD",
"WDAY",
"WDC",
"WDFC",
"WDR",
"WEA",
"WEBK",
"WEC",
"WEI",
"WELL",
"WEN",
"WERN",
"WES",
"WETF",
"WEX",
"WEYS",
"WF",
"WFC",
"WFC^L",
"WFC^N",
"WFC^O",
"WFC^P",
"WFC^Q",
"WFC^R",
"WFC^T",
"WFC^V",
"WFC^W",
"WFC^X",
"WFC^Y",
"WFE^A",
"WGO",
"WH",
"WHD",
"WHF",
"WHFBZ",
"WHG",
"WHLM",
"WHLR",
"WHLRD",
"WHLRP",
"WHR",
"WIA",
"WIFI",
"WILC",
"WINA",
"WINC",
"WING",
"WINS",
"WIRE",
"WISA",
"WIT",
"WIW",
"WIX",
"WK",
"WKHS",
"WLDN",
"WLFC",
"WLH",
"WLK",
"WLKP",
"WLL",
"WLTW",
"WM",
"WMB",
"WMC",
"WMGI",
"WMK",
"WMS",
"WMT",
"WNC",
"WNEB",
"WNFM",
"WNS",
"WOOD",
"WOR",
"WORK",
"WORX",
"WOW",
"WPC",
"WPG",
"WPG^H",
"WPG^I",
"WPM",
"WPP",
"WPRT",
"WPX",
"WRB",
"WRB^B",
"WRB^C",
"WRB^D",
"WRB^E",
"WRE",
"WRI",
"WRK",
"WRLD",
"WRLS",
"WRLSR",
"WRLSU",
"WRLSW",
"WRN",
"WRTC",
"WSBC",
"WSBF",
"WSC",
"WSFS",
"WSG",
"WSM",
"WSO",
"WSO.B",
"WSR",
"WST",
"WSTG",
"WSTL",
"WTBA",
"WTER",
"WTFC",
"WTFCM",
"WTI",
"WTM",
"WTR",
"WTRE",
"WTREP",
"WTRH",
"WTRU",
"WTS",
"WTT",
"WTTR",
"WU",
"WUBA",
"WVE",
"WVFC",
"WVVI",
"WVVIP",
"WW",
"WWD",
"WWE",
"WWR",
"WWW",
"WY",
"WYND",
"WYNN",
"WYY",
"X",
"XAIR",
"XAN",
"XAN^C",
"XBIO",
"XBIOW",
"XBIT",
"XCUR",
"XEC",
"XEL",
"XELA",
"XELB",
"XENE",
"XENT",
"XERS",
"XFLT",
"XFOR",
"XHR",
"XIN",
"XLNX",
"XLRN",
"XNCR",
"XNET",
"XOG",
"XOM",
"XOMA",
"XON",
"XONE",
"XPEL",
"XPER",
"XPL",
"XPO",
"XRAY",
"XRF",
"XRX",
"XSPA",
"XT",
"XTLB",
"XTNT",
"XXII",
"XYF",
"XYL",
"Y",
"YCBD",
"YELP",
"YETI",
"YEXT",
"YGYI",
"YI",
"YIN",
"YJ",
"YLCO",
"YLDE",
"YMAB",
"YNDX",
"YORW",
"YPF",
"YRCW",
"YRD",
"YTEN",
"YTRA",
"YUM",
"YUMA",
"YUMC",
"YVR",
"YY",
"Z",
"ZAGG",
"ZAYO",
"ZBH",
"ZBIO",
"ZBK",
"ZBRA",
"ZB^A",
"ZB^G",
"ZB^H",
"ZDGE",
"ZEAL",
"ZEN",
"ZEUS",
"ZF",
"ZFGN",
"ZG",
"ZGNX",
"ZION",
"ZIONW",
"ZIOP",
"ZIV",
"ZIXI",
"ZKIN",
"ZLAB",
"ZM",
"ZN",
"ZNGA",
"ZNH",
"ZNWAA",
"ZOM",
"ZS",
"ZSAN",
"ZTEST",
"ZTO",
"ZTR",
"ZTS",
"ZUMZ",
"ZUO",
"ZVO",
"ZYME",
"ZYNE",
"ZYXI",
]
STOCK_NAMES = [
"1-800 FLOWERS.COM",
"10x Genomics",
"111",
"1347 Property Insurance Holdings",
"180 Degree Capital Corp.",
"1895 Bancorp of Wisconsin",
"1st Constitution Bancorp (NJ)",
"1st Source Corporation",
"21Vianet Group",
"22nd Century Group",
"2U",
"360 Finance",
"3D Systems Corporation",
"3M Company",
"500.com Limited",
"51job",
"58.com Inc.",
"8i Enterprises Acquisition Corp",
"8x8 Inc",
"9F Inc.",
"A-Mark Precious Metals",
"A.H. Belo Corporation",
"A.O Smith Corporation",
"A10 Networks",
"AAC Holdings",
"AAON",
"AAR Corp.",
"ABB Ltd",
"ABIOMED",
"ABM Industries Incorporated",
"AC Immune SA",
"ACADIA Pharmaceuticals Inc.",
"ACI Worldwide",
"ACM Research",
"ACNB Corporation",
"ADDvantage Technologies Group",
"ADMA Biologics Inc",
"ADT Inc.",
"ADTRAN",
"AECOM",
"AEterna Zentaris Inc.",
"AG Mortgage Investment Trust",
"AGBA Acquisition Limited",
"AGCO Corporation",
"AGM Group Holdings Inc.",
"AGNC Investment Corp.",
"AK Steel Holding Corporation",
"ALJ Regional Holdings",
"AMAG Pharmaceuticals",
"AMC Entertainment Holdings",
"AMC Networks Inc.",
"AMCI Acquisition Corp.",
"AMCON Distributing Company",
"AMERIPRISE FINANCIAL SERVICES",
"AMERISAFE",
"AMN Healthcare Services Inc",
"AMREP Corporation",
"AMTD International Inc.",
"AMTEK",
"ANGI Homeservices Inc.",
"ANI Pharmaceuticals",
"ANSYS",
"ARC Document Solutions",
"ARCA biopharma",
"ARMOUR Residential REIT",
"ARYA Sciences Acquisition Corp.",
"ASA Gold and Precious Metals Limited",
"ASE Technology Holding Co.",
"ASGN Incorporated",
"ASLAN Pharmaceuticals Limited",
"ASML Holding N.V.",
"AT&T Inc.",
"ATA Inc.",
"ATIF Holdings Limited",
"ATN International",
"AU Optronics Corp",
"AVEO Pharmaceuticals",
"AVROBIO",
"AVX Corporation",
"AXA Equitable Holdings",
"AXT Inc",
"AZZ Inc.",
"Aaron's",
"AbbVie Inc.",
"Abbott Laboratories",
"Abeona Therapeutics Inc.",
"Abercrombie & Fitch Company",
"Aberdeen Asia-Pacific Income Fund Inc",
"Aberdeen Australia Equity Fund Inc",
"Aberdeen Emerging Markets Equity Income Fund",
"Aberdeen Global Dynamic Dividend Fund",
"Aberdeen Global Income Fund",
"Aberdeen Global Premier Properties Fund",
"Aberdeen Income Credit Strategies Fund",
"Aberdeen Japan Equity Fund",
"Aberdeen Total Dynamic Dividend Fund",
"Ability Inc.",
"Abraxas Petroleum Corporation",
"Acacia Communications",
"Acacia Research Corporation",
"Acadia Healthcare Company",
"Acadia Realty Trust",
"Acamar Partners Acquisition Corp.",
"Acasti Pharma",
"Accelerate Diagnostics",
"Accelerated Pharma",
"Acceleron Pharma Inc.",
"Accenture plc",
"Acco Brands Corporation",
"Accuray Incorporated",
"AcelRx Pharmaceuticals",
"Acer Therapeutics Inc.",
"Achieve Life Sciences",
"Achillion Pharmaceuticals",
"Aclaris Therapeutics",
"Acme United Corporation.",
"Acorda Therapeutics",
"Acorn International",
"Act II Global Acquisition Corp.",
"Actinium Pharmaceuticals",
"Activision Blizzard",
"Actuant Corporation",
"Acuity Brands",
"Acushnet Holdings Corp.",
"Adamas Pharmaceuticals",
"Adamis Pharmaceuticals Corporation",
"Adams Diversified Equity Fund",
"Adams Natural Resources Fund",
"Adams Resources & Energy",
"Adaptimmune Therapeutics plc",
"Adaptive Biotechnologies Corporation",
"Addus HomeCare Corporation",
"Adecoagro S.A.",
"Adesto Technologies Corporation",
"Adial Pharmaceuticals",
"Adient plc",
"Adobe Inc.",
"Adtalem Global Education Inc.",
"Aduro Biotech",
"AdvanSix Inc.",
"Advance Auto Parts Inc",
"Advanced Disposal Services",
"Advanced Drainage Systems",
"Advanced Emissions Solutions",
"Advanced Energy Industries",
"Advanced Micro Devices",
"Advaxis",
"Advent Convertible and Income Fund",
"Adverum Biotechnologies",
"AdvisorShares Dorsey Wright Micro-Cap ETF",
"AdvisorShares Dorsey Wright Short ETF",
"AdvisorShares Sabretooth ETF",
"AdvisorShares Vice ETF",
"Aegion Corp",
"Aeglea BioTherapeutics",
"Aegon NV",
"Aehr Test Systems",
"Aemetis",
"Aercap Holdings N.V.",
"Aerie Pharmaceuticals",
"AeroCentury Corp.",
"AeroVironment",
"Aerojet Rocketdyne Holdings",
"Aerpio Pharmaceuticals",
"Aethlon Medical",
"Aevi Genomic Medicine",
"Affiliated Managers Group",
"Affimed N.V.",
"Aflac Incorporated",
"Afya Limited",
"AgEagle Aerial Systems",
"AgeX Therapeutics",
"Agenus Inc.",
"Agile Therapeutics",
"Agilent Technologies",
"Agilysys",
"Agios Pharmaceuticals",
"Agnico Eagle Mines Limited",
"Agree Realty Corporation",
"AgroFresh Solutions",
"Aileron Therapeutics",
"Aimmune Therapeutics",
"Air Industries Group",
"Air Lease Corporation",
"Air Products and Chemicals",
"Air T",
"Air Transport Services Group",
"AirNet Technology Inc.",
"Aircastle Limited",
"Airgain",
"Akamai Technologies",
"Akari Therapeutics Plc",
"Akazoo S.A.",
"Akcea Therapeutics",
"Akebia Therapeutics",
"Akerna Corp.",
"Akero Therapeutics",
"Akers Biosciences Inc.",
"Akorn",
"Akoustis Technologies",
"Alabama Power Company",
"Alamo Group",
"Alamos Gold Inc.",
"Alarm.com Holdings",
"Alaska Air Group",
"Alaska Communications Systems Group",
"Albany International Corporation",
"Albemarle Corporation",
"Alberton Acquisition Corporation",
"Albireo Pharma",
"Alcentra Capital Corp.",
"Alcoa Corporation",
"Alcon Inc.",
"Alder BioPharmaceuticals",
"Aldeyra Therapeutics",
"Alector",
"Alerus Financial Corporation",
"Alexander & Baldwin",
"Alexander's",
"Alexandria Real Estate Equities",
"Alexco Resource Corp",
"Alexion Pharmaceuticals",
"Algonquin Power & Utilities Corp.",
"Alibaba Group Holding Limited",
"Alico",
"Alight Inc.",
"Align Technology",
"Alimera Sciences",
"Alio Gold Inc.",
"Alithya Group inc.",
"Alkermes plc",
"Allakos Inc.",
"Alleghany Corporation",
"Allegheny Technologies Incorporated",
"Allegiance Bancshares",
"Allegiant Travel Company",
"Allegion plc",
"Allegro Merger Corp.",
"Allena Pharmaceuticals",
"Allergan plc.",
"Allete",
"Alliance Data Systems Corporation",
"Alliance National Municipal Income Fund Inc",
"Alliance Resource Partners",
"Alliance World Dollar Government Fund II",
"AllianceBernstein Holding L.P.",
"Alliant Energy Corporation",
"AllianzGI Convertible & Income 2024 Target Term Fund",
"AllianzGI Convertible & Income Fund",
"AllianzGI Convertible & Income Fund II",
"AllianzGI Diversified Income & Convertible Fund",
"AllianzGI Equity & Convertible Income Fund",
"AllianzGI NFJ Dividend",
"Allied Esports Entertainment",
"Allied Healthcare Products",
"Allied Motion Technologies",
"Allison Transmission Holdings",
"Allogene Therapeutics",
"Allot Ltd.",
"Allscripts Healthcare Solutions",
"Allstate Corporation (The)",
"Ally Financial Inc.",
"Almaden Minerals",
"Alnylam Pharmaceuticals",
"Alpha Pro Tech",
"Alpha and Omega Semiconductor Limited",
"AlphaMark Actively Managed Small Cap ETF",
"Alphabet Inc.",
"Alphatec Holdings",
"Alpine Immune Sciences",
"Alta Mesa Resources",
"Altaba Inc.",
"Altair Engineering Inc.",
"Alterity Therapeutics Limited",
"Alteryx",
"Altice USA",
"Altimmune",
"Altisource Asset Management Corp",
"Altisource Portfolio Solutions S.A.",
"Altra Industrial Motion Corp.",
"Altria Group",
"Altus Midstream Company",
"Aluminum Corporation of China Limited",
"Amalgamated Bank",
"Amarin Corporation plc",
"Amazon.com",
"Ambac Financial Group",
"Ambarella",
"Ambev S.A.",
"Ambow Education Holding Ltd.",
"Amcor plc",
"Amdocs Limited",
"Amedisys Inc",
"Amerant Bancorp Inc.",
"Amerco",
"Ameren Corporation",
"Ameresco",
"Ameri Holdings",
"AmeriServ Financial Inc.",
"America First Multifamily Investors",
"America Movil",
"America's Car-Mart",
"American Airlines Group",
"American Assets Trust",
"American Axle & Manufacturing Holdings",
"American Campus Communities Inc",
"American Eagle Outfitters",
"American Electric Power Company",
"American Equity Investment Life Holding Company",
"American Express Company",
"American Finance Trust",
"American Financial Group",
"American Homes 4 Rent",
"American International Group",
"American National Bankshares",
"American National Insurance Company",
"American Outdoor Brands Corporation",
"American Public Education",
"American Realty Investors",
"American Renal Associates Holdings",
"American Resources Corporation",
"American River Bankshares",
"American Shared Hospital Services",
"American Software",
"American States Water Company",
"American Superconductor Corporation",
"American Tower Corporation (REIT)",
"American Vanguard Corporation",
"American Water Works",
"American Woodmark Corporation",
"Americas Gold and Silver Corporation",
"Americold Realty Trust",
"Ameris Bancorp",
"AmerisourceBergen Corporation (Holding Co)",
"Ames National Corporation",
"Amgen Inc.",
"Amicus Therapeutics",
"Amira Nature Foods Ltd",
"Amkor Technology",
"Amneal Pharmaceuticals",
"Ampco-Pittsburgh Corporation",
"Amphastar Pharmaceuticals",
"Amphenol Corporation",
"Ampio Pharmaceuticals",
"Amplify Online Retail ETF",
"Amtech Systems",
"Amyris",
"Analog Devices",
"Anaplan",
"AnaptysBio",
"Anavex Life Sciences Corp.",
"Anchiano Therapeutics Ltd.",
"Andina Acquisition Corp. III",
"Angel Oak Financial Strategies Income Term Trust",
"AngioDynamics",
"AngloGold Ashanti Limited",
"Anheuser-Busch Inbev SA",
"Anika Therapeutics Inc.",
"Anixa Biosciences",
"Anixter International Inc.",
"Annaly Capital Management Inc",
"Antares Pharma",
"Anterix Inc.",
"Antero Midstream Corporation",
"Antero Resources Corporation",
"Anthem",
"Anworth Mortgage Asset Corporation",
"Aon plc",
"Apache Corporation",
"Apartment Investment and Management Company",
"Apellis Pharmaceuticals",
"Apergy Corporation",
"Apex Global Brands Inc.",
"Aphria Inc.",
"Apogee Enterprises",
"Apollo Commercial Real Estate Finance",
"Apollo Endosurgery",
"Apollo Global Management",
"Apollo Investment Corporation",
"Apollo Medical Holdings",
"Apollo Senior Floating Rate Fund Inc.",
"Apollo Tactical Income Fund Inc.",
"AppFolio",
"Appian Corporation",
"Apple Hospitality REIT",
"Apple Inc.",
"Applied DNA Sciences Inc",
"Applied Genetic Technologies Corporation",
"Applied Industrial Technologies",
"Applied Materials",
"Applied Optoelectronics",
"Applied Therapeutics",
"Approach Resources Inc.",
"AptarGroup",
"Aptevo Therapeutics Inc.",
"Aptinyx Inc.",
"Aptiv PLC",
"Aptorum Group Limited",
"Aptose Biosciences",
"Apyx Medical Corporation",
"Aqua America",
"Aqua Metals",
"AquaBounty Technologies",
"AquaVenture Holdings Limited",
"Aquantia Corp.",
"Aquestive Therapeutics",
"ArQule",
"Aramark",
"Aravive",
"Arbor Realty Trust",
"Arbutus Biopharma Corporation",
"ArcBest Corporation",
"Arcadia Biosciences",
"ArcelorMittal",
"Arch Capital Group Ltd.",
"Arch Coal",
"Archer-Daniels-Midland Company",
"Archrock",
"Arcimoto",
"Arco Platform Limited",
"Arconic Inc.",
"Arcos Dorados Holdings Inc.",
"Arcosa",
"Arcturus Therapeutics Holdings Inc.",
"Arcus Biosciences",
"Ardagh Group S.A.",
"Ardelyx",
"Ardmore Shipping Corporation",
"Arena Pharmaceuticals",
"Ares Capital Corporation",
"Ares Commercial Real Estate Corporation",
"Ares Dynamic Credit Allocation Fund",
"Ares Management Corporation",
"Argan",
"Argo Group International Holdings",
"Aridis Pharmaceuticals Inc.",
"Arista Networks",
"Ark Restaurants Corp.",
"Arlington Asset Investment Corp",
"Arlo Technologies",
"Armada Hoffler Properties",
"Armata Pharmaceuticals",
"Armstrong Flooring",
"Armstrong World Industries Inc",
"Arotech Corporation",
"Arrow DWA Country Rotation ETF",
"Arrow DWA Tactical ETF",
"Arrow Electronics",
"Arrow Financial Corporation",
"Arrowhead Pharmaceuticals",
"Art's-Way Manufacturing Co.",
"Artelo Biosciences",
"Artesian Resources Corporation",
"Arthur J. Gallagher & Co.",
"Artisan Partners Asset Management Inc.",
"Arvinas",
"Asanko Gold Inc.",
"Asbury Automotive Group Inc",
"Ascena Retail Group",
"Ascendis Pharma A/S",
"Ashford Hospitality Trust Inc",
"Ashford Inc.",
"Ashland Global Holdings Inc.",
"Asia Pacific Wire & Cable Corporation Limited",
"Aspen Aerogels",
"Aspen Group Inc.",
"Aspen Insurance Holdings Limited",
"Aspen Technology",
"Assembly Biosciences",
"Assertio Therapeutics",
"AssetMark Financial Holdings",
"Associated Banc-Corp",
"Associated Capital Group",
"Assurant",
"Assured Guaranty Ltd.",
"Asta Funding",
"Astec Industries",
"Astrazeneca PLC",
"AstroNova",
"Astronics Corporation",
"Astrotech Corporation",
"Asure Software Inc",
"At Home Group Inc.",
"Atara Biotherapeutics",
"Atento S.A.",
"Athene Holding Ltd.",
"Athenex",
"Athersys",
"Atkore International Group Inc.",
"Atlantic American Corporation",
"Atlantic Capital Bancshares",
"Atlantic Power Corporation",
"Atlantic Union Bankshares Corporation",
"Atlantica Yield plc",
"Atlanticus Holdings Corporation",
"Atlas Air Worldwide Holdings",
"Atlas Financial Holdings",
"Atlassian Corporation Plc",
"Atmos Energy Corporation",
"Atomera Incorporated",
"Atossa Genetics Inc.",
"Atreca",
"AtriCure",
"Atrion Corporation",
"Attis Industries Inc.",
"Auburn National Bancorporation",
"Audentes Therapeutics",
"AudioCodes Ltd.",
"AudioEye",
"Aurinia Pharmaceuticals Inc",
"Auris Medical Holding Ltd.",
"Aurora Cannabis Inc.",
"Aurora Mobile Limited",
"Auryn Resources Inc.",
"AutoNation",
"AutoWeb",
"AutoZone",
"Autodesk",
"Autohome Inc.",
"Autoliv",
"Autolus Therapeutics plc",
"Automatic Data Processing",
"Avadel Pharmaceuticals plc",
"Avalara",
"Avalon GloboCare Corp.",
"Avalon Holdings Corporation",
"AvalonBay Communities",
"Avangrid",
"Avanos Medical",
"Avantor",
"Avaya Holdings Corp.",
"Avedro",
"Avenue Therapeutics",
"Avery Dennison Corporation",
"Avianca Holdings S.A.",
"Aviat Networks",
"Avid Bioservices",
"Avid Technology",
"Avinger",
"Avino Silver",
"Avis Budget Group",
"Avista Corporation",
"Avnet",
"Avon Products",
"Aware",
"Axalta Coating Systems Ltd.",
"Axcelis Technologies",
"Axcella Health Inc.",
"Axis Capital Holdings Limited",
"AxoGen",
"Axon Enterprise",
"Axonics Modulation Technologies",
"Axos Financial",
"Axovant Gene Therapies Ltd.",
"Axsome Therapeutics",
"Aytu BioScience",
"Azul S.A.",
"AzurRx BioPharma",
"Azure Power Global Limited",
"B Communications Ltd.",
"B&G Foods",
"B. Riley Financial",
"B. Riley Principal Merger Corp.",
"B.O.S. Better Online Solutions",
"B2Gold Corp",
"BATS BZX Exchange",
"BB&T Corporation",
"BBVA Banco Frances S.A.",
"BBX Capital Corporation",
"BCB Bancorp",
"BCE",
"BELLUS Health Inc.",
"BEST Inc.",
"BG Staffing Inc",
"BGC Partners",
"BHP Group Limited",
"BHP Group Plc",
"BIO-key International",
"BJ's Restaurants",
"BJ's Wholesale Club Holdings",
"BK Technologies Corporation",
"BLACKROCK INTERNATIONAL",
"BMC Stock Holdings",
"BNY Mellon Alcentra Global Credit Income 2024 Target Term Fund",
"BNY Mellon High Yield Strategies Fund",
"BNY Mellon Municipal Bond Infrastructure Fund",
"BNY Mellon Municipal Income Inc.",
"BNY Mellon Strategic Municipal Bond Fund",
"BNY Mellon Strategic Municipals",
"BOK Financial Corporation",
"BP Midstream Partners LP",
"BP Prudhoe Bay Royalty Trust",
"BP p.l.c.",
"BRF S.A.",
"BRP Inc.",
"BRT Apartments Corp.",
"BSQUARE Corporation",
"BT Group plc",
"BWX Technologies",
"Babcock",
"Babson Global Short Duration High Yield Fund",
"Badger Meter",
"Baidu",
"Bain Capital Specialty Finance",
"Baker Hughes",
"Balchem Corporation",
"BalckRock Taxable Municipal Bond Trust",
"Ball Corporation",
"Ballantyne Strong",
"Ballard Power Systems",
"BanColombia S.A.",
"Banc of California",
"BancFirst Corporation",
"Banco Bilbao Viscaya Argentaria S.A.",
"Banco Bradesco Sa",
"Banco De Chile",
"Banco Latinoamericano de Comercio Exterior",
"Banco Santander",
"Banco Santander Brasil SA",
"Banco Santander Chile",
"Banco Santander Mexico",
"Bancorp 34",
"Bancorp of New Jersey",
"BancorpSouth Bank",
"Bancroft Fund Limited",
"Bandwidth Inc.",
"Bank First Corporation",
"Bank OZK",
"Bank Of Montreal",
"Bank Of New York Mellon Corporation (The)",
"Bank of America Corporation",
"Bank of Commerce Holdings (CA)",
"Bank of Hawaii Corporation",
"Bank of Marin Bancorp",
"Bank of N.T. Butterfield & Son Limited (The)",
"Bank of Nova Scotia (The)",
"Bank of South Carolina Corp.",
"Bank of the James Financial Group",
"Bank7 Corp.",
"BankFinancial Corporation",
"BankUnited",
"Bankwell Financial Group",
"Banner Corporation",
"Baozun Inc.",
"Bar Harbor Bankshares",
"Barclays PLC",
"Barings BDC",
"Barings Corporate Investors",
"Barings Participation Investors",
"Barnes & Noble Education",
"Barnes Group",
"Barnwell Industries",
"Barrett Business Services",
"Barrick Gold Corporation",
"Basic Energy Services",
"Bassett Furniture Industries",
"Bat Group",
"Bausch Health Companies Inc.",
"Baxter International Inc.",
"BayCom Corp",
"Baytex Energy Corp",
"Beacon Roofing Supply",
"Beasley Broadcast Group",
"Beazer Homes USA",
"Becton",
"Bed Bath & Beyond Inc.",
"BeiGene",
"Bel Fuse Inc.",
"Belden Inc",
"Bellerophon Therapeutics",
"Bellicum Pharmaceuticals",
"Benchmark Electronics",
"Benefitfocus",
"Benitec Biopharma Limited",
"Berkshire Hathaway Inc.",
"Berkshire Hills Bancorp",
"Berry Global Group",
"Berry Petroleum Corporation",
"Best Buy Co.",
"Beyond Air",
"Beyond Meat",
"BeyondSpring",
"Bicycle Therapeutics plc",
"Big 5 Sporting Goods Corporation",
"Big Lots",
"Big Rock Partners Acquisition Corp.",
"Biglari Holdings Inc.",
"Bilibili Inc.",
"Bio-Path Holdings",
"Bio-Rad Laboratories",
"Bio-Techne Corp",
"BioCardia",
"BioCryst Pharmaceuticals",
"BioDelivery Sciences International",
"BioHiTech Global",
"BioLife Solutions",
"BioLineRx Ltd.",
"BioMarin Pharmaceutical Inc.",
"BioPharmX Corporation",
"BioSig Technologies",
"BioSpecifics Technologies Corp",
"BioTelemetry",
"BioVie Inc.",
"BioXcel Therapeutics",
"Bioanalytical Systems",
"Biocept",
"Bioceres Crop Solutions Corp.",
"Biofrontera AG",
"Biogen Inc.",
"Biohaven Pharmaceutical Holding Company Ltd.",
"Biolase",
"Biomerica",
"Bionano Genomics",
"BiondVax Pharmaceuticals Ltd.",
"Bionik Laboratories Corp.",
"Birks Group Inc.",
"Bitauto Holdings Limited",
"Black Hills Corporation",
"Black Knight",
"Black Stone Minerals",
"BlackBerry Limited",
"BlackLine",
"BlackRock",
"BlackRock 2022 Global Income Opportunity Trust",
"BlackRock California Municipal Income Trust",
"BlackRock Capital Investment Corporation",
"BlackRock Credit Allocation Income Trust",
"BlackRock Energy and Resources Trust",
"BlackRock Income Investment Quality Trust",
"BlackRock Income Trust Inc. (The)",
"BlackRock Investment Quality Municipal Trust Inc. (The)",
"BlackRock Long-Term Municipal Advantage Trust",
"BlackRock Maryland Municipal Bond Trust",
"BlackRock Massachusetts Tax-Exempt Trust",
"BlackRock Multi-Sector Income Trust",
"BlackRock Municipal Income Investment Trust",
"BlackRock Municipal Income Trust",
"BlackRock Municipal Income Trust II",
"BlackRock Municipal Target Term Trust Inc. (The)",
"BlackRock New York Investment Quality Municipal Trust Inc. (Th",
"BlackRock New York Municipal Income Trust II",
"BlackRock Resources",
"BlackRock Science and Technology Trust",
"BlackRock Science and Technology Trust II",
"BlackRock Strategic Municipal Trust Inc. (The)",
"BlackRock TCP Capital Corp.",
"BlackRock Utility",
"BlackRock Virginia Municipal Bond Trust",
"Blackbaud",
"Blackrock Capital and Income Strategies Fund Inc",
"Blackrock Core Bond Trust",
"Blackrock Corporate High Yield Fund",
"Blackrock Debt Strategies Fund",
"Blackrock Enhanced Equity Dividend Trust",
"Blackrock Enhanced Government Fund",
"Blackrock Floating Rate Income Strategies Fund Inc",
"Blackrock Florida Municipal 2020 Term Trust",
"Blackrock Global",
"Blackrock Health Sciences Trust",
"Blackrock Muni Intermediate Duration Fund Inc",
"Blackrock Muni New York Intermediate Duration Fund Inc",
"Blackrock MuniAssets Fund",
"Blackrock MuniEnhanced Fund",
"Blackrock MuniHoldings California Quality Fund",
"Blackrock MuniHoldings Fund",
"Blackrock MuniHoldings Fund II",
"Blackrock MuniHoldings Investment Quality Fund",
"Blackrock MuniHoldings New Jersey Insured Fund",
"Blackrock MuniHoldings New York Quality Fund",
"Blackrock MuniHoldings Quality Fund",
"Blackrock MuniHoldings Quality Fund II",
"Blackrock MuniVest Fund II",
"Blackrock MuniYield California Fund",
"Blackrock MuniYield California Insured Fund",
"Blackrock MuniYield Fund",
"Blackrock MuniYield Investment Fund",
"Blackrock MuniYield Investment QualityFund",
"Blackrock MuniYield Michigan Quality Fund",
"Blackrock MuniYield New Jersey Fund",
"Blackrock MuniYield New York Quality Fund",
"Blackrock MuniYield Pennsylvania Quality Fund",
"Blackrock MuniYield Quality Fund",
"Blackrock MuniYield Quality Fund II",
"Blackrock MuniYield Quality Fund III",
"Blackrock Municipal 2020 Term Trust",
"Blackrock Municipal Bond Trust",
"Blackrock Municipal Income Quality Trust",
"Blackrock New York Municipal Bond Trust",
"Blackrock New York Municipal Income Quality Trust",
"Blackstone / GSO Strategic Credit Fund",
"Blackstone GSO Long Short Credit Income Fund",
"Blackstone GSO Senior Floating Rate Term Fund",
"Blink Charging Co.",
"Blonder Tongue Laboratories",
"Bloom Energy Corporation",
"Bloomin' Brands",
"Blucora",
"Blue Apron Holdings",
"Blue Bird Corporation",
"Blue Capital Reinsurance Holdings Ltd.",
"Blue Hat Interactive Entertainment Technology",
"BlueLinx Holdings Inc.",
"BlueStar Israel Technology ETF",
"Bluegreen Vacations Corporation",
"Blueknight Energy Partners L.P.",
"Blueprint Medicines Corporation",
"Bluerock Residential Growth REIT",
"Boeing Company (The)",
"Boingo Wireless",
"Boise Cascade",
"Bonanza Creek Energy",
"Bonso Electronics International",
"Booking Holdings Inc.",
"Boot Barn Holdings",
"Booz Allen Hamilton Holding Corporation",
"BorgWarner Inc.",
"Borqs Technologies",
"Borr Drilling Limited",
"Boston Beer Company",
"Boston Omaha Corporation",
"Boston Private Financial Holdings",
"Boston Properties",
"Boston Scientific Corporation",
"Bottomline Technologies",
"Bowl America",
"Box",
"Boxlight Corporation",
"Boxwood Merger Corp.",
"Boyd Gaming Corporation",
"Brady Corporation",
"Braeburn Pharmaceuticals",
"Braemar Hotels & Resorts Inc.",
"Brainstorm Cell Therapeutics Inc.",
"Brainsway Ltd.",
"Brandywine Realty Trust",
"BrandywineGLOBAL Global Income Opportunities Fund ",
"Brasilagro Cia Brasileira De Propriedades Agricolas",
"Brickell Biotech",
"Bridge Bancorp",
"BridgeBio Pharma",
"Bridgeline Digital",
"Bridgewater Bancshares",
"Bridgford Foods Corporation",
"Briggs & Stratton Corporation",
"Brigham Minerals",
"Bright Horizons Family Solutions Inc.",
"Bright Scholar Education Holdings Limited",
"BrightSphere Investment Group Inc.",
"BrightView Holdings",
"Brightcove Inc.",
"Brighthouse Financial",
"Brink's Company (The)",
"Brinker International",
"Bristol-Myers Squibb Company",
"British American Tobacco p.l.c.",
"Brixmor Property Group Inc.",
"BroadVision",
"Broadcom Inc.",
"Broadridge Financial Solutions",
"Broadway Financial Corporation",
"Broadwind Energy",
"Brookdale Senior Living Inc.",
"Brookfield Asset Management Inc",
"Brookfield Business Partners L.P.",
"Brookfield DTLA Inc.",
"Brookfield Global Listed Infrastructure Income Fund",
"Brookfield Infrastructure Partners LP",
"Brookfield Property Partners L.P.",
"Brookfield Property REIT Inc.",
"Brookfield Real Assets Income Fund Inc.",
"Brookfield Renewable Partners L.P.",
"Brookline Bancorp",
"Brooks Automation",
"Brown & Brown",
"Brown Forman Corporation",
"Bruker Corporation",
"Brunswick Corporation",
"Bryn Mawr Bank Corporation",
"Buckeye Partners L.P.",
"Buckle",
"Buenaventura Mining Company Inc.",
"Build-A-Bear Workshop",
"Builders FirstSource",
"Bunge Limited",
"Burlington Stores",
"Business First Bancshares",
"Byline Bancorp",
"C&F Financial Corporation",
"C&J Energy Services",
"C.H. Robinson Worldwide",
"CABCO Series 2004-101 Trust",
"CACI International",
"CAE Inc",
"CAI International",
"CASI Pharmaceuticals",
"CB Financial Services",
"CBAK Energy Technology",
"CBIZ",
"CBL & Associates Properties",
"CBM Bancorp",
"CBO (Listing Market - NYSE - Networks A/E)",
"CBRE Clarion Global Real Estate Income Fund",
"CBRE Group",
"CBS Corporation",
"CBTX",
"CBX (Listing Market NYSE Networks AE",
"CDK Global",
"CDW Corporation",
"CECO Environmental Corp.",
"CEVA",
"CF Finance Acquisition Corp.",
"CF Industries Holdings",
"CGI Inc.",
"CHF Solutions",
"CHS Inc",
"CIM Commercial Trust Corporation",
"CIRCOR International",
"CIT Group Inc (DEL)",
"CKX Lands",
"CLPS Incorporation",
"CME Group Inc.",
"CMS Energy Corporation",
"CNA Financial Corporation",
"CNB Financial Corporation",
"CNFinance Holdings Limited",
"CNH Industrial N.V.",
"CNO Financial Group",
"CNOOC Limited",
"CNX Midstream Partners LP",
"CNX Resources Corporation",
"CONMED Corporation",
"CONSOL Coal Resources LP",
"CPB Inc.",
"CPFL Energia S.A.",
"CPI Aerostructures",
"CPI Card Group Inc.",
"CPS Technologies Corp.",
"CRA International",
"CRH Medical Corporation",
"CRH PLC",
"CRISPR Therapeutics AG",
"CSG Systems International",
"CSI Compressco LP",
"CSP Inc.",
"CSS Industries",
"CSW Industrials",
"CSX Corporation",
"CTI BioPharma Corp.",
"CTI Industries Corporation",
"CTS Corporation",
"CUI Global",
"CURO Group Holdings Corp.",
"CVB Financial Corporation",
"CVD Equipment Corporation",
"CVR Energy Inc.",
"CVR Partners",
"CVS Health Corporation",
"CYREN Ltd.",
"Cable One",
"Cabot Corporation",
"Cabot Microelectronics Corporation",
"Cabot Oil & Gas Corporation",
"Cactus",
"Cadence Bancorporation",
"Cadence Design Systems",
"Cadiz",
"Caesars Entertainment Corporation",
"Caesarstone Ltd.",
"Cal-Maine Foods",
"CalAmp Corp.",
"Caladrius Biosciences",
"Calamos Convertible Opportunities and Income Fund",
"Calamos Convertible and High Income Fund",
"Calamos Dynamic Convertible & Income Fund",
"Calamos Global Dynamic Income Fund",
"Calamos Global Total Return Fund",
"Calamos Strategic Total Return Fund",
"Calavo Growers",
"Caledonia Mining Corporation Plc",
"Caleres",
"California Resources Corporation",
"California Water Service Group Holding",
"Calithera Biosciences",
"Calix",
"Callaway Golf Company",
"Callon Petroleum Company",
"Calumet Specialty Products Partners",
"Calyxt",
"Camber Energy",
"Cambium Networks Corporation",
"Cambrex Corporation",
"Cambridge Bancorp",
"Camden National Corporation",
"Camden Property Trust",
"Cameco Corporation",
"Campbell Soup Company",
"Camping World Holdings",
"Camtek Ltd.",
"Can-Fite Biopharma Ltd",
"Canada Goose Holdings Inc.",
"Canadian Imperial Bank of Commerce",
"Canadian National Railway Company",
"Canadian Natural Resources Limited",
"Canadian Pacific Railway Limited",
"Canadian Solar Inc.",
"Cancer Genetics",
"Cango Inc.",
"CannTrust Holdings Inc.",
"Cannae Holdings",
"Canon",
"Canopy Growth Corporation",
"Cantel Medical Corp.",
"Canterbury Park Holding Corporation",
"CapStar Financial Holdings",
"Capital Bancorp",
"Capital City Bank Group",
"Capital One Financial Corporation",
"Capital Product Partners L.P.",
"Capital Senior Living Corporation",
"Capital Southwest Corporation",
"Capital Trust",
"Capitala Finance Corp.",
"Capitol Federal Financial",
"Capri Holdings Limited",
"Capricor Therapeutics",
"Capstead Mortgage Corporation",
"Capstone Turbine Corporation",
"CarGurus",
"CarMax Inc",
"Cara Therapeutics",
"Carbo Ceramics",
"Carbon Black",
"Carbonite",
"Cardinal Health",
"Cardiovascular Systems",
"Cardlytics",
"Cardtronics plc",
"Care.com",
"CareDx",
"CareTrust REIT",
"Career Education Corporation",
"Carlisle Companies Incorporated",
"Carnival Corporation",
"Carolina Financial Corporation",
"Carolina Trust BancShares",
"Carpenter Technology Corporation",
"Carriage Services",
"Carrizo Oil & Gas",
"Carrols Restaurant Group",
"Cars.com Inc.",
"Carter Bank & Trust",
"Carter's",
"Carvana Co.",
"Carver Bancorp",
"Casa Systems",
"Casella Waste Systems",
"Caseys General Stores",
"Cass Information Systems",
"Cassava Sciences",
"Castle Biosciences",
"Castle Brands",
"Castlight Health",
"Castor Maritime Inc.",
"Catabasis Pharmaceuticals",
"Catalent",
"Catalyst Biosciences",
"Catalyst Pharmaceuticals",
"Catasys",
"CatchMark Timber Trust",
"Caterpillar",
"Cathay General Bancorp",
"Cato Corporation (The)",
"Cavco Industries",
"Cboe Global Markets",
"Cedar Fair",
"Cedar Realty Trust",
"Cel-Sci Corporation",
"Celanese Corporation",
"Celcuity Inc.",
"Celestica",
"Celgene Corporation",
"Cellcom Israel",
"Celldex Therapeutics",
"Cellect Biotechnology Ltd.",
"Cellectar Biosciences",
"Cellectis S.A.",
"Cellular Biomedicine Group",
"Celsion Corporation",
"Celsius Holdings",
"Celyad SA",
"Cementos Pacasmayo S.A.A.",
"Cemex S.A.B. de C.V.",
"Cemtrex Inc.",
"Cenovus Energy Inc",
"Centene Corporation",
"Centennial Resource Development",
"Center Coast Brookfield MLP & Energy Infrastructur",
"CenterPoint Energy",
"CenterState Bank Corporation",
"Centrais Electricas Brasileiras S.A.- Eletrobras",
"Central European Media Enterprises Ltd.",
"Central Federal Corporation",
"Central Garden & Pet Company",
"Central Puerto S.A.",
"Central Securities Corporation",
"Central Valley Community Bancorp",
"Centrexion Therapeutics Corporation",
"Centric Brands Inc.",
"Centrus Energy Corp.",
"Century Aluminum Company",
"Century Bancorp",
"Century Casinos",
"Century Communities",
"CenturyLink",
"Ceragon Networks Ltd.",
"Cerecor Inc.",
"Ceridian HCM Holding Inc.",
"Cerner Corporation",
"Cerus Corporation",
"Cesca Therapeutics Inc.",
"ChaSerg Technology Acquisition Corp.",
"Champions Oncology",
"Change Healthcare Inc.",
"Changyou.com Limited",
"ChannelAdvisor Corporation",
"Chanticleer Holdings",
"Chaparral Energy",
"Charah Solutions",
"Chardan Healthcare Acquisition Corp.",
"Charles & Colvard Ltd.",
"Charles River Laboratories International",
"Chart Industries",
"Charter Communications",
"Chase Corporation",
"Chatham Lodging Trust (REIT)",
"Check Point Software Technologies Ltd.",
"Check-Cap Ltd.",
"Checkpoint Therapeutics",
"Cheetah Mobile Inc.",
"Chegg",
"Chembio Diagnostics",
"Chemed Corp.",
"ChemoCentryx",
"Chemours Company (The)",
"Chemung Financial Corp",
"Cheniere Energy",
"Cheniere Energy Partners",
"Cherry Hill Mortgage Investment Corporation",
"Chesapeake Energy Corporation",
"Chesapeake Granite Wash Trust",
"Chesapeake Lodging Trust",
"Chesapeake Utilities Corporation",
"Chevron Corporation",
"Chewy",
"Chiasma",
"Chicago Rivet & Machine Co.",
"Chicken Soup for the Soul Entertainment",
"Chico's FAS",
"Children's Place",
"Chimera Investment Corporation",
"Chimerix",
"China Automotive Systems",
"China Biologic Products Holdings",
"China Ceramics Co.",
"China Customer Relations Centers",
"China Distance Education Holdings Limited",
"China Eastern Airlines Corporation Ltd.",
"China Finance Online Co. Limited",
"China Fund",
"China Green Agriculture",
"China HGS Real Estate",
"China Index Holdings Limited",
"China Internet Nationwide Financial Services Inc.",
"China Jo-Jo Drugstores",
"China Life Insurance Company Limited",
"China Mobile (Hong Kong) Ltd.",
"China Natural Resources",
"China Online Education Group",
"China Petroleum & Chemical Corporation",
"China Pharma Holdings",
"China Rapid Finance Limited",
"China Recycling Energy Corporation",
"China SXT Pharmaceuticals",
"China Southern Airlines Company Limited",
"China TechFaith Wireless Communication Technology Limited",
"China Telecom Corp Ltd",
"China Unicom (Hong Kong) Ltd",
"China XD Plastics Company Limited",
"China Xiangtai Food Co.",
"China Yuchai International Limited",
"ChinaNet Online Holdings",
"ChipMOS TECHNOLOGIES INC.",
"Chipotle Mexican Grill",
"Choice Hotels International",
"ChromaDex Corporation",
"Chubb Limited",
"Chunghwa Telecom Co.",
"Church & Dwight Company",
"Churchill Capital Corp II",
"Churchill Downs",
"Chuy's Holdings",
"Cibus Corp.",
"Cidara Therapeutics",
"Ciena Corporation",
"Cigna Corporation",
"Cimarex Energy Co",
"Cimpress N.V",
"Cincinnati Bell Inc",
"Cincinnati Financial Corporation",
"Cinedigm Corp",
"Cinemark Holdings Inc",
"Ciner Resources LP",
"Cintas Corporation",
"Cirrus Logic",
"Cisco Systems",
"Cision Ltd.",
"Citi Trends",
"Citigroup Inc.",
"Citius Pharmaceuticals",
"Citizens",
"Citizens & Northern Corp",
"Citizens Community Bancorp",
"Citizens Financial Group",
"Citizens Holding Company",
"Citrix Systems",
"City Holding Company",
"City Office REIT",
"Civeo Corporation",
"Civista Bancshares",
"Clarivate Analytics Plc",
"Clarus Corporation",
"Clean Energy Fuels Corp.",
"Clean Harbors",
"Clear Channel Outdoor Holdings",
"ClearBridge All Cap Growth ETF",
"ClearBridge Dividend Strategy ESG ETF",
"ClearBridge Energy Midstream Opportunity Fund Inc.",
"ClearBridge Large Cap Growth ESG ETF",
"ClearBridge MLP and Midstream Fund Inc.",
"ClearBridge MLP and Midstream Total Return Fund In",
"ClearOne",
"ClearSign Combustion Corporation",
"Clearfield",
"Clearside Biomedical",
"Clearwater Paper Corporation",
"Clearway Energy",
"Cleveland BioLabs",
"Cleveland-Cliffs Inc.",
"Clipper Realty Inc.",
"Clorox Company (The)",
"Cloudera",
"Clough Global Dividend and Income Fund",
"Clough Global Equity Fund",
"Clough Global Opportunities Fund",
"Clovis Oncology",
"Co-Diagnostics",
"CoStar Group",
"Coastal Financial Corporation",
"Coca Cola Femsa S.A.B. de C.V.",
"Coca-Cola Company (The)",
"Coca-Cola Consolidated",
"Coca-Cola European Partners plc",
"Cocrystal Pharma",
"Coda Octopus Group",
"Codexis",
"Codorus Valley Bancorp",
"Coeur Mining",
"Coffee Holding Co.",
"Cogent Communications Holdings",
"Cognex Corporation",
"Cognizant Technology Solutions Corporation",
"CohBar",
"Cohen & Company Inc.",
"Cohen & Steers Closed-End Opportunity Fund",
"Cohen & Steers Global Income Builder",
"Cohen & Steers Inc",
"Cohen & Steers Infrastructure Fund",
"Cohen & Steers Limited Duration Preferred and Income Fund",
"Cohen & Steers MLP Income and Energy Opportunity Fund",
"Cohen & Steers Quality Income Realty Fund Inc",
"Cohen & Steers REIT and Preferred and Income Fund",
"Cohen & Steers Select Preferred and Income Fund",
"Cohen & Steers Total Return Realty Fund",
"Coherent",
"Coherus BioSciences",
"Cohu",
"Colfax Corporation",
"Colgate-Palmolive Company",
"CollPlant Biotechnologies Ltd.",
"Collectors Universe",
"Collegium Pharmaceutical",
"Collier Creek Holdings",
"Colliers International Group Inc. ",
"Colonial High Income Municipal Trust",
"Colonial Intermediate High Income Fund",
"Colonial Investment Grade Municipal Trust",
"Colonial Municipal Income Trust",
"Colony Bankcorp",
"Colony Capital",
"Colony Credit Real Estate",
"Columbia Banking System",
"Columbia Financial",
"Columbia Property Trust",
"Columbia Seligman Premium Technology Growth Fund",
"Columbia Sportswear Company",
"Columbus McKinnon Corporation",
"Comcast Corporation",
"Comerica Incorporated",
"Comfort Systems USA",
"CommScope Holding Company",
"Commerce Bancshares",
"Commercial Metals Company",
"Commercial Vehicle Group",
"Communications Systems",
"Community Bank System",
"Community Bankers Trust Corporation.",
"Community First Bancshares",
"Community Health Systems",
"Community Healthcare Trust Incorporated",
"Community Trust Bancorp",
"Community West Bancshares",
"Commvault Systems",
"Comp En De Mn Cemig ADS",
"CompX International Inc.",
"Companhia Brasileira de Distribuicao",
"Companhia Paranaense de Energia (COPEL)",
"Companhia de saneamento Basico Do Estado De Sao Paulo - Sabesp",
"Compania Cervecerias Unidas",
"Compass Diversified Holdings",
"Compass Minerals International",
"Compugen Ltd.",
"Computer Programs and Systems",
"Computer Task Group",
"Comstock Holding Companies",
"Comstock Mining",
"Comstock Resources",
"Comtech Telecommunications Corp.",
"ConAgra Brands",
"Conatus Pharmaceuticals Inc.",
"Concert Pharmaceuticals",
"Concho Resources Inc.",
"Concord Medical Services Holdings Limited",
"Concrete Pumping Holdings",
"Condor Hospitality Trust",
"Conduent Incorporated",
"ConforMIS",
"Conifer Holdings",
"Conn's",
"ConnectOne Bancorp",
"Connecticut Water Service",
"ConocoPhillips",
"Consolidated Communications Holdings",
"Consolidated Edison Inc",
"Consolidated Water Co. Ltd.",
"Consolidated-Tomoka Land Co.",
"Constellation Brands Inc",
"Constellation Pharmaceuticals",
"Constellium SE",
"Construction Partners",
"Consumer Portfolio Services",
"Container Store (The)",
"Contango Oil & Gas Company",
"Continental Building Products",
"Continental Materials Corporation",
"Continental Resources",
"ContraFect Corporation",
"Controladora Vuela Compania de Aviacion",
"Contura Energy",
"ConversionPoint Holdings",
"Conyers Park II Acquisition Corp.",
"CooTek (Cayman) Inc.",
"Cool Holdings Inc.",
"Cooper Tire & Rubber Company",
"Cooper-Standard Holdings Inc.",
"Copa Holdings",
"Copart",
"CorEnergy Infrastructure Trust",
"CorMedix Inc.",
"CorVel Corp.",
"Corbus Pharmaceuticals Holdings",
"Corcept Therapeutics Incorporated",
"Core Laboratories N.V.",
"Core Molding Technologies Inc",
"Core-Mark Holding Company",
"CoreCivic",
"CoreLogic",
"CorePoint Lodging Inc.",
"CoreSite Realty Corporation",
"Corindus Vascular Robotics",
"Cornerstone Building Brands",
"Cornerstone OnDemand",
"Cornerstone Strategic Return Fund",
"Cornerstone Strategic Value Fund",
"Corning Incorporated",
"Corporacion America Airports SA",
"Corporate Asset Backed Corp CABCO",
"Corporate Office Properties Trust",
"Correvio Pharma Corp.",
"Corteva",
"Cortexyme",
"Cortland Bancorp",
"Corvus Pharmaceuticals",
"Cosan Limited",
"Costamare Inc.",
"Costco Wholesale Corporation",
"Cott Corporation",
"Coty Inc.",
"CounterPath Corporation",
"County Bancorp",
"Coupa Software Incorporated",
"Cousins Properties Incorporated",
"Covanta Holding Corporation",
"Covenant Transportation Group",
"Covetrus",
"Covia Holdings Corporation",
"Cowen Inc.",
"Cracker Barrel Old Country Store",
"Craft Brew Alliance",
"Crane Co.",
"Crawford & Company",
"Cray Inc",
"Creative Realities",
"Credicorp Ltd.",
"Credit Acceptance Corporation",
"Credit Suisse AG",
"Credit Suisse Asset Management Income Fund",
"Credit Suisse Group",
"Credit Suisse High Yield Bond Fund",
"Cree",
"Crescent Acquisition Corp",
"Crescent Point Energy Corporation",
"Crestwood Equity Partners LP",
"Cresud S.A.C.I.F. y A.",
"Crinetics Pharmaceuticals",
"Criteo S.A.",
"Crocs",
"Cronos Group Inc.",
"Cross Country Healthcare",
"Cross Timbers Royalty Trust",
"CrossAmerica Partners LP",
"CrossFirst Bankshares",
"CrowdStrike Holdings",
"Crown Castle International Corporation",
"Crown Crafts",
"Crown Holdings",
"CryoLife",
"CryoPort",
"Ctrip.com International",
"CubeSmart",
"Cubic Corporation",
"Cue Biopharma",
"Cullen/Frost Bankers",
"Culp",
"Cumberland Pharmaceuticals Inc.",
"Cummins Inc.",
"Cumulus Media Inc.",
"Curis",
"Curtiss-Wright Corporation",
"Cushing Energy Income Fund (The)",
"Cushing MLP & Infrastructure Total Return Fund",
"Cushing Renaissance Fund (The)",
"Cushman & Wakefield plc",
"Customers Bancorp",
"Cutera",
"Cyanotech Corporation",
"CyberArk Software Ltd.",
"CyberOptics Corporation",
"Cyclacel Pharmaceuticals",
"Cyclerion Therapeutics",
"CymaBay Therapeutics Inc.",
"CynergisTek",
"Cypress Energy Partners",
"Cypress Semiconductor Corporation",
"CyrusOne Inc",
"Cytokinetics",
"CytomX Therapeutics",
"Cytosorbents Corporation",
"D.R. Horton",
"DAQO New Energy Corp.",
"DASAN Zhone Solutions",
"DAVIDsTEA Inc.",
"DBV Technologies S.A.",
"DCP Midstream LP",
"DD3 Acquisition Corp.",
"DENTSPLY SIRONA Inc.",
"DERMAdoctor",
"DFB Healthcare Acquisitions Corp.",
"DGSE Companies",
"DHI Group",
"DHT Holdings",
"DHX Media Ltd.",
"DISH Network Corporation",
"DLH Holdings Corp.",
"DMC Global Inc.",
"DNB Financial Corp",
"DPW Holdings",
"DRDGOLD Limited",
"DSP Group",
"DTE Energy Company",
"DURECT Corporation",
"DXC Technology Company",
"DXP Enterprises",
"DaVita Inc.",
"Daily Journal Corp. (S.C.)",
"Daktronics",
"Dana Incorporated",
"Danaher Corporation",
"Danaos Corporation",
"Darden Restaurants",
"Dare Bioscience",
"DarioHealth Corp.",
"Darling Ingredients Inc.",
"Daseke",
"Data I/O Corporation",
"Datasea Inc.",
"Dave & Buster's Entertainment",
"Davis Select Financial ETF",
"Davis Select International ETF",
"Davis Select U.S. Equity ETF",
"Davis Select Worldwide ETF",
"Dawson Geophysical Company",
"Daxor Corporation",
"Dean Foods Company",
"Deciphera Pharmaceuticals",
"Deckers Outdoor Corporation",
"Deere & Company",
"Del Frisco's Restaurant Group",
"Del Taco Restaurants",
"DelMar Pharmaceuticals",
"Delaware Enhanced Global Dividend",
"Delaware Investments Colorado Municipal Income Fund",
"Delaware Investments Dividend & Income Fund",
"Delaware Investments Florida Insured Municipal Income Fund",
"Delaware Investments Minnesota Municipal Income Fund II",
"Delek Logistics Partners",
"Delek US Holdings",
"Dell Technologies Inc.",
"Delphi Technologies PLC",
"Delta Air Lines",
"Delta Apparel",
"Deluxe Corporation",
"Denali Therapeutics Inc.",
"Denbury Resources Inc.",
"Denison Mine Corp",
"Denny's Corporation",
"DermTech",
"Dermavant Sciences Ltd.",
"Dermira",
"Designer Brands Inc.",
"Despegar.com",
"Destination Maternity Corporation",
"Destination XL Group",
"Deswell Industries",
"Deutsch Bk Contingent Cap Tr V",
"Deutsche Bank AG",
"Devon Energy Corporation",
"DexCom",
"DiaMedica Therapeutics Inc.",
"Diageo plc",
"Diamond Eagle Acquisition Corp.",
"Diamond Hill Investment Group",
"Diamond Offshore Drilling",
"Diamond S Shipping Inc.",
"DiamondPeak Holdings Corp.",
"Diamondback Energy",
"Diamondrock Hospitality Company",
"Diana Shipping inc.",
"Dicerna Pharmaceuticals",
"Dick's Sporting Goods Inc",
"Diebold Nixdorf Incorporated",
"Diffusion Pharmaceuticals Inc.",
"Digi International Inc.",
"Digimarc Corporation",
"Digirad Corporation",
"Digital Ally",
"Digital Realty Trust",
"Digital Turbine",
"Dillard's",
"Dime Community Bancshares",
"Dine Brands Global",
"Diodes Incorporated",
"Diplomat Pharmacy",
"Discover Financial Services",
"Discovery",
"Diversified Restaurant Holdings",
"Dividend and Income Fund",
"DocuSign",
"Document Security Systems",
"Dogness (International) Corporation",
"Dolby Laboratories",
"Dollar General Corporation",
"Dollar Tree",
"Dolphin Entertainment",
"Dominion Energy",
"Domino's Pizza Inc",
"Domo",
"Domtar Corporation",
"Donaldson Company",
"Donegal Group",
"Donnelley Financial Solutions",
"Dorchester Minerals",
"Dorian LPG Ltd.",
"Dorman Products",
"DouYu International Holdings Limited",
"DoubleLine Income Solutions Fund",
"DoubleLine Opportunistic Credit Fund",
"Douglas Dynamics",
"Douglas Emmett",
"Dova Pharmaceuticals",
"Dover Corporation",
"Dover Motorsports",
"Dow Inc.",
"Dr. Reddy's Laboratories Ltd",
"Dragon Victory International Limited",
"Dril-Quip",
"Drive Shack Inc.",
"DropCar",
"Dropbox",
"DryShips Inc.",
"DuPont de Nemours",
"Ducommun Incorporated",
"Duff & Phelps Global Utility Income Fund Inc.",
"Duff & Phelps Select MLP and Midstream Energy Fund",
"Duff & Phelps Utilities Income",
"Duff & Phelps Utilities Tax-Free Income",
"Duff & Phelps Utility & Corporate Bond Trust",
"Duke Energy Corporation",
"Duke Realty Corporation",
"Duluth Holdings Inc.",
"Dunkin' Brands Group",
"Dunxin Financial Holdings Limited",
"Dyadic International",
"Dycom Industries",
"Dynagas LNG Partners LP",
"Dynatrace",
"Dynatronics Corporation",
"Dynavax Technologies Corporation",
"Dynex Capital",
"E*TRADE Financial Corporation",
"E.I. du Pont de Nemours and Company",
"E.W. Scripps Company (The)",
"ECA Marcellus Trust I",
"EDAP TMS S.A.",
"EMC Insurance Group Inc.",
"EMCOR Group",
"EMCORE Corporation",
"EMX Royalty Corporation",
"ENDRA Life Sciences Inc.",
"ENGlobal Corporation",
"ENI S.p.A.",
"ENSERVCO Corporation",
"EOG Resources",
"EPAM Systems",
"EPR Properties",
"EQM Midstream Partners",
"EQT Corporation",
"ESCO Technologies Inc.",
"ESSA Bancorp",
"ESSA Pharma Inc.",
"ETF Series Solutions Trust Vident Core U.S. Bond Strategy Fund",
"ETF Series Solutions Trust Vident Core US Equity ETF",
"ETF Series Solutions Trust Vident International Equity Fund",
"EVI Industries",
"EVO Payments",
"EXFO Inc",
"EZCORP",
"Eagle Bancorp",
"Eagle Bancorp Montana",
"Eagle Bulk Shipping Inc.",
"Eagle Capital Growth Fund",
"Eagle Financial Bancorp",
"Eagle Growth and Income Opportunities Fund",
"Eagle Materials Inc",
"Eagle Pharmaceuticals",
"Eagle Point Credit Company Inc.",
"Eagle Point Income Company Inc.",
"Eagleline Acquisition Corp.",
"Earthstone Energy",
"East West Bancorp",
"EastGroup Properties",
"Easterly Government Properties",
"Eastern Company (The)",
"Eastman Chemical Company",
"Eastman Kodak Company",
"Eastside Distilling",
"Eaton Corporation",
"Eaton Vance California Municipal Bond Fund",
"Eaton Vance California Municipal Income Trust",
"Eaton Vance Corporation",
"Eaton Vance Enhance Equity Income Fund",
"Eaton Vance Enhanced Equity Income Fund II",
"Eaton Vance Floating Rate Income Trust",
"Eaton Vance Floating-Rate 2022 Target Term Trust",
"Eaton Vance High Income 2021 Target Term Trust",
"Eaton Vance Limited Duration Income Fund",
"Eaton Vance Municipal Bond Fund",
"Eaton Vance Municipal Income 2028 Term Trust",
"Eaton Vance Municipal Income Trust",
"Eaton Vance New York Municipal Bond Fund",
"Eaton Vance New York Municipal Income Trust",
"Eaton Vance NextShares Trust",
"Eaton Vance NextShares Trust II",
"Eaton Vance Risk-Managed Diversified Equity Income Fund",
"Eaton Vance Senior Floating-Rate Fund",
"Eaton Vance Senior Income Trust",
"Eaton Vance Short Diversified Income Fund",
"Eaton Vance Tax Advantaged Dividend Income Fund",
"Eaton Vance Tax-Advantage Global Dividend Opp",
"Eaton Vance Tax-Advantaged Global Dividend Income Fund",
"Eaton Vance Tax-Managed Buy-Write Income Fund",
"Eaton Vance Tax-Managed Buy-Write Strategy Fund",
"Eaton Vance Tax-Managed Diversified Equity Income Fund",
"Eaton Vance Tax-Managed Global Diversified Equity Income Fund",
"Eaton vance Floating-Rate Income Plus Fund",
"Ebix",
"Echo Global Logistics",
"EchoStar Corporation",
"Ecolab Inc.",
"Ecology and Environment",
"Ecopetrol S.A.",
"Edesa Biotech",
"Edison International",
"Edison Nation",
"Editas Medicine",
"EdtechX Holdings Acquisition Corp.",
"Educational Development Corporation",
"Edwards Lifesciences Corporation",
"Eidos Therapeutics",
"Eiger BioPharmaceuticals",
"Ekso Bionics Holdings",
"El Paso Corporation",
"El Paso Electric Company",
"El Pollo Loco Holdings",
"Elanco Animal Health Incorporated",
"Elastic N.V.",
"Elbit Systems Ltd.",
"Eldorado Gold Corporation",
"Eldorado Resorts",
"Electrameccanica Vehicles Corp. Ltd.",
"Electro-Sensors",
"Electromed",
"Electronic Arts Inc.",
"Element Solutions Inc.",
"Elevate Credit",
"Eli Lilly and Company",
"Ellington Financial Inc.",
"Ellington Residential Mortgage REIT",
"Ellomay Capital Ltd.",
"Ellsworth Growth and Income Fund Ltd.",
"Elmira Savings Bank NY (The)",
"Eloxx Pharmaceuticals",
"Eltek Ltd.",
"Embotelladora Andina S.A.",
"Embraer S.A.",
"Emclaire Financial Corp",
"Emerald Expositions Events",
"Emergent Biosolutions",
"Emerson Electric Company",
"Emerson Radio Corporation",
"Emmis Communications Corporation",
"Empire Resorts",
"Empire State Realty Trust",
"Employers Holdings Inc",
"Empresa Distribuidora Y Comercializadora Norte S.A. (Edenor)",
"EnLink Midstream",
"EnPro Industries",
"Enable Midstream Partners",
"Enanta Pharmaceuticals",
"Enbridge Inc",
"Encana Corporation",
"Encompass Health Corporation",
"Encore Capital Group Inc",
"Encore Wire Corporation",
"Endava plc",
"Endeavour Silver Corporation",
"Endo International plc",
"Endologix",
"Endurance International Group Holdings",
"Enel Americas S.A.",
"Enel Chile S.A.",
"Energizer Holdings",
"Energous Corporation",
"Energy Focus",
"Energy Fuels Inc",
"Energy Hunter Resources",
"Energy Recovery",
"Energy Transfer L.P.",
"Energy Transfer Operating",
"Enerplus Corporation",
"Enersys",
"Enlivex Therapeutics Ltd.",
"Ennis",
"Enochian Biosciences",
"Enova International",
"Enphase Energy",
"Enstar Group Limited",
"Entasis Therapeutics Holdings Inc.",
"Entegra Financial Corp.",
"Entegris",
"Entera Bio Ltd.",
"Entercom Communications Corp.",
"Entergy Arkansas",
"Entergy Corporation",
"Entergy Louisiana",
"Entergy Mississippi",
"Entergy New Orleans",
"Entergy Texas Inc",
"Enterprise Bancorp Inc",
"Enterprise Financial Services Corporation",
"Enterprise Products Partners L.P.",
"Entravision Communications Corporation",
"Entree Resources Ltd.",
"Envestnet",
"Envision Solar International",
"Enviva Partners",
"Enzo Biochem",
"Epizyme",
"Epsilon Energy Ltd.",
"Equifax",
"Equillium",
"Equinix",
"Equinor ASA",
"Equitrans Midstream Corporation",
"Equity Bancshares",
"Equity Commonwealth",
"Equity Lifestyle Properties",
"Equity Residential",
"Equus Total Return",
"Era Group",
"Ericsson",
"Erie Indemnity Company",
"Eros International PLC",
"Erytech Pharma S.A.",
"Escalade",
"Esperion Therapeutics",
"Espey Mfg. & Electronics Corp.",
"Esquire Financial Holdings",
"Essent Group Ltd.",
"Essential Properties Realty Trust",
"Essex Property Trust",
"Establishment Labs Holdings Inc.",
"Estee Lauder Companies",
"Estre Ambiental",
"Ethan Allen Interiors Inc.",
"Eton Pharmaceuticals",
"Etsy",
"Euro Tech Holdings Company Limited",
"EuroDry Ltd.",
"Euronav NV",
"Euronet Worldwide",
"European Equity Fund",
"Euroseas Ltd.",
"Evans Bancorp",
"Evelo Biosciences",
"Eventbrite",
"Ever-Glory International Group",
"EverQuote",
"Everbridge",
"Evercore Inc.",
"Everest Re Group",
"Evergy",
"Everi Holdings Inc.",
"Eversource Energy",
"Everspin Technologies",
"Evertec",
"Evofem Biosciences",
"Evogene Ltd.",
"Evoke Pharma",
"Evolent Health",
"Evolus",
"Evolution Petroleum Corporation",
"Evolving Systems",
"Evoqua Water Technologies Corp.",
"Exact Sciences Corporation",
"Exantas Capital Corp.",
"Exchange Traded Concepts Trust FLAG-Forensic Accounting Long-S",
"Exchange Traded Concepts Trust ROBO Global Robotics and Automa",
"Exela Technologies",
"Exelixis",
"Exelon Corporation",
"Exicure",
"ExlService Holdings",
"Expedia Group",
"Expeditors International of Washington",
"Experience Investment Corp.",
"Exponent",
"Express",
"Extended Stay America",
"Exterran Corporation",
"Extra Space Storage Inc",
"Extraction Oil & Gas",
"Extreme Networks",
"Exxon Mobil Corporation",
"EyePoint Pharmaceuticals",
"Eyegate Pharmaceuticals",
"Eyenovia",
"F.N.B. Corporation",
"F5 Networks",
"FARMMI",
"FARO Technologies",
"FAT Brands Inc.",
"FB Financial Corporation",
"FBL Financial Group",
"FFBW",
"FGL Holdings",
"FIRST REPUBLIC BANK",
"FLEX LNG Ltd.",
"FLIR Systems",
"FMC Corporation",
"FNCB Bancorp Inc.",
"FRONTEO",
"FRP Holdings",
"FS Bancorp",
"FS KKR Capital Corp.",
"FSB Bancorp",
"FTE Networks",
"FTI Consulting",
"FTS International",
"FVCBankcorp",
"Fabrinet",
"Facebook",
"FactSet Research Systems Inc.",
"Fair Isaac Corporation",
"Falcon Minerals Corporation",
"Famous Dave's of America",
"Fang Holdings Limited",
"Fanhua Inc.",
"Far Point Acquisition Corporation",
"Farfetch Limited",
"Farmer Brothers Company",
"Farmers & Merchants Bancorp",
"Farmers National Banc Corp.",
"Farmland Partners Inc.",
"Fastenal Company",
"Fastly",
"Fate Therapeutics",
"Fauquier Bankshares",
"FedEx Corporation",
"FedNat Holding Company",
"Federal Agricultural Mortgage Corporation",
"Federal Realty Investment Trust",
"Federal Signal Corporation",
"Federated Investors",
"Federated Premier Municipal Income Fund",
"Fellazo Inc.",
"Fennec Pharmaceuticals Inc.",
"Ferrari N.V.",
"Ferrellgas Partners",
"Ferro Corporation",
"Ferroglobe PLC",
"Fiat Chrysler Automobiles N.V.",
"FibroGen",
"Fibrocell Science Inc.",
"Fidelity D & D Bancorp",
"Fidelity Nasdaq Composite Index Tracking Stock",
"Fidelity National Financial",
"Fidelity National Information Services",
"Fiduciary/Claymore Energy Infrastructure Fund",
"Fidus Investment Corporation",
"Fiesta Restaurant Group",
"Fifth Third Bancorp",
"FinTech Acquisition Corp. III",
"Financial Institutions",
"Finisar Corporation",
"Finjan Holdings",
"FireEye",
"First American Corporation (The)",
"First BanCorp.",
"First Bancorp",
"First Bank",
"First Busey Corporation",
"First Business Financial Services",
"First Capital",
"First Choice Bancorp",
"First Citizens BancShares",
"First Commonwealth Financial Corporation",
"First Community Bankshares",
"First Community Corporation",
"First Defiance Financial Corp.",
"First Financial Bancorp.",
"First Financial Bankshares",
"First Financial Corporation Indiana",
"First Financial Northwest",
"First Foundation Inc.",
"First Guaranty Bancshares",
"First Hawaiian",
"First Horizon National Corporation",
"First Industrial Realty Trust",
"First Internet Bancorp",
"First Interstate BancSystem",
"First Majestic Silver Corp.",
"First Merchants Corporation",
"First Mid Bancshares",
"First Midwest Bancorp",
"First National Corporation",
"First Northwest Bancorp",
"First Savings Financial Group",
"First Seacoast Bancorp",
"First Solar",
"First Trust",
"First Trust Alternative Absolute Return Strategy ETF",
"First Trust Asia Pacific Ex-Japan AlphaDEX Fund",
"First Trust BICK Index Fund",
"First Trust Brazil AlphaDEX Fund",
"First Trust BuyWrite Income ETF",
"First Trust CEF Income Opportunity ETF",
"First Trust California Municipal High income ETF",
"First Trust Canada AlphaDEX Fund",
"First Trust Capital Strength ETF",
"First Trust China AlphaDEX Fund",
"First Trust Cloud Computing ETF",
"First Trust Developed International Equity Select ETF",
"First Trust Developed Markets Ex-US AlphaDEX Fund",
"First Trust Developed Markets ex-US Small Cap AlphaDEX Fund",
"First Trust Dorsey Wright Dynamic Focus 5 ETF",
"First Trust Dorsey Wright Focus 5 ETF",
"First Trust Dorsey Wright International Focus 5 ETF",
"First Trust Dorsey Wright Momentum & Dividend ETF",
"First Trust Dorsey Wright Momentum & Low Volatility ETF",
"First Trust Dorsey Wright Momentum & Value ETF",
"First Trust Dorsey Wright People's Portfolio ETF",
"First Trust DorseyWright DALI 1 ETF",
"First Trust Dow Jones International Internet ETF",
"First Trust Dynamic Europe Equity Income Fund",
"First Trust Emerging Markets AlphaDEX Fund",
"First Trust Emerging Markets Equity Select ETF",
"First Trust Emerging Markets Local Currency Bond ETF",
"First Trust Emerging Markets Small Cap AlphaDEX Fund",
"First Trust Energy Income and Growth Fund",
"First Trust Energy Infrastructure Fund",
"First Trust Enhanced Short Maturity ETF",
"First Trust Europe AlphaDEX Fund",
"First Trust Eurozone AlphaDEX ETF",
"First Trust Germany AlphaDEX Fund",
"First Trust Global Tactical Commodity Strategy Fund",
"First Trust Hedged BuyWrite Income ETF",
"First Trust High Income Long Short Fund",
"First Trust High Yield Long/Short ETF",
"First Trust Hong Kong AlphaDEX Fund",
"First Trust IPOX Europe Equity Opportunities ETF",
"First Trust India Nifty 50 Equal Weight ETF",
"First Trust Indxx Global Agriculture ETF",
"First Trust Indxx Global Natural Resources Income ETF",
"First Trust Indxx Innovative Transaction & Process ETF",
"First Trust Indxx NextG ETF",
"First Trust Intermediate Duration Preferred & Income Fund",
"First Trust International Equity Opportunities ETF",
"First Trust Japan AlphaDEX Fund",
"First Trust Large Cap Core AlphaDEX Fund",
"First Trust Large Cap Growth AlphaDEX Fund",
"First Trust Large Cap US Equity Select ETF",
"First Trust Large Cap Value AlphaDEX Fund",
"First Trust Latin America AlphaDEX Fund",
"First Trust Low Duration Opportunities ETF",
"First Trust Low Duration Strategic Focus ETF",
"First Trust MLP and Energy Income Fund",
"First Trust Managed Municipal ETF",
"First Trust Mega Cap AlphaDEX Fund",
"First Trust Mid Cap Core AlphaDEX Fund",
"First Trust Mid Cap Growth AlphaDEX Fund",
"First Trust Mid Cap US Equity Select ETF",
"First Trust Mid Cap Value AlphaDEX Fund",
"First Trust Multi Cap Growth AlphaDEX Fund",
"First Trust Multi Cap Value AlphaDEX Fund",
"First Trust Multi-Asset Diversified Income Index Fund",
"First Trust Municipal CEF Income Opportunity ETF",
"First Trust Municipal High Income ETF",
"First Trust NASDAQ ABA Community Bank Index Fund",
"First Trust NASDAQ Clean Edge Green Energy Index Fund",
"First Trust NASDAQ Clean Edge Smart Grid Infrastructure Index ",
"First Trust NASDAQ Cybersecurity ETF",
"First Trust NASDAQ Global Auto Index Fund",
"First Trust NASDAQ Technology Dividend Index Fund",
"First Trust NASDAQ-100 Equal Weighted Index Fund",
"First Trust NASDAQ-100 Ex-Technology Sector Index Fund",
"First Trust NASDAQ-100- Technology Index Fund",
"First Trust Nasdaq Artificial Intelligence and Robotics ETF",
"First Trust Nasdaq Bank ETF",
"First Trust Nasdaq Food & Beverage ETF",
"First Trust Nasdaq Oil & Gas ETF",
"First Trust Nasdaq Pharmaceuticals ETF",
"First Trust Nasdaq Retail ETF",
"First Trust Nasdaq Semiconductor ETF",
"First Trust Nasdaq Transportation ETF",
"First Trust New Opportunities MLP & Energy Fund",
"First Trust RBA American Industrial Renaissance ETF",
"First Trust Rising Dividend Achievers ETF",
"First Trust RiverFront Dynamic Asia Pacific ETF",
"First Trust RiverFront Dynamic Developed International ETF",
"First Trust RiverFront Dynamic Emerging Markets ETF",
"First Trust RiverFront Dynamic Europe ETF",
"First Trust S&P International Dividend Aristocrats ETF",
"First Trust SMID Cap Rising Dividend Achievers ETF",
"First Trust SSI Strategic Convertible Securities ETF",
"First Trust Senior Floating Rate 2022 Target Term Fund",
"First Trust Senior Floating Rate Income Fund II",
"First Trust Senior Loan Fund ETF",
"First Trust Small Cap Core AlphaDEX Fund",
"First Trust Small Cap Growth AlphaDEX Fund",
"First Trust Small Cap US Equity Select ETF",
"First Trust Small Cap Value AlphaDEX Fund",
"First Trust South Korea AlphaDEX Fund",
"First Trust Specialty Finance and Financial Opportunities Fund",
"First Trust Strategic Income ETF",
"First Trust Switzerland AlphaDEX Fund",
"First Trust TCW Opportunistic Fixed Income ETF",
"First Trust Total US Market AlphaDEX ETF",
"First Trust US Equity Dividend Select ETF",
"First Trust United Kingdom AlphaDEX Fund",
"First Trust/Aberdeen Emerging Opportunity Fund",
"First Trust/Aberdeen Global Opportunity Income Fund",
"First US Bancshares",
"First United Corporation",
"First Western Financial",
"FirstCash",
"FirstEnergy Corp.",
"FirstService Corporation",
"Firsthand Technology Value Fund",
"Fiserv",
"Fitbit",
"Five Below",
"Five Point Holdings",
"Five Prime Therapeutics",
"Five Star Senior Living Inc.",
"Five9",
"Fiverr International Ltd.",
"Flagstar Bancorp",
"Flaherty & Crumrine Dynamic Preferred and Income Fund Inc.",
"Flaherty & Crumrine Preferred and Income Fund Inco",
"Flaherty & Crumrine Preferred and Income Opportuni",
"Flaherty & Crumrine Preferred and Income Securitie",
"Flaherty & Crumrine Total Return Fund Inc",
"Flanigan's Enterprises",
"FleetCor Technologies",
"Flex Ltd.",
"FlexShares Credit-Scored US Corporate Bond Index Fund",
"FlexShares Credit-Scored US Long Corporate Bond Index Fund",
"FlexShares Disciplined Duration MBS Index Fund",
"FlexShares Real Assets Allocation Index Fund",
"FlexShares STOXX Global ESG Impact Index Fund",
"FlexShares STOXX US ESG Impact Index Fund",
"FlexShares US Quality Large Cap Index Fund",
"FlexShopper",
"Flexible Solutions International Inc.",
"Flexion Therapeutics",
"Flexsteel Industries",
"Floor & Decor Holdings",
"Flotek Industries",
"Flowers Foods",
"Flowr Corporation (The)",
"Flowserve Corporation",
"Fluent",
"Fluidigm Corporation",
"Fluor Corporation",
"Flushing Financial Corporation",
"Flux Power Holdings",
"Fly Leasing Limited",
"Foamix Pharmaceuticals Ltd.",
"Focus Financial Partners Inc.",
"Fomento Economico Mexicano S.A.B. de C.V.",
"Fonar Corporation",
"Foot Locker",
"Ford Motor Company",
"ForeScout Technologies",
"Foresight Autonomous Holdings Ltd.",
"Foresight Energy LP",
"Forestar Group Inc",
"FormFactor",
"Formula Systems (1985) Ltd.",
"Forrester Research",
"Forterra",
"Fortinet",
"Fortis Inc.",
"Fortive Corporation",
"Fortress Biotech",
"Fortress Transportation and Infrastructure Investors LLC",
"Fortuna Silver Mines Inc.",
"Fortune Brands Home & Security",
"Forty Seven",
"Forum Energy Technologies",
"Forum Merger II Corporation",
"Forward Air Corporation",
"Forward Industries",
"Forward Pharma A/S",
"Fossil Group",
"Foundation Building Materials",
"Four Corners Property Trust",
"Four Seasons Education (Cayman) Inc.",
"Fox Corporation",
"Fox Factory Holding Corp.",
"Francesca's Holdings Corporation",
"Franco-Nevada Corporation",
"Frank's International N.V.",
"Franklin Covey Company",
"Franklin Electric Co.",
"Franklin Financial Network",
"Franklin Financial Services Corporation",
"Franklin Limited Duration Income Trust",
"Franklin Resources",
"Franklin Street Properties Corp.",
"Franklin Universal Trust",
"Frankly",
"Fred's",
"Freeport-McMoran",
"Freightcar America",
"Frequency Electronics",
"Fresenius Medical Care Corporation",
"Fresh Del Monte Produce",
"Freshpet",
"Friedman Industries Inc.",
"Front Yard Residential Corporation",
"Frontier Communications Corporation",
"Frontline Ltd.",
"Fuel Tech",
"FuelCell Energy",
"Fulcrum Therapeutics",
"Fulgent Genetics",
"Fuling Global Inc.",
"Full House Resorts",
"Full Spectrum Inc.",
"Fulton Financial Corporation",
"Funko",
"Futu Holdings Limited",
"Future FinTech Group Inc.",
"FutureFuel Corp.",
"Fuwei Films (Holdings) Co.",
"G-III Apparel Group",
"G. Willi-Food International",
"G1 Therapeutics",
"GAIN Capital Holdings",
"GAMCO Global Gold",
"GAMCO Natural Resources",
"GATX Corporation",
"GCI Liberty",
"GCP Applied Technologies Inc.",
"GDS Holdings Limited",
"GEE Group Inc.",
"GENFIT S.A.",
"GMS Inc.",
"GNC Holdings",
"GOLDEN BULL LIMITED",
"GP Strategies Corporation",
"GRAVITY Co.",
"GS Acquisition Holdings Corp.",
"GSE Systems",
"GSI Technology",
"GSX Techedu Inc.",
"GTT Communications",
"GTY Technology Holdings",
"GW Pharmaceuticals Plc",
"GWG Holdings",
"GX Acquisiton Corp.",
"Gabelli Convertible and Income Securities Fund",
"Gabelli Equity Trust",
"Gabelli Global Small and Mid Cap Value Trust (The)",
"Gabelli Multi-Media Trust Inc. (The)",
"Gabelli Utility Trust (The)",
"Gaia",
"Galapagos NV",
"Galectin Therapeutics Inc.",
"Galmed Pharmaceuticals Ltd.",
"Gamco Investors",
"Gamestop Corporation",
"Gamida Cell Ltd.",
"Gaming and Leisure Properties",
"Gap",
"Gardner Denver Holdings",
"Garmin Ltd.",
"Garrett Motion Inc.",
"Garrison Capital Inc.",
"Gartner",
"GasLog LP.",
"GasLog Partners LP",
"Gates Industrial Corporation plc",
"Gemphire Therapeutics Inc.",
"GenMark Diagnostics",
"GenSight Biologics S.A.",
"Genco Shipping & Trading Limited ",
"Gencor Industries Inc.",
"Generac Holdlings Inc.",
"General American Investors",
"General Dynamics Corporation",
"General Electric Company",
"General Finance Corporation",
"General Mills",
"General Moly",
"General Motors Company",
"Genesco Inc.",
"Genesee & Wyoming",
"Genesis Energy",
"Genesis Healthcare",
"Genetic Technologies Ltd",
"Genie Energy Ltd.",
"Genius Brands International",
"Genmab A/S",
"Genocea Biosciences",
"Genomic Health",
"Genpact Limited",
"Genprex",
"Gentex Corporation",
"Gentherm Inc",
"Genuine Parts Company",
"Genworth Financial Inc",
"Geo Group Inc (The)",
"Geopark Ltd",
"Georgia Power Company",
"Geospace Technologies Corporation",
"Gerdau S.A.",
"German American Bancorp",
"Geron Corporation",
"Getty Realty Corporation",
"Gevo",
"Gibraltar Industries",
"GigCapital",
"GigCapital2",
"GigaMedia Limited",
"Gilat Satellite Networks Ltd.",
"Gildan Activewear",
"Gilead Sciences",
"Glacier Bancorp",
"Gladstone Capital Corporation",
"Gladstone Commercial Corporation",
"Gladstone Investment Corporation",
"Gladstone Land Corporation",
"Glatfelter",
"Glaukos Corporation",
"GlaxoSmithKline PLC",
"Glen Burnie Bancorp",
"Global Blood Therapeutics",
"Global Cord Blood Corporation",
"Global Eagle Entertainment Inc.",
"Global Indemnity Limited",
"Global Medical REIT Inc.",
"Global Net Lease",
"Global Partners LP",
"Global Payments Inc.",
"Global Self Storage",
"Global Ship Lease",
"Global Water Resources",
"Global X Autonomous & Electric Vehicles ETF",
"Global X Cloud Computing ETF",
"Global X Conscious Companies ETF",
"Global X DAX Germany ETF",
"Global X E-commerce ETF",
"Global X FinTech ETF",
"Global X Funds Global X MSCI China Communication Services ETF",
"Global X Future Analytics Tech ETF",
"Global X Genomics & Biotechnology ETF",
"Global X Health & Wellness Thematic ETF",
"Global X Internet of Things ETF",
"Global X Longevity Thematic ETF",
"Global X MSCI SuperDividend EAFE ETF",
"Global X Millennials Thematic ETF",
"Global X NASDAQ-100 Covered Call ETF",
"Global X Robotics & Artificial Intelligence ETF",
"Global X S&P 500 Catholic Values ETF",
"Global X Social Media ETF",
"Global X SuperDividend Alternatives ETF",
"Global X SuperDividend REIT ETF",
"Global X YieldCo & Renewable Energy Income ETF",
"GlobalSCAPE",
"Globalstar",
"Globant S.A.",
"Globe Life Inc.",
"Globus Maritime Limited",
"Globus Medical",
"Glowpoint",
"Glu Mobile Inc.",
"GlycoMimetics",
"GoDaddy Inc.",
"GoPro",
"Gogo Inc.",
"Gol Linhas Aereas Inteligentes S.A.",
"Golar LNG Limited",
"Golar LNG Partners LP",
"Gold Fields Limited",
"Gold Resource Corporation",
"Gold Standard Ventures Corporation",
"Golden Entertainment",
"Golden Minerals Company",
"Golden Ocean Group Limited",
"Golden Star Resources",
"Goldfield Corporation (The)",
"Goldman Sachs BDC",
"Goldman Sachs Group",
"Goldman Sachs MLP Energy Renaissance Fund",
"Goldman Sachs MLP Income Opportunities Fund",
"Golub Capital BDC",
"Good Times Restaurants Inc.",
"GoodBulk Ltd.",
"Goodrich Petroleum Corporation",
"Goosehead Insurance",
"Gordon Pointe Acquisition Corp.",
"Gores Holdings III",
"Gores Metropoulos",
"Gorman-Rupp Company (The)",
"Gossamer Bio",
"Graco Inc.",
"Graf Industrial Corp.",
"GrafTech International Ltd.",
"Graham Corporation",
"Graham Holdings Company",
"Gran Tierra Energy Inc.",
"Grana y Montero S.A.A.",
"Grand Canyon Education",
"Granite Construction Incorporated",
"Granite Point Mortgage Trust Inc.",
"Granite Real Estate Inc.",
"Graphic Packaging Holding Company",
"Gray Television",
"Great Ajax Corp.",
"Great Elm Capital Corp.",
"Great Elm Capital Group",
"Great Lakes Dredge & Dock Corporation",
"Great Panther Mining Limited",
"Great Southern Bancorp",
"Great Western Bancorp",
"Green Brick Partners",
"Green Dot Corporation",
"Green Plains",
"Green Plains Partners LP",
"GreenSky",
"GreenTree Hospitality Group Ltd.",
"Greenbrier Companies",
"Greene County Bancorp",
"Greenhill & Co.",
"Greenland Acquisition Corporation",
"Greenlane Holdings",
"Greenlight Reinsurance",
"Greenpro Capital Corp.",
"Greif Bros. Corporation",
"Gridsum Holding Inc.",
"Griffin Industrial Realty",
"Griffon Corporation",
"Grifols",
"Grindrod Shipping Holdings Ltd.",
"Gritstone Oncology",
"Grocery Outlet Holding Corp.",
"Group 1 Automotive",
"Groupon",
"GrubHub Inc.",
"Grupo Aeroportuario Del Pacifico",
"Grupo Aeroportuario del Centro Norte S.A.B. de C.V.",
"Grupo Aeroportuario del Sureste",
"Grupo Aval Acciones y Valores S.A.",
"Grupo Financiero Galicia S.A.",
"Grupo Simec",
"Grupo Supervielle S.A.",
"Grupo Televisa S.A.",
"Guangshen Railway Company Limited",
"Guaranty Bancshares",
"Guaranty Federal Bancshares",
"Guardant Health",
"Guardion Health Sciences",
"Guess?",
"Guggenheim Credit Allocation Fund",
"Guggenheim Enhanced Equity Income Fund",
"Guggenheim Strategic Opportunities Fund",
"Guggenheim Taxable Municipal Managed Duration Trst",
"Guidewire Software",
"Gulf Island Fabrication",
"Gulf Resources",
"Gulfport Energy Corporation",
"Gyrodyne",
"H&E Equipment Services",
"H&R Block",
"H. B. Fuller Company",
"HC2 Holdings",
"HCA Healthcare",
"HCI Group",
"HCP",
"HD Supply Holdings",
"HDFC Bank Limited",
"HEXO Corp.",
"HF Foods Group Inc.",
"HL Acquisitions Corp.",
"HMG/Courtland Properties",
"HMN Financial",
"HMS Holdings Corp",
"HNI Corporation",
"HOOKIPA Pharma Inc.",
"HP Inc.",
"HSBC Holdings plc",
"HTG Molecular Diagnostics",
"HUYA Inc.",
"HV Bancorp",
"Haemonetics Corporation",
"Hailiang Education Group Inc.",
"Hallador Energy Company",
"Halliburton Company",
"Hallmark Financial Services",
"Halozyme Therapeutics",
"Hamilton Beach Brands Holding Company",
"Hamilton Lane Incorporated",
"Hancock Jaffe Laboratories",
"Hancock Whitney Corporation",
"Hanesbrands Inc.",
"Hanger",
"Hanmi Financial Corporation",
"Hannon Armstrong Sustainable Infrastructure Capital",
"HarborOne Bancorp",
"Harley-Davidson",
"Harmonic Inc.",
"Harmony Gold Mining Company Limited",
"Harpoon Therapeutics",
"Harrow Health",
"Harsco Corporation",
"Harte-Hanks",
"Hartford Financial Services Group",
"Harvard Bioscience",
"Harvest Capital Credit Corporation",
"Hasbro",
"Haverty Furniture Companies",
"Hawaiian Electric Industries",
"Hawaiian Holdings",
"Hawkins",
"Hawthorn Bancshares",
"Haymaker Acquisition Corp. II",
"Haynes International",
"HeadHunter Group PLC",
"Health Catalyst",
"Health Insurance Innovations",
"Health Sciences Acquisitions Corporation",
"HealthEquity",
"HealthStream",
"Healthcare Realty Trust Incorporated",
"Healthcare Services Group",
"Healthcare Trust of America",
"Heartland Express",
"Heartland Financial USA",
"Heat Biologics",
"Hebron Technology Co.",
"Hecla Mining Company",
"Heico Corporation",
"Heidrick & Struggles International",
"Helen of Troy Limited",
"Helios Technologies",
"Helius Medical Technologies",
"Helix Energy Solutions Group",
"Helmerich & Payne",
"Hemisphere Media Group",
"Hemispherx BioPharma",
"Hennessy Advisors",
"Hennessy Capital Acquisition Corp. IV",
"Henry Schein",
"Hepion Pharmaceuticals",
"Herbalife Nutrition Ltd.",
"Herc Holdings Inc.",
"Hercules Capital",
"Heritage Commerce Corp",
"Heritage Financial Corporation",
"Heritage Insurance Holdings",
"Heritage-Crystal Clean",
"Herman Miller",
"Hermitage Offshore Services Ltd.",
"Heron Therapeutics",
"Hersha Hospitality Trust",
"Hershey Company (The)",
"Hertz Global Holdings",
"Heska Corporation",
"Hess Corporation",
"Hess Midstream Partners LP",
"Hewlett Packard Enterprise Company",
"Hexcel Corporation",
"Hexindai Inc.",
"Hi-Crush Inc.",
"Hibbett Sports",
"High Income Securities Fund",
"HighPoint Resources Corporation",
"Highland Global Allocation Fund",
"Highland Income Fund",
"Highland/iBoxx Senior Loan ETF",
"Highpower International Inc",
"Highway Holdings Limited",
"Highwoods Properties",
"Hill International",
"Hill-Rom Holdings Inc",
"Hillenbrand Inc",
"Hillman Group Capital Trust",
"Hilltop Holdings Inc.",
"Hilton Grand Vacations Inc.",
"Hilton Worldwide Holdings Inc.",
"Himax Technologies",
"Hingham Institution for Savings",
"HireQuest",
"Histogenics Corporation",
"Hoegh LNG Partners LP",
"Holly Energy Partners",
"HollyFrontier Corporation",
"Hollysys Automation Technologies",
"Hologic",
"Home BancShares",
"Home Bancorp",
"Home Depot",
"Home Federal Bancorp",
"HomeStreet",
"HomeTrust Bancshares",
"Homology Medicines",
"Honda Motor Company",
"Honeywell International Inc.",
"Hooker Furniture Corporation",
"Hope Bancorp",
"Horace Mann Educators Corporation",
"Horizon Bancorp",
"Horizon Global Corporation",
"Horizon Technology Finance Corporation",
"Horizon Therapeutics Public Limited Company",
"Hormel Foods Corporation",
"Hornbeck Offshore Services",
"Hospitality Properties Trust",
"Host Hotels & Resorts",
"Hostess Brands",
"Hoth Therapeutics",
"Houghton Mifflin Harcourt Company",
"Houlihan Lokey",
"Houston American Energy Corporation",
"Houston Wire & Cable Company",
"Hovnanian Enterprises Inc",
"Howard Bancorp",
"Howard Hughes Corporation (The)",
"Huami Corporation",
"Huaneng Power International",
"Huazhu Group Limited",
"Hub Group",
"HubSpot",
"Hubbell Inc",
"Hudbay Minerals Inc.",
"Hudson Global",
"Hudson Ltd.",
"Hudson Pacific Properties",
"Hudson Technologies",
"Huitao Technology Co.",
"Humana Inc.",
"Hunt Companies Finance Trust",
"Huntington Bancshares Incorporated",
"Huntington Ingalls Industries",
"Huntsman Corporation",
"Hurco Companies",
"Huron Consulting Group Inc.",
"Hutchison China MediTech Limited",
"Huttig Building Products",
"Hyatt Hotels Corporation",
"HyreCar Inc.",
"Hyster-Yale Materials Handling",
"I.D. Systems",
"IAA",
"IAC/InterActiveCorp",
"IBERIABANK Corporation",
"IBEX Holdings Limited",
"IBO (Listing Market - NYSE Amex Network B F)",
"ICC Holdings",
"ICF International",
"ICICI Bank Limited",
"ICON plc",
"ICU Medical",
"IDACORP",
"IDEAYA Biosciences",
"IDEX Corporation",
"IDEXX Laboratories",
"IDT Corporation",
"IEC Electronics Corp.",
"IES Holdings",
"IF Bancorp",
"IHS Markit Ltd.",
"II-VI Incorporated",
"IMAC Holdings",
"IMV Inc.",
"ING Group",
"INMODE LTD.",
"INTL FCStone Inc.",
"INVESCO MORTGAGE CAPITAL INC",
"INmune Bio Inc.",
"IPG Photonics Corporation",
"IQ Chaikin U.S. Large Cap ETF",
"IQ Chaikin U.S. Small Cap ETF",
"IQVIA Holdings",
"IRIDEX Corporation",
"IRSA Inversiones Y Representaciones S.A.",
"IRSA Propiedades Comerciales S.A.",
"IT Tech Packaging",
"ITT Inc.",
"IVERIC bio",
"IZEA Worldwide",
"Iamgold Corporation",
"Icahn Enterprises L.P.",
"Ichor Holdings",
"Iconix Brand Group",
"Ideal Power Inc.",
"Ideanomics",
"Identiv",
"Idera Pharmaceuticals",
"Ikonics Corporation",
"Illinois Tool Works Inc.",
"Illumina",
"Image Sensing Systems",
"Imax Corporation",
"Immersion Corporation",
"ImmuCell Corporation",
"Immunic",
"ImmunoGen",
"Immunomedics",
"Immuron Limited",
"Immutep Limited",
"Impac Mortgage Holdings",
"Imperial Oil Limited",
"Impinj",
"InVivo Therapeutics Holdings Corp.",
"Income Opportunity Realty Investors",
"Incyte Corporation",
"Independence Contract Drilling",
"Independence Holding Company",
"Independence Realty Trust",
"Independent Bank Corp.",
"Independent Bank Corporation",
"Independent Bank Group",
"India Fund",
"India Globalization Capital Inc.",
"Industrial Logistics Properties Trust",
"Industrial Services of America",
"Industrias Bachoco",
"Infinera Corporation",
"Infinity Pharmaceuticals",
"InflaRx N.V.",
"Information Services Group",
"Infosys Limited",
"Infrastructure and Energy Alternatives",
"InfuSystems Holdings",
"Ingersoll-Rand plc (Ireland)",
"Ingevity Corporation",
"Ingles Markets",
"Ingredion Incorporated",
"InnSuites Hospitality Trust",
"InnerWorkings",
"Innodata Inc.",
"Innophos Holdings",
"Innospec Inc.",
"Innovate Biopharmaceuticals",
"Innovative Industrial Properties",
"Innovative Solutions and Support",
"Innoviva",
"Inogen",
"Inovalon Holdings",
"Inovio Pharmaceuticals",
"Inphi Corporation",
"Inpixon ",
"Inseego Corp.",
"Insight Enterprises",
"Insight Select Income Fund",
"Insignia Systems",
"Insmed",
"Insperity",
"Inspire Medical Systems",
"InspireMD Inc.",
"Inspired Entertainment",
"Installed Building Products",
"Insteel Industries",
"Instructure",
"Insulet Corporation",
"Insurance Acquisition Corp.",
"Intec Pharma Ltd.",
"Integer Holdings Corporation",
"Integra LifeSciences Holdings Corporation",
"Integrated Media Technology Limited",
"Intel Corporation",
"Intellia Therapeutics",
"Intellicheck",
"Intelligent Systems Corporation",
"Intelsat S.A.",
"Inter Parfums",
"InterDigital",
"InterXion Holding N.V.",
"Intercept Pharmaceuticals",
"Intercontinental Exchange Inc.",
"Intercontinental Hotels Group",
"Intercorp Financial Services Inc.",
"Interface",
"Intermolecular",
"Internap Corporation",
"International Bancshares Corporation",
"International Business Machines Corporation",
"International Flavors & Fragrances",
"International Game Technology",
"International Money Express",
"International Paper Company",
"International Seaways",
"International Speedway Corporation",
"International Tower Hill Mines Ltd",
"Internet Gold Golden Lines Ltd.",
"Interpace Diagnostics Group",
"Interpublic Group of Companies",
"Intersect ENT",
"Interstate Power and Light Company",
"Intevac",
"Intra-Cellular Therapies Inc.",
"Intrepid Potash",
"Intrexon Corporation",
"IntriCon Corporation",
"Intuit Inc.",
"Intuitive Surgical",
"Inuvo",
"Invacare Corporation",
"Invesco 1-30 Laddered Treasury ETF",
"Invesco Advantage Municipal Income Trust II",
"Invesco BLDRS Asia 50 ADR Index Fund",
"Invesco BLDRS Developed Markets 100 ADR Index Fund",
"Invesco BLDRS Emerging Markets 50 ADR Index Fund",
"Invesco BLDRS Europe Select ADR Index Fund",
"Invesco Bond Fund",
"Invesco BuyBack Achievers ETF",
"Invesco California Value Municipal Income Trust",
"Invesco Credit Opportunities Fund",
"Invesco DWA Basic Materials Momentum ETF",
"Invesco DWA Consumer Cyclicals Momentum ETF",
"Invesco DWA Consumer Staples Momentum ETF",
"Invesco DWA Developed Markets Momentum ETF",
"Invesco DWA Emerging Markets Momentum ETF",
"Invesco DWA Energy Momentum ETF",
"Invesco DWA Financial Momentum ETF",
"Invesco DWA Healthcare Momentum ETF",
"Invesco DWA Industrials Momentum ETF",
"Invesco DWA Momentum ETF",
"Invesco DWA NASDAQ Momentum ETF",
"Invesco DWA SmallCap Momentum ETF",
"Invesco DWA Tactical Multi-Asset Income ETF",
"Invesco DWA Tactical Sector Rotation ETF",
"Invesco DWA Technology Momentum ETF",
"Invesco DWA Utilities Momentum ETF",
"Invesco Dividend Achievers ETF",
"Invesco FTSE International Low Beta Equal Weight ETF",
"Invesco FTSE RAFI US 1500 Small-Mid ETF",
"Invesco Global Water ETF",
"Invesco Golden Dragon China ETF",
"Invesco High Income 2023 Target Term Fund",
"Invesco High Income 2024 Target Term Fund",
"Invesco High Income Trust II",
"Invesco High Yield Equity Dividend Achievers ETF",
"Invesco International BuyBack Achievers ETF",
"Invesco International Dividend Achievers ETF",
"Invesco KBW Bank ETF",
"Invesco KBW High Dividend Yield Financial ETF",
"Invesco KBW Premium Yield Equity REIT ETF",
"Invesco KBW Property & Casualty Insurance ETF",
"Invesco KBW Regional Banking ETF",
"Invesco LadderRite 0-5 Year Corporate Bond ETF",
"Invesco Mortgage Capital Inc.",
"Invesco Municipal Income Opportunities Trust",
"Invesco Municipal Opportunity Trust",
"Invesco Municipal Trust",
"Invesco Nasdaq Internet ETF",
"Invesco Optimum Yield Diversified Commodity Strategy No K-1 ET",
"Invesco Pennsylvania Value Municipal Income Trust",
"Invesco Plc",
"Invesco QQQ Trust",
"Invesco Quality Municipal Income Trust",
"Invesco RAFI Strategic Developed ex-US ETF",
"Invesco RAFI Strategic Developed ex-US Small Company ETF",
"Invesco RAFI Strategic Emerging Markets ETF",
"Invesco RAFI Strategic US ETF",
"Invesco RAFI Strategic US Small Company ETF",
"Invesco Russell 1000 Low Beta Equal Weight ETF",
"Invesco S&P SmallCap Consumer Discretionary ETF",
"Invesco S&P SmallCap Consumer Staples ETF",
"Invesco S&P SmallCap Energy ETF",
"Invesco S&P SmallCap Financials ETF",
"Invesco S&P SmallCap Health Care ETF",
"Invesco S&P SmallCap Industrials ETF",
"Invesco S&P SmallCap Information Technology ETF",
"Invesco S&P SmallCap Materials ETF",
"Invesco S&P SmallCap Utilities & Communication Services ETF",
"Invesco Senior Income Trust",
"Invesco Trust for Investment Grade New York Municipal",
"Invesco Trust for Investment Grade Municipals",
"Invesco Value Municipal Income Trust",
"Invesco Variable Rate Investment Grade ETF",
"Invesco Water Resources ETF",
"Investar Holding Corporation",
"Investcorp Credit Management BDC",
"Investors Bancorp",
"Investors Real Estate Trust",
"Investors Title Company",
"Invitae Corporation",
"Invitation Homes Inc.",
"Ion Geophysical Corporation",
"Ionis Pharmaceuticals",
"Iovance Biotherapeutics",
"Iridium Communications Inc",
"Iron Mountain Incorporated",
"Ironwood Pharmaceuticals",
"IsoRay",
"Israel Chemicals Shs",
"Isramco",
"Issuer Direct Corporation",
"Ita? CorpBanca",
"Itamar Medical Ltd.",
"Itau Unibanco Banco Holding SA",
"Iteris",
"Iterum Therapeutics plc",
"Itron",
"Ituran Location and Control Ltd.",
"Ivy High Income Opportunities Fund",
"J & J Snack Foods Corp.",
"J P Morgan Chase & Co",
"J. Alexander's Holdings",
"J. Jill",
"J. W. Mays",
"J.B. Hunt Transport Services",
"J.C. Penney Company",
"J.M. Smucker Company (The)",
"JAKKS Pacific",
"JBG SMITH Properties",
"JD.com",
"JELD-WEN Holding",
"JMP Group LLC",
"JMU Limited",
"Jabil Inc.",
"Jack Henry & Associates",
"Jack In The Box Inc.",
"Jacobs Engineering Group Inc.",
"Jagged Peak Energy Inc.",
"Jaguar Health",
"James Hardie Industries plc.",
"James River Group Holdings",
"JanOne Inc.",
"Janus Henderson Group plc",
"Janus Henderson Small Cap Growth Alpha ETF",
"Janus Henderson Small/Mid Cap Growth Alpha ETF",
"Japan Smaller Capitalization Fund Inc",
"Jason Industries",
"Jazz Pharmaceuticals plc",
"Jefferies Financial Group Inc.",
"Jerash Holdings (US)",
"Jernigan Capital",
"JetBlue Airways Corporation",
"Jewett-Cameron Trading Company",
"Jianpu Technology Inc.",
"Jiayin Group Inc.",
"JinkoSolar Holding Company Limited",
"John B. Sanfilippo & Son",
"John Bean Technologies Corporation",
"John Hancock Financial Opportunities Fund",
"John Hancock Hedged Equity & Income Fund",
"John Hancock Income Securities Trust",
"John Hancock Investors Trust",
"John Hancock Pfd Income Fund II",
"John Hancock Preferred Income Fund",
"John Hancock Preferred Income Fund III",
"John Hancock Premium Dividend Fund",
"John Hancock Tax Advantaged Dividend Income Fund",
"John Hancock Tax-Advantaged Global Shareholder Yield Fund",
"John Wiley & Sons",
"Johnson & Johnson",
"Johnson Controls International plc",
"Johnson Outdoors Inc.",
"Jones Lang LaSalle Incorporated",
"Jounce Therapeutics",
"Jumei International Holding Limited",
"Jumia Technologies AG",
"Juniper Networks",
"Jupai Holdings Limited",
"Just Energy Group",
"K12 Inc",
"KAR Auction Services",
"KB Financial Group Inc",
"KB Home",
"KBL Merger Corp. IV",
"KBR",
"KBS Fashion Group Limited",
"KKR & Co. Inc.",
"KKR Income Opportunities Fund",
"KKR Real Estate Finance Trust Inc.",
"KLA Corporation ",
"KLX Energy Services Holdings",
"KNOT Offshore Partners LP",
"KT Corporation",
"KVH Industries",
"Kadant Inc",
"Kadmon Holdings",
"Kaiser Aluminum Corporation",
"Kaixin Auto Holdings",
"KalVista Pharmaceuticals",
"Kala Pharmaceuticals",
"Kaleido Biosciences",
"Kamada Ltd.",
"Kaman Corporation",
"Kandi Technologies Group",
"Kansas City Southern",
"Karuna Therapeutics",
"Karyopharm Therapeutics Inc.",
"Kayne Anderson MLP/Midstream Investment Company",
"Kayne Anderson Midstream Energy Fund",
"Kazia Therapeutics Limited",
"Keane Group",
"Kearny Financial",
"Kellogg Company",
"Kelly Services",
"Kelso Technologies Inc",
"KemPharm",
"Kemet Corporation",
"Kemper Corporation",
"Kennametal Inc.",
"Kennedy-Wilson Holdings Inc.",
"Kenon Holdings Ltd.",
"Kentucky First Federal Bancorp",
"Keurig Dr Pepper Inc.",
"Kewaunee Scientific Corporation",
"Key Energy Services",
"Key Tronic Corporation",
"KeyCorp",
"Keysight Technologies Inc.",
"Kezar Life Sciences",
"Kforce",
"Kilroy Realty Corporation",
"Kimball Electronics",
"Kimball International",
"Kimbell Royalty Partners",
"Kimberly-Clark Corporation",
"Kimco Realty Corporation",
"Kinder Morgan",
"Kindred Biosciences",
"Kingold Jewelry Inc.",
"Kingstone Companies",
"Kingsway Financial Services",
"Kiniksa Pharmaceuticals",
"Kinross Gold Corporation",
"Kinsale Capital Group",
"Kirby Corporation",
"Kirkland Lake Gold Ltd.",
"Kirkland's",
"Kite Realty Group Trust",
"Kitov Pharma Ltd.",
"Knight Transportation",
"Knoll",
"Knowles Corporation",
"Kodiak Sciences Inc",
"Kohl's Corporation",
"Koninklijke Philips N.V.",
"Kontoor Brands",
"Kopin Corporation",
"Koppers Holdings Inc.",
"Korea Electric Power Corporation",
"Korea Fund",
"Korn Ferry ",
"Kornit Digital Ltd.",
"Kosmos Energy Ltd.",
"Koss Corporation",
"KraneShares Trust KraneShares CSI China Internet ETF",
"Kraton Corporation",
"Kratos Defense & Security Solutions",
"Kroger Company (The)",
"Kronos Worldwide Inc",
"Krystal Biotech",
"Kulicke and Soffa Industries",
"Kura Oncology",
"Kura Sushi USA",
"L Brands",
"L.B. Foster Company",
"L.S. Starrett Company (The)",
"L3Harris Technologies",
"LAIX Inc.",
"LATAM Airlines Group S.A.",
"LCI Industries ",
"LCNB Corporation",
"LEAP THERAPEUTICS",
"LF Capital Acquistion Corp.",
"LG Display Co.",
"LGI Homes",
"LGL Group",
"LHC Group",
"LINE Corporation",
"LKQ Corporation",
"LM Funding America",
"LMP Capital and Income Fund Inc.",
"LPL Financial Holdings Inc.",
"LRAD Corporation",
"LSC Communications",
"LSI Industries Inc.",
"LTC Properties",
"La Jolla Pharmaceutical Company",
"La-Z-Boy Incorporated",
"Laboratory Corporation of America Holdings",
"Ladder Capital Corp",
"Ladenburg Thalmann Financial Services Inc",
"Ladenburg Thalmann Financial Services Inc.",
"Lake Shore Bancorp",
"Lakeland Bancorp",
"Lakeland Financial Corporation",
"Lakeland Industries",
"Lam Research Corporation",
"Lamar Advertising Company",
"Lamb Weston Holdings",
"Lancaster Colony Corporation",
"Landcadia Holdings II",
"Landec Corporation",
"Landmark Bancorp Inc.",
"Landmark Infrastructure Partners LP",
"Lands' End",
"Landstar System",
"Lannett Co Inc",
"Lantheus Holdings",
"Lantronix",
"Laredo Petroleum",
"Las Vegas Sands Corp.",
"Lattice Semiconductor Corporation",
"Laureate Education",
"Lawson Products",
"Lazard Global Total Return and Income Fund",
"Lazard Ltd.",
"Lazard World Dividend & Income Fund",
"Lazydays Holdings",
"LeMaitre Vascular",
"Leaf Group Ltd.",
"Lear Corporation",
"Lee Enterprises",
"Legacy Acquisition Corp.",
"Legacy Housing Corporation",
"LegacyTexas Financial Group",
"Legg Mason",
"Legg Mason Global Infrastructure ETF",
"Legg Mason Low Volatility High Dividend ETF",
"Legg Mason Small-Cap Quality Value ETF",
"Leggett & Platt",
"Lehman ABS Corporation",
"Leidos Holdings",
"Leisure Acquisition Corp.",
"Leju Holdings Limited",
"LendingClub Corporation",
"LendingTree",
"Lennar Corporation",
"Lennox International",
"Leo Holdings Corp.",
"Level One Bancorp",
"Levi Strauss & Co",
"Lexicon Pharmaceuticals",
"LexinFintech Holdings Ltd.",
"Lexington Realty Trust",
"Lianluo Smart Limited",
"Libbey",
"Liberty All-Star Equity Fund",
"Liberty All-Star Growth Fund",
"Liberty Broadband Corporation",
"Liberty Global plc",
"Liberty Latin America Ltd.",
"Liberty Media Corporation",
"Liberty Oilfield Services Inc.",
"Liberty Property Trust",
"Liberty TripAdvisor Holdings",
"Life Storage",
"Lifetime Brands",
"Lifevantage Corporation",
"Lifeway Foods",
"Ligand Pharmaceuticals Incorporated",
"LightInTheBox Holding Co.",
"LightPath Technologies",
"Lightbridge Corporation",
"Lilis Energy",
"Limbach Holdings",
"Limelight Networks",
"Limestone Bancorp",
"Limoneira Co",
"Lincoln Educational Services Corporation",
"Lincoln Electric Holdings",
"Lincoln National Corporation",
"Lindblad Expeditions Holdings Inc. ",
"Linde plc",
"Lindsay Corporation",
"Lineage Cell Therapeutics",
"Linx S.A.",
"Lions Gate Entertainment Corporation",
"Lipocine Inc.",
"LiqTech International",
"Liquid Media Group Ltd.",
"Liquidia Technologies",
"Liquidity Services",
"Lithia Motors",
"Lithium Americas Corp.",
"Littelfuse",
"LivaNova PLC",
"Live Nation Entertainment",
"Live Oak Bancshares",
"Live Ventures Incorporated",
"LivePerson",
"LiveRamp Holdings",
"LiveXLive Media",
"Livent Corporation",
"Livongo Health",
"Lloyds Banking Group Plc",
"Lockheed Martin Corporation",
"Loews Corporation",
"LogMein",
"LogicBio Therapeutics",
"Logitech International S.A.",
"Loma Negra Compania Industrial Argentina Sociedad Anonima",
"Loncar Cancer Immunotherapy ETF",
"Loncar China BioPharma ETF",
"Lonestar Resources US Inc.",
"Longevity Acquisition Corporation",
"Loop Industries",
"Loral Space and Communications",
"Louisiana-Pacific Corporation",
"Lowe's Companies",
"Lsb Industries Inc.",
"Luby's",
"Luckin Coffee Inc.",
"Lumber Liquidators Holdings",
"Lumentum Holdings Inc.",
"Luminex Corporation",
"Luna Innovations Incorporated",
"Luokung Technology Corp",
"Luther Burbank Corporation",
"Luxfer Holdings PLC",
"Lydall",
"Lyft",
"Lyon William Homes",
"LyondellBasell Industries NV",
"M&T Bank Corporation",
"M.D.C. Holdings",
"M/I Homes",
"MACOM Technology Solutions Holdings",
"MAG Silver Corporation",
"MAM Software Group",
"MBIA",
"MDC Partners Inc.",
"MDJM LTD",
"MDU Resources Group",
"MEDIFAST INC",
"MEI Pharma",
"MER Telemanagement Solutions Ltd.",
"MFA Financial",
"MFS Charter Income Trust",
"MFS Government Markets Income Trust",
"MFS Intermediate Income Trust",
"MFS Multimarket Income Trust",
"MFS Municipal Income Trust",
"MFS Special Value Trust",
"MGE Energy Inc.",
"MGIC Investment Corporation",
"MGM Growth Properties LLC",
"MGM Resorts International",
"MGP Ingredients",
"MICT",
"MIDSTATES PETROLEUM COMPANY",
"MIND C.T.I. Ltd.",
"MISONIX",
"MKS Instruments",
"MMA Capital Holdings",
"MMTec",
"MOGU Inc.",
"MPLX LP",
"MRC Global Inc.",
"MRI Interventions",
"MS Structured Asset Corp Saturns GE Cap Corp Series 2002-14",
"MSA Safety Incorporporated",
"MSB Financial Corp.",
"MSC Industrial Direct Company",
"MSCI Inc",
"MSG Networks Inc.",
"MTBC",
"MTS Systems Corporation",
"MV Oil Trust",
"MVB Financial Corp.",
"MVC Capital",
"MYOS RENS Technology Inc.",
"MYR Group",
"Macatawa Bank Corporation",
"Macerich Company (The)",
"Mack-Cali Realty Corporation",
"Mackinac Financial Corporation",
"Macquarie First Trust Global",
"Macquarie Global Infrastructure Total Return Fund Inc.",
"Macquarie Infrastructure Corporation ",
"Macro Bank Inc.",
"MacroGenics",
"Macy's Inc",
"Madison Covered Call & Equity Strategy Fund",
"Madrigal Pharmaceuticals",
"Magal Security Systems Ltd.",
"Magellan Health",
"Magellan Midstream Partners L.P.",
"Magenta Therapeutics",
"Magic Software Enterprises Ltd.",
"Magna International",
"MagnaChip Semiconductor Corporation",
"Magnolia Oil & Gas Corporation",
"Magyar Bancorp",
"Maiden Holdings",
"Main Street Capital Corporation",
"MainStay MacKay DefinedTerm Municipal Opportunitie",
"MainStreet Bancshares",
"Majesco",
"MakeMyTrip Limited",
"Malibu Boats",
"Mallinckrodt plc",
"Malvern Bancorp",
"Mammoth Energy Services",
"ManTech International Corporation",
"Manchester United Ltd.",
"Manhattan Associates",
"Manhattan Bridge Capital",
"Manitex International",
"Manitowoc Company",
"MannKind Corporation",
"Mannatech",
"Manning & Napier",
"ManpowerGroup",
"Manulife Financial Corp",
"Marathon Oil Corporation",
"Marathon Patent Group",
"Marathon Petroleum Corporation",
"Marchex",
"Marcus & Millichap",
"Marcus Corporation (The)",
"Marin Software Incorporated",
"Marine Petroleum Trust",
"Marine Products Corporation",
"MarineMax",
"Marinus Pharmaceuticals",
"Markel Corporation",
"Marker Therapeutics",
"MarketAxess Holdings",
"Marlin Business Services Corp.",
"Marriott International",
"Marriott Vacations Worldwide Corporation",
"Marrone Bio Innovations",
"Marsh & McLennan Companies",
"Marten Transport",
"Martin Marietta Materials",
"Martin Midstream Partners L.P.",
"Marvell Technology Group Ltd.",
"MasTec",
"Masco Corporation",
"Masimo Corporation",
"Masonite International Corporation",
"Mastech Digital",
"MasterCraft Boat Holdings",
"Mastercard Incorporated",
"Matador Resources Company",
"Match Group",
"Materialise NV",
"Materion Corporation",
"Matinas Biopharma Holdings",
"Matrix Service Company",
"Matson",
"Mattel",
"Matthews International Corporation",
"Maui Land & Pineapple Company",
"Maverix Metals Inc.",
"MaxLinear",
"Maxar Technologies Inc.",
"Maxim Integrated Products",
"Maximus",
"Mayville Engineering Company",
"McClatchy Company (The)",
"McCormick & Company",
"McDermott International",
"McDonald's Corporation",
"McEwen Mining Inc.",
"McGrath RentCorp",
"McKesson Corporation",
"Mechel PAO",
"Medalist Diversified REIT",
"Medallia",
"Medallion Financial Corp.",
"MediWound Ltd.",
"Medical Properties Trust",
"MediciNova",
"Medidata Solutions",
"Medigus Ltd.",
"Medley Capital Corporation",
"Medley LLC",
"Medley Management Inc.",
"Mednax",
"Medpace Holdings",
"Medtronic plc",
"Megalith Financial Acquisition Corp.",
"MeiraGTx Holdings plc",
"Melco Resorts & Entertainment Limited",
"Melinta Therapeutics",
"Mellanox Technologies",
"Menlo Therapeutics Inc.",
"MercadoLibre",
"Mercantile Bank Corporation",
"Mercer International Inc.",
"Merchants Bancorp",
"Merck & Company",
"Mercury General Corporation",
"Mercury Systems Inc",
"Meredith Corporation",
"Mereo BioPharma Group plc",
"Meridian Bancorp",
"Meridian Bioscience Inc.",
"Meridian Corporation",
"Merit Medical Systems",
"Meritage Corporation",
"Meritor",
"Merrill Lynch & Co.",
"Merrill Lynch Depositor",
"Merrimack Pharmaceuticals",
"Mersana Therapeutics",
"Merus N.V.",
"Mesa Air Group",
"Mesa Laboratories",
"Mesa Royalty Trust",
"Mesabi Trust",
"Mesoblast Limited",
"MetLife",
"Meta Financial Group",
"Methanex Corporation",
"Methode Electronics",
"Metropolitan Bank Holding Corp.",
"Mettler-Toledo International",
"Mexco Energy Corporation",
"Mexico Equity and Income Fund",
"Mexico Fund",
"MiX Telematics Limited",
"Micro Focus Intl PLC",
"MicroStrategy Incorporated",
"Microbot Medical Inc. ",
"Microchip Technology Incorporated",
"Micron Solutions",
"Micron Technology",
"Microsoft Corporation",
"Microvision",
"Mid Penn Bancorp",
"Mid-America Apartment Communities",
"Mid-Con Energy Partners",
"Mid-Southern Bancorp",
"MidSouth Bancorp",
"MidWestOne Financial Group",
"Midatech Pharma PLC",
"Middlefield Banc Corp.",
"Middlesex Water Company",
"Midland States Bancorp",
"Milacron Holdings Corp.",
"Milestone Pharmaceuticals Inc.",
"Milestone Scientific",
"Millendo Therapeutics",
"Miller Industries",
"Miller/Howard High Income Equity Fund",
"Millicom International Cellular S.A.",
"Mimecast Limited",
"Minerals Technologies Inc.",
"Minerva Neurosciences",
"Miragen Therapeutics",
"Mirati Therapeutics",
"Mirum Pharmaceuticals",
"Mistras Group Inc",
"Mitcham Industries",
"Mitek Systems",
"Mitsubishi UFJ Financial Group Inc",
"Mizuho Financial Group",
"MoSys",
"Mobile Mini",
"Mobile TeleSystems OJSC",
"MobileIron",
"Model N",
"Moderna",
"Modine Manufacturing Company",
"Moelis & Company",
"Mogo Inc.",
"Mohawk Group Holdings",
"Mohawk Industries",
"Molecular Templates",
"Moleculin Biotech",
"Molina Healthcare Inc",
"Molson Coors Brewing Company",
"Momenta Pharmaceuticals",
"Momo Inc.",
"Monaker Group",
"Monarch Casino & Resort",
"Mondelez International",
"Moneygram International",
"MongoDB",
"Monmouth Real Estate Investment Corporation",
"Monocle Acquisition Corporation",
"Monolithic Power Systems",
"Monotype Imaging Holdings Inc.",
"Monro",
"Monroe Capital Corporation",
"Monster Beverage Corporation",
"Montage Resources Corporation",
"Moody's Corporation",
"Moog Inc.",
"Morgan Stanley",
"Morgan Stanley China A Share Fund Inc.",
"Morgan Stanley Emerging Markets Debt Fund",
"Morgan Stanley Emerging Markets Domestic Debt Fund",
"Morgan Stanley India Investment Fund",
"Morningstar",
"Morphic Holding",
"MorphoSys AG",
"Mosaic Acquisition Corp.",
"Mosaic Company (The)",
"Mota Group",
"Motif Bio plc",
"Motorcar Parts of America",
"Motorola Solutions",
"Motus GI Holdings",
"Mountain Province Diamonds Inc.",
"Movado Group Inc.",
"Moxian",
"Mr. Cooper Group Inc.",
"Mudrick Capital Acquisition Corporation",
"Mueller Industries",
"Mueller Water Products Inc",
"MuniVest Fund",
"MuniYield Arizona Fund",
"Murphy Oil Corporation",
"Murphy USA Inc.",
"Mustang Bio",
"MutualFirst Financial Inc.",
"My Size",
"Myers Industries",
"Mylan N.V.",
"MyoKardia",
"Myomo Inc.",
"Myovant Sciences Ltd.",
"Myriad Genetics",
"NACCO Industries",
"NAPCO Security Technologies",
"NASDAQ TEST STOCK",
"NB Capital Acquisition Corp.",
"NBT Bancorp Inc.",
"NCR Corporation",
"NCS Multistage Holdings",
"NETGEAR",
"NF Energy Saving Corporation",
"NGL ENERGY PARTNERS LP",
"NGM Biopharmaceuticals",
"NI Holdings",
"NIC Inc.",
"NICE Ltd",
"NII Holdings",
"NIO Inc.",
"NL Industries",
"NMI Holdings Inc",
"NN",
"NOW Inc.",
"NRC Group Holdings Corp.",
"NRG Energy",
"NTN Buzztime",
"NV5 Global",
"NVE Corporation",
"NVIDIA Corporation",
"NVR",
"NXP Semiconductors N.V.",
"NXT-ID Inc.",
"NYSE Test One",
"Nabors Industries Ltd.",
"Nabriva Therapeutics plc",
"Naked Brand Group Limited",
"Nam Tai Property Inc.",
"Nano Dimension Ltd.",
"NanoString Technologies",
"NanoVibronix",
"NanoViricides",
"Nanometrics Incorporated",
"NantHealth",
"NantKwest",
"Nasdaq",
"Natera",
"Nathan's Famous",
"National Bank Holdings Corporation",
"National Bankshares",
"National Beverage Corp.",
"National CineMedia",
"National Energy Services Reunited Corp.",
"National Fuel Gas Company",
"National General Holdings Corp",
"National Grid Transco",
"National Health Investors",
"National HealthCare Corporation",
"National Holdings Corporation",
"National Instruments Corporation",
"National Oilwell Varco",
"National Presto Industries",
"National Research Corporation",
"National Retail Properties",
"National Rural Utilities Cooperative Finance Corporation",
"National Security Group",
"National Steel Company",
"National Storage Affiliates Trust",
"National Vision Holdings",
"National Western Life Group",
"Natural Alternatives International",
"Natural Gas Services Group",
"Natural Grocers by Vitamin Cottage",
"Natural Health Trends Corp.",
"Natural Resource Partners LP",
"Nature's Sunshine Products",
"Natus Medical Incorporated",
"Natuzzi",
"Nautilus Group",
"Navidea Biopharmaceuticals",
"Navient Corporation",
"Navigant Consulting",
"Navigator Holdings Ltd.",
"Navios Maritime Acquisition Corporation",
"Navios Maritime Containers L.P.",
"Navios Maritime Holdings Inc.",
"Navios Maritime Partners LP",
"Navistar International Corporation",
"Nebula Acquisition Corporation",
"Neenah",
"Nektar Therapeutics",
"Nelnet",
"Nemaura Medical Inc.",
"NeoGenomics",
"NeoPhotonics Corporation",
"Neogen Corporation",
"Neoleukin Therapeutics",
"Neon Therapeutics",
"Neonode Inc.",
"Neos Therapeutics",
"Neovasc Inc.",
"Nephros",
"Neptune Wellness Solutions Inc.",
"Nesco Holdings",
"Net 1 UEPS Technologies",
"Net Element",
"NetApp",
"NetEase",
"NetScout Systems",
"NetSol Technologies Inc.",
"Netfin Acquisition Corp.",
"Netflix",
"Network-1 Technologies",
"NeuBase Therapeutics",
"Neuberger Berman California Municipal Fund Inc",
"Neuberger Berman High Yield Strategies Fund",
"Neuberger Berman MLP and Energy Income Fund Inc.",
"Neuberger Berman Municipal Fund Inc.",
"Neuberger Berman New York Municipal Fund Inc.",
"Neuberger Berman Real Estate Securities Income Fund",
"Neuralstem",
"NeuroMetrix",
"Neurocrine Biosciences",
"Neuronetics",
"Neurotrope",
"Nevro Corp.",
"New Age Beverages Corporation",
"New America High Income Fund",
"New Concept Energy",
"New England Realty Associates Limited Partnership",
"New Fortress Energy LLC",
"New Frontier Corporation",
"New Germany Fund",
"New Gold Inc.",
"New Home Company Inc. (The)",
"New Ireland Fund",
"New Media Investment Group Inc.",
"New Mountain Finance Corporation",
"New Oriental Education & Technology Group",
"New Providence Acquisition Corp.",
"New Relic",
"New Residential Investment Corp.",
"New Senior Investment Group Inc.",
"New York Community Bancorp",
"New York Mortgage Trust",
"New York Times Company (The)",
"NewJersey Resources Corporation",
"NewLink Genetics Corporation",
"NewMarket Corporation",
"Newater Technology",
"Newell Brands Inc.",
"Newmark Group",
"Newmont Goldcorp Corporation",
"Newpark Resources",
"News Corporation",
"Newtek Business Services Corp.",
"NexPoint Residential Trust",
"NexPoint Strategic Opportunities Fund",
"Nexa Resources S.A.",
"Nexeon Medsystems",
"Nexgen Energy Ltd.",
"Nexstar Media Group",
"NextCure",
"NextDecade Corporation",
"NextEra Energy",
"NextEra Energy Partners",
"NextGen Healthcare",
"NiSource",
"Niagara Mohawk Holdings",
"Nicholas Financial",
"Nicolet Bankshares Inc.",
"Nielsen N.V.",
"Nike",
"Nine Energy Service",
"Niu Technologies",
"Noah Holdings Ltd.",
"Noble Corporation",
"Noble Energy Inc.",
"Noble Midstream Partners LP",
"Nokia Corporation",
"Nomad Foods Limited",
"Nomura Holdings Inc ADR",
"Noodles & Company",
"Norbord Inc.",
"Nordic American Tankers Limited",
"Nordson Corporation",
"Nordstrom",
"Norfolk Southern Corporation",
"Nortech Systems Incorporated",
"North American Construction Group Ltd.",
"North European Oil Royality Trust",
"NorthStar Realty Europe Corp.",
"NorthWestern Corporation",
"Northeast Bank",
"Northern Dynasty Minerals",
"Northern Oil and Gas",
"Northern Technologies International Corporation",
"Northern Trust Corporation",
"Northfield Bancorp",
"Northrim BanCorp Inc",
"Northrop Grumman Corporation",
"Northwest Bancshares",
"Northwest Natural Holding Company",
"Northwest Pipe Company",
"Norwegian Cruise Line Holdings Ltd.",
"Norwood Financial Corp.",
"Nova Lifestyle",
"Nova Measuring Instruments Ltd.",
"NovaBay Pharmaceuticals",
"Novagold Resources Inc.",
"Novan",
"Novanta Inc.",
"Novartis AG",
"Novavax",
"Novelion Therapeutics Inc. ",
"Novo Nordisk A/S",
"NovoCure Limited",
"Novus Therapeutics",
"Nu Skin Enterprises",
"NuCana plc",
"NuStar Logistics",
"NuVasive",
"Nuance Communications",
"Nucor Corporation",
"Nustar Energy L.P.",
"Nutanix",
"Nutrien Ltd.",
"Nuvectra Corporation",
"Nuveen AMT-Free Municipal Credit Income Fund",
"Nuveen AMT-Free Municipal Value Fund",
"Nuveen AMT-Free Quality Municipal Income Fund",
"Nuveen All Cap Energy MLP Opportunities Fund",
"Nuveen Arizona Quality Municipal Income Fund",
"Nuveen California AMT-Free Quality Municipal Income Fund",
"Nuveen California Municipal Value Fund",
"Nuveen California Municipal Value Fund 2",
"Nuveen California Quality Municipal Income Fund",
"Nuveen Connecticut Quality Municipal Income Fund",
"Nuveen Core Equity Alpha Fund",
"Nuveen Credit Opportunities 2022 Target Term Fund",
"Nuveen Credit Strategies Income Fund",
"Nuveen Diversified Dividend and Income Fund",
"Nuveen Dow 30SM Dynamic Overwrite Fund",
"Nuveen Emerging Markets Debt 2022 Target Term Fund",
"Nuveen Energy MLP Total Return Fund",
"Nuveen Enhanced Municipal Value Fund",
"Nuveen Floating Rate Income Fund",
"Nuveen Floating Rate Income Opportuntiy Fund",
"Nuveen Georgia Quality Municipal Income Fund ",
"Nuveen Global High Income Fund",
"Nuveen High Income 2020 Target Term Fund",
"Nuveen High Income 2023 Target Term Fund",
"Nuveen High Income December 2019 Target Term Fund",
"Nuveen High Income November 2021 Target Term Fund",
"Nuveen Insured California Select Tax-Free Income Portfolio",
"Nuveen Insured New York Select Tax-Free Income Portfolio",
"Nuveen Intermediate Duration Municipal Term Fund",
"Nuveen Maryland Quality Municipal Income Fund",
"Nuveen Massachusetts Municipal Income Fund",
"Nuveen Michigan Quality Municipal Income Fund",
"Nuveen Minnesota Quality Municipal Income Fund",
"Nuveen Missouri Quality Municipal Income Fund",
"Nuveen Mortgage Opportunity Term Fund",
"Nuveen Multi-Market Income Fund",
"Nuveen Municipal 2021 Target Term Fund",
"Nuveen Municipal Credit Income Fund",
"Nuveen Municipal High Income Opportunity Fund",
"Nuveen Municipal Income Fund",
"Nuveen NASDAQ 100 Dynamic Overwrite Fund",
"Nuveen New Jersey Municipal Value Fund",
"Nuveen New Jersey Quality Municipal Income Fund",
"Nuveen New York AMT-Free Quality Municipal",
"Nuveen New York Municipal Value Fund",
"Nuveen New York Municipal Value Fund 2",
"Nuveen New York Quality Municipal Income Fund",
"Nuveen North Carolina Quality Municipal Income Fd",
"Nuveen Ohio Quality Municipal Income Fund",
"Nuveen Pennsylvania Municipal Value Fund",
"Nuveen Pennsylvania Quality Municipal Income Fund",
"Nuveen Preferred & Income Opportunities Fund",
"Nuveen Preferred & Income Securities Fund",
"Nuveen Preferred and Income 2022 Term Fund",
"Nuveen Preferred and Income Term Fund",
"Nuveen Quality Municipal Income Fund",
"Nuveen Real Asset Income and Growth Fund",
"Nuveen Real Estate Fund",
"Nuveen S&P 500 Buy-Write Income Fund",
"Nuveen S&P 500 Dynamic Overwrite Fund",
"Nuveen Select Maturities Municipal Fund",
"Nuveen Select Tax Free Income Portfolio",
"Nuveen Select Tax Free Income Portfolio II",
"Nuveen Select Tax Free Income Portfolio III",
"Nuveen Senior Income Fund",
"Nuveen Short Duration Credit Opportunities Fund",
"Nuveen Tax-Advantaged Dividend Growth Fund",
"Nuveen Tax-Advantaged Total Return Strategy Fund",
"Nuveen Taxable Municipal Income Fund",
"Nuveen Texas Quality Municipal Income Fund",
"Nuveen Virginia Quality Municipal Income Fund",
"Nuveenn Intermediate Duration Quality Municipal Term Fund",
"Nuven Mortgage Opportunity Term Fund 2",
"Nuverra Environmental Solutions",
"Nymox Pharmaceutical Corporation",
"O'Reilly Automotive",
"O2Micro International Limited",
"OFG Bancorp",
"OFS Capital Corporation",
"OFS Credit Company",
"OGE Energy Corp",
"OHA Investment Corporation",
"OMNOVA Solutions Inc.",
"ON Semiconductor Corporation",
"ONE Gas",
"ONEOK",
"OP Bancorp",
"ORBCOMM Inc.",
"OSI Systems",
"OTG EXP",
"OUTFRONT Media Inc.",
"Oak Valley Bancorp (CA)",
"Oaktree Acquisition Corp.",
"Oaktree Capital Group",
"Oaktree Specialty Lending Corporation",
"Oaktree Strategic Income Corporation",
"Oasis Midstream Partners LP",
"Oasis Petroleum Inc.",
"Obalon Therapeutics",
"ObsEva SA",
"Obsidian Energy Ltd.",
"Occidental Petroleum Corporation",
"Ocean Bio-Chem",
"Ocean Power Technologies",
"OceanFirst Financial Corp.",
"Oceaneering International",
"Oconee Federal Financial Corp.",
"Ocular Therapeutix",
"Ocwen Financial Corporation",
"Odonate Therapeutics",
"Odyssey Marine Exploration",
"Office Depot",
"Office Properties Income Trust",
"Ohio Valley Banc Corp.",
"Oi S.A.",
"Oil States International",
"Oil-Dri Corporation Of America",
"Okta",
"Old Dominion Freight Line",
"Old Line Bancshares",
"Old National Bancorp",
"Old Point Financial Corporation",
"Old Republic International Corporation",
"Old Second Bancorp",
"Olin Corporation",
"Ollie's Bargain Outlet Holdings",
"Olympic Steel",
"Omega Flex",
"Omega Healthcare Investors",
"Omeros Corporation",
"Omnicell",
"Omnicom Group Inc.",
"On Deck Capital",
"On Track Innovations Ltd",
"OncoCyte Corporation",
"OncoSec Medical Incorporated",
"Oncolytics Biotech Inc.",
"Onconova Therapeutics",
"Oncternal Therapeutics",
"One Liberty Properties",
"One Stop Systems",
"OneMain Holdings",
"OneSmart International Education Group Limited",
"OneSpaWorld Holdings Limited",
"OneSpan Inc.",
"Ooma",
"OpGen",
"Open Text Corporation",
"Opera Limited",
"Opes Acquisition Corp.",
"Opiant Pharmaceuticals",
"Opko Health",
"Oppenheimer Holdings",
"OptiNose",
"Optibase Ltd.",
"Optical Cable Corporation",
"OptimizeRx Corporation",
"OptimumBank Holdings",
"Option Care Health",
"Opus Bank",
"OraSure Technologies",
"Oracle Corporation",
"Oragenics Inc.",
"Oramed Pharmaceuticals Inc.",
"Orange",
"Orchard Therapeutics plc",
"Orchid Island Capital",
"Organigram Holdings Inc.",
"Organogenesis Holdings Inc. ",
"Organovo Holdings",
"Orgenesis Inc.",
"Origin Agritech Limited",
"Origin Bancorp",
"Orion Energy Systems",
"Orion Engineered Carbons S.A",
"Orion Group Holdings",
"Orisun Acquisition Corp.",
"Oritani Financial Corp.",
"Orix Corp Ads",
"Ormat Technologies",
"Orrstown Financial Services Inc",
"OrthoPediatrics Corp.",
"Orthofix Medical Inc. ",
"Oshkosh Corporation",
"Osisko Gold Royalties Ltd",
"Osmotica Pharmaceuticals plc",
"Ossen Innovation Co.",
"Otelco Inc.",
"Otonomy",
"Ottawa Bancorp",
"Otter Tail Corporation",
"Outlook Therapeutics",
"Overseas Shipholding Group",
"Overstock.com",
"Ovid Therapeutics Inc.",
"Owens & Minor",
"Owens Corning Inc",
"Owens-Illinois",
"Owl Rock Capital Corporation",
"Oxbridge Re Holdings Limited",
"Oxford Immunotec Global PLC",
"Oxford Industries",
"Oxford Lane Capital Corp.",
"Oxford Square Capital Corp.",
"P & F Industries",
"P.A.M. Transportation Services",
"PACCAR Inc.",
"PAR Technology Corporation",
"PAVmed Inc.",
"PB Bancorp",
"PBF Energy Inc.",
"PBF Logistics LP",
"PC Connection",
"PC-Tel",
"PCB Bancorp",
"PCI Media",
"PCSB Financial Corporation",
"PDC Energy",
"PDF Solutions",
"PDL BioPharma",
"PDL Community Bancorp",
"PDS Biotechnology Corporation",
"PFSweb",
"PGIM Global High Yield Fund",
"PGIM High Yield Bond Fund",
"PGT Innovations",
"PICO Holdings Inc.",
"PIMCO California Municipal Income Fund",
"PIMCO California Municipal Income Fund III",
"PIMCO Commercial Mortgage Securities Trust",
"PIMCO Dynamic Credit and Mortgage Income Fund",
"PIMCO Dynamic Income Fund",
"PIMCO Energy and Tactical Credit Opportunities Fund",
"PIMCO Income Strategy Fund",
"PIMCO Income Strategy Fund II",
"PIMCO Municipal Income Fund",
"PIMCO Municipal Income Fund III",
"PIMCO New York Municipal Income Fund",
"PIMCO New York Municipal Income Fund III",
"PIMCO Strategic Income Fund",
"PJT Partners Inc.",
"PLDT Inc.",
"PLUS THERAPEUTICS",
"PLx Pharma Inc.",
"PNC Financial Services Group",
"PNM Resources",
"POSCO",
"PPDAI Group Inc.",
"PPG Industries",
"PPL Capital Funding",
"PPL Corporation",
"PPlus Trust",
"PQ Group Holdings Inc.",
"PRA Group",
"PRA Health Sciences",
"PRGX Global",
"PROS Holdings",
"PS Business Parks",
"PT Telekomunikasi Indonesia",
"PTC Inc.",
"PTC Therapeutics",
"PUHUI WEALTH INVESTMENT MANAGEMENT CO.",
"PVH Corp.",
"PacWest Bancorp",
"Pacer Cash Cows Fund of Funds ETF",
"Pacer Emerging Markets Cash Cows 100 ETF",
"Pacer Military Times Best Employers ETF",
"Pacific Biosciences of California",
"Pacific Coast Oil Trust",
"Pacific Drilling S.A.",
"Pacific Ethanol",
"Pacific Gas & Electric Co.",
"Pacific Mercantile Bancorp",
"Pacific Premier Bancorp Inc",
"Pacira BioSciences",
"Packaging Corporation of America",
"PagSeguro Digital Ltd.",
"PagerDuty",
"Palatin Technologies",
"Palo Alto Networks",
"Palomar Holdings",
"Pampa Energia S.A.",
"Pan American Silver Corp.",
"Pangaea Logistics Solutions Ltd.",
"Panhandle Royalty Company",
"Papa John's International",
"Par Pacific Holdings",
"Paramount Gold Nevada Corp.",
"Paramount Group",
"Paratek Pharmaceuticals",
"Pareteum Corporation",
"Paringa Resources Limited",
"Park Aerospace Corp.",
"Park City Group",
"Park Hotels & Resorts Inc.",
"Park National Corporation",
"Park-Ohio Holdings Corp.",
"Parke Bancorp",
"Parker Drilling Company",
"Parker-Hannifin Corporation",
"Parsley Energy",
"Parsons Corporation",
"Partner Communications Company Ltd.",
"PartnerRe Ltd.",
"Party City Holdco Inc.",
"Pathfinder Bancorp",
"Patrick Industries",
"Patriot National Bancorp Inc.",
"Patriot Transportation Holding",
"Pattern Energy Group Inc.",
"Patterson Companies",
"Patterson-UTI Energy",
"PayPal Holdings",
"Paychex",
"Paycom Software",
"Paylocity Holding Corporation",
"Paysign",
"Peabody Energy Corporation",
"Peak Resorts",
"Peapack-Gladstone Financial Corporation",
"Pearson",
"Pebblebrook Hotel Trust",
"Pedevco Corp.",
"PeerStream",
"Pegasystems Inc.",
"Pembina Pipeline Corp.",
"Penn National Gaming",
"Penn Virginia Corporation",
"PennantPark Floating Rate Capital Ltd.",
"PennantPark Investment Corporation",
"Penns Woods Bancorp",
"Pennsylvania Real Estate Investment Trust",
"PennyMac Financial Services",
"PennyMac Mortgage Investment Trust",
"Pensare Acquisition Corp.",
"Penske Automotive Group",
"Pentair plc.",
"Penumbra",
"People's United Financial",
"People's Utah Bancorp",
"Peoples Bancorp Inc.",
"Peoples Bancorp of North Carolina",
"Peoples Financial Services Corp. ",
"Pepsico",
"Perceptron",
"Perficient",
"Performance Food Group Company",
"Performance Shipping Inc.",
"Performant Financial Corporation",
"Perion Network Ltd",
"PerkinElmer",
"PermRock Royalty Trust",
"Perma-Fix Environmental Services",
"Perma-Pipe International Holdings",
"Permian Basin Royalty Trust",
"Permianville Royalty Trust",
"Perrigo Company",
"Personalis",
"Perspecta Inc.",
"PetIQ",
"PetMed Express",
"PetroChina Company Limited",
"Petroleo Brasileiro S.A.- Petrobras",
"Pfenex Inc.",
"Pfizer",
"PhaseBio Pharmaceuticals",
"Phibro Animal Health Corporation",
"Philip Morris International Inc",
"Phillips 66",
"Phillips 66 Partners LP",
"Phio Pharmaceuticals Corp.",
"Phoenix New Media Limited",
"Photronics",
"Phreesia",
"Phunware",
"Physicians Realty Trust",
"Piedmont Lithium Limited",
"Piedmont Office Realty Trust",
"Pier 1 Imports",
"Pieris Pharmaceuticals",
"Pilgrim's Pride Corporation",
"Pimco California Municipal Income Fund II",
"Pimco Corporate & Income Opportunity Fund",
"Pimco Corporate & Income Stategy Fund",
"Pimco Global Stocksplus & Income Fund",
"Pimco High Income Fund",
"Pimco Income Opportunity Fund",
"Pimco Municipal Income Fund II",
"Pimco New York Municipal Income Fund II",
"Pinduoduo Inc.",
"Pingtan Marine Enterprise Ltd.",
"Pinnacle Financial Partners",
"Pinnacle West Capital Corporation",
"Pintec Technology Holdings Limited",
"Pinterest",
"Pioneer Bancorp",
"Pioneer Diversified High Income Trust",
"Pioneer Floating Rate Trust",
"Pioneer High Income Trust",
"Pioneer Municipal High Income Advantage Trust",
"Pioneer Municipal High Income Trust",
"Pioneer Natural Resources Company",
"Pioneer Power Solutions",
"Piper Jaffray Companies",
"Pitney Bowes Inc.",
"Pivotal Acquisition Corp.",
"Pivotal Investment Corporation II",
"Pivotal Software",
"Pixelworks",
"Plains All American Pipeline",
"Plains Group Holdings",
"Planet Fitness",
"Planet Green Holdings Corp",
"Plantronics",
"Platinum Group Metals Ltd.",
"PlayAGS",
"Playa Hotels & Resorts N.V.",
"Plexus Corp.",
"Plug Power",
"Plumas Bancorp",
"Pluralsight",
"Pluristem Therapeutics",
"Plymouth Industrial REIT",
"Pointer Telocation Ltd.",
"Points International",
"Polar Power",
"Polaris Inc.",
"PolarityTE",
"PolyOne Corporation",
"PolyPid Ltd.",
"Polymet Mining Corp.",
"Pool Corporation",
"Pope Resources",
"Popular",
"Portland General Electric Company",
"Portman Ridge Finance Corporation",
"Portola Pharmaceuticals",
"Positive Physicians Holdings",
"Post Holdings",
"Postal Realty Trust",
"Potbelly Corporation",
"PotlatchDeltic Corporation",
"Powell Industries",
"Power Integrations",
"Power REIT",
"Powerbridge Technologies Co.",
"Precipio",
"Precision BioSciences",
"Precision Drilling Corporation",
"Predictive Oncology Inc.",
"Preferred Apartment Communities",
"Preferred Bank",
"Preformed Line Products Company",
"Premier",
"Premier Financial Bancorp",
"Presidio",
"Pressure BioSciences",
"Prestige Consumer Healthcare Inc.",
"Pretium Resources",
"Prevail Therapeutics Inc.",
"PriceSmart",
"PrimeEnergy Resources Corporation",
"Primerica",
"Primo Water Corporation",
"Primoris Services Corporation",
"Principal Contrarian Value Index ETF",
"Principal Financial Group Inc",
"Principal Healthcare Innovators Index ETF",
"Principal International Multi-Factor Core Index ETF",
"Principal Millennials Index ETF",
"Principal Price Setters Index ETF",
"Principal Real Estate Income Fund",
"Principal Shareholder Yield Index ETF",
"Principal Sustainable Momentum Index ETF",
"Principal U.S. Large-Cap Multi-Factor Core Index ETF",
"Principal U.S. Mega-Cap Multi-Factor Index ETF",
"Principal U.S. Small-Cap Multi-Factor Index ETF",
"Principal U.S. Small-MidCap Multi-Factor Core Index ETF",
"Principia Biopharma Inc.",
"Priority Income Fund",
"Priority Technology Holdings",
"Pro-Dex",
"ProAssurance Corporation",
"ProLung",
"ProPetro Holding Corp.",
"ProPhase Labs",
"ProQR Therapeutics N.V.",
"ProShares Equities for Rising Rates ETF",
"ProShares Ultra Nasdaq Biotechnology",
"ProShares UltraPro QQQ",
"ProShares UltraPro Short NASDAQ Biotechnology",
"ProShares UltraPro Short QQQ",
"ProShares UltraShort Nasdaq Biotechnology",
"ProSight Global",
"Procter & Gamble Company (The)",
"Professional Diversity Network",
"Proficient Alpha Acquisition Corp.",
"Profire Energy",
"Progenics Pharmaceuticals Inc.",
"Progress Software Corporation",
"Progressive Corporation (The)",
"Prologis",
"Proofpoint",
"Proshares UltraPro Nasdaq Biotechnology",
"Prospect Capital Corporation",
"Prosperity Bancshares",
"Protagonist Therapeutics",
"Protalix BioTherapeutics",
"Protective Insurance Corporation",
"Proteon Therapeutics",
"Proteostasis Therapeutics",
"Prothena Corporation plc",
"Proto Labs",
"Provention Bio",
"Provident Bancorp",
"Provident Financial Holdings",
"Provident Financial Services",
"Prudential Bancorp",
"Prudential Financial",
"Prudential Public Limited Company",
"Psychemedics Corporation",
"Public Service Enterprise Group Incorporated",
"Public Storage",
"Pulmatrix",
"Pulse Biosciences",
"PulteGroup",
"Puma Biotechnology Inc",
"Pure Acquisition Corp.",
"Pure Cycle Corporation",
"Pure Storage",
"Purple Innovation",
"Putnam Managed Municipal Income Trust",
"Putnam Master Intermediate Income Trust",
"Putnam Municipal Opportunities Trust",
"Putnam Premier Income Trust",
"Puxin Limited",
"Puyi Inc.",
"Pyxis Tankers Inc.",
"Pyxus International",
"Pzena Investment Management Inc",
"Q2 Holdings",
"QAD Inc.",
"QCR Holdings",
"QEP Resources",
"QIWI plc",
"QTS Realty Trust",
"QUALCOMM Incorporated",
"QVC",
"Qiagen N.V.",
"Qorvo",
"Quad Graphics",
"Quaker Chemical Corporation",
"Qualstar Corporation",
"Qualys",
"Quanex Building Products Corporation",
"Quanta Services",
"Quanterix Corporation",
"Quarterhill Inc.",
"Qudian Inc.",
"Quest Diagnostics Incorporated",
"Quest Resource Holding Corporation",
"QuickLogic Corporation",
"Quidel Corporation",
"QuinStreet",
"Quintana Energy Services Inc.",
"Qumu Corporation",
"Quorum Health Corporation",
"Quotient Limited",
"Quotient Technology Inc.",
"Qurate Retail",
"Qutoutiao Inc.",
"Qwest Corporation",
"R.R. Donnelley & Sons Company",
"R1 RCM Inc.",
"RADA Electronic Industries Ltd.",
"RAPT Therapeutics",
"RBB Bancorp",
"RBC Bearings Incorporated",
"RCI Hospitality Holdings",
"RCM Technologies",
"RE/MAX Holdings",
"REGENXBIO Inc.",
"RELX PLC",
"RENN Fund",
"REV Group",
"REX American Resources Corporation",
"RF Industries",
"RGC Resources Inc.",
"RH",
"RISE Education Cayman Ltd",
"RLI Corp.",
"RLJ Lodging Trust",
"RMG Acquisition Corp.",
"RMR Real Estate Income Fund",
"RPC",
"RPM International Inc.",
"RPT Realty",
"RTI Surgical Holdings",
"RTW Retailwinds",
"RYB Education",
"Ra Medical Systems",
"Ra Pharmaceuticals",
"RadNet",
"Radcom Ltd.",
"Radian Group Inc.",
"Radiant Logistics",
"Radius Health",
"Radware Ltd.",
"Rafael Holdings",
"Ralph Lauren Corporation",
"Ramaco Resources",
"Rambus",
"Rand Capital Corporation",
"Randolph Bancorp",
"Range Resources Corporation",
"Ranger Energy Services",
"Ranpak Holdings Corp",
"Rapid7",
"Rattler Midstream LP",
"Rave Restaurant Group",
"Raven Industries",
"Raymond James Financial",
"Rayonier Advanced Materials Inc.",
"Rayonier Inc.",
"Raytheon Company",
"ReTo Eco-Solutions",
"ReWalk Robotics Ltd.",
"Reading International Inc",
"Ready Capital Corporation",
"RealNetworks",
"RealPage",
"Reality Shares Nasdaq NexGen Economy China ETF",
"Reality Shares Nasdaq NextGen Economy ETF",
"Realogy Holdings Corp.",
"Realty Income Corporation",
"Reata Pharmaceuticals",
"Reaves Utility Income Fund",
"Recon Technology",
"Recro Pharma",
"Red Lion Hotels Corporation",
"Red River Bancshares",
"Red Robin Gourmet Burgers",
"Red Rock Resorts",
"Red Violet",
"Redfin Corporation",
"Redhill Biopharma Ltd.",
"Redwood Trust",
"Reebonz Holding Limited",
"Reeds",
"Regal Beloit Corporation",
"Regalwood Global Energy Ltd.",
"Regency Centers Corporation",
"Regeneron Pharmaceuticals",
"Regional Health Properties",
"Regional Management Corp.",
"Regions Financial Corporation",
"Regis Corporation",
"Regulus Therapeutics Inc.",
"Reinsurance Group of America",
"Rekor Systems",
"Reliance Steel & Aluminum Co.",
"Reliant Bancorp",
"Reliv' International",
"Remark Holdings",
"RenaissanceRe Holdings Ltd.",
"Renasant Corporation",
"Renesola Ltd.",
"Renewable Energy Group",
"Renren Inc.",
"Rent-A-Center Inc.",
"Repay Holdings Corporation",
"Replay Acquisition Corp.",
"Repligen Corporation",
"Replimune Group",
"Republic Bancorp",
"Republic First Bancorp",
"Republic Services",
"ResMed Inc.",
"Research Frontiers Incorporated",
"Resideo Technologies",
"Resolute Forest Products Inc.",
"Resonant Inc.",
"Resources Connection",
"Restaurant Brands International Inc.",
"Restoration Robotics",
"Retail Opportunity Investments Corp.",
"Retail Properties of America",
"Retail Value Inc.",
"Retractable Technologies",
"Retrophin",
"Revance Therapeutics",
"Reven Housing REIT",
"Revlon",
"Revolution Lighting Technologies",
"Revolve Group",
"Rexahn Pharmaceuticals",
"Rexford Industrial Realty",
"Rexnord Corporation",
"Rhinebeck Bancorp",
"Rhythm Pharmaceuticals",
"Ribbon Communications Inc. ",
"RiceBran Technologies",
"Richardson Electronics",
"Richmond Mutual Bancorporation",
"RigNet",
"Rigel Pharmaceuticals",
"Rimini Street",
"Ring Energy",
"Ringcentral",
"Rio Tinto Plc",
"Riot Blockchain",
"Ritchie Bros. Auctioneers Incorporated",
"Rite Aid Corporation",
"Ritter Pharmaceuticals",
"RiverNorth Managed Duration Municipal Income Fund",
"RiverNorth Marketplace Lending Corporation",
"RiverNorth Opportunistic Municipal Income Fund",
"RiverNorth Opportunities Fund",
"RiverNorth/DoubleLine Strategic Opportunity Fund",
"Riverview Bancorp Inc",
"Riverview Financial Corporation",
"Roadrunner Transportation Systems",
"Roan Resources",
"Robert Half International Inc.",
"Rocket Pharmaceuticals",
"Rockwell Automation",
"Rockwell Medical",
"Rocky Brands",
"Rocky Mountain Chocolate Factory",
"Rogers Communication",
"Rogers Corporation",
"Roku",
"Rollins",
"Roper Technologies",
"Rosehill Resources Inc.",
"Rosetta Stone",
"Ross Stores",
"Royal Bank Of Canada",
"Royal Bank Scotland plc (The)",
"Royal Caribbean Cruises Ltd.",
"Royal Dutch Shell PLC",
"Royal Gold",
"Royce Global Value Trust",
"Royce Micro-Cap Trust",
"Royce Value Trust",
"Rubicon Technology",
"Rubius Therapeutics",
"Rudolph Technologies",
"Ruhnn Holding Limited",
"RumbleOn",
"Rush Enterprises",
"Ruth's Hospitality Group",
"Ryanair Holdings plc",
"Ryder System",
"Ryerson Holding Corporation",
"Ryman Hospitality Properties",
"S&P Global Inc.",
"S&T Bancorp",
"S&W Seed Company",
"SAExploration Holdings",
"SAP SE",
"SB Financial Group",
"SB One Bancorp",
"SBA Communications Corporation",
"SC Health Corporation",
"SCIENCE APPLICATIONS INTERNATIONAL CORPORATION",
"SCWorx Corp.",
"SCYNEXIS",
"SEACOR Holdings",
"SEACOR Marine Holdings Inc.",
"SEI Investments Company",
"SELLAS Life Sciences Group",
"SG Blocks",
"SGOCO Group",
"SI-BONE",
"SIFCO Industries",
"SIGA Technologies Inc.",
"SINOPEC Shangai Petrochemical Company",
"SIRVA",
"SITE Centers Corp.",
"SITO Mobile",
"SJW Group",
"SK Telecom Co.",
"SL Green Realty Corp",
"SLM Corporation",
"SM Energy Company",
"SMART Global Holdings",
"SMTC Corporation",
"SORL Auto Parts",
"SP Plus Corporation",
"SPAR Group",
"SPDR Dorsey Wright Fixed Income Allocation ETF",
"SPI Energy Co.",
"SPS Commerce",
"SPX Corporation",
"SPX FLOW",
"SRAX",
"SRC Energy Inc.",
"SS&C Technologies Holdings",
"SSR Mining Inc.",
"STAAR Surgical Company",
"STARWOOD PROPERTY TRUST",
"STERIS plc",
"STMicroelectronics N.V.",
"STORE Capital Corporation",
"STRATS Trust",
"SVB Financial Group",
"SVMK Inc.",
"Sabine Royalty Trust",
"Sabra Health Care REIT",
"Sabre Corporation",
"Sachem Capital Corp.",
"Safe Bulkers",
"Safe-T Group Ltd.",
"Safeguard Scientifics",
"Safehold Inc.",
"Safety Insurance Group",
"Saga Communications",
"Sage Therapeutics",
"Saia",
"SailPoint Technologies Holdings",
"Salarius Pharmaceuticals",
"Salem Media Group",
"Salesforce.com Inc",
"Salient Midstream & MLP Fund",
"Salisbury Bancorp",
"Sally Beauty Holdings",
"San Juan Basin Royalty Trust",
"Sanchez Midstream Partners LP",
"SandRidge Energy",
"SandRidge Mississippian Trust I",
"SandRidge Mississippian Trust II",
"SandRidge Permian Trust",
"Sanderson Farms",
"Sandstorm Gold Ltd",
"Sandy Spring Bancorp",
"Sangamo Therapeutics",
"Sanmina Corporation",
"Sanofi",
"Santander Consumer USA Holdings Inc.",
"Sapiens International Corporation N.V.",
"Saratoga Investment Corp",
"Sarepta Therapeutics",
"Sasol Ltd.",
"Saul Centers",
"Savara",
"ScanSource",
"Schlumberger N.V.",
"Schmitt Industries",
"Schneider National",
"Schnitzer Steel Industries",
"Scholar Rock Holding Corporation",
"Scholastic Corporation",
"Schultze Special Purpose Acquisition Corp.",
"Schweitzer-Mauduit International",
"SciPlay Corporation",
"Scientific Games Corp",
"Scorpio Bulkers Inc.",
"Scorpio Tankers Inc.",
"Scotts Miracle-Gro Company (The)",
"Scudder Municiple Income Trust",
"Scudder Strategic Municiple Income Trust",
"Scully Royalty Ltd.",
"Sea Limited",
"SeaChange International",
"SeaSpine Holdings Corporation",
"SeaWorld Entertainment",
"Seaboard Corporation",
"Seabridge Gold",
"Seacoast Banking Corporation of Florida",
"Seadrill Limited",
"Seagate Technology PLC",
"Sealed Air Corporation",
"Seanergy Maritime Holdings Corp",
"Sears Hometown and Outlet Stores",
"Seaspan Corporation",
"Seattle Genetics",
"Second Sight Medical Products",
"Secoo Holding Limited",
"SecureWorks Corp.",
"Security National Financial Corporation",
"Seelos Therapeutics",
"Select Asset Inc.",
"Select Bancorp",
"Select Energy Services",
"Select Interior Concepts",
"Select Medical Holdings Corporation",
"Selecta Biosciences",
"Selective Insurance Group",
"Semgroup Corporation",
"SemiLEDS Corporation",
"Sempra Energy",
"Semtech Corporation",
"Seneca Foods Corp.",
"SenesTech",
"Senior Housing Properties Trust",
"Senmiao Technology Limited",
"Sensata Technologies Holding plc",
"Senseonics Holdings",
"Sensient Technologies Corporation",
"Sensus Healthcare",
"Sentinel Energy Services Inc.",
"Sequans Communications S.A.",
"Sequential Brands Group",
"Seres Therapeutics",
"Seritage Growth Properties",
"Service Corporation International",
"ServiceMaster Global Holdings",
"ServiceNow",
"ServiceSource International",
"ServisFirst Bancshares",
"Servotronics",
"Sesen Bio",
"Severn Bancorp Inc",
"Shake Shack",
"SharpSpring",
"Sharps Compliance Corp.",
"Shaw Communications Inc.",
"Shell Midstream Partners",
"Shenandoah Telecommunications Co",
"Sherwin-Williams Company (The)",
"ShiftPixy",
"Shiloh Industries",
"Shimmick Construction Company",
"Shineco",
"Shinhan Financial Group Co Ltd",
"Ship Finance International Limited",
"ShockWave Medical",
"Shoe Carnival",
"Shopify Inc.",
"Shore Bancshares Inc",
"ShotSpotter",
"Shutterfly",
"Shutterstock",
"SiNtx Technologies",
"Sibanye Gold Limited",
"Siebert Financial Corp.",
"Sienna Biopharmaceuticals",
"Sientra",
"Sierra Bancorp",
"Sierra Metals Inc.",
"Sierra Oncology",
"Sierra Wireless",
"Sify Technologies Limited",
"Sigma Labs",
"SigmaTron International",
"Signature Bank",
"Signet Jewelers Limited",
"Silgan Holdings Inc.",
"Silicom Ltd",
"Silicon Laboratories",
"Silicon Motion Technology Corporation",
"Silk Road Medical",
"Silver Spike Acquisition Corp.",
"SilverBow Resorces",
"SilverCrest Metals Inc.",
"SilverSun Technologies",
"Silvercorp Metals Inc.",
"Silvercrest Asset Management Group Inc.",
"Simmons First National Corporation",
"Simon Property Group",
"Simpson Manufacturing Company",
"Simulations Plus",
"Sina Corporation",
"Sinclair Broadcast Group",
"Sino-Global Shipping America",
"Sinovac Biotech",
"Sirius International Insurance Group",
"Sirius XM Holdings Inc.",
"SiteOne Landscape Supply",
"Six Flags Entertainment Corporation New",
"Skechers U.S.A.",
"Sky Solar Holdings",
"SkyWest",
"Skyline Champion Corporation",
"Skyworks Solutions",
"Slack Technologies",
"Sleep Number Corporation",
"Smart Sand",
"SmartFinancial",
"Smartsheet Inc.",
"SmileDirectClub",
"Smith & Nephew SNATS",
"Smith Micro Software",
"Snap Inc.",
"Snap-On Incorporated",
"So-Young International Inc.",
"SoFi Gig Economy ETF",
"Social Capital Hedosophia Holdings Corp.",
"Sociedad Quimica y Minera S.A.",
"Socket Mobile",
"Sogou Inc.",
"Sohu.com Limited ",
"Sol-Gel Technologies Ltd.",
"Solar Capital Ltd.",
"Solar Senior Capital Ltd.",
"SolarEdge Technologies",
"SolarWinds Corporation",
"Solaris Oilfield Infrastructure",
"Soleno Therapeutics",
"Solid Biosciences Inc.",
"Soligenix",
"Solitario Zinc Corp.",
"Soliton",
"Sonic Automotive",
"Sonim Technologies",
"Sonoco Products Company",
"Sonoma Pharmaceuticals",
"Sonos",
"Sony Corp Ord",
"Sophiris Bio",
"Sorrento Therapeutics",
"Sotheby's",
"Sotherly Hotels Inc.",
"Sound Financial Bancorp",
"Source Capital",
"South Jersey Industries",
"South Mountain Merger Corp.",
"South Plains Financial",
"South State Corporation",
"Southern California Edison Company",
"Southern Company (The)",
"Southern Copper Corporation",
"Southern First Bancshares",
"Southern Missouri Bancorp",
"Southern National Bancorp of Virginia",
"Southside Bancshares",
"Southwest Airlines Company",
"Southwest Gas Holdings",
"Southwest Georgia Financial Corporation",
"Southwestern Energy Company",
"Spark Energy",
"Spark Networks",
"Spark Therapeutics",
"Spartan Energy Acquisition Corp",
"Spartan Motors",
"SpartanNash Company",
"Special Opportunities Fund Inc.",
"Spectrum Brands Holdings",
"Spectrum Pharmaceuticals",
"Speedway Motorsports",
"Spero Therapeutics",
"Sphere 3D Corp.",
"Spherix Incorporated",
"Spire Inc.",
"Spirit Aerosystems Holdings",
"Spirit Airlines",
"Spirit MTA REIT",
"Spirit Realty Capital",
"Spirit of Texas Bancshares",
"Splunk Inc.",
"Spok Holdings",
"Sportsman's Warehouse Holdings",
"Spotify Technology S.A.",
"Sprague Resources LP",
"Spring Bank Pharmaceuticals",
"SpringWorks Therapeutics",
"Sprint Corporation",
"Sprott Focus Trust",
"Sprouts Farmers Market",
"Square",
"St. Joe Company (The)",
"Stabilis Energy",
"Staffing 360 Solutions",
"Stag Industrial",
"Stage Stores",
"Stamps.com Inc.",
"Standard AVB Financial Corp.",
"Standard Diversified Inc.",
"Standard Motor Products",
"Standex International Corporation",
"Stanley Black & Decker",
"Stantec Inc",
"Star Bulk Carriers Corp.",
"Star Group",
"StarTek",
"Starbucks Corporation",
"State Auto Financial Corporation",
"State Street Corporation",
"Stealth BioTherapeutics Corp.",
"StealthGas",
"Steel Connect",
"Steel Dynamics",
"Steel Partners Holdings LP",
"Steelcase Inc.",
"Stein Mart",
"Stellus Capital Investment Corporation",
"Stemline Therapeutics",
"Stepan Company",
"Stereotaxis",
"Stericycle",
"Sterling Bancorp",
"Sterling Construction Company Inc",
"Steven Madden",
"Stewardship Financial Corp",
"Stewart Information Services Corporation",
"Stifel Financial Corporation",
"Stitch Fix",
"Stock Yards Bancorp",
"Stoke Therapeutics",
"Stone Harbor Emerging Markets Income Fund",
"Stone Harbor Emerging Markets Total Income Fund",
"StoneCastle Financial Corp",
"StoneCo Ltd.",
"StoneMor Partners L.P.",
"Stoneridge",
"Strata Skin Sciences",
"Stratasys",
"Strategic Education",
"Strategy Shares Nasdaq 7HANDL Index ETF",
"Strattec Security Corporation",
"Stratus Properties Inc.",
"Streamline Health Solutions",
"Strongbridge Biopharma plc",
"Stryker Corporation",
"Studio City International Holdings Limited",
"Sturm",
"Suburban Propane Partners",
"Sumitomo Mitsui Financial Group Inc",
"Summer Infant",
"Summit Financial Group",
"Summit Hotel Properties",
"Summit Materials",
"Summit Midstream Partners",
"Summit State Bank",
"Summit Therapeutics plc",
"Summit Wireless Technologies",
"Sun Communities",
"Sun Life Financial Inc.",
"SunCoke Energy",
"SunLink Health Systems",
"SunOpta",
"SunPower Corporation",
"SunTrust Banks",
"Suncor Energy Inc.",
"Sundance Energy Australia Limited",
"Sundial Growers Inc.",
"Sunesis Pharmaceuticals",
"Sunlands Technology Group",
"Sunnova Energy International Inc.",
"Sunoco LP",
"Sunrun Inc.",
"Sunstone Hotel Investors",
"Sunworks",
"Super League Gaming",
"SuperCom",
"Superconductor Technologies Inc.",
"Superior Drilling Products",
"Superior Energy Services",
"Superior Group of Companies",
"Superior Industries International",
"Supernus Pharmaceuticals",
"Surface Oncology",
"Surgery Partners",
"Surmodics",
"Sutro Biopharma",
"Sutter Rock Capital Corp.",
"Suzano S.A.",
"Swiss Helvetia Fund",
"Switch",
"Switchback Energy Acquisition Corporation",
"Sykes Enterprises",
"Symantec Corporation",
"Synacor",
"Synalloy Corporation",
"Synaptics Incorporated",
"Synchronoss Technologies",
"Synchrony Financial",
"Syndax Pharmaceuticals",
"Syneos Health",
"Synlogic",
"Synnex Corporation",
"Synopsys",
"Synovus Financial Corp.",
"Synthesis Energy Systems",
"Synthetic Biologics",
"Synthetic Fixed-Income Securities",
"Synthorx",
"Sypris Solutions",
"Syros Pharmaceuticals",
"Sysco Corporation",
"Systemax Inc.",
"T-Mobile US",
"T. Rowe Price Group",
"T2 Biosystems",
"TAL Education Group",
"TAT Technologies Ltd.",
"TC Energy Corporation",
"TC PipeLines",
"TCF Financial Corporation",
"TCG BDC",
"TCR2 Therapeutics Inc.",
"TCW Strategic Income Fund",
"TD Ameritrade Holding Corporation",
"TDH Holdings",
"TE Connectivity Ltd.",
"TEGNA Inc.",
"TELUS Corporation",
"TESSCO Technologies Incorporated",
"TFS Financial Corporation",
"TG Therapeutics",
"THL Credit",
"THL Credit Senior Loan Fund",
"TIM Participacoes S.A.",
"TJX Companies",
"TKK Symphony Acquisition Corporation",
"TMSR Holding Company Limited",
"TOP Ships Inc.",
"TORM plc",
"TPG Pace Holdings Corp.",
"TPG RE Finance Trust",
"TPG Specialty Lending",
"TPI Composites",
"TRACON Pharmaceuticals",
"TRI Pointe Group",
"TSR",
"TTEC Holdings",
"TTM Technologies",
"Tabula Rasa HealthCare",
"Tactile Systems Technology",
"Tailored Brands",
"Taitron Components Incorporated",
"Taiwan Fund",
"Taiwan Liposome Company",
"Taiwan Semiconductor Manufacturing Company Ltd.",
"Take-Two Interactive Software",
"Takeda Pharmaceutical Company Limited",
"Takung Art Co.",
"Talend S.A.",
"Tallgrass Energy",
"Talos Energy",
"Tandem Diabetes Care",
"Tandy Leather Factory",
"Tanger Factory Outlet Centers",
"Tantech Holdings Ltd.",
"Tanzanian Gold Corporation",
"Taoping Inc.",
"Tapestry",
"Tarena International",
"Targa Resources",
"Targa Resources Partners LP",
"Target Corporation",
"Target Hospitality Corp.",
"Taro Pharmaceutical Industries Ltd.",
"Taronis Technologies",
"Taseko Mines Limited",
"Tata Motors Ltd",
"Taubman Centers",
"Taylor Devices",
"Taylor Morrison Home Corporation",
"Team",
"Tech Data Corporation",
"TechTarget",
"Technical Communications Corporation",
"TechnipFMC plc",
"Teck Resources Ltd",
"Tecnoglass Inc.",
"Tecogen Inc.",
"Tectonic Financial",
"Teekay Corporation",
"Teekay LNG Partners L.P.",
"Teekay Offshore Partners L.P.",
"Teekay Tankers Ltd.",
"Tejon Ranch Co",
"Tekla Healthcare Investors",
"Tekla Healthcare Opportunies Fund",
"Tekla Life Sciences Investors",
"Tekla World Healthcare Fund",
"Teladoc Health",
"Telaria",
"Telecom Argentina Stet - France Telecom S.A.",
"Teledyne Technologies Incorporated",
"Teleflex Incorporated",
"Telefonica Brasil S.A.",
"Telefonica SA",
"Telenav",
"Telephone and Data Systems",
"Teligent",
"Tellurian Inc.",
"Templeton Dragon Fund",
"Templeton Emerging Markets Fund",
"Templeton Emerging Markets Income Fund",
"Templeton Global Income Fund",
"Tempur Sealy International",
"Tenable Holdings",
"Tenaris S.A.",
"Tenax Therapeutics",
"Tencent Music Entertainment Group",
"Tenet Healthcare Corporation",
"Tengasco",
"Tennant Company",
"Tenneco Inc.",
"Tennessee Valley Authority",
"Tenzing Acquisition Corp.",
"Teradata Corporation",
"Teradyne",
"Terex Corporation",
"Ternium S.A.",
"TerraForm Power",
"Terreno Realty Corporation",
"Territorial Bancorp Inc.",
"Tesla",
"Tetra Tech",
"Tetra Technologies",
"Tetraphase Pharmaceuticals",
"Teva Pharmaceutical Industries Limited",
"Texas Capital Bancshares",
"Texas Instruments Incorporated",
"Texas Pacific Land Trust",
"Texas Roadhouse",
"Textainer Group Holdings Limited",
"Textron Inc.",
"The AES Corporation",
"The Alkaline Water Company Inc.",
"The Andersons",
"The Bancorp",
"The Bank of Princeton",
"The Blackstone Group Inc.",
"The Carlyle Group L.P.",
"The Central and Eastern Europe Fund",
"The Charles Schwab Corporation",
"The Cheesecake Factory Incorporated",
"The Chefs' Warehouse",
"The Community Financial Corporation",
"The Cooper Companies",
"The Descartes Systems Group Inc.",
"The Dixie Group",
"The Ensign Group",
"The ExOne Company",
"The First Bancshares",
"The First of Long Island Corporation",
"The GDL Fund",
"The Gabelli Dividend & Income Trust",
"The Gabelli Global Utility and Income Trust",
"The Gabelli Go Anywhere Trust",
"The Gabelli Healthcare & Wellness Trust",
"The Goodyear Tire & Rubber Company",
"The Habit Restaurants",
"The Hackett Group",
"The Hain Celestial Group",
"The Hanover Insurance Group",
"The Herzfeld Caribbean Basin Fund",
"The Intergroup Corporation",
"The Joint Corp.",
"The Kraft Heinz Company",
"The Long-Term Care ETF",
"The Lovesac Company",
"The Madison Square Garden Company",
"The Medicines Company",
"The Meet Group",
"The Michaels Companies",
"The Middleby Corporation",
"The ONE Group Hospitality",
"The Obesity ETF",
"The Organics ETF",
"The Peck Company Holdings",
"The Providence Service Corporation",
"The RMR Group Inc.",
"The RealReal",
"The Rubicon Project",
"The Simply Good Foods Company",
"The Stars Group Inc.",
"The Trade Desk",
"The Travelers Companies",
"The Vivaldi Opportunities Fund",
"The York Water Company",
"The9 Limited",
"TherapeuticsMD",
"Therapix Biosciences Ltd.",
"Theravance Biopharma",
"Thermo Fisher Scientific Inc",
"Thermon Group Holdings",
"Third Point Reinsurance Ltd.",
"Thomson Reuters Corp",
"Thor Industries",
"Thunder Bridge Acquisition II",
"TiVo Corporation",
"Tiberius Acquisition Corporation",
"Tidewater Inc.",
"Tiffany & Co.",
"Tile Shop Hldgs",
"Tilly's",
"Tilray",
"Timberland Bancorp",
"Timken Company (The)",
"TimkenSteel Corporation",
"Tiptree Inc.",
"Titan International",
"Titan Machinery Inc.",
"Titan Medical Inc.",
"Titan Pharmaceuticals",
"Tivity Health",
"Tiziana Life Sciences plc",
"Tocagen Inc.",
"Toll Brothers",
"Tompkins Financial Corporation",
"Tonix Pharmaceuticals Holding Corp.",
"Tootsie Roll Industries",
"TopBuild Corp.",
"Torchlight Energy Resources",
"Toro Company (The)",
"Toronto Dominion Bank (The)",
"Tortoise Acquisition Corp.",
"Tortoise Energy Independence Fund",
"Tortoise Energy Infrastructure Corporation",
"Tortoise Essential Assets Income Term Fund",
"Tortoise Midstream Energy Fund",
"Tortoise Pipeline & Energy Fund",
"Tortoise Power and Energy Infrastructure Fund",
"Total S.A.",
"Total System Services",
"Tottenham Acquisition I Limited",
"ToughBuilt Industries",
"Tower International",
"Tower Semiconductor Ltd.",
"Town Sports International Holdings",
"Towne Bank",
"Townsquare Media",
"Toyota Motor Corp Ltd Ord",
"Tractor Supply Company",
"Tradeweb Markets Inc.",
"Trans World Entertainment Corp.",
"TransAct Technologies Incorporated",
"TransAlta Corporation",
"TransEnterix",
"TransGlobe Energy Corporation",
"TransMedics Group",
"TransUnion",
"Transatlantic Petroleum Ltd",
"Transcat",
"Transcontinental Realty Investors",
"Transdigm Group Incorporated",
"Translate Bio",
"Transocean Ltd.",
"Transportadora De Gas Sa Ord B",
"TravelCenters of America Inc. ",
"Travelzoo",
"Trecora Resources",
"Tredegar Corporation",
"Treehouse Foods",
"Tremont Mortgage Trust",
"Trevena",
"Trevi Therapeutics",
"Trex Company",
"Tri Continental Corporation",
"TriCo Bancshares",
"TriMas Corporation",
"TriNet Group",
"TriState Capital Holdings",
"Tribune Media Company",
"Tribune Publishing Company",
"Tricida",
"Trident Acquisitions Corp.",
"Trillium Therapeutics Inc.",
"Trilogy Metals Inc.",
"Trimble Inc.",
"Trine Acquisition Corp.",
"Trinity Biotech plc",
"Trinity Industries",
"Trinity Merger Corp.",
"Trinity Place Holdings Inc.",
"Trinseo S.A.",
"Trio-Tech International",
"TripAdvisor",
"Triple-S Management Corporation",
"TriplePoint Venture Growth BDC Corp.",
"Triton International Limited",
"Triumph Bancorp",
"Triumph Group",
"Tronox Holdings plc",
"TrovaGene",
"TrueBlue",
"TrueCar",
"Trupanion",
"TrustCo Bank Corp NY",
"Trustmark Corporation",
"Tsakos Energy Navigation Ltd",
"TuanChe Limited",
"Tucows Inc.",
"Tuesday Morning Corp.",
"Tufin Software Technologies Ltd.",
"Tuniu Corporation",
"Tupperware Brands Corporation",
"Turkcell Iletisim Hizmetleri AS",
"Turning Point Brands",
"Turning Point Therapeutics",
"Turquoise Hill Resources Ltd.",
"Turtle Beach Corporation",
"Tuscan Holdings Corp.",
"Tuscan Holdings Corp. II",
"Tutor Perini Corporation",
"Twelve Seas Investment Company",
"Twilio Inc.",
"Twin Disc",
"Twin River Worldwide Holdings",
"Twist Bioscience Corporation",
"Twitter",
"Two Harbors Investments Corp",
"Two River Bancorp",
"Tyler Technologies",
"Tyme Technologies",
"Tyson Foods",
"U S Concrete",
"U.S. Auto Parts Network",
"U.S. Bancorp",
"U.S. Energy Corp.",
"U.S. Global Investors",
"U.S. Gold Corp.",
"U.S. Physical Therapy",
"U.S. Silica Holdings",
"U.S. Well Services",
"U.S. Xpress Enterprises",
"UBS AG",
"UDR",
"UFP Technologies",
"UGI Corporation",
"UMB Financial Corporation",
"UMH Properties",
"UNITIL Corporation",
"UNIVERSAL INSURANCE HOLDINGS INC",
"UP Fintech China-U.S. Internet Titans ETF",
"UP Fintech Holding Limited",
"US Ecology",
"US Foods Holding Corp.",
"USA Compression Partners",
"USA Technologies",
"USA Truck",
"USANA Health Sciences",
"USD Partners LP",
"USLIFE Income Fund",
"UTStarcom Holdings Corp",
"Uber Technologies",
"Ubiquiti Inc.",
"Ulta Beauty",
"Ultra Clean Holdings",
"Ultragenyx Pharmaceutical Inc.",
"Ultralife Corporation",
"Ultrapar Participacoes S.A.",
"Umpqua Holdings Corporation",
"Under Armour",
"Unico American Corporation",
"Unifi",
"Unifirst Corporation",
"Unilever NV",
"Unilever PLC",
"Union Bankshares",
"Union Pacific Corporation",
"Unique Fabricating",
"Unisys Corporation",
"Unit Corporation",
"United Airlines Holdings",
"United Bancorp",
"United Bancshares",
"United Bankshares",
"United Community Banks",
"United Community Financial Corp.",
"United Financial Bancorp",
"United Fire Group",
"United Insurance Holdings Corp.",
"United Microelectronics Corporation",
"United Natural Foods",
"United Parcel Service",
"United Rentals",
"United Security Bancshares",
"United States Antimony Corporation",
"United States Cellular Corporation",
"United States Lime & Minerals",
"United States Steel Corporation",
"United Technologies Corporation",
"United Therapeutics Corporation",
"United-Guardian",
"UnitedHealth Group Incorporated",
"Uniti Group Inc.",
"Unity Bancorp",
"Unity Biotechnology",
"Univar Solutions Inc.",
"Universal Corporation",
"Universal Display Corporation",
"Universal Electronics Inc.",
"Universal Forest Products",
"Universal Health Realty Income Trust",
"Universal Health Services",
"Universal Logistics Holdings",
"Universal Security Instruments",
"Universal Stainless & Alloy Products",
"Universal Technical Institute Inc",
"Univest Financial Corporation",
"Unum Group",
"Unum Therapeutics Inc.",
"Upland Software",
"Upwork Inc.",
"Ur Energy Inc",
"Uranium Energy Corp.",
"Urban Edge Properties",
"Urban One",
"Urban Outfitters",
"Urban Tea",
"UroGen Pharma Ltd.",
"Urovant Sciences Ltd.",
"Urstadt Biddle Properties Inc.",
"Usio",
"Utah Medical Products",
"Uxin Limited",
"V.F. Corporation",
"VAALCO Energy",
"VALE S.A.",
"VBI Vaccines",
"VEON Ltd.",
"VEREIT Inc.",
"VICI Properties Inc.",
"VIVUS",
"VOC Energy Trust",
"VOXX International Corporation",
"VSE Corporation",
"Vaccinex",
"Vail Resorts",
"Valaris plc",
"Valeritas Holdings",
"Valero Energy Corporation",
"Valhi",
"Validea Market Legends ETF",
"Valley National Bancorp",
"Valmont Industries",
"Valtech SE",
"Value Line",
"Valvoline Inc.",
"VanEck Vectors Biotech ETF",
"VanEck Vectors Pharmaceutical ETF",
"Vanda Pharmaceuticals Inc.",
"Vanguard Emerging Markets Government Bond ETF",
"Vanguard Global ex-U.S. Real Estate ETF",
"Vanguard Intermediate-Term Corporate Bond ETF",
"Vanguard Intermediate-Term Treasury ETF",
"Vanguard International Dividend Appreciation ETF",
"Vanguard International High Dividend Yield ETF",
"Vanguard Long-Term Corporate Bond ETF",
"Vanguard Long-Treasury ETF",
"Vanguard Mortgage-Backed Securities ETF",
"Vanguard Russell 1000 ETF",
"Vanguard Russell 1000 Growth ETF",
"Vanguard Russell 1000 Value ETF",
"Vanguard Russell 2000 ETF",
"Vanguard Russell 2000 Growth ETF",
"Vanguard Russell 2000 Value ETF",
"Vanguard Russell 3000 ETF",
"Vanguard Short-Term Corporate Bond ETF",
"Vanguard Short-Term Inflation-Protected Securities Index Fund",
"Vanguard Short-Term Treasury ETF",
"Vanguard Total Bond Market ETF",
"Vanguard Total Corporate Bond ETF",
"Vanguard Total International Bond ETF",
"Vanguard Total International Stock ETF",
"Vanguard Total World Bond ETF",
"Vapotherm",
"Varex Imaging Corporation",
"Varian Medical Systems",
"Varonis Systems",
"Vascular Biogenics Ltd.",
"Vaxart",
"VectoIQ Acquisition Corp.",
"Vector Group Ltd.",
"Vectrus",
"Vedanta Limited",
"Veeco Instruments Inc.",
"Veeva Systems Inc.",
"Venator Materials PLC",
"Ventas",
"Veoneer",
"Vera Bradley",
"Veracyte",
"Verastem",
"Verb Technology Company",
"VeriSign",
"Vericel Corporation",
"Vericity",
"Verint Systems Inc.",
"Verisk Analytics",
"Veritex Holdings",
"Veritiv Corporation",
"Veritone",
"Verizon Communications Inc.",
"Vermilion Energy Inc.",
"Vermillion",
"Verona Pharma plc",
"Verra Mobility Corporation",
"Verrica Pharmaceuticals Inc.",
"Verso Corporation",
"Versum Materials",
"Vertex Energy",
"Vertex Pharmaceuticals Incorporated",
"Vertical Capital Income Fund",
"Veru Inc.",
"ViaSat",
"Viacom Inc.",
"Viad Corp",
"Viamet Pharmaceuticals Corp.",
"Viavi Solutions Inc.",
"Vicor Corporation",
"Victory Capital Holdings",
"VictoryShares Developed Enhanced Volatility Wtd ETF",
"VictoryShares Dividend Accelerator ETF",
"VictoryShares Emerging Market High Div Volatility Wtd ETF",
"VictoryShares Emerging Market Volatility Wtd ETF",
"VictoryShares International High Div Volatility Wtd ETF",
"VictoryShares International Volatility Wtd ETF",
"VictoryShares US 500 Enhanced Volatility Wtd ETF",
"VictoryShares US 500 Volatility Wtd ETF",
"VictoryShares US Discovery Enhanced Volatility Wtd ETF",
"VictoryShares US EQ Income Enhanced Volatility Wtd ETF",
"VictoryShares US Large Cap High Div Volatility Wtd ETF",
"VictoryShares US Multi-Factor Minimum Volatility ETF",
"VictoryShares US Small Cap High Div Volatility Wtd ETF",
"VictoryShares US Small Cap Volatility Wtd ETF",
"Viemed Healthcare",
"ViewRay",
"Viking Therapeutics",
"Village Bank and Trust Financial Corp.",
"Village Farms International",
"Village Super Market",
"Vince Holding Corp.",
"Viomi Technology Co.",
"Viper Energy Partners LP",
"Vipshop Holdings Limited",
"VirTra",
"Virco Manufacturing Corporation",
"Virgin Trains USA Inc.",
"VirnetX Holding Corp",
"Virtu Financial",
"Virtus Global Dividend & Income Fund Inc.",
"Virtus Global Multi-Sector Income Fund",
"Virtus Investment Partners",
"Virtus LifeSci Biotech Clinical Trials ETF",
"Virtus LifeSci Biotech Products ETF",
"Virtus Total Return Fund Inc.",
"Virtusa Corporation",
"Visa Inc.",
"Vishay Intertechnology",
"Vishay Precision Group",
"Vislink Technologies",
"Vista Gold Corporation",
"Vista Oil & Gas",
"Vista Outdoor Inc.",
"VistaGen Therapeutics",
"Visteon Corporation",
"Visterra",
"Vistra Energy Corp.",
"Vitamin Shoppe",
"Viveve Medical",
"Vivint Solar",
"VivoPower International PLC",
"Vmware",
"Vocera Communications",
"Vodafone Group Plc",
"VolitionRX Limited",
"Volt Information Sciences",
"Vonage Holdings Corp.",
"Vornado Realty Trust",
"Voya Asia Pacific High Dividend Equity Income Fund",
"Voya Emerging Markets High Income Dividend Equity Fund",
"Voya Financial",
"Voya Global Advantage and Premium Opportunity Fund",
"Voya Global Equity Dividend and Premium Opportunity Fund",
"Voya Infrastructure",
"Voya International High Dividend Equity Income Fund",
"Voya Natural Resources Equity Income Fund",
"Voya Prime Rate Trust",
"Voyager Therapeutics",
"Vulcan Materials Company",
"Vuzix Corporation",
"W&T Offshore",
"W.P. Carey Inc.",
"W.R. Berkley Corporation",
"W.R. Grace & Co.",
"W.W. Grainger",
"WAVE Life Sciences Ltd.",
"WD-40 Company",
"WEC Energy Group",
"WESCO International",
"WEX Inc.",
"WNS (Holdings) Limited",
"WPP plc",
"WPX Energy",
"WSFS Financial Corporation",
"WVS Financial Corp.",
"Wabash National Corporation",
"Wabco Holdings Inc.",
"Waddell & Reed Financial",
"Wah Fu Education Group Limited",
"Wahed FTSE USA Shariah ETF",
"Waitr Holdings Inc.",
"Walgreens Boots Alliance",
"Walker & Dunlop",
"Walmart Inc.",
"Walt Disney Company (The)",
"Wanda Sports Group Company Limited",
"Warrior Met Coal",
"Washington Federal",
"Washington Prime Group Inc.",
"Washington Real Estate Investment Trust",
"Washington Trust Bancorp",
"Waste Connections",
"Waste Management",
"Waters Corporation",
"Waterstone Financial",
"Watford Holdings Ltd.",
"Watsco",
"Watts Water Technologies",
"Wayfair Inc.",
"Wayne Farms",
"Wayside Technology Group",
"Wealthbridge Acquisition Limited",
"Webster Financial Corporation",
"Weibo Corporation",
"Weidai Ltd.",
"Weight Watchers International Inc",
"Weingarten Realty Investors",
"Weis Markets",
"Welbilt",
"WellCare Health Plans",
"Wellesley Bancorp",
"Wells Fargo & Company",
"Wells Fargo Global Dividend Opportunity Fund",
"Wells Fargo Income Opportunities Fund",
"Wells Fargo Multi-Sector Income Fund",
"Wells Fargo Utilities and High Income Fund",
"Welltower Inc.",
"Wendy's Company (The)",
"Werner Enterprises",
"WesBanco",
"Wesco Aircraft Holdings",
"West Bancorporation",
"West Pharmaceutical Services",
"Westamerica Bancorporation",
"Westell Technologies",
"Western Alliance Bancorporation",
"Western Asset Bond Fund",
"Western Asset Corporate Loan Fund Inc",
"Western Asset Emerging Markets Debt Fund Inc",
"Western Asset Global Corporate Defined Opportunity Fund Inc.",
"Western Asset Global High Income Fund Inc",
"Western Asset High Income Fund II Inc.",
"Western Asset High Income Opportunity Fund",
"Western Asset High Yield Defined Opportunity Fund Inc.",
"Western Asset Intermediate Muni Fund Inc",
"Western Asset Investment Grade Defined Opportunity Trust Inc.",
"Western Asset Investment Grade Income Fund Inc.",
"Western Asset Managed Municipals Fund",
"Western Asset Mortgage Capital Corporation",
"Western Asset Mortgage Defined Opportunity Fund Inc",
"Western Asset Municipal Defined Opportunity Trust Inc",
"Western Asset Municipal High Income Fund",
"Western Asset Municipal Partners Fund",
"Western Asset Short Duration Income ETF",
"Western Asset Total Return ETF",
"Western Asset Variable Rate Strategic Fund Inc.",
"Western Asset/Claymore U.S Treasury Inflation Prot Secs Fd 2",
"Western Asset/Claymore U.S. Treasury Inflation Prot Secs Fd",
"Western Copper and Gold Corporation",
"Western Digital Corporation",
"Western Midstream Partners",
"Western New England Bancorp",
"Western Union Company (The)",
"Westinghouse Air Brake Technologies Corporation",
"Westlake Chemical Corporation",
"Westlake Chemical Partners LP",
"Westpac Banking Corporation",
"Westport Fuel Systems Inc",
"Westrock Company",
"Westwater Resources",
"Westwood Holdings Group Inc",
"Weyco Group",
"Weyerhaeuser Company",
"Wheaton Precious Metals Corp.",
"Wheeler Real Estate Investment Trust",
"Whirlpool Corporation",
"White Mountains Insurance Group",
"WhiteHorse Finance",
"Whitestone REIT",
"Whiting Petroleum Corporation",
"WideOpenWest",
"WidePoint Corporation",
"Wilhelmina International",
"WillScot Corporation",
"Willamette Valley Vineyards",
"Willdan Group",
"Williams Companies",
"Williams-Sonoma",
"Willis Lease Finance Corporation",
"Willis Towers Watson Public Limited Company",
"Wingstop Inc.",
"Winmark Corporation",
"Winnebago Industries",
"Wins Finance Holdings Inc.",
"Wintrust Financial Corporation",
"Wipro Limited",
"Wireless Telecom Group",
"WisdomTree Barclays Negative Duration U.S. Aggregate Bond Fund",
"WisdomTree China ex-State-Owned Enterprises Fund",
"WisdomTree Cloud Computing Fund",
"WisdomTree Emerging Markets Consumer Growth Fund",
"WisdomTree Emerging Markets Corporate Bond Fund",
"WisdomTree Emerging Markets Quality Dividend Growth Fund",
"WisdomTree Germany Hedged Equity Fund",
"WisdomTree Interest Rate Hedged High Yield Bond Fund",
"WisdomTree Interest Rate Hedged U.S. Aggregate Bond Fund",
"WisdomTree Investments",
"WisdomTree Japan Hedged SmallCap Equity Fund",
"WisdomTree Middle East Dividend Fund",
"WisdomTree Negative Duration High Yield Bond Fund",
"WisdomTree U.S. Quality Dividend Growth Fund",
"WisdomTree U.S. SmallCap Quality Dividend Growth Fund",
"Wix.com Ltd.",
"Wolverine World Wide",
"Woodward",
"Woori Bank",
"Workday",
"Workhorse Group",
"Workiva Inc.",
"World Acceptance Corporation",
"World Fuel Services Corporation",
"World Wrestling Entertainment",
"Worthington Industries",
"Wrap Technologies",
"Wright Medical Group N.V.",
"Wyndham Destinations",
"Wyndham Hotels & Resorts",
"Wynn Resorts",
"X Financial",
"X4 Pharmaceuticals",
"XAI Octagon Floating Rate & Alternative Income Term Trust",
"XBiotech Inc.",
"XOMA Corporation",
"XPEL",
"XPO Logistics",
"XTL Biopharmaceuticals Ltd.",
"Xcel Brands",
"Xcel Energy Inc.",
"Xencor",
"Xenetic Biosciences",
"Xenia Hotels & Resorts",
"Xenon Pharmaceuticals Inc.",
"Xeris Pharmaceuticals",
"Xerox Holdings Corporation",
"Xilinx",
"Xinyuan Real Estate Co Ltd",
"Xperi Corporation",
"XpresSpa Group",
"Xtant Medical Holdings",
"Xunlei Limited",
"Xylem Inc.",
"Y-mAbs Therapeutics",
"YETI Holdings",
"YPF Sociedad Anonima",
"YRC Worldwide",
"YY Inc.",
"Yamana Gold Inc.",
"Yandex N.V.",
"Yatra Online",
"Yelp Inc.",
"Yext",
"Yield10 Bioscience",
"Yintech Investment Holdings Limited",
"Yirendai Ltd.",
"Youngevity International",
"Yum China Holdings",
"Yum! Brands",
"Yuma Energy",
"Yunji Inc.",
"ZAGG Inc",
"ZIOPHARM Oncology Inc",
"ZK International Group Co.",
"ZTO Express (Cayman) Inc.",
"Zafgen",
"Zai Lab Limited",
"Zayo Group Holdings",
"Zealand Pharma A/S",
"Zebra Technologies Corporation",
"Zedge",
"Zendesk",
"Zillow Group",
"Zimmer Biomet Holdings",
"Zion Oil ",
"Zions Bancorporation N.A.",
"Zix Corporation",
"Zoetis Inc.",
"Zogenix",
"Zomedica Pharmaceuticals Corp.",
"Zoom Video Communications",
"Zosano Pharma Corporation",
"Zovio Inc.",
"Zscaler",
"Zumiez Inc.",
"Zuora",
"Zymeworks Inc.",
"Zynerba Pharmaceuticals",
"Zynex",
"Zynga Inc.",
"aTyr Pharma",
"argenx SE",
"bluebird bio",
"cbdMD",
"comScore",
"e.l.f. Beauty",
"eBay Inc.",
"eGain Corporation",
"eHealth",
"eMagin Corporation",
"ePlus inc.",
"eXp World Holdings",
"electroCore",
"frontdoor",
"i3 Verticals",
"iBio",
"iClick Interactive Asia Group Limited",
"iFresh Inc.",
"iHeartMedia",
"iMedia Brands",
"iQIYI",
"iRadimed Corporation",
"iRhythm Technologies",
"iRobot Corporation",
"iShares 0-5 Year Investment Grade Corporate Bond ETF",
"iShares 1-3 Year International Treasury Bond ETF",
"iShares 1-3 Year Treasury Bond ETF",
"iShares 20+ Year Treasury Bond ETF",
"iShares 3-7 Year Treasury Bond ETF",
"iShares 7-10 Year Treasury Bond ETF",
"iShares Asia 50 ETF",
"iShares Broad USD Investment Grade Corporate Bond ETF",
"iShares Commodities Select Strategy ETF",
"iShares Core 1-5 Year USD Bond ETF",
"iShares Core MSCI Total International Stock ETF",
"iShares Core S&P U.S. Growth ETF",
"iShares Core S&P U.S. Value ETF",
"iShares Core Total USD Bond Market ETF",
"iShares Currency Hedged MSCI Germany ETF",
"iShares ESG 1-5 Year USD Corporate Bond ETF",
"iShares ESG MSCI EAFE ETF",
"iShares ESG MSCI EM ETF",
"iShares ESG MSCI USA ETF",
"iShares ESG MSCI USA Leaders ETF",
"iShares ESG USD Corporate Bond ETF",
"iShares Exponential Technologies ETF",
"iShares FTSE EPRA/NAREIT Europe Index Fund",
"iShares FTSE EPRA/NAREIT Global Real Estate ex-U.S. Index Fund",
"iShares Fallen Angels USD Bond ETF",
"iShares GNMA Bond ETF",
"iShares Global Green Bond ETF",
"iShares Global Infrastructure ETF",
"iShares Intermediate-Term Corporate Bond ETF",
"iShares International Treasury Bond ETF",
"iShares J.P. Morgan USD Emerging Markets Bond ETF",
"iShares MBS ETF",
"iShares MSCI ACWI Index Fund",
"iShares MSCI ACWI ex US Index Fund",
"iShares MSCI All Country Asia ex Japan Index Fund",
"iShares MSCI Brazil Small-Cap ETF",
"iShares MSCI China ETF",
"iShares MSCI EAFE Small-Cap ETF",
"iShares MSCI Emerging Markets Asia ETF",
"iShares MSCI Emerging Markets ex China ETF",
"iShares MSCI Europe Financials Sector Index Fund",
"iShares MSCI Europe Small-Cap ETF",
"iShares MSCI Global Gold Miners ETF",
"iShares MSCI Global Impact ETF",
"iShares MSCI Japan Equal Weighted ETF",
"iShares MSCI Japan Value ETF",
"iShares MSCI New Zealand ETF",
"iShares MSCI Qatar ETF",
"iShares MSCI Turkey ETF",
"iShares MSCI UAE ETF",
"iShares Morningstar Mid-Cap ETF",
"iShares Nasdaq Biotechnology Index Fund",
"iShares PHLX SOX Semiconductor Sector Index Fund",
"iShares Preferred and Income Securities ETF",
"iShares Russell 1000 Pure U.S. Revenue ETF",
"iShares S&P Emerging Markets Infrastructure Index Fund",
"iShares S&P Global Clean Energy Index Fund",
"iShares S&P Global Timber & Forestry Index Fund",
"iShares S&P India Nifty 50 Index Fund",
"iShares S&P Small-Cap 600 Growth ETF",
"iShares Select Dividend ETF",
"iShares Short Treasury Bond ETF",
"iShares Short-Term Corporate Bond ETF",
"iShares iBoxx $ High Yield ex Oil & Gas Corporate Bond ETF",
"iStar Inc.",
"icad inc.",
"inTest Corporation",
"j2 Global",
"lululemon athletica inc.",
"nLIGHT",
"nVent Electric plc",
"resTORbio",
"scPharmaceuticals Inc.",
"support.com",
"trivago N.V.",
"uniQure N.V.",
"vTv Therapeutics Inc.",
"voxeljet AG",
]
| mit | 3,439,949,495,777,811,000 | 19.220623 | 78 | 0.547705 | false | 2.756734 | false | false | false |
sumedh123/debatify | venv/lib/python2.7/site-packages/flask_socketio/__init__.py | 1 | 34245 | import sys
# make sure gevent-socketio is not installed, as it conflicts with
# python-socketio
gevent_socketio_found = True
try:
from socketio import socketio_manage
except ImportError:
gevent_socketio_found = False
if gevent_socketio_found:
print('The gevent-socketio package is incompatible with this version of '
'the Flask-SocketIO extension. Please uninstall it, and then '
'install the latest version of python-socketio in its place.')
sys.exit(1)
import socketio
import flask
from flask import json as flask_json
from werkzeug.debug import DebuggedApplication
from werkzeug.serving import run_with_reloader
from .namespace import Namespace
from .test_client import SocketIOTestClient
__version__ = '2.7.1'
class _SocketIOMiddleware(socketio.Middleware):
"""This WSGI middleware simply exposes the Flask application in the WSGI
environment before executing the request.
"""
def __init__(self, socketio_app, flask_app, socketio_path='socket.io'):
self.flask_app = flask_app
super(_SocketIOMiddleware, self).__init__(socketio_app,
flask_app.wsgi_app,
socketio_path)
def __call__(self, environ, start_response):
environ['flask.app'] = self.flask_app
return super(_SocketIOMiddleware, self).__call__(environ,
start_response)
class SocketIO(object):
"""Create a Flask-SocketIO server.
:param app: The flask application instance. If the application instance
isn't known at the time this class is instantiated, then call
``socketio.init_app(app)`` once the application instance is
available.
:param message_queue: A connection URL for a message queue service the
server can use for multi-process communication. A
message queue is not required when using a single
server process.
:param channel: The channel name, when using a message queue. If a channel
isn't specified, a default channel will be used. If
multiple clusters of SocketIO processes need to use the
same message queue without interfering with each other, then
each cluster should use a different channel.
:param path: The path where the Socket.IO server is exposed. Defaults to
``'socket.io'``. Leave this as is unless you know what you are
doing.
:param resource: Alias to ``path``.
:param kwargs: Socket.IO and Engine.IO server options.
The Socket.IO server options are detailed below:
:param client_manager: The client manager instance that will manage the
client list. When this is omitted, the client list
is stored in an in-memory structure, so the use of
multiple connected servers is not possible. In most
cases, this argument does not need to be set
explicitly.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``.
:param binary: ``True`` to support binary payloads, ``False`` to treat all
payloads as text. On Python 2, if this is set to ``True``,
``unicode`` values are treated as text, and ``str`` and
``bytes`` values are treated as binary. This option has no
effect on Python 3, where text and binary payloads are
always automatically discovered.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions. To use the same json encoder and decoder as a Flask
application, use ``flask.json``.
The Engine.IO server configuration supports the following settings:
:param async_mode: The asynchronous model to use. See the Deployment
section in the documentation for a description of the
available options. Valid async modes are
``threading``, ``eventlet``, ``gevent`` and
``gevent_uwsgi``. If this argument is not given,
``eventlet`` is tried first, then ``gevent_uwsgi``,
then ``gevent``, and finally ``threading``. The
first async mode that has all its dependencies installed
is then one that is chosen.
:param ping_timeout: The time in seconds that the client waits for the
server to respond before disconnecting.
:param ping_interval: The interval in seconds at which the client pings
the server.
:param max_http_buffer_size: The maximum size of a message when using the
polling transport.
:param allow_upgrades: Whether to allow transport upgrades or not.
:param http_compression: Whether to compress packages when using the
polling transport.
:param compression_threshold: Only compress messages when their byte size
is greater than this value.
:param cookie: Name of the HTTP cookie that contains the client session
id. If set to ``None``, a cookie is not sent to the client.
:param cors_allowed_origins: List of origins that are allowed to connect
to this server. All origins are allowed by
default.
:param cors_credentials: Whether credentials (cookies, authentication) are
allowed in requests to this server.
:param engineio_logger: To enable Engine.IO logging set to ``True`` or pass
a logger object to use. To disable logging set to
``False``.
"""
def __init__(self, app=None, **kwargs):
self.server = None
self.server_options = None
self.wsgi_server = None
self.handlers = []
self.namespace_handlers = []
self.exception_handlers = {}
self.default_exception_handler = None
if app is not None or len(kwargs) > 0:
self.init_app(app, **kwargs)
def init_app(self, app, **kwargs):
if app is not None:
if not hasattr(app, 'extensions'):
app.extensions = {} # pragma: no cover
app.extensions['socketio'] = self
self.server_options = kwargs
if 'client_manager' not in self.server_options:
url = kwargs.pop('message_queue', None)
channel = kwargs.pop('channel', 'flask-socketio')
write_only = app is None
if url:
if url.startswith('redis://'):
queue_class = socketio.RedisManager
else:
queue_class = socketio.KombuManager
queue = queue_class(url, channel=channel,
write_only=write_only)
self.server_options['client_manager'] = queue
if 'json' in self.server_options and \
self.server_options['json'] == flask_json:
# flask's json module is tricky to use because its output
# changes when it is invoked inside or outside the app context
# so here to prevent any ambiguities we replace it with wrappers
# that ensure that the app context is always present
class FlaskSafeJSON(object):
@staticmethod
def dumps(*args, **kwargs):
with app.app_context():
return flask_json.dumps(*args, **kwargs)
@staticmethod
def loads(*args, **kwargs):
with app.app_context():
return flask_json.loads(*args, **kwargs)
self.server_options['json'] = FlaskSafeJSON
resource = kwargs.pop('path', kwargs.pop('resource', 'socket.io'))
if resource.startswith('/'):
resource = resource[1:]
self.server = socketio.Server(**self.server_options)
self.async_mode = self.server.async_mode
for handler in self.handlers:
self.server.on(handler[0], handler[1], namespace=handler[2])
for namespace_handler in self.namespace_handlers:
self.server.register_namespace(namespace_handler)
if app is not None:
# here we attach the SocketIO middlware to the SocketIO object so it
# can be referenced later if debug middleware needs to be inserted
self.sockio_mw = _SocketIOMiddleware(self.server, app,
socketio_path=resource)
app.wsgi_app = self.sockio_mw
def on(self, message, namespace=None):
"""Decorator to register a SocketIO event handler.
This decorator must be applied to SocketIO event handlers. Example::
@socketio.on('my event', namespace='/chat')
def handle_my_custom_event(json):
print('received json: ' + str(json))
:param message: The name of the event. This is normally a user defined
string, but a few event names are already defined. Use
``'message'`` to define a handler that takes a string
payload, ``'json'`` to define a handler that takes a
JSON blob payload, ``'connect'`` or ``'disconnect'``
to create handlers for connection and disconnection
events.
:param namespace: The namespace on which the handler is to be
registered. Defaults to the global namespace.
"""
namespace = namespace or '/'
def decorator(handler):
def _handler(sid, *args):
return self._handle_event(handler, message, namespace, sid,
*args)
if self.server:
self.server.on(message, _handler, namespace=namespace)
else:
self.handlers.append((message, _handler, namespace))
return _handler
return decorator
def on_error(self, namespace=None):
"""Decorator to define a custom error handler for SocketIO events.
This decorator can be applied to a function that acts as an error
handler for a namespace. This handler will be invoked when a SocketIO
event handler raises an exception. The handler function must accept one
argument, which is the exception raised. Example::
@socketio.on_error(namespace='/chat')
def chat_error_handler(e):
print('An error has occurred: ' + str(e))
:param namespace: The namespace for which to register the error
handler. Defaults to the global namespace.
"""
namespace = namespace or '/'
def decorator(exception_handler):
if not callable(exception_handler):
raise ValueError('exception_handler must be callable')
self.exception_handlers[namespace] = exception_handler
return exception_handler
return decorator
def on_error_default(self, exception_handler):
"""Decorator to define a default error handler for SocketIO events.
This decorator can be applied to a function that acts as a default
error handler for any namespaces that do not have a specific handler.
Example::
@socketio.on_error_default
def error_handler(e):
print('An error has occurred: ' + str(e))
"""
if not callable(exception_handler):
raise ValueError('exception_handler must be callable')
self.default_exception_handler = exception_handler
return exception_handler
def on_event(self, message, handler, namespace=None):
"""Register a SocketIO event handler.
``on_event`` is the non-decorator version of ``'on'``.
Example::
def on_foo_event(json):
print('received json: ' + str(json))
socketio.on_event('my event', on_foo_event, namespace='/chat')
:param message: The name of the event. This is normally a user defined
string, but a few event names are already defined. Use
``'message'`` to define a handler that takes a string
payload, ``'json'`` to define a handler that takes a
JSON blob payload, ``'connect'`` or ``'disconnect'``
to create handlers for connection and disconnection
events.
:param handler: The function that handles the event.
:param namespace: The namespace on which the handler is to be
registered. Defaults to the global namespace.
"""
self.on(message, namespace=namespace)(handler)
def on_namespace(self, namespace_handler):
if not isinstance(namespace_handler, Namespace):
raise ValueError('Not a namespace instance.')
namespace_handler._set_socketio(self)
if self.server:
self.server.register_namespace(namespace_handler)
else:
self.namespace_handlers.append(namespace_handler)
def emit(self, event, *args, **kwargs):
"""Emit a server generated SocketIO event.
This function emits a SocketIO event to one or more connected clients.
A JSON blob can be attached to the event as payload. This function can
be used outside of a SocketIO event context, so it is appropriate to
use when the server is the originator of an event, outside of any
client context, such as in a regular HTTP request handler or a
background task. Example::
@app.route('/ping')
def ping():
socketio.emit('ping event', {'data': 42}, namespace='/chat')
:param event: The name of the user event to emit.
:param args: A dictionary with the JSON data to send as payload.
:param namespace: The namespace under which the message is to be sent.
Defaults to the global namespace.
:param room: Send the message to all the users in the given room. If
this parameter is not included, the event is sent to
all connected users.
:param include_self: ``True`` to include the sender when broadcasting
or addressing a room, or ``False`` to send to
everyone but the sender.
:param callback: If given, this function will be called to acknowledge
that the client has received the message. The
arguments that will be passed to the function are
those provided by the client. Callback functions can
only be used when addressing an individual client.
"""
skip_sid = flask.request.sid \
if not kwargs.get('include_self', True) else None
self.server.emit(event, *args, namespace=kwargs.get('namespace', '/'),
room=kwargs.get('room'), skip_sid=skip_sid,
callback=kwargs.get('callback'))
def send(self, data, json=False, namespace=None, room=None,
callback=None, include_self=True):
"""Send a server-generated SocketIO message.
This function sends a simple SocketIO message to one or more connected
clients. The message can be a string or a JSON blob. This is a simpler
version of ``emit()``, which should be preferred. This function can be
used outside of a SocketIO event context, so it is appropriate to use
when the server is the originator of an event.
:param message: The message to send, either a string or a JSON blob.
:param json: ``True`` if ``message`` is a JSON blob, ``False``
otherwise.
:param namespace: The namespace under which the message is to be sent.
Defaults to the global namespace.
:param room: Send the message only to the users in the given room. If
this parameter is not included, the message is sent to
all connected users.
:param include_self: ``True`` to include the sender when broadcasting
or addressing a room, or ``False`` to send to
everyone but the sender.
:param callback: If given, this function will be called to acknowledge
that the client has received the message. The
arguments that will be passed to the function are
those provided by the client. Callback functions can
only be used when addressing an individual client.
"""
skip_sid = flask.request.sid if not include_self else None
if json:
self.emit('json', data, namespace=namespace, room=room,
skip_sid=skip_sid, callback=callback)
else:
self.emit('message', data, namespace=namespace, room=room,
skip_sid=skip_sid, callback=callback)
def close_room(self, room, namespace=None):
"""Close a room.
This function removes any users that are in the given room and then
deletes the room from the server. This function can be used outside
of a SocketIO event context.
:param room: The name of the room to close.
:param namespace: The namespace under which the room exists. Defaults
to the global namespace.
"""
self.server.close_room(room, namespace)
def run(self, app, host=None, port=None, **kwargs):
"""Run the SocketIO web server.
:param app: The Flask application instance.
:param host: The hostname or IP address for the server to listen on.
Defaults to 127.0.0.1.
:param port: The port number for the server to listen on. Defaults to
5000.
:param debug: ``True`` to start the server in debug mode, ``False`` to
start in normal mode.
:param use_reloader: ``True`` to enable the Flask reloader, ``False``
to disable it.
:param extra_files: A list of additional files that the Flask
reloader should watch. Defaults to ``None``
:param log_output: If ``True``, the server logs all incomming
connections. If ``False`` logging is disabled.
Defaults to ``True`` in debug mode, ``False``
in normal mode. Unused when the threading async
mode is used.
:param kwargs: Additional web server options. The web server options
are specific to the server used in each of the supported
async modes. Note that options provided here will
not be seen when using an external web server such
as gunicorn, since this method is not called in that
case.
"""
if host is None:
host = '127.0.0.1'
if port is None:
server_name = app.config['SERVER_NAME']
if server_name and ':' in server_name:
port = int(server_name.rsplit(':', 1)[1])
else:
port = 5000
debug = kwargs.pop('debug', app.debug)
log_output = kwargs.pop('log_output', debug)
use_reloader = kwargs.pop('use_reloader', debug)
extra_files = kwargs.pop('extra_files', None)
app.debug = debug
if app.debug and self.server.eio.async_mode != 'threading':
# put the debug middleware between the SocketIO middleware
# and the Flask application instance
#
# mw1 mw2 mw3 Flask app
# o ---- o ---- o ---- o
# /
# o Flask-SocketIO
# \ middleware
# o
# Flask-SocketIO WebSocket handler
#
# BECOMES
#
# dbg-mw mw1 mw2 mw3 Flask app
# o ---- o ---- o ---- o ---- o
# /
# o Flask-SocketIO
# \ middleware
# o
# Flask-SocketIO WebSocket handler
#
self.sockio_mw.wsgi_app = DebuggedApplication(self.sockio_mw.wsgi_app,
evalex=True)
if self.server.eio.async_mode == 'threading':
from werkzeug._internal import _log
_log('warning', 'WebSocket transport not available. Install '
'eventlet or gevent and gevent-websocket for '
'improved performance.')
app.run(host=host, port=port, threaded=True,
use_reloader=use_reloader, **kwargs)
elif self.server.eio.async_mode == 'eventlet':
def run_server():
import eventlet
import eventlet.wsgi
eventlet_socket = eventlet.listen((host, port))
# If provided an SSL argument, use an SSL socket
ssl_args = ['keyfile', 'certfile', 'server_side', 'cert_reqs',
'ssl_version', 'ca_certs',
'do_handshake_on_connect', 'suppress_ragged_eofs',
'ciphers']
ssl_params = {k: kwargs[k] for k in kwargs if k in ssl_args}
if len(ssl_params) > 0:
for k in ssl_params:
kwargs.pop(k)
ssl_params['server_side'] = True # Listening requires true
eventlet_socket = eventlet.wrap_ssl(eventlet_socket,
**ssl_params)
eventlet.wsgi.server(eventlet_socket, app,
log_output=log_output, **kwargs)
if use_reloader:
run_with_reloader(run_server, extra_files=extra_files)
else:
run_server()
elif self.server.eio.async_mode == 'gevent':
from gevent import pywsgi
try:
from geventwebsocket.handler import WebSocketHandler
websocket = True
except ImportError:
websocket = False
log = 'default'
if not log_output:
log = None
if websocket:
self.wsgi_server = pywsgi.WSGIServer(
(host, port), app, handler_class=WebSocketHandler,
log=log, **kwargs)
else:
self.wsgi_server = pywsgi.WSGIServer((host, port), app,
log=log)
if use_reloader:
# monkey patching is required by the reloader
from gevent import monkey
monkey.patch_all()
def run_server():
self.wsgi_server.serve_forever()
run_with_reloader(run_server, extra_files=extra_files)
else:
self.wsgi_server.serve_forever()
def stop(self):
"""Stop a running SocketIO web server.
This method must be called from a HTTP or SocketIO handler function.
"""
if self.server.eio.async_mode == 'threading':
func = flask.request.environ.get('werkzeug.server.shutdown')
if func:
func()
else:
raise RuntimeError('Cannot stop unknown web server')
elif self.server.eio.async_mode == 'eventlet':
raise SystemExit
elif self.server.eio.async_mode == 'gevent':
self.wsgi_server.stop()
def start_background_task(self, target, *args, **kwargs):
"""Start a background task using the appropriate async model.
This is a utility function that applications can use to start a
background task using the method that is compatible with the
selected async mode.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object compatible with the `Thread` class in
the Python standard library. The `start()` method on this object is
already called by this function.
"""
return self.server.start_background_task(target, *args, **kwargs)
def sleep(self, seconds=0):
"""Sleep for the requested amount of time using the appropriate async
model.
This is a utility function that applications can use to put a task to
sleep without having to worry about using the correct call for the
selected async mode.
"""
return self.server.sleep(seconds)
def test_client(self, app, namespace=None):
"""Return a simple SocketIO client that can be used for unit tests."""
return SocketIOTestClient(app, self, namespace)
def _handle_event(self, handler, message, namespace, sid, *args):
if sid not in self.server.environ:
# we don't have record of this client, ignore this event
return '', 400
app = self.server.environ[sid]['flask.app']
with app.request_context(self.server.environ[sid]):
if 'saved_session' in self.server.environ[sid]:
self._copy_session(
self.server.environ[sid]['saved_session'],
flask.session)
flask.request.sid = sid
flask.request.namespace = namespace
flask.request.event = {'message': message, 'args': args}
try:
if message == 'connect':
ret = handler()
else:
ret = handler(*args)
except:
err_handler = self.exception_handlers.get(
namespace, self.default_exception_handler)
if err_handler is None:
raise
type, value, traceback = sys.exc_info()
return err_handler(value)
if flask.session.modified and sid in self.server.environ:
self.server.environ[sid]['saved_session'] = {}
self._copy_session(
flask.session,
self.server.environ[sid]['saved_session'])
return ret
def _copy_session(self, src, dest):
for k in src:
dest[k] = src[k]
def emit(event, *args, **kwargs):
"""Emit a SocketIO event.
This function emits a SocketIO event to one or more connected clients. A
JSON blob can be attached to the event as payload. This is a function that
can only be called from a SocketIO event handler, as in obtains some
information from the current client context. Example::
@socketio.on('my event')
def handle_my_custom_event(json):
emit('my response', {'data': 42})
:param event: The name of the user event to emit.
:param args: A dictionary with the JSON data to send as payload.
:param namespace: The namespace under which the message is to be sent.
Defaults to the namespace used by the originating event.
A ``'/'`` can be used to explicitly specify the global
namespace.
:param callback: Callback function to invoke with the client's
acknowledgement.
:param broadcast: ``True`` to send the message to all clients, or ``False``
to only reply to the sender of the originating event.
:param room: Send the message to all the users in the given room. If this
argument is set, then broadcast is implied to be ``True``.
:param include_self: ``True`` to include the sender when broadcasting or
addressing a room, or ``False`` to send to everyone
but the sender.
"""
if 'namespace' in kwargs:
namespace = kwargs['namespace']
else:
namespace = flask.request.namespace
callback = kwargs.get('callback')
broadcast = kwargs.get('broadcast')
room = kwargs.get('room')
if room is None and not broadcast:
room = flask.request.sid
include_self = kwargs.get('include_self', True)
socketio = flask.current_app.extensions['socketio']
return socketio.emit(event, *args, namespace=namespace, room=room,
include_self=include_self, callback=callback)
def send(message, **kwargs):
"""Send a SocketIO message.
This function sends a simple SocketIO message to one or more connected
clients. The message can be a string or a JSON blob. This is a simpler
version of ``emit()``, which should be preferred. This is a function that
can only be called from a SocketIO event handler.
:param message: The message to send, either a string or a JSON blob.
:param namespace: The namespace under which the message is to be sent.
Defaults to the namespace used by the originating event.
An empty string can be used to use the global namespace.
:param callback: Callback function to invoke with the client's
acknowledgement.
:param broadcast: ``True`` to send the message to all connected clients, or
``False`` to only reply to the sender of the originating
event.
:param room: Send the message to all the users in the given room.
:param include_self: ``True`` to include the sender when broadcasting or
addressing a room, or ``False`` to send to everyone
but the sender.
"""
if 'namespace' in kwargs:
namespace = kwargs['namespace']
else:
namespace = flask.request.namespace
callback = kwargs.get('callback')
broadcast = kwargs.get('broadcast')
room = kwargs.get('room')
if room is None and not broadcast:
room = flask.request.sid
include_self = kwargs.get('include_self', True)
socketio = flask.current_app.extensions['socketio']
return socketio.send(message, namespace=namespace, room=room,
include_self=include_self, callback=callback)
def join_room(room):
"""Join a room.
This function puts the user in a room, under the current namespace. The
user and the namespace are obtained from the event context. This is a
function that can only be called from a SocketIO event handler. Example::
@socketio.on('join')
def on_join(data):
username = session['username']
room = data['room']
join_room(room)
send(username + ' has entered the room.', room=room)
:param room: The name of the room to join.
"""
socketio = flask.current_app.extensions['socketio']
socketio.server.enter_room(flask.request.sid, room,
namespace=flask.request.namespace)
def leave_room(room):
"""Leave a room.
This function removes the user from a room, under the current namespace.
The user and the namespace are obtained from the event context. This is
a function that can only be called from a SocketIO event handler. Example::
@socketio.on('leave')
def on_leave(data):
username = session['username']
room = data['room']
leave_room(room)
send(username + ' has left the room.', room=room)
:param room: The name of the room to leave.
"""
socketio = flask.current_app.extensions['socketio']
socketio.server.leave_room(flask.request.sid, room,
namespace=flask.request.namespace)
def close_room(room):
"""Close a room.
This function removes any users that are in the given room and then deletes
the room from the server. This is a function that can only be called from
a SocketIO event handler.
:param room: The name of the room to close.
"""
socketio = flask.current_app.extensions['socketio']
socketio.server.close_room(room, namespace=flask.request.namespace)
def rooms():
"""Return a list of the rooms the client is in.
This function returns all the rooms the client has entered, including its
own room, assigned by the Socket.IO server. This is a function that can
only be called from a SocketIO event handler.
"""
socketio = flask.current_app.extensions['socketio']
return socketio.server.rooms(flask.request.sid,
namespace=flask.request.namespace)
def disconnect(silent=False):
"""Disconnect the client.
This function terminates the connection with the client. As a result of
this call the client will receive a disconnect event. Example::
@socketio.on('message')
def receive_message(msg):
if is_banned(session['username']):
disconnect()
# ...
:param silent: this option is deprecated.
"""
socketio = flask.current_app.extensions['socketio']
return socketio.server.disconnect(flask.request.sid,
namespace=flask.request.namespace)
| mit | 2,640,166,010,440,705,000 | 44.118577 | 82 | 0.579121 | false | 4.829361 | false | false | false |
iamarf/terminal-quest | linux_story/gtk3/Spellbook.py | 1 | 4059 | #!/usr/bin/env python
# Spellbook.py
#
# Copyright (C) 2014 Kano Computing Ltd
# License: GNU General Public License v2 http://www.gnu.org/licenses/gpl-2.0.txt
#
# Author: Caroline Clark <[email protected]>
import os
import sys
from gi.repository import Gtk, Gdk
if __name__ == '__main__' and __package__ is None:
dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if dir_path != '/usr':
sys.path.insert(1, dir_path)
from linux_story.common import common_media_dir
class Spellbook(Gtk.EventBox):
'''This is the GUI showing all the spells along the bottom
'''
SPELLBOOK_BORDER = 1
SPELL_BORDER = 1
CMD_HEIGHT = 80
CMD_WIDTH = 80
HEIGHT = 100
number_of_spells = 7
def __init__(self):
self.stop = False
Gtk.EventBox.__init__(self)
background = Gtk.EventBox()
background.get_style_context().add_class("spellbook_background")
self.grid = Gtk.Grid()
self.add(background)
background.add(self.grid)
screen = Gdk.Screen.get_default()
self.win_width = screen.get_width()
self.win_height = screen.get_height()
self.WIDTH = self.win_width / 2
self.set_size_request(self.WIDTH, self.HEIGHT)
self.__pack_locked_spells()
def repack_spells(self, commands):
'''
Takes in the list of commands, and creates the spells and
packs them into a grid.
Args:
commands (list): List of strings of the commands we want to show
Returns:
None
'''
left = 0
if commands:
for command in commands:
if (left + 1) * (self.CMD_WIDTH + 20) < self.win_width:
box = self.__create_spell(command)
child = self.grid.get_child_at(left, 0)
self.grid.remove(child)
self.grid.attach(box, left, 0, 1, 1)
left += 1
self.show_all()
def __create_spell(self, name, locked=False):
'''
Create the individual GUI for a spell.
To create the icon, have the icon located at
media/images/name.png
Args:
name (str): Name to be shown in the widget
locked (bool): Whether we show the icon locked
i.e. with a padlock
Returns:
Gtk.Box: container widget for an individual spell
'''
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
box.set_size_request(self.CMD_WIDTH, self.CMD_HEIGHT)
box.set_margin_top(10)
box.set_margin_left(10)
box.set_margin_right(10)
box.set_margin_bottom(10)
icon_background = Gtk.EventBox()
icon_background.get_style_context().add_class("spell_icon_background")
box.pack_start(icon_background, False, False, 0)
label_background = Gtk.EventBox()
label_background.get_style_context().add_class("spell_label_background")
images_dir = os.path.join(common_media_dir, 'images')
if locked:
filename = os.path.join(images_dir, "padlock.png")
icon_background.get_style_context().add_class("locked")
label_background.get_style_context().add_class("locked")
else:
filename = os.path.join(images_dir, name + ".png")
icon = Gtk.Image.new_from_file(filename)
icon_background.add(icon)
box.pack_start(label_background, False, False, 0)
label = Gtk.Label(name)
label.get_style_context().add_class("spell_command")
label.set_alignment(xalign=0.5, yalign=0.5)
label_background.add(label)
return box
def __pack_locked_spells(self):
'''
Fill up the rest of the spellbook with locked boxes.
'''
left = 0
while left < self.number_of_spells:
locked_box = self.__create_spell("...", locked=True)
self.grid.attach(locked_box, left, 0, 1, 1)
left += 1
| gpl-2.0 | -828,757,298,218,543,200 | 27.584507 | 80 | 0.577482 | false | 3.663357 | false | false | false |
habi/GlobalDiagnostiX | GOTTHARD/Photon-Calculation.py | 1 | 3166 | # -*- coding: utf-8 -*-
"""
Plot Attenuation and transmission of GOTTHARD detector element
"""
from __future__ import division
import matplotlib.pylab as plt
import os
import glob
import numpy as np
GOTTHARDArea = 1130 * (50 / 1000) * 2 # mm
Distance = 163 # cm
ScintillatorArea = 430 * 430 # mm
print 'The area of the GOTTHARD sensor we used was', int(GOTTHARDArea), 'mm²'
print 'This is', int(round(ScintillatorArea / GOTTHARDArea)), 'times smaller',\
'than the scintillator we plan to use (430 x 430 mm²)'
SiliconAttenuation = np.loadtxt('Si_Attenuation.dat')
SiliconTransmission = np.loadtxt('Si_Transmission.dat')
SiliconDensity = 2.329 # g/cm³
SiliconThickness = 320 # um
plt.figure()
plt.hold(True)
plt.subplot(1, 2, 1)
plt.plot(SiliconAttenuation[:, 0] * 1000,
1 - (np.exp(1) ** - (SiliconAttenuation[:, 1] * SiliconDensity *
SiliconThickness / 10000)), color='k')
plt.xlabel('Photon Energy [keV]')
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.ylabel(r'Attenuation coefficient $\frac{\mu}{\rho}$ [cm2/g]')
plt.title('Attenuation')
plt.xlim([0, 120])
plt.ylim([0, 1])
from scipy import interpolate
x = SiliconAttenuation[:, 0] * 1000
y = (np.exp(- (SiliconAttenuation[:, 1] * SiliconDensity * SiliconThickness /
10000)))
f1 = interpolate.interp1d(x, y)
f2 = interpolate.interp1d(x, y, kind='cubic')
xnew = np.arange(1, 120, 0.1)
plt.subplot(1, 2, 2)
plt.plot(SiliconTransmission[:, 0] / 1000, SiliconTransmission[:, 1],
color='k', label='from Anna')
plt.plot(SiliconAttenuation[:, 0] * 1000,
(np.exp(- (SiliconAttenuation[:, 1] * SiliconDensity *
SiliconThickness / 10000))), 'gD',
label='from NIST (1-Attenuation)')
plt.plot(xnew, f1(xnew) + 0.1, 'r', label='Int')
plt.plot(xnew, f2(xnew) + 0.2, 'b', label='Int')
# plt.legend(loc='best')
plt.xlabel('Photon Energy [keV]')
plt.ylabel('Tranmission')
plt.title('Transmission for a thickness of 320 um')
plt.xlim([0, 120])
# plt.ylim([0, 1])
# plt.savefig('Si_Attenuation_Transmission.pdf')
plt.show()
Spectrapath = '/afs/psi.ch/project/EssentialMed/Images/' \
'GOTTHARD_and_TIS/GOTTHARD'
Spectra = sorted(glob.glob(os.path.join(Spectrapath, '*.txt')))
FileName = [os.path.basename(item) for item in Spectra]
Data = [np.loadtxt(item) for item in Spectra]
DataName = [open(item).readlines()[0].split()[0][1:-2] for item in Spectra]
# Get Filenames of Spectra and split it up into the desired values like kV, mAs
# and exposure time with some basic string handling.
Modality = [item.split('_')[0] for item in FileName]
Energy = [int(item.split('_')[1][:-2]) for item in FileName]
Current = [int(item.split('_')[2][:-2]) for item in FileName]
mAs = [float(item.split('_')[3][:-3]) for item in FileName]
ExposureTime = [int(item.split('_')[4][:-6]) for item in FileName]
Frames = [open(item).readlines()[0].split()[1] for item in Spectra]
BinCenter = [open(item).readlines()[1].split()[0] for item in Spectra]
Photons = [open(item).readlines()[1].split()[1] for item in Spectra]
PhotonsPerFrame = [open(item).readlines()[1].split()[2] for item in Spectra]
| unlicense | -3,744,348,821,068,173,300 | 35.77907 | 79 | 0.665191 | false | 2.774561 | false | false | false |
josuemontano/cms | spartan/spartan/factories/extra.py | 1 | 1990 | import os
import uuid
from .meta import *
class HomeFactory(object):
def __init__(self, request):
self.request = request
def index(self):
from .security import User
pages = DBSession.query(Page).order_by(Page.sort).all()
site = DBSession.query(Site).first()
users = DBSession.query(User).all()
return { 'site' : site,
'pages' : pages,
'users' : users,
'user' : authenticated_userid(self.request) }
class FileFactory(object):
def __init__(self, request):
self.request = request
self.here = os.path.dirname(__file__)
def index (self):
return { }
def get_images (self):
images = os.listdir(os.path.join(self.here, '..', 'static', 'uploads/images'))
images_list = []
for img in images:
i = self.request.static_url('spartan:static/uploads/images/' + img)
images_list.append(dict(thumb=i, image=i))
return images_list
def upload_image (self):
filename = self.request.POST['file'].filename
input_file = self.request.POST['file'].file
ext = filename.rsplit('.', 1)[-1]
if ext in ['jpg', 'jpeg', 'png', 'gif']:
name = '%s.%s' % (uuid.uuid4(), ext)
file_path = os.path.join(self.here, '..', 'static', 'uploads/images', name)
temp_file_path = file_path + '~'
output_file = open(temp_file_path, 'wb')
input_file.seek(0)
while True:
data = input_file.read(2<<16)
if not data:
break
output_file.write(data)
output_file.close()
os.rename(temp_file_path, file_path)
link = self.request.static_url('spartan:static/uploads/images/' + name)
return { 'filelink': link, 'filename': name }
def upload_file (self):
return { } | gpl-3.0 | 363,829,128,363,588,160 | 30.109375 | 92 | 0.523618 | false | 3.804971 | false | false | false |
nikitanovosibirsk/hiphip | hiphip/http/response.py | 1 | 1229 | import json
from collections import OrderedDict
import requests
class Response(requests.Response):
def __repr__(self):
def _log_body(headers, body):
r = ""
request_content_type = headers.get("content-type", "text/plain").lower()
if request_content_type.startswith("text/"):
r += str(body)
elif request_content_type.startswith("application/json"):
try:
request_text = json.loads(body)
except:
r += "Non-parsed json: {}".format(str(body))
else:
r += json.dumps(request_text, sort_keys=True, ensure_ascii=False, indent=4)
else:
r += str(body)
return r
r = "{method} {url}\n".format(method=self.request.method, url=self.request.url)
r += _log_resp(self.request.headers, self.request.body)
r += "\n\n"
r += "HTTP/1.1 {status}\n".format(status=self.status_code)
for key, value in OrderedDict(sorted(self.headers.items())).items():
r += "{key}: {value}\n".format(key=key, value=value)
r += _log_resp(self.headers, self.text)
return r
| mit | -8,099,628,879,669,621,000 | 31.342105 | 95 | 0.534581 | false | 3.901587 | false | false | false |
joopert/home-assistant | homeassistant/components/mqtt/server.py | 2 | 2808 | """Support for a local MQTT broker."""
import asyncio
import logging
import tempfile
import voluptuous as vol
from hbmqtt.broker import Broker, BrokerException
from passlib.apps import custom_app_context
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
from .const import PROTOCOL_311
_LOGGER = logging.getLogger(__name__)
# None allows custom config to be created through generate_config
HBMQTT_CONFIG_SCHEMA = vol.Any(
None,
vol.Schema(
{
vol.Optional("auth"): vol.Schema(
{vol.Optional("password-file"): cv.isfile}, extra=vol.ALLOW_EXTRA
),
vol.Optional("listeners"): vol.Schema(
{vol.Required("default"): vol.Schema(dict), str: vol.Schema(dict)}
),
},
extra=vol.ALLOW_EXTRA,
),
)
@asyncio.coroutine
def async_start(hass, password, server_config):
"""Initialize MQTT Server.
This method is a coroutine.
"""
passwd = tempfile.NamedTemporaryFile()
gen_server_config, client_config = generate_config(hass, passwd, password)
try:
if server_config is None:
server_config = gen_server_config
broker = Broker(server_config, hass.loop)
yield from broker.start()
except BrokerException:
_LOGGER.exception("Error initializing MQTT server")
return False, None
finally:
passwd.close()
@asyncio.coroutine
def async_shutdown_mqtt_server(event):
"""Shut down the MQTT server."""
yield from broker.shutdown()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown_mqtt_server)
return True, client_config
def generate_config(hass, passwd, password):
"""Generate a configuration based on current Home Assistant instance."""
config = {
"listeners": {
"default": {
"max-connections": 50000,
"bind": "0.0.0.0:1883",
"type": "tcp",
},
"ws-1": {"bind": "0.0.0.0:8080", "type": "ws"},
},
"auth": {"allow-anonymous": password is None},
"plugins": ["auth_anonymous"],
"topic-check": {"enabled": True, "plugins": ["topic_taboo"]},
}
if password:
username = "homeassistant"
# Encrypt with what hbmqtt uses to verify
passwd.write(
"homeassistant:{}\n".format(custom_app_context.encrypt(password)).encode(
"utf-8"
)
)
passwd.flush()
config["auth"]["password-file"] = passwd.name
config["plugins"].append("auth_file")
else:
username = None
client_config = ("localhost", 1883, username, password, None, PROTOCOL_311)
return config, client_config
| apache-2.0 | 1,489,471,780,592,490,500 | 27.08 | 85 | 0.606125 | false | 4.028694 | true | false | false |
shibu/pyspec | pyspec/reloader.py | 1 | 10404 | # -*- coding: ascii -*-
__pyspec = 1
__all__ = ("ModuleObserver", "ModuleReloader")
import linecache, os, sys, time
class ModuleReloader(object):
def __init__(self):
self.modules = {}
self.observer_class = ModuleObserver
self.category = MethodCategorizer()
def check_modification(self):
changed_modules = []
for observer in self.modules.itervalues():
if observer.is_modified():
changed_modules.append(observer)
return changed_modules
def check_new_modules(self):
new_modules = []
old_modules = sys.modules.keys()
try:
for name, module in sys.modules.iteritems():
if not self.modules.has_key(name):
observer = self.observer_class(module)
self.modules[name] = observer
new_modules.append(observer)
old_modules = None
return new_modules
finally:
if old_modules is not None:
print "new: %s" % set(sys.modules.keys()).difference(set(old_modules))
print "del: %s" % set(old_modules).difference(sys.modules.keys())
def update_depends(self, observers):
for observer in observers:
if observer.module is not None:
use_module = observer.make_depends(category=self.category)
use_module.discard(observer.module.__name__)
for module_name in use_module:
self.modules[module_name].affect.add(observer.module.__name__)
def clear_flag(self):
for module in self.modules.itervalues():
module.change_flag = False
def clear_callstack(self):
pass
def clear_callstack_hostspot(self, method):
pass
def dump_structure_map(self):
print self.category.methods
table = {}
for module in self.modules.itervalues():
module.dump_module_structure_map(table, self.category)
return table
class ModuleVisitor(object):
def explore(self, observers, nest=1):
pass
def visit_module(self, nest):
pass
class ModuleObserver(object):
__slots__ = ("module", "path", "mtime", "affect", "has_error", "change_flag", "is_system", "is_pyspec", "is_spectest")
def __init__(self, module):
self.module = module
self.path = None
self.mtime = None
self.affect = set()
self.has_error = False
self.module_type_check()
if not self.is_system:
self.path = self.check_filename()
self.mtime = os.path.getmtime(self.path)
def make_depends(self, namespace = None, result = None, category = None):
from types import ModuleType, FunctionType, TypeType, ClassType, MethodType
if namespace is None:
namespace = self.module
result = set()
for name, object in namespace.__dict__.iteritems():
if name == "__builtins__":
continue
elif type(object) == ModuleType:
if (not self.is_system) and (object is not namespace):
result.add(object.__name__)
elif type(object) == MethodType:
if category is not None:
category.regist_method(object)
elif type(object) == FunctionType:
if category is not None:
category.regist_method(object)
if object.__module__ is None:
continue
module = sys.modules[object.__module__]
if self.is_system:
pass
elif object.__module__ != namespace.__name__:
result.add(object.__module__)
elif type(object) in [TypeType, ClassType]:
if object.__module__ is None:
continue
try:
module = sys.modules[object.__module__]
if self.is_system or (module is namespace):
pass
else:
result.add(object.__module__)
self.make_depends(object, result, category)
except KeyError:
print "not find '%s'" % object.__module__
return result
def module_type_check(self):
if not hasattr(self.module, "__file__"):
self.is_system = True
elif self.module.__file__ == "__main__":
self.is_system = True
elif self.module.__file__.startswith(sys.prefix):
self.is_system = True
else:
self.is_system = False
if hasattr(self.module, "__pyspec"):
self.is_pyspec = True
else:
self.is_pyspec = False
self.is_spectest = False
def is_modified(self):
if self.mtime is None:
return False
oldmtime = self.mtime
self.mtime = os.path.getmtime(self.path)
return oldmtime != self.mtime
def reload(self):
import traceback
self.has_error = False
if self.is_system or self.is_pyspec:
return
if self.module is not None:
keys = self.module.__dict__.keys()
for key in keys:
if key.startswith("__") and key.endswith("__"):
continue
del self.module.__dict__[key]
try:
reload(self.module)
print "reload(%s)" % self.module.__name__
if self.mtime is not None:
self.mtime = os.path.getmtime(self.path)
except Exception, error:
print "load error(%s)" % self.module.__name__
self.has_error = True
error_message = ''.join(traceback.format_exception(error.__class__, error, None))
return error_message
def reload_all(self, modules, error_messages):
error_message = self.reload()
self.change_flag = True
if self.has_error:
self.chain_error_messages(self.affect, modules, 0, error_messages, error_message)
return
self.chain_reload(self.affect, modules, 0)
update_modules = set()
for module in modules.itervalues():
if module.change_flag:
linecache.checkcache(self.path)
def chain_reload(self, change_modules, allmodules, i):
if i > 10:
return
next_updates = set()
for module_name in change_modules:
observer = allmodules[module_name]
observer.reload()
if not observer.has_error:
next_updates.update(observer.affect)
if next_updates:
self.chain_reload(next_updates, allmodules, i+1)
def chain_error_messages(self, change_modules, allmodules, i, error_messages, error_message):
if i > 10:
return
next_modules = set()
for module_name in change_modules:
observer = allmodules[module_name]
if observer.is_spectest:
try:
error_messages[observer].append(error_message)
except KeyError:
error_messages[observer] = [error_message]
next_modules.update(observer.affect)
if next_modules:
self.chain_error_modules(next_modules, allmodules, i+1, error_messages, error_message)
def check_filename(self):
path = self.module.__file__
if path.endswith(".pyc"):
return path[:-1]
elif path.endswith(".pyo"):
return path[:-1]
return path
def dump_module_structure_map(self, table, category_list, namespace = None, nest=False):
from types import ModuleType, FunctionType, TypeType, ClassType, MethodType
if not self.is_spectest:
return
if namespace is None:
namespace = self.module
for name, object in namespace.__dict__.iteritems():
if name == "__builtins__":
continue
elif type(object) in [MethodType, FunctionType]:
if hasattr(object, "__pyspec_attribute"):
self.collect_structure_from_method(table, category_list, object)
elif (type(object) in [TypeType, ClassType]) and (not nest):
self.dump_module_structure_map(table, category_list, object, nest=True)
def collect_structure_from_method(self, table, category_list, function):
attr = getattr(function, "__pyspec_attribute")
print attr
if not hasattr(attr, "call"):
return
if attr.call is None:
return
stack = [None, None]
print attr.call.stack
for frame in attr.call.stack:
if frame[0] in (0, 3):
category = category_list.find_category(frame[1])
if category is not None:
stack.append(category)
else:
stack.append(stack[-1])
if stack[-1] != stack[-2]:
try:
count = table[(stack[-2], stack[-1])]
table[(stack[-2], stack[-1])] = count + 1
except KeyError:
table[(stack[-2], stack[-1])] = 1
else:
stack.pop()
class MethodCategorizer(object):
def __init__(self):
self.methods = {}
def search_docstring(self, docstring):
if docstring is None:
return None
for line in docstring.split("\n"):
if line.startswith("@category"):
category = line[len("@category"):].strip()
return category.split(" ")[0]
return None
def check_method(self, method):
import inspect
docstring = inspect.getdoc(method)
return self.search_docstring(docstring)
def regist_method(self, method):
category = self.check_method(method)
if category is None:
return
if hasattr(method, "func_code"):
self.methods[id(method.func_code)] = category
elif hasattr(method, "im_func"):
self.methods[id(method.im_func.func_code)] = category
def find_category(self, func_code_id):
try:
return self.methods[func_code_id]
except KeyError:
return None
| mit | 1,957,949,977,454,918,700 | 35.633803 | 122 | 0.541234 | false | 4.408475 | false | false | false |
CountZer0/PipelineConstructionSet | python/maya/site-packages/pymel-1.0.5/extras/completion/py/maya/OpenMayaUI.py | 3 | 56653 | from . import OpenMayaRender
from . import OpenMaya
import _OpenMayaUI
import weakref
from __builtin__ import object as _object
from __builtin__ import property as _swig_property
class MDeviceState(_object):
def __init__(self, *args, **kwargs):
pass
def __repr__(self):
pass
def buttonState(*args, **kwargs):
pass
def devicePosition(*args, **kwargs):
pass
def isNull(*args, **kwargs):
pass
def maxAxis(*args, **kwargs):
pass
def setButtonState(*args, **kwargs):
pass
def setDevicePosition(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class new_instancemethod(_object):
"""
instancemethod(function, instance, class)
Create an instance method object.
"""
def __call__(*args, **kwargs):
"""
x.__call__(...) <==> x(...)
"""
pass
def __cmp__(*args, **kwargs):
"""
x.__cmp__(y) <==> cmp(x,y)
"""
pass
def __delattr__(*args, **kwargs):
"""
x.__delattr__('name') <==> del x.name
"""
pass
def __get__(*args, **kwargs):
"""
descr.__get__(obj[, type]) -> value
"""
pass
def __getattribute__(*args, **kwargs):
"""
x.__getattribute__('name') <==> x.name
"""
pass
def __hash__(*args, **kwargs):
"""
x.__hash__() <==> hash(x)
"""
pass
def __repr__(*args, **kwargs):
"""
x.__repr__() <==> repr(x)
"""
pass
def __setattr__(*args, **kwargs):
"""
x.__setattr__('name', value) <==> x.name = value
"""
pass
__func__ = None
__self__ = None
im_class = None
im_func = None
im_self = None
__new__ = None
class MHWShaderSwatchGenerator(OpenMayaRender.MSwatchRenderBase):
def __init__(self, *args):
pass
def __repr__(self):
pass
def createObj(*args, **kwargs):
pass
def getSwatchBackgroundColor(*args, **kwargs):
pass
def initialize(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MCursor(_object):
def __eq__(*args, **kwargs):
pass
def __init__(self, *args):
pass
def __ne__(*args, **kwargs):
pass
def __repr__(self):
pass
def assign(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
crossHairCursor = None
defaultCursor = None
doubleCrossHairCursor = None
editCursor = None
handCursor = None
pencilCursor = None
class MObjectListFilter(_object):
def UIname(*args, **kwargs):
pass
def __init__(self, *args, **kwargs):
pass
def __repr__(self):
pass
def dependentOnSceneUpdates(*args, **kwargs):
pass
def filterType(*args, **kwargs):
pass
def getList(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def requireListUpdate(*args, **kwargs):
pass
def setFilterType(*args, **kwargs):
pass
def setUIName(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def deregisterFilter(*args, **kwargs):
pass
def registerFilter(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kAddRemoveObjects = 1
kExclusionList = 1
kInclusionList = 0
kNone = 0
kNumberOfFilterTypes = 2
class MFeedbackLine(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def className(*args, **kwargs):
pass
def clear(*args, **kwargs):
pass
def setFormat(*args, **kwargs):
pass
def setShowFeedback(*args, **kwargs):
pass
def setTitle(*args, **kwargs):
pass
def setValue(*args, **kwargs):
pass
def showFeedback(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MManipData(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def asBool(*args, **kwargs):
pass
def asDouble(*args, **kwargs):
pass
def asFloat(*args, **kwargs):
pass
def asLong(*args, **kwargs):
pass
def asMObject(*args, **kwargs):
pass
def asShort(*args, **kwargs):
pass
def asUnsigned(*args, **kwargs):
pass
def assign(*args, **kwargs):
pass
def isSimple(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MFnManip3D(OpenMaya.MFnTransform):
def __init__(self, *args):
pass
def __repr__(self):
pass
def isOptimizePlaybackOn(*args, **kwargs):
pass
def isVisible(*args, **kwargs):
pass
def manipScale(*args, **kwargs):
pass
def rotateXYZValue(*args, **kwargs):
pass
def setManipScale(*args, **kwargs):
pass
def setOptimizePlayback(*args, **kwargs):
pass
def setVisible(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def deleteManipulator(*args, **kwargs):
pass
def globalSize(*args, **kwargs):
pass
def handleSize(*args, **kwargs):
pass
def lineSize(*args, **kwargs):
pass
def setGlobalSize(*args, **kwargs):
pass
def setHandleSize(*args, **kwargs):
pass
def setLineSize(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class M3dView(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def beginGL(*args, **kwargs):
pass
def beginProjMatrixOverride(*args, **kwargs):
pass
def beginSelect(*args, **kwargs):
pass
def beginXorDrawing(*args, **kwargs):
pass
def colorAtIndex(*args, **kwargs):
pass
def colorMask(*args, **kwargs):
pass
def disallowPolygonOffset(*args, **kwargs):
pass
def display(*args, **kwargs):
pass
def displayStyle(*args, **kwargs):
pass
def drawText(*args, **kwargs):
pass
def endGL(*args, **kwargs):
pass
def endProjMatrixOverride(*args, **kwargs):
pass
def endSelect(*args, **kwargs):
pass
def endXorDrawing(*args, **kwargs):
pass
def getCamera(*args, **kwargs):
pass
def getColorIndexAndTable(*args, **kwargs):
pass
def getLightCount(*args, **kwargs):
pass
def getLightIndex(*args, **kwargs):
pass
def getLightPath(*args, **kwargs):
pass
def getLightingMode(*args, **kwargs):
pass
def getRendererName(*args, **kwargs):
pass
def getScreenPosition(*args, **kwargs):
pass
def initNames(*args, **kwargs):
pass
def isLightVisible(*args, **kwargs):
pass
def isShadeActiveOnly(*args, **kwargs):
pass
def isVisible(*args, **kwargs):
pass
def loadName(*args, **kwargs):
pass
def modelViewMatrix(*args, **kwargs):
pass
def multipleDrawEnabled(*args, **kwargs):
pass
def multipleDrawPassCount(*args, **kwargs):
pass
def numActiveColors(*args, **kwargs):
pass
def numDormantColors(*args, **kwargs):
pass
def numUserDefinedColors(*args, **kwargs):
pass
def objectDisplay(*args, **kwargs):
pass
def objectListFilterName(*args, **kwargs):
pass
def pluginObjectDisplay(*args, **kwargs):
pass
def popName(*args, **kwargs):
pass
def popViewport(*args, **kwargs):
pass
def portHeight(*args, **kwargs):
pass
def portWidth(*args, **kwargs):
pass
def projectionMatrix(*args, **kwargs):
pass
def pushName(*args, **kwargs):
pass
def pushViewport(*args, **kwargs):
pass
def readBufferTo2dTexture(*args, **kwargs):
pass
def readColorBuffer(*args, **kwargs):
pass
def readDepthMap(*args, **kwargs):
pass
def refresh(*args, **kwargs):
pass
def renderOverrideName(*args, **kwargs):
pass
def rendererString(*args, **kwargs):
pass
def selectMode(*args, **kwargs):
pass
def setCamera(*args, **kwargs):
pass
def setColorMask(*args, **kwargs):
pass
def setDisallowPolygonOffset(*args, **kwargs):
pass
def setDisplayStyle(*args, **kwargs):
pass
def setDrawColor(*args, **kwargs):
pass
def setDrawColorAndAlpha(*args, **kwargs):
pass
def setMultipleDrawEnable(*args, **kwargs):
pass
def setMultipleDrawPassCount(*args, **kwargs):
pass
def setObjectDisplay(*args, **kwargs):
pass
def setObjectListFilterName(*args, **kwargs):
pass
def setPluginObjectDisplay(*args, **kwargs):
pass
def setRenderOverrideName(*args, **kwargs):
pass
def setShowObjectFilterNameInHUD(*args, **kwargs):
pass
def setShowViewSelectedChildren(*args, **kwargs):
pass
def setUserDefinedColor(*args, **kwargs):
pass
def setViewSelectedPrefix(*args, **kwargs):
pass
def showObjectFilterNameInHUD(*args, **kwargs):
pass
def showViewSelectedChildren(*args, **kwargs):
pass
def textureMode(*args, **kwargs):
pass
def twoSidedLighting(*args, **kwargs):
pass
def updateViewingParameters(*args, **kwargs):
pass
def userDefinedColorIndex(*args, **kwargs):
pass
def usingDefaultMaterial(*args, **kwargs):
pass
def usingMipmappedTextures(*args, **kwargs):
pass
def viewSelectedPrefix(*args, **kwargs):
pass
def viewToObjectSpace(*args, **kwargs):
pass
def viewToWorld(*args, **kwargs):
pass
def viewport(*args, **kwargs):
pass
def widget(*args, **kwargs):
pass
def window(*args, **kwargs):
pass
def wireframeOnShaded(*args, **kwargs):
pass
def wireframeOnlyInShadedMode(*args, **kwargs):
pass
def worldToView(*args, **kwargs):
pass
def writeColorBuffer(*args, **kwargs):
pass
def xray(*args, **kwargs):
pass
def xrayJoints(*args, **kwargs):
pass
def active3dView(*args, **kwargs):
pass
def activeAffectedColor(*args, **kwargs):
pass
def activeTemplateColor(*args, **kwargs):
pass
def applicationShell(*args, **kwargs):
pass
def backgroundColor(*args, **kwargs):
pass
def backgroundColorBottom(*args, **kwargs):
pass
def backgroundColorTop(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def displayStatus(*args, **kwargs):
pass
def get3dView(*args, **kwargs):
pass
def getM3dViewFromModelEditor(*args, **kwargs):
pass
def getM3dViewFromModelPanel(*args, **kwargs):
pass
def hiliteColor(*args, **kwargs):
pass
def isBackgroundGradient(*args, **kwargs):
pass
def leadColor(*args, **kwargs):
pass
def liveColor(*args, **kwargs):
pass
def numberOf3dViews(*args, **kwargs):
pass
def referenceLayerColor(*args, **kwargs):
pass
def templateColor(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kActive = 0
kActiveAffected = 10
kActiveColors = 0
kActiveComponent = 7
kActiveTemplate = 6
kBackgroundColor = 6
kBoundingBox = 0
kCenter = 1
kDefaultQualityRenderer = 0
kDepth_8 = 0
kDepth_Float = 1
kDisplayCVs = 65536
kDisplayCameras = 32
kDisplayDeformers = 256
kDisplayDimensions = 2048
kDisplayDynamicConstraints = 67108864
kDisplayDynamics = 512
kDisplayEverything = -1
kDisplayFluids = 1048576
kDisplayFollicles = 2097152
kDisplayGrid = 32768
kDisplayHairSystems = 4194304
kDisplayHulls = 131072
kDisplayIkHandles = 128
kDisplayImagePlane = 8388608
kDisplayJoints = 64
kDisplayLights = 16
kDisplayLocators = 1024
kDisplayManipulators = 134217728
kDisplayMeshes = 4
kDisplayNCloths = 16777216
kDisplayNParticles = 268435456
kDisplayNRigids = 33554432
kDisplayNurbsCurves = 1
kDisplayNurbsSurfaces = 2
kDisplayPivots = 8192
kDisplayPlanes = 8
kDisplaySelectHandles = 4096
kDisplayStrokes = 262144
kDisplaySubdivSurfaces = 524288
kDisplayTextures = 16384
kDormant = 2
kDormantColors = 2
kExcludeMotionTrails = 536870912
kExcludePluginShapes = 1073741824
kExternalRenderer = 3
kFlatShaded = 1
kGouraudShaded = 2
kHighQualityRenderer = 1
kHilite = 4
kIntermediateObject = 9
kInvisible = 3
kLead = 8
kLeft = 0
kLightActive = 2
kLightAll = 0
kLightDefault = 3
kLightNone = 5
kLightSelected = 1
kLive = 1
kNoStatus = 11
kPoints = 4
kRight = 2
kStippleDashed = 1
kStippleNone = 0
kTemplate = 5
kTemplateColor = 5
kUnused1 = 4
kViewport2Renderer = 2
kWireFrame = 3
class MExternalDropData(_object):
def __init__(self, *args, **kwargs):
pass
def __repr__(self):
pass
def color(*args, **kwargs):
pass
def dataSize(*args, **kwargs):
pass
def formats(*args, **kwargs):
pass
def hasColor(*args, **kwargs):
pass
def hasFormat(*args, **kwargs):
pass
def hasHtml(*args, **kwargs):
pass
def hasImage(*args, **kwargs):
pass
def hasText(*args, **kwargs):
pass
def hasUrls(*args, **kwargs):
pass
def html(*args, **kwargs):
pass
def image(*args, **kwargs):
pass
def keyboardModifiers(*args, **kwargs):
pass
def mouseButtons(*args, **kwargs):
pass
def text(*args, **kwargs):
pass
def urls(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kAltModifier = 134217728
kControlModifier = 67108864
kLeftButton = 1
kMidButton = 4
kMiddleButton = 4
kNoModifier = 0
kRightButton = 2
kShiftModifier = 33554432
class MDrawData(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def geometry(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MTextureEditorDrawInfo(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def drawingFunction(*args, **kwargs):
pass
def setDrawingFunction(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kDrawEdgeForSelect = 4
kDrawEverything = 2
kDrawFacetForSelect = 5
kDrawFunctionFirst = 1
kDrawFunctionLast = 6
kDrawUVForSelect = 6
kDrawVertexForSelect = 3
kDrawWireframe = 1
class MDeviceChannel(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def axisIndex(*args, **kwargs):
pass
def childByIndex(*args, **kwargs):
pass
def hasChildren(*args, **kwargs):
pass
def longName(*args, **kwargs):
pass
def name(*args, **kwargs):
pass
def numChildren(*args, **kwargs):
pass
def parent(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MUiMessage(OpenMaya.MMessage):
def __init__(self, *args):
pass
def __repr__(self):
pass
def add3dViewDestroyMsgCallback(*args, **kwargs):
pass
def add3dViewPostMultipleDrawPassMsgCallback(*args, **kwargs):
pass
def add3dViewPostRenderMsgCallback(*args, **kwargs):
pass
def add3dViewPreMultipleDrawPassMsgCallback(*args, **kwargs):
pass
def add3dViewPreRenderMsgCallback(*args, **kwargs):
pass
def add3dViewRenderOverrideChangedCallback(*args, **kwargs):
pass
def add3dViewRendererChangedCallback(*args, **kwargs):
pass
def addCameraChangedCallback(*args, **kwargs):
pass
def addUiDeletedCallback(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MDrawRequest(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def assign(*args, **kwargs):
pass
def color(*args, **kwargs):
pass
def component(*args, **kwargs):
pass
def displayCullOpposite(*args, **kwargs):
pass
def displayCulling(*args, **kwargs):
pass
def displayStatus(*args, **kwargs):
pass
def displayStyle(*args, **kwargs):
pass
def drawData(*args, **kwargs):
pass
def drawLast(*args, **kwargs):
pass
def isTransparent(*args, **kwargs):
pass
def material(*args, **kwargs):
pass
def matrix(*args, **kwargs):
pass
def multiPath(*args, **kwargs):
pass
def setColor(*args, **kwargs):
pass
def setComponent(*args, **kwargs):
pass
def setDisplayCullOpposite(*args, **kwargs):
pass
def setDisplayCulling(*args, **kwargs):
pass
def setDisplayStatus(*args, **kwargs):
pass
def setDisplayStyle(*args, **kwargs):
pass
def setDrawData(*args, **kwargs):
pass
def setDrawLast(*args, **kwargs):
pass
def setIsTransparent(*args, **kwargs):
pass
def setMaterial(*args, **kwargs):
pass
def setMatrix(*args, **kwargs):
pass
def setMultiPath(*args, **kwargs):
pass
def setToken(*args, **kwargs):
pass
def setView(*args, **kwargs):
pass
def token(*args, **kwargs):
pass
def view(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MQtUtil(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def addWidgetToMayaLayout(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def deregisterUIType(*args, **kwargs):
pass
def findControl(*args, **kwargs):
pass
def findLayout(*args, **kwargs):
pass
def findMenuItem(*args, **kwargs):
pass
def findWindow(*args, **kwargs):
pass
def fullName(*args, **kwargs):
pass
def getCurrentParent(*args, **kwargs):
pass
def getLayoutChildren(*args, **kwargs):
pass
def getParent(*args, **kwargs):
pass
def mainWindow(*args, **kwargs):
pass
def nativeWindow(*args, **kwargs):
pass
def registerUIType(*args, **kwargs):
pass
def toMString(*args, **kwargs):
pass
def toQString(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MToolsInfo(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def className(*args, **kwargs):
pass
def isDirty(*args, **kwargs):
pass
def resetDirtyFlag(*args, **kwargs):
pass
def setDirtyFlag(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MEvent(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def getPosition(*args, **kwargs):
pass
def getWindowPosition(*args, **kwargs):
pass
def isModifierControl(*args, **kwargs):
pass
def isModifierKeyRelease(*args, **kwargs):
pass
def isModifierLeftMouseButton(*args, **kwargs):
pass
def isModifierMiddleMouseButton(*args, **kwargs):
pass
def isModifierNone(*args, **kwargs):
pass
def isModifierShift(*args, **kwargs):
pass
def modifiers(*args, **kwargs):
pass
def mouseButton(*args, **kwargs):
pass
def setModifiers(*args, **kwargs):
pass
def setPosition(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
controlKey = 4
kLeftMouse = 64
kMiddleMouse = 128
shiftKey = 1
class MProgressWindow(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def advanceProgress(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def endProgress(*args, **kwargs):
pass
def isCancelled(*args, **kwargs):
pass
def isInterruptable(*args, **kwargs):
pass
def progress(*args, **kwargs):
pass
def progressMax(*args, **kwargs):
pass
def progressMin(*args, **kwargs):
pass
def progressStatus(*args, **kwargs):
pass
def reserve(*args, **kwargs):
pass
def setInterruptable(*args, **kwargs):
pass
def setProgress(*args, **kwargs):
pass
def setProgressMax(*args, **kwargs):
pass
def setProgressMin(*args, **kwargs):
pass
def setProgressRange(*args, **kwargs):
pass
def setProgressStatus(*args, **kwargs):
pass
def setTitle(*args, **kwargs):
pass
def startProgress(*args, **kwargs):
pass
def title(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MMaterial(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def applyTexture(*args, **kwargs):
pass
def evaluateDiffuse(*args, **kwargs):
pass
def evaluateEmission(*args, **kwargs):
pass
def evaluateMaterial(*args, **kwargs):
pass
def evaluateShininess(*args, **kwargs):
pass
def evaluateSpecular(*args, **kwargs):
pass
def evaluateTexture(*args, **kwargs):
pass
def evaluateTextureTransformation(*args, **kwargs):
pass
def getDiffuse(*args, **kwargs):
pass
def getEmission(*args, **kwargs):
pass
def getHasTransparency(*args, **kwargs):
pass
def getHwShaderNode(*args, **kwargs):
pass
def getShininess(*args, **kwargs):
pass
def getSpecular(*args, **kwargs):
pass
def getTextureTransformation(*args, **kwargs):
pass
def materialIsTextured(*args, **kwargs):
pass
def setMaterial(*args, **kwargs):
pass
def shadingEngine(*args, **kwargs):
pass
def textureImage(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
def defaultMaterial(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kAmbientColor = 2
kBumpMap = 4
kColor = 0
kCosinePower = 10
kDiffuse = 5
kEccentricity = 11
kHighlightSize = 8
kIncandescence = 3
kReflectedColor = 15
kReflectivity = 14
kRoughness = 7
kSpecularColor = 13
kSpecularRollOff = 12
kTransluscence = 6
kTransparency = 1
kWhiteness = 9
class MMaterialArray(_object):
def __getitem__(*args, **kwargs):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def append(*args, **kwargs):
pass
def assign(*args, **kwargs):
pass
def clear(*args, **kwargs):
pass
def copy(*args, **kwargs):
pass
def insert(*args, **kwargs):
pass
def length(*args, **kwargs):
pass
def remove(*args, **kwargs):
pass
def set(*args, **kwargs):
pass
def setLength(*args, **kwargs):
pass
def setSizeIncrement(*args, **kwargs):
pass
def sizeIncrement(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MDrawTraversal(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def enableFiltering(*args, **kwargs):
pass
def filterNode(*args, **kwargs):
pass
def filteringEnabled(*args, **kwargs):
pass
def frustumValid(*args, **kwargs):
pass
def itemHasStatus(*args, **kwargs):
pass
def itemPath(*args, **kwargs):
pass
def leafLevelCulling(*args, **kwargs):
pass
def numberOfItems(*args, **kwargs):
pass
def setFrustum(*args, **kwargs):
pass
def setLeafLevelCulling(*args, **kwargs):
pass
def setOrthoFrustum(*args, **kwargs):
pass
def setPerspFrustum(*args, **kwargs):
pass
def traverse(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kActiveItem = 0
kTemplateItem = 1
class MDrawInfo(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def canDrawComponent(*args, **kwargs):
pass
def completelyInside(*args, **kwargs):
pass
def displayStatus(*args, **kwargs):
pass
def displayStyle(*args, **kwargs):
pass
def getPrototype(*args, **kwargs):
pass
def inSelect(*args, **kwargs):
pass
def inUserInteraction(*args, **kwargs):
pass
def inclusiveMatrix(*args, **kwargs):
pass
def multiPath(*args, **kwargs):
pass
def objectDisplayStatus(*args, **kwargs):
pass
def pluginObjectDisplayStatus(*args, **kwargs):
pass
def projectionMatrix(*args, **kwargs):
pass
def setMultiPath(*args, **kwargs):
pass
def userChangingViewContext(*args, **kwargs):
pass
def view(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MDrawRequestQueue(_object):
def __init__(self, *args):
pass
def __repr__(self):
pass
def add(*args, **kwargs):
pass
def assign(*args, **kwargs):
pass
def isEmpty(*args, **kwargs):
pass
def remove(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
class MExternalDropCallback(_object):
def __disown__(self):
pass
def __init__(self, *args):
pass
def __repr__(self):
pass
def externalDropCallback(*args, **kwargs):
pass
def addCallback(*args, **kwargs):
pass
def removeCallback(*args, **kwargs):
pass
__dict__ = None
__weakref__ = None
thisown = None
__swig_destroy__ = None
kMayaDefault = 0
kNoMayaDefaultAndAccept = 1
kNoMayaDefaultAndNoAccept = 2
class MSelectInfo(MDrawInfo):
def __init__(self, *args):
pass
def __repr__(self):
pass
def addSelection(*args, **kwargs):
pass
def getAlignmentMatrix(*args, **kwargs):
pass
def getLocalRay(*args, **kwargs):
pass
def highestPriority(*args, **kwargs):
pass
def isRay(*args, **kwargs):
pass
def selectClosest(*args, **kwargs):
pass
def selectForHilite(*args, **kwargs):
pass
def selectOnHilitedOnly(*args, **kwargs):
pass
def selectPath(*args, **kwargs):
pass
def selectRect(*args, **kwargs):
pass
def selectable(*args, **kwargs):
pass
def selectableComponent(*args, **kwargs):
pass
def setHighestPriority(*args, **kwargs):
pass
def setSnapPoint(*args, **kwargs):
pass
def singleSelection(*args, **kwargs):
pass
def view(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnFreePointTriadManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToPointPlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def isDrawAxesOn(*args, **kwargs):
pass
def isKeyframeAllOn(*args, **kwargs):
pass
def isSnapModeOn(*args, **kwargs):
pass
def pointIndex(*args, **kwargs):
pass
def setDirection(*args, **kwargs):
pass
def setDrawArrowHead(*args, **kwargs):
pass
def setDrawAxes(*args, **kwargs):
pass
def setGlobalTriadPlane(*args, **kwargs):
pass
def setKeyframeAll(*args, **kwargs):
pass
def setPoint(*args, **kwargs):
pass
def setSnapMode(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kViewPlane = 3
kXYPlane = 2
kXZPlane = 1
kYZPlane = 0
class MFnCircleSweepManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def angleIndex(*args, **kwargs):
pass
def axisIndex(*args, **kwargs):
pass
def centerIndex(*args, **kwargs):
pass
def connectToAnglePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def endCircleIndex(*args, **kwargs):
pass
def endPoint(*args, **kwargs):
pass
def setAngle(*args, **kwargs):
pass
def setCenterPoint(*args, **kwargs):
pass
def setDrawAsArc(*args, **kwargs):
pass
def setEndPoint(*args, **kwargs):
pass
def setNormal(*args, **kwargs):
pass
def setRadius(*args, **kwargs):
pass
def setStartPoint(*args, **kwargs):
pass
def startCircleIndex(*args, **kwargs):
pass
def startPoint(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnToggleManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToTogglePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def direction(*args, **kwargs):
pass
def directionIndex(*args, **kwargs):
pass
def length(*args, **kwargs):
pass
def lengthIndex(*args, **kwargs):
pass
def setDirection(*args, **kwargs):
pass
def setLength(*args, **kwargs):
pass
def setStartPoint(*args, **kwargs):
pass
def setToggle(*args, **kwargs):
pass
def startPoint(*args, **kwargs):
pass
def startPointIndex(*args, **kwargs):
pass
def toggle(*args, **kwargs):
pass
def toggleIndex(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnDistanceManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToDistancePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def currentPointIndex(*args, **kwargs):
pass
def directionIndex(*args, **kwargs):
pass
def distanceIndex(*args, **kwargs):
pass
def isDrawLineOn(*args, **kwargs):
pass
def isDrawStartOn(*args, **kwargs):
pass
def scalingFactor(*args, **kwargs):
pass
def setDirection(*args, **kwargs):
pass
def setDrawLine(*args, **kwargs):
pass
def setDrawStart(*args, **kwargs):
pass
def setScalingFactor(*args, **kwargs):
pass
def setStartPoint(*args, **kwargs):
pass
def startPointIndex(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnStateManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToStatePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def maxStates(*args, **kwargs):
pass
def positionIndex(*args, **kwargs):
pass
def setInitialState(*args, **kwargs):
pass
def setMaxStates(*args, **kwargs):
pass
def state(*args, **kwargs):
pass
def stateIndex(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnPointOnCurveManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToCurvePlug(*args, **kwargs):
pass
def connectToParamPlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def curveIndex(*args, **kwargs):
pass
def curvePoint(*args, **kwargs):
pass
def isDrawCurveOn(*args, **kwargs):
pass
def paramIndex(*args, **kwargs):
pass
def parameter(*args, **kwargs):
pass
def setDrawCurve(*args, **kwargs):
pass
def setParameter(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnScaleManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToScaleCenterPlug(*args, **kwargs):
pass
def connectToScalePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def displayWithNode(*args, **kwargs):
pass
def getOrientation(*args, **kwargs):
pass
def getOrientationMode(*args, **kwargs):
pass
def isSnapModeOn(*args, **kwargs):
pass
def scaleCenterIndex(*args, **kwargs):
pass
def scaleIndex(*args, **kwargs):
pass
def setInitialScale(*args, **kwargs):
pass
def setOrientation(*args, **kwargs):
pass
def setOrientationMode(*args, **kwargs):
pass
def setSnapIncrement(*args, **kwargs):
pass
def setSnapMode(*args, **kwargs):
pass
def snapIncrement(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kArbitraryOrientation = 1
kDefaultOrientation = 0
class MFnDiscManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def angleIndex(*args, **kwargs):
pass
def axisIndex(*args, **kwargs):
pass
def centerIndex(*args, **kwargs):
pass
def connectToAnglePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def setAngle(*args, **kwargs):
pass
def setCenterPoint(*args, **kwargs):
pass
def setNormal(*args, **kwargs):
pass
def setRadius(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnCurveSegmentManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToCurvePlug(*args, **kwargs):
pass
def connectToEndParamPlug(*args, **kwargs):
pass
def connectToStartParamPlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def curveIndex(*args, **kwargs):
pass
def endParamIndex(*args, **kwargs):
pass
def endParameter(*args, **kwargs):
pass
def setEndParameter(*args, **kwargs):
pass
def setStartParameter(*args, **kwargs):
pass
def startParamIndex(*args, **kwargs):
pass
def startParameter(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnDirectionManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToDirectionPlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def directionIndex(*args, **kwargs):
pass
def endPointIndex(*args, **kwargs):
pass
def setDirection(*args, **kwargs):
pass
def setDrawStart(*args, **kwargs):
pass
def setNormalizeDirection(*args, **kwargs):
pass
def setStartPoint(*args, **kwargs):
pass
def startPointIndex(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
class MFnRotateManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToRotationCenterPlug(*args, **kwargs):
pass
def connectToRotationPlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def displayWithNode(*args, **kwargs):
pass
def isSnapModeOn(*args, **kwargs):
pass
def rotateMode(*args, **kwargs):
pass
def rotationCenterIndex(*args, **kwargs):
pass
def rotationIndex(*args, **kwargs):
pass
def setInitialRotation(*args, **kwargs):
pass
def setRotateMode(*args, **kwargs):
pass
def setRotationCenter(*args, **kwargs):
pass
def setSnapIncrement(*args, **kwargs):
pass
def setSnapMode(*args, **kwargs):
pass
def snapIncrement(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
kGimbal = 2
kObjectSpace = 0
kWorldSpace = 1
class MFnPointOnSurfaceManip(MFnManip3D):
def __init__(self, *args):
pass
def __repr__(self):
pass
def connectToParamPlug(*args, **kwargs):
pass
def connectToSurfacePlug(*args, **kwargs):
pass
def create(*args, **kwargs):
pass
def getParameters(*args, **kwargs):
pass
def isDrawSurfaceOn(*args, **kwargs):
pass
def paramIndex(*args, **kwargs):
pass
def setDrawArrows(*args, **kwargs):
pass
def setDrawSurface(*args, **kwargs):
pass
def setParameters(*args, **kwargs):
pass
def surfaceIndex(*args, **kwargs):
pass
def className(*args, **kwargs):
pass
thisown = None
__swig_destroy__ = None
def M3dView_referenceLayerColor(*args, **kwargs):
pass
def MDrawInfo_className(*args, **kwargs):
pass
def MQtUtil_toQString(*args, **kwargs):
pass
def MProgressWindow_className(*args, **kwargs):
pass
def MUiMessage_add3dViewPostRenderMsgCallback(*args, **kwargs):
pass
def MDrawRequestQueue_className(*args, **kwargs):
pass
def _swig_getattr(self, class_type, name):
pass
def MDrawData_className(*args, **kwargs):
pass
def M3dView_liveColor(*args, **kwargs):
pass
def MQtUtil_findLayout(*args, **kwargs):
pass
def MFnStateManip_swigregister(*args, **kwargs):
pass
def MUiMessage_add3dViewDestroyMsgCallback(*args, **kwargs):
pass
def MEvent_swigregister(*args, **kwargs):
pass
def MFnCurveSegmentManip_swigregister(*args, **kwargs):
pass
def MMaterial_className(*args, **kwargs):
pass
def M3dView_displayStatus(*args, **kwargs):
pass
def MProgressWindow_setInterruptable(*args, **kwargs):
pass
def M3dView_active3dView(*args, **kwargs):
pass
def MFnManip3D_setHandleSize(*args, **kwargs):
pass
def MDrawRequestQueue_swigregister(*args, **kwargs):
pass
def MQtUtil_deregisterUIType(*args, **kwargs):
pass
def MFnPointOnCurveManip_className(*args, **kwargs):
pass
def MDrawInfo_swigregister(*args, **kwargs):
pass
def MUiMessage_addUiDeletedCallback(*args, **kwargs):
pass
def MFnRotateManip_swigregister(*args, **kwargs):
pass
def MFeedbackLine_swigregister(*args, **kwargs):
pass
def MDrawTraversal_swigregister(*args, **kwargs):
pass
def MProgressWindow_reserve(*args, **kwargs):
pass
def MFnToggleManip_className(*args, **kwargs):
pass
def MFeedbackLine_clear(*args, **kwargs):
pass
def MFnPointOnCurveManip_swigregister(*args, **kwargs):
pass
def MManipData_swigregister(*args, **kwargs):
pass
def MDrawData_swigregister(*args, **kwargs):
pass
def MProgressWindow_endProgress(*args, **kwargs):
pass
def MProgressWindow_setTitle(*args, **kwargs):
pass
def MHWShaderSwatchGenerator_getSwatchBackgroundColor(*args, **kwargs):
pass
def MFnCurveSegmentManip_className(*args, **kwargs):
pass
def MDeviceChannel_swigregister(*args, **kwargs):
pass
def MQtUtil_getLayoutChildren(*args, **kwargs):
pass
def MUiMessage_swigregister(*args, **kwargs):
pass
def MUiMessage_add3dViewPreMultipleDrawPassMsgCallback(*args, **kwargs):
pass
def MFnManip3D_lineSize(*args, **kwargs):
pass
def M3dView_backgroundColorBottom(*args, **kwargs):
pass
def MToolsInfo_className(*args, **kwargs):
pass
def MObjectListFilter_registerFilter(*args, **kwargs):
pass
def M3dView_leadColor(*args, **kwargs):
pass
def MFeedbackLine_setValue(*args, **kwargs):
pass
def M3dView_templateColor(*args, **kwargs):
pass
def _swig_setattr(self, class_type, name, value):
pass
def MMaterialArray_swigregister(*args, **kwargs):
pass
def MFeedbackLine_showFeedback(*args, **kwargs):
pass
def MSelectInfo_className(*args, **kwargs):
pass
def MProgressWindow_isInterruptable(*args, **kwargs):
pass
def MQtUtil_mainWindow(*args, **kwargs):
pass
def MSelectInfo_swigregister(*args, **kwargs):
pass
def MQtUtil_getParent(*args, **kwargs):
pass
def MFnFreePointTriadManip_className(*args, **kwargs):
pass
def MToolsInfo_setDirtyFlag(*args, **kwargs):
pass
def MUiMessage_addCameraChangedCallback(*args, **kwargs):
pass
def M3dView_getM3dViewFromModelEditor(*args, **kwargs):
pass
def M3dView_activeAffectedColor(*args, **kwargs):
pass
def MMaterial_defaultMaterial(*args, **kwargs):
pass
def M3dView_applicationShell(*args, **kwargs):
pass
def MHWShaderSwatchGenerator_initialize(*args, **kwargs):
pass
def MProgressWindow_setProgressMin(*args, **kwargs):
pass
def MFnManip3D_setLineSize(*args, **kwargs):
pass
def M3dView_getM3dViewFromModelPanel(*args, **kwargs):
pass
def _swig_setattr_nondynamic_method(set):
pass
def weakref_proxy(*args, **kwargs):
"""
proxy(object[, callback]) -- create a proxy object that weakly
references 'object'. 'callback', if given, is called with a
reference to the proxy when 'object' is about to be finalized.
"""
pass
def M3dView_hiliteColor(*args, **kwargs):
pass
def MQtUtil_getCurrentParent(*args, **kwargs):
pass
def MProgressWindow_setProgressRange(*args, **kwargs):
pass
def M3dView_backgroundColor(*args, **kwargs):
pass
def MProgressWindow_title(*args, **kwargs):
pass
def MProgressWindow_swigregister(*args, **kwargs):
pass
def MExternalDropCallback_swigregister(*args, **kwargs):
pass
def MFeedbackLine_setShowFeedback(*args, **kwargs):
pass
def MFnDirectionManip_className(*args, **kwargs):
pass
def MQtUtil_className(*args, **kwargs):
pass
def MUiMessage_add3dViewPostMultipleDrawPassMsgCallback(*args, **kwargs):
pass
def MDeviceState_swigregister(*args, **kwargs):
pass
def MFnCircleSweepManip_className(*args, **kwargs):
pass
def MObjectListFilter_deregisterFilter(*args, **kwargs):
pass
def MFnDistanceManip_className(*args, **kwargs):
pass
def MFnFreePointTriadManip_swigregister(*args, **kwargs):
pass
def MFnManip3D_deleteManipulator(*args, **kwargs):
pass
def MProgressWindow_setProgress(*args, **kwargs):
pass
def MProgressWindow_isCancelled(*args, **kwargs):
pass
def M3dView_className(*args, **kwargs):
pass
def MFnDiscManip_swigregister(*args, **kwargs):
pass
def MQtUtil_nativeWindow(*args, **kwargs):
pass
def MFnManip3D_className(*args, **kwargs):
pass
def MToolsInfo_resetDirtyFlag(*args, **kwargs):
pass
def MMaterialArray_className(*args, **kwargs):
pass
def MFnDistanceManip_swigregister(*args, **kwargs):
pass
def MFeedbackLine_className(*args, **kwargs):
pass
def MFnScaleManip_swigregister(*args, **kwargs):
pass
def M3dView_backgroundColorTop(*args, **kwargs):
pass
def MToolsInfo_swigregister(*args, **kwargs):
pass
def MFnManip3D_setGlobalSize(*args, **kwargs):
pass
def MFnPointOnSurfaceManip_swigregister(*args, **kwargs):
pass
def MObjectListFilter_swigregister(*args, **kwargs):
pass
def MQtUtil_registerUIType(*args, **kwargs):
pass
def M3dView_swigregister(*args, **kwargs):
pass
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
pass
def MProgressWindow_setProgressStatus(*args, **kwargs):
pass
def MFnStateManip_className(*args, **kwargs):
pass
def MQtUtil_addWidgetToMayaLayout(*args, **kwargs):
pass
def MCursor_swigregister(*args, **kwargs):
pass
def MDrawRequest_className(*args, **kwargs):
pass
def MTextureEditorDrawInfo_className(*args, **kwargs):
pass
def MHWShaderSwatchGenerator_swigregister(*args, **kwargs):
pass
def MFnManip3D_globalSize(*args, **kwargs):
pass
def MUiMessage_add3dViewRendererChangedCallback(*args, **kwargs):
pass
def MExternalDropCallback_addCallback(*args, **kwargs):
pass
def MFnPointOnSurfaceManip_className(*args, **kwargs):
pass
def MFnDirectionManip_swigregister(*args, **kwargs):
pass
def _swig_repr(self):
pass
def MFnToggleManip_swigregister(*args, **kwargs):
pass
def MExternalDropData_swigregister(*args, **kwargs):
pass
def MProgressWindow_progressMax(*args, **kwargs):
pass
def MFeedbackLine_setFormat(*args, **kwargs):
pass
def MMaterial_swigregister(*args, **kwargs):
pass
def MProgressWindow_advanceProgress(*args, **kwargs):
pass
def MFnCircleSweepManip_swigregister(*args, **kwargs):
pass
def MHWShaderSwatchGenerator_createObj(*args, **kwargs):
pass
def MQtUtil_fullName(*args, **kwargs):
pass
def MQtUtil_findWindow(*args, **kwargs):
pass
def MQtUtil_toMString(*args, **kwargs):
pass
def MToolsInfo_isDirty(*args, **kwargs):
pass
def M3dView_get3dView(*args, **kwargs):
pass
def MUiMessage_add3dViewPreRenderMsgCallback(*args, **kwargs):
pass
def MFnScaleManip_className(*args, **kwargs):
pass
def MFnRotateManip_className(*args, **kwargs):
pass
def M3dView_numberOf3dViews(*args, **kwargs):
pass
def MQtUtil_findMenuItem(*args, **kwargs):
pass
def M3dView_activeTemplateColor(*args, **kwargs):
pass
def MExternalDropCallback_removeCallback(*args, **kwargs):
pass
def MProgressWindow_setProgressMax(*args, **kwargs):
pass
def MProgressWindow_progressStatus(*args, **kwargs):
pass
def MObjectListFilter_className(*args, **kwargs):
pass
def MQtUtil_findControl(*args, **kwargs):
pass
def MUiMessage_className(*args, **kwargs):
pass
def MFeedbackLine_setTitle(*args, **kwargs):
pass
def MProgressWindow_progressMin(*args, **kwargs):
pass
def MQtUtil_swigregister(*args, **kwargs):
pass
def MTextureEditorDrawInfo_swigregister(*args, **kwargs):
pass
def MDrawRequest_swigregister(*args, **kwargs):
pass
def MUiMessage_add3dViewRenderOverrideChangedCallback(*args, **kwargs):
pass
def M3dView_isBackgroundGradient(*args, **kwargs):
pass
def MProgressWindow_progress(*args, **kwargs):
pass
def MEvent_className(*args, **kwargs):
pass
def MManipData_className(*args, **kwargs):
pass
def MFnManip3D_swigregister(*args, **kwargs):
pass
def MFnManip3D_handleSize(*args, **kwargs):
pass
def MProgressWindow_startProgress(*args, **kwargs):
pass
def MFnDiscManip_className(*args, **kwargs):
pass
cvar = None
_newclass = 1
| bsd-3-clause | 7,938,223,139,684,012,000 | 13.822868 | 73 | 0.506963 | false | 4.403996 | false | false | false |
pymanopt/pymanopt | examples/dominant_eigenvector.py | 1 | 3073 | import os
import autograd.numpy as np
import tensorflow as tf
import theano.tensor as T
import torch
from examples._tools import ExampleRunner
from numpy import linalg as la, random as rnd
import pymanopt
from pymanopt.manifolds import Sphere
from pymanopt.solvers import SteepestDescent
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
SUPPORTED_BACKENDS = (
"Autograd", "Callable", "PyTorch", "TensorFlow", "Theano"
)
def create_cost_egrad(backend, A):
m, n = A.shape
egrad = None
if backend == "Autograd":
@pymanopt.function.Autograd
def cost(x):
return -np.inner(x, A @ x)
elif backend == "Callable":
@pymanopt.function.Callable
def cost(x):
return -np.inner(x, A @ x)
@pymanopt.function.Callable
def egrad(x):
return -2 * A @ x
elif backend == "PyTorch":
A_ = torch.from_numpy(A)
@pymanopt.function.PyTorch
def cost(x):
return -torch.matmul(x, torch.matmul(A_, x))
elif backend == "TensorFlow":
x = tf.Variable(tf.zeros(n, dtype=np.float64), name="X")
@pymanopt.function.TensorFlow(x)
def cost(x):
return -tf.tensordot(x, tf.tensordot(A, x, axes=1), axes=1)
elif backend == "Theano":
x = T.vector()
@pymanopt.function.Theano(x)
def cost(x):
return -x.T.dot(T.dot(A, x))
else:
raise ValueError("Unsupported backend '{:s}'".format(backend))
return cost, egrad
def run(backend=SUPPORTED_BACKENDS[0], quiet=True):
n = 128
matrix = rnd.randn(n, n)
matrix = 0.5 * (matrix + matrix.T)
cost, egrad = create_cost_egrad(backend, matrix)
manifold = Sphere(n)
problem = pymanopt.Problem(manifold, cost=cost, egrad=egrad)
if quiet:
problem.verbosity = 0
solver = SteepestDescent()
estimated_dominant_eigenvector = solver.solve(problem)
if quiet:
return
# Calculate the actual solution by a conventional eigenvalue decomposition.
eigenvalues, eigenvectors = la.eig(matrix)
dominant_eigenvector = eigenvectors[:, np.argmax(eigenvalues)]
# Make sure both vectors have the same direction. Both are valid
# eigenvectors, but for comparison we need to get rid of the sign
# ambiguity.
if (np.sign(dominant_eigenvector[0]) !=
np.sign(estimated_dominant_eigenvector[0])):
estimated_dominant_eigenvector = -estimated_dominant_eigenvector
# Print information about the solution.
print("l2-norm of x: %f" % la.norm(dominant_eigenvector))
print("l2-norm of xopt: %f" % la.norm(estimated_dominant_eigenvector))
print("Solution found: %s" % np.allclose(
dominant_eigenvector, estimated_dominant_eigenvector, rtol=1e-3))
error_norm = la.norm(
dominant_eigenvector - estimated_dominant_eigenvector)
print("l2-error: %f" % error_norm)
if __name__ == "__main__":
runner = ExampleRunner(run, "Dominant eigenvector of a PSD matrix",
SUPPORTED_BACKENDS)
runner.run()
| bsd-3-clause | -3,975,183,999,994,145,000 | 28.548077 | 79 | 0.635861 | false | 3.403101 | false | false | false |
markovmodel/thermotools | test/test_bar.py | 1 | 1657 | # This file is part of thermotools.
#
# Copyright 2015 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# thermotools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import thermotools.bar as bar
import scipy.integrate
import numpy as np
from numpy.testing import assert_allclose
def make_gauss(N, sigma, mu):
k = 1
s = -1.0 / (2 * sigma**2)
def f(x):
return k * np.exp(s * (x - mu)**2)
return f
def delta_f_gaussian():
f1 = scipy.integrate.quad(make_gauss(N=100000, sigma=1, mu=0), -np.inf, np.inf)
f2 = scipy.integrate.quad(make_gauss(N=100000, sigma=5, mu=0), -np.inf, np.inf)
return np.log(f2[0]) - np.log(f1[0])
def test_bar():
x1 = np.random.normal(loc=0, scale=1.0, size=10000)
x2 = np.random.normal(loc=0, scale=12.5, size=10050)
u_x1_x1 = 0.5 * x1**2
u_x2_x2 = 12.5 * x2**2
u_x1_x2 = 0.5 * x2**2
u_x2_x1 = 12.5 * x1**2
dbIJ = u_x1_x1 - u_x2_x1
dbJI = u_x2_x2 - u_x1_x2
assert_allclose(bar.df(dbIJ, dbJI, np.zeros(dbJI.shape[0])), delta_f_gaussian(), atol=1.0E-1)
| lgpl-3.0 | -2,925,888,970,545,269,000 | 36.659091 | 97 | 0.677731 | false | 2.808475 | false | false | false |
ProfMobius/ThinLauncher | surfaces/LeftMenu.py | 1 | 1156 | import pygame
from Constants import *
from Label import Label
class LeftMenu(pygame.Surface):
def __init__(self, w, h):
super(LeftMenu, self).__init__((w, h), pygame.SRCALPHA)
self.selected = 0
self.buttons = []
self.data = None
def redraw(self, screen, x, y):
self.fill((100, 100, 100, 50))
for i in range(len(self.buttons) -1, -1, -1):
if i != self.selected:
self.buttons[i].redraw(self, i * 0, i * (LEFT_MENU_BUTTON_HEIGHT - 10))
self.buttons[self.selected].redraw(self, self.selected * 0, self.selected * (LEFT_MENU_BUTTON_HEIGHT - 10))
screen.blit(self, self.get_rect(x=x, y=y))
def init(self, data):
self.data = data
self.buttons = []
self.buttons = [Label(LEFT_MENU_WIDTH, LEFT_MENU_BUTTON_HEIGHT, i, align=ALIGN_LEFT, logo=True) for i in data]
self.setSelected(0)
def getSelected(self):
return self.selected
def setSelected(self, index):
self.selected = index
for button in self.buttons:
button.setSelected(False)
self.buttons[index].setSelected(True)
| apache-2.0 | 7,369,858,021,584,314,000 | 31.111111 | 118 | 0.596021 | false | 3.461078 | false | false | false |
cliffe/SecGen | modules/utilities/unix/audit_tools/ghidra/files/release/Ghidra/Features/Python/ghidra_scripts/jython_basics.py | 2 | 1158 | # Examples of Jython-specific functionality
# @category: Examples.Python
# Using Java data structures from Jython
python_list = [1, 2, 3]
java_list = java.util.LinkedList(java.util.Arrays.asList(1, 2, 3))
print str(type(python_list))
print str(type(java_list))
# Importing Java packages for simpler Java calls
from java.util import LinkedList, Arrays
python_list = [1, 2, 3]
java_list = LinkedList(Arrays.asList(1, 2, 3))
print str(type(python_list))
print str(type(java_list))
# Python adds helpful syntax to Java data structures
print python_list[0]
print java_list[0] # can't normally do this in java
print java_list[0:2] # can't normally do this in java
# Iterate over Java collection the Python way
for entry in java_list:
print entry
# "in" keyword compatibility
print str(3 in java_list)
# Create GUI with Java Swing
from javax.swing import JFrame
frame = JFrame() # don't call constructor with "new"
frame.setSize(400,400)
frame.setLocation(200, 200)
frame.setTitle("Jython JFrame")
frame.setVisible(True)
# Use JavaBean properties in constructor with keyword arguments!
JFrame(title="Super Jython JFrame", size=(400,400), visible=True)
| gpl-3.0 | -4,032,463,259,587,541,000 | 29.473684 | 66 | 0.753022 | false | 3.234637 | false | false | false |
braycarlson/viking | cogs/moderation.py | 1 | 18252 | import asyncio
import discord
import logging
from asyncio import TimeoutError
from database.model import HiddenCommands
from discord.ext import commands
from utilities.format import format_list
from utilities.member import MemberError, get_member_by_id
from utilities.time import midnight, timeout
log = logging.getLogger(__name__)
class Moderation(commands.Cog):
def __init__(self, viking):
self.viking = viking
self.viking.loop.create_task(
self.purge_spam()
)
async def purge_spam(self):
"""
A function that purges all messages from the spam channel at
midnight.
"""
await self.viking.wait_until_ready()
while not self.viking.is_closed():
time = midnight()
await asyncio.sleep(time)
for guild in self.viking.guilds:
channel = guild.get_channel(579830092352716820)
await channel.purge()
async def chat_restrict(self):
"""
A function that changes Discord permissions to chat restrict
a member.
"""
overwrite = discord.PermissionOverwrite()
overwrite.send_messages = False
overwrite.send_tts_messages = False
overwrite.add_reactions = False
return overwrite
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def afk(self, ctx, identifier):
"""
*afk <identifier>
A command that moves a member by name, nickname or ID to a
designated voice channel.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
await member.edit(voice_channel=ctx.guild.afk_channel)
except discord.HTTPException:
await ctx.send(f"{member} could not be moved to Valhalla.")
else:
log.info(f"{ctx.author} moved {member} to Valhalla.")
@commands.command(hidden=True)
@commands.bot_has_permissions(ban_members=True)
@commands.has_any_role('Administrator', 'Moderator')
async def ban(self, ctx, *, identifier):
"""
*ban <identifier>
A command that bans a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = self.viking.get_user(member_id)
try:
await ctx.guild.ban(member)
except discord.HTTPException:
await ctx.send(f"{member} could not be banned.")
else:
log.info(f"{ctx.author} banned {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def clear(self, ctx, limit):
"""
*clear <limit>
A command that clears a specified amount of messages from a
text channel.
"""
await ctx.message.delete()
await ctx.channel.purge(limit=limit)
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def deafen(self, ctx, identifier):
"""
*deafen <identifier>
A command that deafens a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
await member.edit(deafen=True)
except discord.HTTPException:
await ctx.send(f"{member} could not be deafened.")
else:
log.info(f"{ctx.author} deafened {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def disconnect(self, ctx, identifier):
"""
*disconnect <identifier>
A command that disconnects a member from a voice channel by name,
nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
await member.edit(voice_channel=None)
except discord.HTTPException:
await ctx.send(f"{member} could not be disconnected.")
else:
log.info(f"{ctx.author} disconnected {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def hidden(self, ctx):
"""
*hidden
A command that displays hidden commands that are available
for administrators/moderators to use.
"""
rows = await HiddenCommands.select('name').gino.all()
commands = [dict(row).get('name') for row in rows]
command = format_list(
commands,
symbol='asterisk',
sort=True
)
embed = discord.Embed(color=self.viking.color)
embed.add_field(name='Hidden Commands', value=command)
await ctx.send(embed=embed)
@commands.command(hidden=True)
@commands.bot_has_permissions(kick_members=True)
@commands.has_any_role('Administrator', 'Moderator')
async def kick(self, ctx, identifier):
"""
*kick <identifier>
A command that kicks a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = self.viking.get_user(member_id)
try:
await ctx.guild.kick(member)
except discord.HTTPException:
await ctx.send(f"{member} could not be kicked.")
else:
log.info(f"{ctx.author} kicked {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def load(self, ctx, *, extension):
"""
*load <extension>
A command that loads an extension.
"""
extension = f"cogs.{extension}"
try:
self.viking.load_extension(extension)
except ModuleNotFoundError:
await ctx.send(f"`{extension}` does not exist.")
else:
await ctx.send(f"`{extension}` was successfully loaded.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def mute(self, ctx, identifier):
"""
*mute <identifier>
A command that mutes a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
await member.edit(mute=True)
except discord.HTTPException:
await ctx.send(f"{member} could not be muted.")
else:
log.info(f"{ctx.author} muted {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def purge(self, ctx):
"""
*purge
A command that purges all messages from a text channel.
"""
await ctx.channel.purge()
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def reload(self, ctx, *, extension):
"""
*reload <extension>
A command that reloads an extension.
"""
extension = f"cogs.{extension}"
try:
self.viking.unload_extension(extension)
self.viking.load_extension(extension)
except ModuleNotFoundError:
await ctx.send(f"`{extension}` does not exist.")
else:
await ctx.send(f"`{extension}` was successfully reloaded.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def restrict(self, ctx, *, identifier):
"""
*restrict <identifier>
A command that restricts a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
for channel in ctx.guild.text_channels:
overwrite = await self.chat_restrict()
await channel.set_permissions(member, overwrite=overwrite)
except discord.HTTPException:
await ctx.send(f"{member} could not be chat-restricted.")
else:
log.info(f"{ctx.author} chat-restricted {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def softdeafen(self, ctx, seconds, *, identifier):
"""
*softdeafen <seconds> <identifier>
A command that soft-deafens a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
if seconds <= 3600:
await member.edit(deafen=True)
else:
await ctx.send('A soft-deafen must be less than an hour.')
except discord.HTTPException:
await ctx.send(f"{member} could not be soft-deafened.")
else:
log.info(f"{ctx.author} soft-deafened {member}.")
while not self.viking.is_closed():
time = timeout(seconds=seconds)
await asyncio.sleep(time)
await member.edit(deafen=False)
break
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def softmute(self, ctx, seconds, *, identifier):
"""
*softmute <seconds> <identifier>
A command that soft-mutes a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
if seconds <= 3600:
await member.edit(mute=True)
else:
await ctx.send('A soft-mute must be less than an hour.')
except discord.HTTPException:
await ctx.send(f"{member} could not be soft-muted.")
else:
log.info(f"{ctx.author} soft-muted {member}.")
while not self.viking.is_closed():
time = timeout(seconds=seconds)
await asyncio.sleep(time)
await member.edit(mute=False)
break
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def softrestrict(self, ctx, seconds, *, identifier):
"""
*softrestrict <seconds> <identifier>
A command that soft-restricts a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
if seconds <= 3600:
for channel in ctx.guild.text_channels:
overwrite = await self.chat_restrict()
await channel.set_permissions(
member,
overwrite=overwrite
)
else:
await ctx.send('A soft-restrict must be less than an hour.')
except discord.HTTPException:
await ctx.send(f"{member} could not be chat-restricted.")
else:
log.info(f"{ctx.author} chat-restricted {member}.")
while not self.viking.is_closed():
time = timeout(seconds=seconds)
await asyncio.sleep(time)
for channel in ctx.guild.text_channels:
await channel.set_permissions(member, overwrite=None)
break
@commands.command(hidden=True)
@commands.bot_has_permissions(ban_members=True)
@commands.has_any_role('Administrator', 'Moderator')
async def unban(self, ctx, *, identifier):
"""
*unban <identifier>
A command that unbans a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(
self,
ctx,
identifier,
table='banned_members'
)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = await self.viking.fetch_user(member_id)
try:
await ctx.guild.unban(member)
except discord.HTTPException:
await ctx.send(f"{member} could not be unbanned.")
else:
log.info(f"{ctx.author} unbanned {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def undeafen(self, ctx, identifier):
"""
*undeafen <identifier>
A command that undeafens a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
await member.edit(deafen=False)
except discord.HTTPException:
await ctx.send(f"{member} could not be undeafened.")
else:
log.info(f"{ctx.author} undeafened {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def unload(self, ctx, *, extension):
"""
*unload <extension>
A command that unloads an extension.
"""
extension = f"cogs.{extension}"
try:
self.viking.unload_extension(extension)
except commands.ExtensionNotLoaded:
await ctx.send(f"`{extension}` is not loaded or does not exist.")
else:
await ctx.send(f"`{extension}` was successfully unloaded.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def unmute(self, ctx, identifier):
"""
*unmute <identifier>
A command that unmutes a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
await member.edit(mute=False)
except discord.HTTPException:
await ctx.send(f"{member} could not be unmuted.")
else:
log.info(f"{ctx.author} unmuted {member}.")
@commands.command(hidden=True)
@commands.has_any_role('Administrator', 'Moderator')
async def unrestrict(self, ctx, *, identifier):
"""
*unrestrict <identifier>
A command that unrestricts a member by name, nickname or ID.
"""
try:
member_id = await get_member_by_id(self, ctx, identifier)
except MemberError:
await ctx.send('No member found.')
except TimeoutError:
await ctx.send('You have run out of time. Please try again.')
else:
member = ctx.guild.get_member(member_id)
try:
for channel in ctx.guild.text_channels:
await channel.set_permissions(member, overwrite=None)
except discord.HTTPException:
await ctx.send(f"{member} could not be unrestricted.")
else:
log.info(f"{ctx.author} unrestricted {member}.")
def setup(viking):
viking.add_cog(Moderation(viking))
| mit | -8,379,491,097,569,145,000 | 31.827338 | 80 | 0.56213 | false | 4.309799 | false | false | false |
fmartingr/uvepe8 | converter/controller.py | 1 | 2008 | from objects import Frame, Diff
#import diff_methods
class Controller(object):
current = -1
items = []
def get_current(self):
if self.current >= 0:
return self.items[self.current]
def get_previous(self):
if self.current > 0:
return self.items[self.current - 1]
class FrameController(Controller):
method = None
current = -1
items = []
def __init__(self, method='Simple'):
# Load diff method
module = __import__("diff_methods")
try:
method = getattr(module, "%sMethod" % method)
except Exception:
print "Cant load %s diff method, using simple..." % method
method = getattr(module, "SimpleMethod")
self.method = method()
def append(self, path):
new_item = Frame(self, path)
self.items.append(new_item)
self.current += 1
def get_difference(self, current_frame, previous_frame):
diffs = self.method.difference(current_frame, previous_frame)
return diffs
def remove(self, frame):
self.items.remove(frame)
self.current -= 1
class DiffController(Controller):
current = -1
items = []
total = {
"width": 0,
"height": 0
}
def append(self, object):
new_item = Diff(self, object['image'],
object['size'][0],
object['size'][1])
self.total["width"] += object["size"][0]
self.total["height"] += object["size"][1]
new_item.hash = object['hash']
self.items.append(new_item)
self.current += 1
def find_hash(self, hash):
# Find if hash has already been stored
# *What do we say to redundancy? Not today*
if len(self.items) > 0:
for item in self.items:
if item.hash == hash:
return self.items.index(item)
return None
def sort_by_size(self):
self.items.sort(key=lambda x: x.size, reverse=True)
| gpl-2.0 | 8,406,440,875,016,979,000 | 25.421053 | 70 | 0.558267 | false | 3.952756 | false | false | false |
jesuscript/topo-mpi | topo/plotting/palette.py | 1 | 13410 | """
Palette class hierarchy, for constructing an RGB color out of a scalar value.
$Id$
"""
__version__='$Revision$'
import param
import plot
# CEBALERT: is this file usable?
#
# topo/plotting/palette.py:10: 'plot' imported but unused
# topo/plotting/palette.py:177: undefined name 'array'
# topo/plotting/palette.py:185: undefined name 'interpolate'
# topo/plotting/palette.py:185: undefined name 'color'
# topo/plotting/palette.py:185: undefined name 'color'
# topo/plotting/palette.py:188: undefined name 'interpolate'
# topo/plotting/palette.py:188: undefined name 'color'
# topo/plotting/palette.py:188: undefined name 'color'
### JABALERT: Should be able to construct a Palette automatically by
### accepting a string specification whose characters each stand for
### colors between which to interpolate.
###
### We'd like to support a string interface like:
### colormap(somestring), where somestring is a list of characters
### corresponding to colors between which to interpolate.
### (Interpolation is performed linearly in RGB space.) Available
### colors include:
###
### R Red
### Y Yellow
### G Green
### C Cyan
### B Blue
### M Magenta
### K Black
### W White
###
### Use a lowercase letter to indicate that a color should use half intensity.
### For instance, a <colorspec> of 'KgYW' would map the level range 0.0->1.0
### to the color range black->dark green->yellow->white, with smooth
### interpolation between the specified colors.
###
### In addition to these string-specified palettes (the basic
### necessity), we would like to support classes for other methods for
### constructing palettes based on the hue, saturation, and value:
###
### Hue [saturation [value]]
### Useful for plotting cyclic quantities, such as orientation.
### The hue is computed from the level, and is combined with the given fixed
### saturation and value (default '1.0 1.0') to determine the color. The hue
### wraps around at each end of the range (e.g. red->yellow->green->blue->magenta
### ->red), and thus is usually appropriate only when the quantity plotted has
### that same property. For the defaults, nearly identical to RYGCBMR.
###
### Saturation [hue [value]]
### Usually SpecifiedHue is used instead of specifying this type directly.
### The saturation is computed from the level, and is combined with the given
### fixed hue and value (default '0.0 1.0') to determine the color.
###
### Value [hue [saturation]]
### Usually Grayscale is used instead of specifying this type directly.
### The value is computed from the level, and is combined with the given fixed
### hue and saturation (default '0.0 0.0') to determine the color. The defaults
### result in a range of grayscale values from black to white; the optional
### arguments allow other colors to be used instead of gray.
### For the defaults, nearly identical to KW.
###
### Grayscale [hue [saturation]]
### Useful for monochrome displays or printers, or to show photographs.
### Same as Value but the scale is flipped when ppm_paper_based_colors=True.
### This makes the most-active areas show up with the intensity that is most
### visible for the given medium (video or paper). For the defaults, nearly
### identical to KW, or WK for ppm_paper_based_colors.
###
### SpecifiedHue [hue [confidence]]
### Useful for color-coding a plot with a specific hue visible on the default
### background. For paper_based_colors=False, same as ValueColorLookup;
### the confidence is used as the saturation. Such a plot works well for
### showing color on a black background. For paper_based_colors==True,
### returns the specified hue masked by the specified confidence, such
### that low values produce white, and high values produce black for low
### confidences and the specified hue for high confidences. Such a plot
### is good for showing colors on light backgrounds.
###
### MapSpecifiesHue [nameofhuemap [nameofconfidencemap]]
### Neural-region-specific variant of SpecifiedHue where these colorspec
### arguments specify not the actual hue and confidence, but the names of
### registered maps (as in define_plot) in which to look up the hue and
### confidence when plotting. This colorspec can be used by plot_unit or
### plot_unit_range to colorize a plot based on some property of a unit;
### it is not supported in other contexts. Examples:
### Region::Eye0::Afferent0::colorspec='MapSpecifiesHue OrientationPreference OrientationSelectivity'
### Region::Ganglia*::Afferent*::colorspec='MapSpecifiesHue OrientationPreference OrientationSelectivity
###
### SpecifiedColor [hue [saturation [value]]]
### Used to turn off color ranges, e.g. for a plot whose shape is more
### important than the intensity of each pixel, such as a histogram. Ignores
### the level, and always returns the single given fixed color. The default
### color is a medium gray: '0.0 0.0 0.5'. For the defaults, nearly identical
### to 'w'.
###
###
###
### Notes on implementing the string-based palette construction, taken
### from lissom/src/colorlookup.h:
###
### Might consider making the numcolors odd and adding a special
### entry for the top of the range to make the range inclusive.
### This might be more intuitive and would make plotting
### inversely-scaled items (with a reversed color order) match
### regularly-scaled ones.
###
### StringBasedPalette
### def __init__(spec,numcolors=0,scale=default_scale):
### steps = spec.length()
### stepsize = size_t(colors.size()/(steps>1 ? (steps-1) : 1))
### start,i
### for (i=0,start=0; i<steps-1; i++,start+=stepsize)
### interpolate(start, start+stepsize,color(spec[i]), color(spec[i+1]))
### interpolate(start, colors.size(),color(spec[i]), color(spec[steps-1]))
###
### def interpolate(start, finish, startcolor, finishcolor):
### """
### Fill the lookup table (or a portion of it) with linear
### interpolations between two colors. The upper array index
### and finishcolor are exclusive.
### """
### assert (start<=finish);
###
### num_vals = int(finish-start)
### division = (num_vals!=0 ? 1.0/num_vals : 0)
###
### rs = startcolor.red()
### gs = startcolor.green()
### bs = startcolor.blue()
###
### rinc = division*(finishcolor.red() - startcolor.red())
### ginc = division*(finishcolor.green() - startcolor.green())
### binc = division*(finishcolor.blue() - startcolor.blue())
###
### for(i=0; i<num_vals; i++)
### colors[start+i]=PixelType(rs+rinc*i,gs+ginc*i,bs+binc*i)
###
### def color(char):
### """
### Returns a color given a one-character name. Uppercase is full-strength,
### lowercase is half-strength.
### """
### h=0.5
### switch (char)
### case 'R': p=PixelType(1,0,0); break; case 'r': p=PixelType(h,0,0); break; /* Red */
### case 'Y': p=PixelType(1,1,0); break; case 'y': p=PixelType(h,h,0); break; /* Yellow */
### case 'G': p=PixelType(0,1,0); break; case 'g': p=PixelType(0,h,0); break; /* Green */
### case 'C': p=PixelType(0,1,1); break; case 'c': p=PixelType(0,h,h); break; /* Cyan */
### case 'B': p=PixelType(0,0,1); break; case 'b': p=PixelType(0,0,h); break; /* Blue */
### case 'M': p=PixelType(1,0,1); break; case 'm': p=PixelType(h,0,h); break; /* Magenta */
### case 'W': p=PixelType(1,1,1); break; case 'w': p=PixelType(h,h,h); break; /* White */
### case 'K': p=PixelType(0,0,0); break; case 'k': p=PixelType(0,0,0); break; /* Black */
### return p
# Supported background types, used for such things as determining
# what color to be used for outlines, fills, etc.
BLACK_BACKGROUND = 0
WHITE_BACKGROUND = 1
######################## JC: starting new implementation #############
from numpy.oldnumeric import zeros
class StringBasedPalette(param.Parameterized):
### JCALERT: What is the default scale?
def __init__(spec="KRYW",num_colors=0,scale=1.0):
steps = len(spec)
### JCALERT! I am not sure about that...
### I have to check again with the C++ code
if num_colors>0:
colors = zeros(num_colors,'O')
else:
colors = array(252,'O')
if steps>1:
stepsize = len(colors)/(steps-1)
else:
stepsize = len(colors)
for i,start in zip(range(steps-1),range(0,(steps-1)*stepsize,stepsize)):
interpolate(start,start+stepsize,color(spec[i]),color(spec[i+1]))
### JCALERT! I do not really understand the last line
interpolate(start,len(colors),color(spec[steps-1]),color(spec[steps-1]))
### start,i
### for (i=0,start=0; i<steps-1; i++,start+=stepsize)
### interpolate(start, start+stepsize,color(spec[i]), color(spec[i+1]))
### interpolate(start, colors.size(),color(spec[i]), color(spec[steps-1]))
# CB: lazy hack around the lambda that was in Palette.colors_
class F(object):
def __call__(self):
return [(i,i,i) for i in range(256)]
### JABHACKALERT: Needs significant cleanup -- should be much more
### straightforward, taking some specification and immediately
### constructing a usable object (without e.g. requiring set() to be
### called).
class Palette(param.Parameterized):
"""
Each palette has 3*256 values that are keyed by an index.
This base class takes in a list of 256 triples.
A Palette object has 256 triples of RGB values ranging from 0
... 255. The purpose of the class is to maintain an accurate
palette conversion between a number (0..255) and an RGB triple
even as the background of the plots change. If the background is
Black, then the 0 intensity should often be 0, but if the
background of the Plot should be white, then the 0 intensity
should probably be 255. This automatic updating is possible
through the use of Dynamic Parameters, and lambda functions.
This class stores a passed in variable named colors. If the
variable is a lambda function that gives the 256 triples, then it
will evaluate the lambda each time a datarequest is made. If it
is a static list, then the palette is fixed. It may be possible
to make Palette a 'pure' Dynamic parameter, with different types
of palettes setting the lambda. More power to you if you do that.
"""
background = param.Dynamic(default=BLACK_BACKGROUND)
colors_ = param.Dynamic(default=F()) #(lambda:[(i,i,i) for i in range(256)])
def __init__(self,**params):
"""
Does not fill in the colors, must call with a set
function call preferably from a subclass of Palette
"""
super(Palette,self).__init__(**params)
def set(self, colors):
"""
Colors is a list of 256 triples, each with a 0..255 RGB value
or a lambda function that generates the list. Lambdas will be
necessary for dynamic shifts in black or white background
changes.
"""
self.colors_ = colors
def flat(self):
"""
Return the palette in a flat form of 768 numbers. If the
colors parameter is a callable object, call it for the
list of values.
"""
c = self.colors_
return_list = []
if callable(c):
self.warning('Callable Parameter value returned, ', callable(c))
c = c()
for each in c:
return_list.extend(list(each))
return return_list
def color(self, pos):
"""
Return the tuple of RGB color found at pos in color list
"""
c = self.colors_
if callable(c):
self.warning('Callable Parameter value returned, ', callable(c))
c = c()
return c[pos]
def colors(self):
"""
Return the complete list of palette colors in tuple form
"""
c = self.colors_
if callable(c):
self.warning('Callable Parameter value returned, ', callable(c))
c = c()
return c
### JABALERT: There is probably no reason to have this class; this
### functionality is likely to be a special case of many other
### classes.
class Monochrome(Palette):
"""
Color goes from Black to White if background is Black. It goes
from White to Black if background is set to White. By using
a Dynamic Parameter, it should be able to update the palette
automatically if the plot background color changes.
"""
def __init__(self,**params):
"""
Set a lambda function to the colors list which switches
if the background switches. This makes the accessors in
the parent class rather slow since it has to do a list
comprehension each time it accesses the list.
"""
super(Monochrome,self).__init__(**params)
self.colors = lambda: self.__mono_palette__()
def __mono_palette__(self):
"""
Function to be passed as a lambda to the Parameter
"""
if self.background == BLACK_BACKGROUND:
set_ = [(i,i,i) for i in range(256)]
else: # Reverse the order 255 ... 0
set_ = [(i,i,i) for i in range(255,-1,-1)]
return set_
| bsd-3-clause | 5,585,290,121,707,293,000 | 40.009174 | 106 | 0.643997 | false | 3.625304 | false | false | false |
AaronGeist/Llama | biz/ocr/putao.py | 1 | 1855 | import os
from PIL import Image
class PuTaoCaptchaParser:
@classmethod
def analyze(cls, image_path):
# load image and convert to grey mode
im = Image.open(image_path).convert("L")
im_cropped = im.crop((10, 9, 54, 19))
pixel_matrix = im_cropped.load()
# turn grey pixel into white
for col in range(0, im_cropped.height):
for row in range(0, im_cropped.width):
if pixel_matrix[row, col] != 0:
pixel_matrix[row, col] = 255
# crop image into 5 parts, like 1 + 2 * 3
pos_list = [(0, 0, 8, 10), (9, 0, 16, 10), (18, 0, 26, 10), (27, 0, 34, 10), (36, 0, 44, 10)]
expression = ""
for pos in pos_list:
expression += cls.match(im_cropped.crop(pos))
# calculate expression
return eval(expression)
# find image with maximum same pixel and return image name
@classmethod
def match(cls, image):
root_path = "ocr/resources/putao/"
image_name_dict = dict()
for root, dirs, files in os.walk(root_path):
for file in files:
image_name_dict[file[0:1]] = Image.open(os.path.join(root, file)).convert("L").load()
pixel_matrix = image.load()
max_score = 0
match_image_name = ""
for image_name in image_name_dict:
target_image = image_name_dict[image_name]
score = 0
for col in range(0, image.height):
for row in range(0, image.width):
if pixel_matrix[row, col] == target_image[row, col]:
score += 1
# record max score and corresponding image name
if score > max_score:
max_score = score
match_image_name = image_name
return match_image_name
| gpl-3.0 | -1,480,584,969,521,980,200 | 30.982759 | 101 | 0.537466 | false | 3.755061 | false | false | false |
studentisgss/booking | news/urls.py | 1 | 1370 | """activities URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.views.decorators.cache import cache_page
from news.views import NewsView, NewsEditView, NewsAddView, NewsDeleteView, MessageView
from news.feeds import RssNewsFeed, AtomNewsFeed
app_name = "news"
urlpatterns = [
url(r'^$', NewsView.as_view(), name="news"),
url(r'^new$', NewsAddView.as_view(), name="new"),
url(r'^edit/(?P<pk>\d+)$', NewsEditView.as_view(), name="edit"),
url(r'^delete/(?P<pk>\d+)$', NewsDeleteView.as_view(), name="delete"),
url(r'^messages/(?P<activity_id>\d+)$', MessageView.as_view(), name="messages"),
url(r'^feed/rss$', cache_page(15 * 60)(RssNewsFeed()), name="rss"),
url(r'^feed/atom$', cache_page(15 * 60)(AtomNewsFeed()), name="atom"),
]
| gpl-3.0 | -3,035,935,102,196,703,700 | 40.515152 | 87 | 0.676642 | false | 3.246445 | false | false | false |
stxnext/intranet-open | src/intranet3/intranet3/subscribers.py | 1 | 2359 | import datetime
from markupsafe import Markup
from pyramid.httpexceptions import HTTPForbidden
from pyramid.events import subscriber
from pyramid.events import BeforeRender, ContextFound
from intranet3.models import DBSession
class PresenceTracking(object):
def __init__(self, event):
today = datetime.date.today()
user = event['request'].user
self.arrival = None
if not user:
return
row = DBSession.query('ts').from_statement("""
SELECT MIN(p.ts) as "ts"
FROM presence_entry p
WHERE DATE(p.ts) = :today
AND p.user_id = :user_id
""").params(today=today, user_id=user.id).first()
if not (row and row.ts):
return
arrival = row.ts
now = datetime.datetime.now()
since_morning_hours = float((now - arrival).seconds) / 3600
self.present = since_morning_hours
noted = 0.0
row = DBSession.query('time').from_statement("""
SELECT COALESCE(SUM(t.time), 0.0) as "time"
FROM time_entry t
WHERE t.date = :today
AND t.user_id = :user_id
AND t.deleted = FALSE
""").params(user_id=user.id, today=today).first()
if row:
noted = row.time
self.noted = noted
self.remaining = since_morning_hours - noted
self.arrival = arrival
def get_flashed_messages(request):
def get_flashed_messages(*args, **kwargs):
return [ (a, b) for a, b in request.session.pop_flash() ]
return get_flashed_messages
@subscriber(BeforeRender)
def add_global(event):
request = event['request']
event['presence_tracking'] = PresenceTracking(event)
event['get_flashed_messages'] = get_flashed_messages(event['request'])
event['csrf_field'] = Markup('<input type="hidden" name="csrf_token" value="%s">' % request.session.get_csrf_token())
for key, value in request.tmpl_ctx.iteritems():
event[key] = value
@subscriber(ContextFound)
def csrf_validation(event):
request = event.request
if not request.is_xhr and request.method == "POST":
csrf_token = request.POST.get('csrf_token')
if csrf_token is None or csrf_token != request.session.get_csrf_token():
raise HTTPForbidden('CSRF token is missing or invalid')
| mit | -7,764,728,680,976,918,000 | 32.225352 | 121 | 0.616363 | false | 3.750397 | false | false | false |
Submitty/AnalysisTools | commonAST/refMaps.py | 1 | 9709 | import context
import eqTag
'''
Hex values for colors of marked nodes
'''
adlDetailColor = "#B0B0B0"
adlStrColor = "#8b98ca"
notMatchedColor = "#ff0000"
'''
The tagEqlMap is a dictionary of nodes in the common AST, to their equivalent nodes in the full AST.
This map is only used when the tags are not (case in-sensitive) equivalent.
For example, in python, "For" in the common AST matches "for" in the full AST.
Because the only differences are case, these constructs do NOT need to be added to this map.
However, in python, "FunctionDef" in the common AST matches to "function" in the full AST. So, it must be added to the tagEqlMap.
The structure of the tagEqlMap is as follows:
commonASTConstruct: [EqualityTagObj1, EqualityTagObj2, ..., EqualityTagObjN]
where EqualityObjs are instances of the EqTag class in eqTag.py
Equality Objects contain a list of tags that must all be in the potential match,
a context that the match must be in (children, parents, grand parents etc.)
and a language of the full AST
The Context objects are instances of the Context class in context.py
A Context Object contains a list of lookahead tags (children)
a list of sibling tags
a list of parent tags
and a list of grand parent tags
If the tags, context, and language match, the then common AST node matches to that full AST node.
'''
emptyCntxt = context.Context("py")
emptyCntxtCpp = context.Context("cpp")
classNoCntxt = eqTag.EqTag(["class"], emptyCntxt)
functionNoCntxt = eqTag.EqTag(["function"], emptyCntxt)
caseNoCntxt = eqTag.EqTag(["case"], emptyCntxt)
ifNoCntxt = eqTag.EqTag(["if"], emptyCntxt)
functionNoCntxtCpp = eqTag.EqTag(["function"], emptyCntxtCpp)
augAssignNoCntxt = eqTag.EqTag(["augmented", "assign"], emptyCntxt)
binOpNoCntxt = eqTag.EqTag(["binary", "operator"], emptyCntxt)
binOpNoCntxtCpp = eqTag.EqTag(["binaryop"], emptyCntxtCpp)
unOpNoCntxt = eqTag.EqTag(["unary", "operator"], emptyCntxt)
listNoCntxt = eqTag.EqTag(["list"], emptyCntxt)
setNoCntxt = eqTag.EqTag(["set"], emptyCntxt)
dictNoCntxt = eqTag.EqTag(["dict"], emptyCntxt)
tupleNoCntxt = eqTag.EqTag(["tuple"], emptyCntxt)
subscriptNoCntxt = eqTag.EqTag(["subscript"], emptyCntxt)
basesCntxt = eqTag.EqTag(["variable"], context.Context("py", ["\*"],["\*"],["argument"],["parameters"]))
paramClassContext = eqTag.EqTag(["parameters"], context.Context("py", ["\*"],["\*"],["class"],["\*"]))
dottedNameImportCntxt = eqTag.EqTag(["dottedname"], context.Context("py", ["\*"],["\*"],["import"],["\*"]))
dottedNameGpImportCntxt = eqTag.EqTag(["dottedname"], context.Context("py", ["\*"],["\*"],["\*"],["import"]))
bodyIfCntxt = eqTag.EqTag(["body"], context.Context("py",["\*"],["\*"],["case"],["if"]))
bodyForCntxt = eqTag.EqTag(["body"], context.Context("py",["\*"],["\*"],["for"],["\*"]))
bodyWhileCntxt = eqTag.EqTag(["body"], context.Context("py",["\*"],["\*"],["while"], ["\*"]))
bodyFuncCntxt = eqTag.EqTag(["body"], context.Context("py",["\*"],["\*"],["function"],["\*"]))
elseIfCntxt = eqTag.EqTag(["else"], context.Context("py",["\*"],["\*"],["if"],["\*"]))
lteBinOpCntxt = eqTag.EqTag(["binary", "operator"], context.Context("py",["lte"],["\*"], ["\*"],["\*"]))
ltBinOpCntxt = eqTag.EqTag(["binary", "operator"], context.Context("py",["lessthan"],["\*"], ["\*"],["\*"]))
gtBinOpCntxt = eqTag.EqTag(["binary", "operator"], context.Context("py",["gt"],["\*"], ["\*"],["\*"]))
gteBinOpCntxt = eqTag.EqTag(["binary", "operator"], context.Context("py",["gte"],["\*"], ["\*"],["\*"]))
eqBinOpCntxt = eqTag.EqTag(["binary", "operator"], context.Context("py",["equals"],["\*"], ["\*"],["\*"]))
caseIfCntxt = eqTag.EqTag(["case"], context.Context("py",["\*"], ["\*"], ["if"],["\*"]))
ifCppCntxt = eqTag.EqTag(["ifstatement"], context.Context("cpp",["\*"], ["\*"], ["\*"],["\*"]))
forLoopCppCntxt = eqTag.EqTag(["forloop"], context.Context("cpp",["\*"],["\*"],["\*"],["\*"]))
assignCppCntxt = eqTag.EqTag(["assignment"], context.Context("cpp",["\*"],["\*"],["\*"],["\*"]))
whileCppCntxt = eqTag.EqTag(["whileloop"], context.Context("cpp",["\*"],["\*"],["\*"],["\*"]))
tagEqlMap = dict({"classdef": [classNoCntxt], #classdef matches to class in any context
"functiondef": [functionNoCntxt, functionNoCntxtCpp],
"compoundstmt": [bodyIfCntxt, bodyForCntxt, bodyWhileCntxt, bodyFuncCntxt, elseIfCntxt],
"augassign": [augAssignNoCntxt],
"binop": [binOpNoCntxt, binOpNoCntxtCpp],
"unaryop": [unOpNoCntxt],
"comparison": [eqBinOpCntxt, gtBinOpCntxt, gteBinOpCntxt, ltBinOpCntxt, lteBinOpCntxt],
"for": [forLoopCppCntxt],
"while": [whileCppCntxt],
"assign": [assignCppCntxt],
"identifier": [dottedNameImportCntxt, dottedNameGpImportCntxt, basesCntxt],
"bases": [paramClassContext],
"container": [listNoCntxt, setNoCntxt, dictNoCntxt, tupleNoCntxt],
"if": [caseNoCntxt, ifCppCntxt],
"ifblock": [ifNoCntxt, ifCppCntxt],
"expr": [subscriptNoCntxt]})
'''
The adlDetailMap is a dictionary of nodes in the full AST that are not relevant to our use cases
and thus do not need to be included in the common AST
The structure of the adlDetail is as follows:
fullASTConstruct: [Context1, Context2, ..., ContextN]
where Context objects are instances of the Context class in context.py
A Context Object contains a language
a list of lookahead tags (children)
a list of sibling tags
a list of parent tags
and a list of grand parent tags
If the node in the full AST map has a tag equal to a tag in the adlDetailMap,
and its context matches one of the contexts in its value, the node is additional detail.
Our use cases are as follows:
(1) Detecting nested if statements and dangling elses
(2) Detecting nested loops & crude complexity analysis
(3) Detecting for/while loops and nested if/elses inside
(4) Detecting member function calls of an outside class
(5) Counting number of while and if statements
(6) Count number of function calls
(7) Exception Handling - make sure exceptions are never thrown
(8) Detecting class heirarchies
(9) Detecting function calls of a forbidden module
(10) Forbidding exec
If you modify the use cases, adjust the adlDetailMap to match your use cases.
'''
assignContext = context.Context("py",["\*"],["\*"],["assign"],["\*"])
functionContext = context.Context("py",["\*"],["\*"],["function"],["\*"])
paramContext = context.Context("py",["\*"],["\*"],["parameters"],["\*"])
accessContext = context.Context("py",["\*"],["\*"],["access"],["\*"])
augAssignContext = context.Context("py",["\*"],["\*"],["augmented", "assign"],["\*"])
forContext = context.Context("py",["\*"],["\*"],["for"],["\*"])
binOpContext = context.Context("py",["\*"], ["\*"],["binary", "operator"], ["\*"])
unOpContext = context.Context("py",["\*"], ["\*"],["unary", "operator"], ["\*"])
adlDetailMap = dict({"literal": [emptyCntxt],
"parmvar": [emptyCntxtCpp],
"string": [assignContext, binOpContext],
"greaterthan": [binOpContext],
"gt": [binOpContext],
"gte": [binOpContext],
"lessthan": [binOpContext],
"lte": [binOpContext],
"eq": [binOpContext],
"and": [binOpContext],
"or": [binOpContext],
"string": [emptyCntxt],
"none": [emptyCntxt],
"null": [emptyCntxt],
"variable": [emptyCntxt],
"parameters": [functionContext],
"parameter": [paramContext],
"targets": [forContext],
"minus": [binOpContext],
"modulo": [binOpContext],
"plus": [binOpContext],
"plusassign": [augAssignContext],
"divassign": [augAssignContext],
"floordivassign":[augAssignContext],
"multassign": [augAssignContext],
"minusassign": [augAssignContext],
"exponent": [binOpContext],
"multiply":[binOpContext],
"divide":[binOpContext],
"not":[unOpContext],
"integerliteral": [emptyCntxtCpp]})
'''
The adlStructMap is a dictionary of nodes in either AST, that if removed, would not affect the structure or data in the AST
The structure of the adlStructMap is as follows:
languageConstruct: [ContextObj1, ContextObj2, ..., ContextObjN]
Where Context objects are instances of the Context class in context.py
A Context Object contains a list of lookahead tags (children)
a list of sibling tags
a list of parent tags
and a list of grand parent tags
If the tag and one of the contexts match, the node is marked as an additional structure node
'''
#TODO: Fix this - some of these are language py but they should apply to the common AST
classContext = context.Context("py",["\*"],["\*"],["class"],["\*"])
basesContext = context.Context("py",["\*"],["\*"],["bases"],["\*"]) #commonAST not PY
paramCntxt = context.Context("py",["\*"],["\*"],["parameters"],["class"])
importCntxt = context.Context("py",["\*"],["\*"],["import"],["\*"])
importCntxtGP = context.Context("py",["\*"],["\*"],["\*"],["import"])
ifElseContext = context.Context("py",["+case"], ["\*"], ["\*"], ["\*"]) #+ signifys more than one
callContext = context.Context("py",["\*"],["\*"],["call"],["\*"])
functionContext = context.Context("py",["\*"],["\*"],["functiondef"],["\*"])
noChildrenContext = context.Context("py",None,["\*"], ["if"], ["\*"])
whileNoChildrenContext = context.Context("py",None,["\*"], ["while"], ["\*"])
forNoChildrenContext = context.Context("py",None,["\*"], ["for"], ["\*"])
adlStructMap = dict({"body":[classContext],
"args":[callContext],
#"if": [ifElseContext],
"access": [emptyCntxt],
"else": [noChildrenContext, whileNoChildrenContext, forNoChildrenContext],
"argument":[callContext, paramCntxt],
"paren":[emptyCntxt],
"import":[importCntxt],
"importitem":[importCntxt, importCntxtGP],
"importeverything":[importCntxt],
"identifier":[functionContext, classContext]})
| bsd-3-clause | 8,210,747,895,515,876,000 | 47.064356 | 129 | 0.657534 | false | 3.188506 | false | false | false |
slickqa/slickqaweb | slickqaweb/api/apidocs.py | 1 | 13173 | __author__ = 'jcorbett'
from slickqaweb.app import app
from .standardResponses import JsonResponse
from mongoengine import *
from flask import request
import re
import types
#------------- For documenting other API endpoints ---------------------------
resources = []
def standard_query_parameters(f):
if not hasattr(f, 'argument_docs'):
f.argument_docs = {}
if not hasattr(f, 'argument_types'):
f.argument_types = {}
if not hasattr(f, 'argument_param_types'):
f.argument_param_types = {}
f.argument_docs['q'] = "Slick query string"
f.argument_docs['orderby'] = "Property to sort by. Use - before property name to reverse sort order."
f.argument_docs['limit'] = "Limit the number of items to query."
f.argument_docs['skip'] = "Skip past a certain number of results."
f.argument_types['q'] = "string"
f.argument_types['orderby'] = "string"
f.argument_types['limit'] = "integer"
f.argument_types['skip'] = "integer"
f.argument_param_types['q'] = "query"
f.argument_param_types['orderby'] = "query"
f.argument_param_types['limit'] = "query"
f.argument_param_types['skip'] = "query"
return f
def add_resource(path, description):
retval = SwaggerApiDescription(path=path, description=description)
resources.append(retval)
return retval
def returns(datatype):
def returns_datatype(f):
f.returns = datatype
return f
return returns_datatype
def accepts(datatype):
def accepts_datatype(f):
f.accepts = datatype
return f
return accepts_datatype
def note(note_info):
def add_note(f):
f.note = note_info
return f
return add_note
def argument_doc(name, description, argtype="string", paramtype="path"):
def argdoc(f):
if not hasattr(f, 'argument_docs'):
f.argument_docs = {}
if not hasattr(f, 'argument_types'):
f.argument_types = {}
if not hasattr(f, 'argument_param_types'):
f.argument_param_types = {}
f.argument_docs[name] = description
f.argument_types[name] = argtype
f.argument_param_types[name] = paramtype
return f
return argdoc
#------------- API Endpoints for the Docs ------------------------------------
@app.route("/api/api-docs")
def get_swagger_toplevel():
return JsonResponse(SwaggerApiDocs())
@app.route("/api/api-docs/<resource_name>")
def get_swagger_for_resource(resource_name):
for resource in resources:
if resource.path == "/{}".format(resource_name):
return JsonResponse(get_endpoint_doc(resource))
return JsonResponse(None)
#------------- Models and methods for generating docs ------------------------
def get_endpoint_doc(resource):
retval = SwaggerResource(resourcePath=resource.path)
retval.apis = []
rules = []
for rule in app.url_map.iter_rules():
if rule.rule.startswith("/api{}".format(resource.path)):
rules.append(rule)
for rule in rules:
endpoint = SwaggerApiEndpoint()
endpoint.path = re.sub("^/api", "", rule.rule)
endpoint.path = re.sub("<", "{", endpoint.path)
endpoint.path = re.sub(">", "}", endpoint.path)
endpoint.operations = []
for method in rule.methods:
if method in ["GET", "DELETE", "POST", "PUT"]:
operation = SwaggerOperation()
operation.method = method
operation.nickname = rule.endpoint
function = app.view_functions[rule.endpoint]
operation.summary = function.__doc__
operation.parameters = []
arguments = set(rule.arguments)
if hasattr(function, 'argument_docs'):
arguments = arguments.union(set(function.argument_docs.keys()))
if hasattr(function, 'note'):
operation.notes = function.note
for argument in arguments:
parameter = SwaggerParameter()
parameter.name = argument
parameter.allowMultiple = False
if hasattr(function, 'argument_docs') and argument in function.argument_docs:
parameter.description = function.argument_docs[argument]
if hasattr(function, 'argument_types') and argument in function.argument_types:
parameter.type = function.argument_types[argument]
else:
parameter.type = "string"
if hasattr(function, 'argument_param_types') and argument in function.argument_param_types:
parameter.paramType = function.argument_param_types[argument]
else:
parameter.paramType = "path"
operation.parameters.append(parameter)
if hasattr(function, 'returns'):
add_type_properties(operation, function.returns, retval)
else:
operation.type = "void"
if hasattr(function, 'accepts'):
parameter = SwaggerParameter()
parameter.name = "body"
parameter.paramType = "body"
parameter.allowMultiple = False
add_type_properties(parameter, function.accepts, retval)
operation.parameters.append(parameter)
endpoint.operations.append(operation)
retval.apis.append(endpoint)
for parent, subtypes in resource.subtypes.items():
model = None
for potential_model in list(retval.models.values()):
if parent.__name__ == potential_model.id:
model = potential_model
break
if model is not None:
model.subTypes = []
for subtype in subtypes:
model.subTypes.append(subtype.__name__)
add_swagger_model(retval, subtype)
model.discriminator = 'typeName'
return retval
def get_type_name(from_type):
if isinstance(from_type, type) and (issubclass(from_type, Document) or issubclass(from_type, EmbeddedDocument)):
return from_type.__name__
if isinstance(from_type, StringField):
return "string"
if isinstance(from_type, ListField):
return "array"
if isinstance(from_type, IntField):
return "integer"
if isinstance(from_type, LongField):
return "integer"
if isinstance(from_type, FloatField):
return "number"
if isinstance(from_type, BooleanField):
return "boolean"
if isinstance(from_type, DateTimeField):
return "integer"
if isinstance(from_type, ObjectIdField):
return "string"
if isinstance(from_type, BinaryField):
return "string"
if isinstance(from_type, (EmbeddedDocumentField, ReferenceField)):
return from_type.document_type.__name__
def get_format_name(from_type):
if isinstance(from_type, IntField):
return "int32"
if isinstance(from_type, LongField):
return "int64"
if isinstance(from_type, FloatField):
return "float"
if isinstance(from_type, DateTimeField):
return "int64"
if isinstance(from_type, BinaryField):
return "byte"
def get_override_description(from_type):
if isinstance(from_type, ObjectIdField):
return "A String representation of a BSON ObjectId"
if isinstance(from_type, DateTimeField):
return "The number of milliseconds since EPOCH GMT"
def add_type_properties(to, from_type, resource):
if hasattr(from_type, 'help_text'):
to.description = from_type.help_text
to.type = get_type_name(from_type)
format = get_format_name(from_type)
if format is not None:
to.format = format
description = get_override_description(from_type)
if description is not None:
to.description = description
if isinstance(from_type, type) and (issubclass(from_type, Document) or issubclass(from_type, EmbeddedDocument)):
add_swagger_model(resource, from_type)
elif isinstance(from_type, StringField):
if hasattr(from_type, 'choices'):
to.enum = []
to.enum = from_type.choices
elif isinstance(from_type, ListField):
to.items = dict()
to.items['type'] = get_type_name(from_type.field)
if isinstance(from_type.field, type) and (issubclass(from_type.field, Document) or issubclass(from_type.field, EmbeddedDocument)):
add_swagger_model(resource, from_type.field)
elif isinstance(from_type.field, (EmbeddedDocumentField, ReferenceField)):
add_swagger_model(resource, from_type.field.document_type)
elif isinstance(from_type, (EmbeddedDocumentField, ReferenceField)):
add_swagger_model(resource, from_type.document_type)
class SwaggerInfo(EmbeddedDocument):
contact = StringField(required=True, default="[email protected]")
license = StringField(required=True, default="Apache 2.0")
licenseUrl = StringField(required=True, default="http://www.apache.org/licenses/LICENSE-2.0.html")
title = StringField(required=True, default="Slick Test Manager")
class SwaggerAuthorizations(EmbeddedDocument):
pass
class SwaggerApiDescription(EmbeddedDocument):
description = StringField()
path = StringField()
def __init__(self, *args, **kwargs):
super(SwaggerApiDescription, self).__init__(*args, **kwargs)
self.subtypes = {}
class SwaggerApiDocs(EmbeddedDocument):
apiVersion = StringField(required=True, default="1.0.0")
swaggerVersion = StringField(required=True, default="1.2")
info = EmbeddedDocumentField(SwaggerInfo, default=SwaggerInfo())
authorizations = EmbeddedDocumentField(SwaggerAuthorizations, default=SwaggerAuthorizations())
apis = ListField(EmbeddedDocumentField(SwaggerApiDescription), required=True, default=resources)
def generate_base_path():
return re.sub("api/.*$", "api", request.base_url)
class SwaggerProperty(EmbeddedDocument):
description = StringField()
format = StringField()
type = StringField()
enum = ListField(StringField(), default=None)
items = MapField(StringField(), default=None)
class SwaggerModel(EmbeddedDocument):
id = StringField()
description = StringField()
properties = MapField(EmbeddedDocumentField(SwaggerProperty))
required = ListField(StringField())
subTypes = ListField(StringField(), default=None)
discriminator = StringField()
class SwaggerParameter(EmbeddedDocument):
name = StringField()
paramType = StringField()
description = StringField()
required = BooleanField()
type = StringField()
format = StringField()
enum = ListField(StringField(), default=None)
items = MapField(StringField(), default=None)
allowMultiple = BooleanField()
class SwaggerOperation(EmbeddedDocument):
method = StringField()
nickname = StringField()
notes = StringField()
summary = StringField()
type = StringField()
format = StringField()
enum = ListField(StringField(), default=None)
items = MapField(StringField(), default=None)
parameters = ListField(EmbeddedDocumentField(SwaggerParameter))
produces = ListField(StringField(), default=["application/json"])
class SwaggerApiEndpoint(EmbeddedDocument):
path = StringField()
operations = ListField(EmbeddedDocumentField(SwaggerOperation))
class SwaggerResource(EmbeddedDocument):
apiVersion = StringField(required=True, default="1.0.0")
swaggerVersion = StringField(required=True, default="1.2")
resourcePath = StringField()
basePath = StringField(required=True, default=generate_base_path)
models = MapField(EmbeddedDocumentField(SwaggerModel))
apis = ListField(EmbeddedDocumentField(SwaggerApiEndpoint))
def add_swagger_model(resource, modeltype):
if not hasattr(resource, 'models'):
resource.models = dict()
if modeltype.__name__ in resource.models:
return
model = SwaggerModel()
model.id = modeltype.__name__
if modeltype.__doc__:
model.description = modeltype.__doc__
model.properties = dict()
for fieldname, fieldtype in modeltype._fields.items():
property = SwaggerProperty()
add_type_properties(property, fieldtype, resource)
if property.type is None:
property = None
if property is not None:
model.properties[fieldname] = property
if hasattr(modeltype, 'dynamic_types'):
for fieldname, fieldtype in modeltype.dynamic_types.items():
property = SwaggerProperty()
add_type_properties(property, fieldtype, resource)
if property.type is None:
property = None
if property is not None:
model.properties[fieldname] = property
if fieldname == 'typeName':
if model.required is None:
model.required = []
model.required.append(fieldname)
resource.models[model.id] = model
| apache-2.0 | 8,642,785,155,971,280,000 | 36.31728 | 138 | 0.635239 | false | 4.268633 | false | false | false |
Tallefer/scons-for-symbian | arguments.py | 2 | 15344 | """Reads and stores globals including the command line arguments"""
__author__ = "Jussi Toivola"
__license__ = "MIT License"
from SCons.Script import ARGUMENTS, DefaultEnvironment, HelpFunction as Help
from SCons.Variables import Variables, EnumVariable
from config import * #IGNORE:W0611
from os.path import join, abspath
from echoutil import loginfo
import os
import sys
#: Are we running a build? This is to avoid messing up code analyzers
#: and Epydoc.
RUNNING_SCONS = ( "scons" in sys.argv[0] or "-c" == sys.argv[0] )
VARS = Variables( 'arguments.py' )
def GetArg( name, helpmsg, default, allowed_values = None, caseless = True ):
"""Utility for adding help information and retrieving argument"""
if allowed_values is not None:
VARS.Add( EnumVariable( name, helpmsg, default,
allowed_values = allowed_values,
ignorecase = 2 ) )
else:
VARS.Add( name, helpmsg, default )
value = ARGUMENTS.get( name, default )
if value is not None and caseless:
value = value.lower()
return value
#: Symbian SDK folder
EPOCROOT = os.environ.get( "EPOCROOT", EPOCROOT )
loginfo( "EPOCROOT=%s" % EPOCROOT )
#: Constant pointing to EPOCROOT/epoc32
EPOC32 = join( EPOCROOT, 'epoc32' )
#: Constant pointing to sdk's data folder
EPOC32_DATA = join( EPOC32, 'data' )
#: Constant pointing to system include folder
EPOC32_INCLUDE = join( EPOC32, 'include' )
#: Constant pointing to system tools folder
EPOC32_TOOLS = join( EPOC32, 'tools' )
#: Constant pointing to release folder
EPOC32_RELEASE = join( EPOC32, "release", COMPILER, RELEASE )
# TODO(mika.raento): The setting of the final output directories feels hacky
# here.
# I _do_ want them to be overridable from scripts, as I think it's more a
# project style question than something you change per build.
# Should we just make the 'constants' accessor functions instead? Or something
# else?
_set_install_epocroot = None
def SetInstallDirectory(dir):
"""
SetInstallDirectory can be called to put the final output (binaries, resource
files, .libs and headers) somewhere else than the SDK folder so that builds
don't pollute the SDK. Apps can be started by pointing a virtual MMC to this
directory (with _EPOC_DRIVE_E environment variable or epoc.ini setting).
"""
global _set_install_epocroot
if _set_install_epocroot and _set_install_epocroot != dir:
msg = "You have conflicting settings for the installation directory" + "%s, %s" % (_set_install_epocroot, dir)
raise msg
_set_install_epocroot = dir
INSTALL_EPOCROOT = None
INSTALL_EPOC32 = None
INSTALL_EPOC32_DATA = None
INSTALL_EPOC32_INCLUDE = None
INSTALL_EPOC32_TOOLS = None
INSTALL_EPOC32_RELEASE = None
INSTALL_EMULATOR_C = None
SDKFOLDER = None
SYSTEM_INCLUDES = None
def ResolveInstallDirectories():
"""
ResolveInstallDirectories sets the necessary constants for final output. It
should be called before accessing any of the INSTALL_ variables but is split
into a separate initialization so that that the root of the installation tree
can be set in a SConscript.
It will only do its thing once, returning True if it did.
"""
global INSTALL_EPOCROOT, INSTALL_EPOC32, INSTALL_EPOC32_DATA
global INSTALL_EPOC32_INCLUDE, INSTALL_EPOC32_TOOLS, INSTALL_EPOC32_RELEASE
global INSTALL_EMULATOR_C, SDKFOLDER, SYSTEM_INCLUDES
if not INSTALL_EPOCROOT is None:
return False
#: Final output directories, mimicking SDK structure
INSTALL_EPOCROOT = _set_install_epocroot or EPOCROOT
INSTALL_EPOCROOT = GetArg("install_epocroot", "Final output directory root, "
"if different from EPOCROOT", INSTALL_EPOCROOT)
loginfo( "INSTALL_EPOCROOT=%s" % INSTALL_EPOCROOT )
#: Constant pointing to INSTALL_EPOCROOT/epoc32
INSTALL_EPOC32 = join( INSTALL_EPOCROOT, 'epoc32' )
#: Constant pointing to sdk's data folder
INSTALL_EPOC32_DATA = join( INSTALL_EPOC32, 'data' )
#: Constant pointing to system include folder
INSTALL_EPOC32_INCLUDE = join( INSTALL_EPOC32, 'include' )
#: Constant pointing to system tools folder
INSTALL_EPOC32_TOOLS = join( INSTALL_EPOC32, 'tools' )
#: Constant pointing to release folder
INSTALL_EPOC32_RELEASE = join( INSTALL_EPOC32, "release", COMPILER, RELEASE )
#: Constant pointing to emulator c drive
INSTALL_EMULATOR_C = join( EPOC32, "winscw", "c" )
#: Default include folders
SYSTEM_INCLUDES = [ EPOC32_INCLUDE,
join( EPOC32_INCLUDE, "variant" ),
INSTALL_EPOC32_INCLUDE,
]
#: SDK Installation folder
SDKFOLDER = os.path.join( INSTALL_EPOCROOT,
"epoc32",
"release",
COMPILER,
RELEASE
)
return True
if sys.platform == "win32":
os.environ["EPOCROOT"] = EPOCROOT.replace( "/", "\\" )
else:
os.environ["EPOCROOT"] = EPOCROOT
PYTHON_COMPILER = GetArg("pycompiler", "Enable Python source compilation into bytecode. Points to Python executable.", None )
PYTHON_DOZIP = GetArg("pythondozip", "Zip all python sources into a single archive. Path to the file on device", None )
_p = os.environ["PATH"]
#CSL_ARM_TOOLCHAIN_FOLDER_NAME = "CSL Arm Toolchain\\bin"
#if sys.platform == "linux2":
# CSL_ARM_TOOLCHAIN_FOLDER_NAME = "csl-gcc/bin"
#: Path to arm toolchain. Detected automatically from path using 'CSL Arm Toolchain' on Windows or csl-gcc on Linux
#PATH_ARM_TOOLCHAIN = [ _x for _x in _p.split( os.path.pathsep ) if CSL_ARM_TOOLCHAIN_FOLDER_NAME in _x ]
# Parse arguments -------------------------------------------------------------
#: Used compiler
COMPILER = GetArg( "compiler", "The compiler to use.", COMPILER, COMPILERS )
#: Release type
RELEASE = GetArg( "release", "Release type.", RELEASE, RELEASETYPES )
#: Compiler flags for GCCE
GCCE_OPTIMIZATION_FLAGS = GetArg( "gcce_options", "GCCE compiler options.",
GCCE_OPTIMIZATION_FLAGS,
caseless = False )
#: Compiler flags for GCCE
WINSCW_OPTIMIZATION_FLAGS = GetArg( "winscw_options", "WINSCW compiler options.",
WINSCW_OPTIMIZATION_FLAGS,
caseless = False )
MMP_EXPORT_ENABLED = GetArg( "mmpexport", "Enable MMP export(if configured).", "false", [ "true", "false"] )
MMP_EXPORT_ENABLED = MMP_EXPORT_ENABLED == "true"
DO_CREATE_SIS = GetArg( "dosis", "Create SIS package.", str( DO_CREATE_SIS ).lower(), [ "true", "false"] )
DO_CREATE_SIS = (DO_CREATE_SIS == "true" )
DO_DUPLICATE_SOURCES = GetArg( "duplicate", "Duplicate sources to build dir.", "false", [ "true", "false"] )
DO_DUPLICATE_SOURCES = (DO_DUPLICATE_SOURCES in ["true", 1])
ENSYMBLE_AVAILABLE = False
try:
if COMPILER != COMPILER_WINSCW and DO_CREATE_SIS:
__import__( "ensymble" )
ENSYMBLE_AVAILABLE = True
except ImportError:
loginfo( "Automatic SIS creation requires Ensymble." )
if COMPILER == COMPILER_WINSCW:
DO_CREATE_SIS = False
if not DO_CREATE_SIS:
loginfo( "SIS creation disabled" )
#: Constant for ui platform version
UI_VERSION = ( 3, 0 )
#: Symbian version of the SDK
SYMBIAN_VERSION = ( 9 , 1 )
#: SDK platform header( generated )
#: S60 3rd & mr = EPOC32_INCLUDE + variant + symbian_os_v9.1.hrh
PLATFORM_HEADER = join( EPOC32_INCLUDE, "variant" )
def _resolve_platform():
"""Find out current SDK version"""
global PLATFORM_HEADER, UI_PLATFORM, UI_VERSION, SYMBIAN_VERSION
if not RUNNING_SCONS:
return
if not os.path.exists( PLATFORM_HEADER ):
raise RuntimeError( "'%s' does not exist. Invalid EPOCROOT?" % PLATFORM_HEADER )
# These are the same on S60
sdk_header = ""
symbian_header = ""
uiplatform = UI_PLATFORM_S60
uiversion = UI_VERSION
files = os.listdir( PLATFORM_HEADER )
files.sort()
for fname in files:
if fname.lower().startswith( "symbian_os" ) \
and "vintulo" not in fname.lower():
symbian_header = join( PLATFORM_HEADER, fname )
elif fname.lower().startswith( "uiq" ):
# symbian_header found earlier
assert symbian_header != ""
sdk_header = join( PLATFORM_HEADER, fname )
uiplatform = UI_PLATFORM_UIQ
uiversion = sdk_header.split( "_" )[1].split( ".hrh" )[0].split( "." )
uiversion = map( int, uiversion )
break
if symbian_header == "":
raise RuntimeError( "Unknown platform. Invalid EPOCROOT?" )
if uiplatform == UI_PLATFORM_S60:
# Use manifest.xml to get version for all S60 SDKs
f = open( join( EPOC32, "kit", "manifest.xml" ) )
d = f.read()
f.close()
symbian_version = d.split('osInfo version="')[-1].split('"')[0]
symbian_version = symbian_version.split(".")[:2]
uiversion = d.split('sdkVersion>')[1].split('<')[0]
uiversion = uiversion.split(".")[:2]
sdk_header = symbian_header
else: #UIQ
symbian_version = symbian_header.split( "_v" )[1].split( "." )[:2]
PLATFORM_HEADER = sdk_header
UI_PLATFORM = uiplatform
UI_VERSION = tuple( map( int, uiversion ) )
SYMBIAN_VERSION = tuple( map( int, symbian_version ) )
_resolve_platform()
#: Location for the packages. Value generated in run-time.
PACKAGE_FOLDER = abspath( join( "build%d_%d" % SYMBIAN_VERSION, "%s_%s" % ( COMPILER, RELEASE ), "packages" ) )
loginfo( "Symbian OS version = %d.%d" % SYMBIAN_VERSION )
loginfo( "UI platform = %s" % UI_PLATFORM, "%d.%d" % UI_VERSION )
#: Built components. One SConstruct can define multiple SymbianPrograms.
#: This can be used from command-line to build only certain SymbianPrograms
COMPONENTS = GetArg( "components", "Components to build. Separate with ','.", "all" )
COMPONENTS_EXCLUDE = False
def __processComponents():
global COMPONENTS_EXCLUDE
components = COMPONENTS.lower().split( "," )
if "all" in components:
if len( components ) == 1: # if all only
return None
COMPONENTS_EXCLUDE = True
components.remove( "all" )
return components
COMPONENTS = __processComponents()
def __get_defines():
"Ensure correct syntax for defined strings"
tmp = GetArg( "defines", "Extra preprocessor defines. For debugging, etc.", None, caseless=False )
if tmp is None: return []
tmp = tmp.split( "," )
defs = []
for x in tmp:
if "=" in x:
name, value = x.split( "=" )
if not value.isdigit():
value = r'/"' + value + r'/"'
x = "=".join( [name, value] )
defs.append( x )
return defs
#: Command-line define support
CMD_LINE_DEFINES = __get_defines()
#: Extra libraries( debug library etc. )
CMD_LINE_LIBS = GetArg( "extra_libs", "Extra libraries. Debug libraries, etc.", None )
if CMD_LINE_LIBS is not None:
CMD_LINE_LIBS = CMD_LINE_LIBS.split( "," )
#: Default Symbian definitions.
STANDARD_DEFINES = [ "__SYMBIAN32__",
"_UNICODE",
]
if SYMBIAN_VERSION[0] > 8:
STANDARD_DEFINES += [ "__SUPPORT_CPP_EXCEPTIONS__" ]
# Add S60 macros
EXTRA_DEFINES = []
if UI_PLATFORM == UI_PLATFORM_S60:
STANDARD_DEFINES += [ "__SERIES60_%d%d__" % UI_VERSION ]
STANDARD_DEFINES += [ "__SERIES60_%dX__" % UI_VERSION[0] ]
STANDARD_DEFINES += [ "__S60_%d%d__" % UI_VERSION ]
STANDARD_DEFINES += [ "__S60_%dX__" % UI_VERSION[0] ]
# Special rules for 5th edition
# __S60_3X__ and __SERIES60_3X__ are correct here
# TODO: Should these be read from e32plat.pl directly?
if UI_VERSION[0] == 5:
STANDARD_DEFINES += ['__S60_3X__','__SERIES60_3X__']
# Not in regular build scripts
EXTRA_DEFINES += [ "__SERIES60__" ]
#Add UIQ macros
elif UI_PLATFORM == UI_PLATFORM_UIQ:
# WARNING! These are not defined in regular UIQ build scripts
# if you use these defines in your code, it becomes incompatible with them
# You'll need to add these in your MMP with MACRO
EXTRA_DEFINES += [ "__UIQ_%d%d__" % UI_VERSION ]
EXTRA_DEFINES += [ "__UIQ_%dX__" % UI_VERSION[0] ]
EXTRA_DEFINES += [ "__UIQ__" ]
EXTRA_DEFINES += [ "__SYMBIAN_OS_VERSION__=%d%d" % SYMBIAN_VERSION ]
EXTRA_DEFINES += [ "__UI_VERSION__=%d%d" % UI_VERSION ]
DEFAULT_SYMBIAN_DEFINES = STANDARD_DEFINES + EXTRA_DEFINES
if RELEASE == RELEASE_UREL:
DEFAULT_SYMBIAN_DEFINES.append( "NDEBUG" )
else:
DEFAULT_SYMBIAN_DEFINES.append( "_DEBUG" )
def get_output_folder( compiler, release, target, targettype ):
p = os.path.join( "build" + "%d_%d" % SYMBIAN_VERSION, compiler + "_" + release, target + "_" + targettype )
return os.path.abspath( p )
# Generate help message
def __generate_help_message():
separator = "=" * 79 + "\n"
# SCons gets into some kind of infinite loop if this file is imported directly
# as it is done with EpyDoc.
ENV = DefaultEnvironment( variables = VARS )
msg = "SCons for Symbian arguments:\n"
msg += separator
msg += VARS.GenerateHelpText( ENV ).replace( "\n a", " | a" )
Help( msg )
Help( separator )
#: Flag to disable processing to shorten time to display help message
HELP_ENABLED = False
for _x in [ "-h", "-H", "--help"]:
if _x in sys.argv:
HELP_ENABLED = True
__generate_help_message()
break
PATH_ARM_TOOLCHAIN = None
def checkGCCE():
global PATH_ARM_TOOLCHAIN
paths = _p.split( os.path.pathsep )
for p in paths:
try:
items = os.listdir( p )
except WindowsError, msg: # TODO: WindowsError on windows... how about linux?
print msg
continue
for i in items:
if i.startswith( "arm-none-symbianelf" ):
PATH_ARM_TOOLCHAIN = p
return True
return False
# Check if GCCE setup is correct
#if len( PATH_ARM_TOOLCHAIN ) > 0:
# PATH_ARM_TOOLCHAIN = PATH_ARM_TOOLCHAIN[0]
if RUNNING_SCONS:
if not checkGCCE():
print "\nERROR"
print "-" * 79
print "Error: Unable to find 'arm-none-symbianelf' tools from path. GCCE building is not possible."
raise SystemExit( - 1 )#IGNORE:W1010
# Check if WINSCW is found
def __winscw_in_path():
if COMPILER == COMPILER_WINSCW:
for x in os.environ["PATH"].split( ";" ):
if os.path.exists( x ):
if "mwccsym2.exe" in [ x.lower() for x in os.listdir( x ) ]:
return True
return False
return True
if not __winscw_in_path() and RUNNING_SCONS:
print "\nERROR"
print "-" * 79
print "WINSCW compiler 'mwccsym2.exe' not found from PATH."
print "Install Carbide and run configuration\\run_env_update.bat"
if not HELP_ENABLED and RUNNING_SCONS:
raise SystemExit( - 1 )#IGNORE:W1010
del _p
del _x
| mit | -937,099,424,023,850,500 | 33.354839 | 125 | 0.61946 | false | 3.395441 | false | false | false |
CFStorm/FCLI2 | master.py | 1 | 7392 | #Python 3 - Developed By Carter Fulcher
#Hello Developers!
"""IMPORTANT NOTICE"""
#The dev code is 584. You will need this if you wish to use the
#interactive console. Thanks!
logo = """
______ _ _____ _ _____
| ____| | | / ____| | |_ _|
| |__ ___ _ __ _ __ ___ _ _| | __ _ | | | | | |
| __/ _ \| '__| '_ ` _ \| | | | |/ _` | | | | | | |
| | | (_) | | | | | | | | |_| | | (_| | | |____| |____ _| |_
|_|__\___/|_| |_|_|_| |_|\__,_|_|\__,_| \_____|______|_____|
|_|
"""
import math
import sys
import readline # optional, will allow Up/Down/History in the console
import code
import functools #For the geomean function
import operator #For the geomean function
import os #for the clear function
instruction = """
Hi! Welcome to Formula CLI 2.0. FCLI2 is a command line based math tool. Truthfully, FCLI2 is the MOST versatile math tool out there. It is complete with over 50 built in formulas, with the easy ability for you to add more. In addition, you can create your own python functions that can be ran from the program. FCLI2 truly is the best tool for doing anything math related.
--Running a Formula--
To run a formula just do "listformulas" to see a list of the avialable formulas; Next, just run the formula and enter in the desired value for the variables and FCLI2 will do the rest!
--Running a Function--
To run a function do "listfunctions" and this will show you all the avialable functions. Now, run "function" to let FCLI2 that you will be running a function and not a formula, then type the name of the desired function and you're off!
--Adding a Formula--
If you want to add your own formula then simply look at the "forumals" dictionary. Here you can add formulas with a couple of easy steps. First, you need to specify the amount of variables that your formula will need in the first spot in the array, then enter the variables by name in the next couple spots. Finally, enter the formula AT THE LAST part of the array. Make sure the formula uses the same name you specified earlier
--Adding a Function--
This is easy. Just add your python function to the bottom of the program. Then, add the functions name to the "functions" list.
"""
formulas = { #Master Formula function! Add your formulas here!!!
"trianglearea":["2", "b", "h", "0.5*(b*h)"],
"squarearea":["2", "b", "h", "b*h"],
"circlearea":["1", "r", "math.pi*(r)**2"],
"findslope":["3", "y", "x", "b", "(y-b)/x"],
"average2":["2", "n1", "n2", "(n1+n2)/2"],
"geomean2":["2", "n1", "n2", "math.sqrt(n1*n2)"],
"perimiterrect":["2", "l", "w", "(2*l)+(2*w)"],
"volumerectprism":["3", "l", "w", "h", "l*w*h"],
"pythagc":["2", "a", "b", "(a*a)+(b*b)"],
"circum":["1", "r", "2*math.pi*r"],
"cylvol":["2", "r", "h", "math.pi*(r*r)*h"],
"spherevol":["1", "r", "(4/3)*math.pi*(r*r*r)"],
"conevol":["2","r", "h", "(1/3)*math.pi*(r*r)*h"],
"pyramidvol":["3", "l", "w", "h", "(l*w*h)/3"],
"trapezoidarea":["3", "b1", "b2", "h", "((b1 + b2)/2)*h"],
"exponentialgrowth":["4", "P", "r", "n", "t", "P * (1+r/n)**(n*t)"],
"ngonarea":["3", "n", "a", "s", "n*(1/2)*a*s"],
"sectorarea":["2", "measure_of_arc", "r", "(measure_of_arc/360)*math.pi*(r*r)"],
"arclength":["2", "measure_of_arc", "r", "(measure_of_arc/360)*2*math.pi*r"],
}
def init():
print(logo)
main()
#This function starts an interactive shell WITHIN the python script that allows you to access variables written here for testing.
def shell():
#This forces users to have a code to enter. This is really insecure but that's ok! We are just trying to wee outpeople who are just using the program from messing with the developer console.
devcode = 584 #THIS IS THE DEV COE
print("Enter developer verification code (Or enter to go back):")
devcode_verif = input()
try:
if devcode == int(devcode_verif):
vars = globals().copy()
vars.update(locals())
shell = code.InteractiveConsole(vars)
shell.interact()
elif devcode != int(devcode_verif):
print("Verification Failed.")
main()
elif devcode_verif == "":
main()
except ValueError:
print("Verification Failed.")
main()
#Main function. The init() function runs this to start the program. This function essentially recognizes commands and then deals with them accordingly.
def main():
while True:
runtime = input('Formula CLI 2.0 > ')
commands = ["exit", "listformulas", "shell", "help", "commands", "asdf", "function", "listfunctions"]
if runtime == commands[0]:
sys.exit()
if runtime == commands[1]:
for key in formulas:
print(key)
if runtime == commands[2]:
shell() #For Debugging, view shell() function for more information.
if runtime == commands[3]:
print(instruction)
if runtime == commands[4]:
print("Available commands:")
for item in commands:
print(item)
if runtime == commands[5]:
print("jklj;")
if runtime == commands[6]:
func = input('Enter Name of Function [name]: ')
function(func)
if runtime == commands[7]:
for item in av_functions:
print(item)
elif runtime not in commands:
print(compiler(runtime))
#Master Compiler Function
def compiler(formula):
try:
main = formulas[str(formula)] #Main Array that holds the formula
variables = int(main[0]) #Number of Variables
var = main[1:variables+1] #An array that contains str versions of the variables
formula_master = main[-1]
except KeyError:
return "Formula not found."
master_var = {}
i = 0
while i < variables:
names = var[i]
print("Enter Value of %s" % var[i])
master_var[var[i]] = float(input())
i+=1
for key,val in master_var.items():
exec(key + '=val')
return(eval(formula_master))
def function(function_to_run):
eval(function_to_run+"()")
av_functions = ["double", "calc", "geomean", "isprime"] #Enter functions here
"""Write functions below:"""
def double(): #A function that doubles a number.
i1 = input("number: ")
print(int(i1)*2)
def calc(): #A simple calculator. Perform basic math functions here.
print("Basic Calc Initializing...")
print("Complete.")
while True:
master = input("> ")
if(master == "exit"):
main()
else:
print(eval(master))
def geomean(): #Calculate the geometric mean of unlimited numbers
nums = input("Enter 2 or more numbers seperated by commas: ")
numbers = nums.split(",") #Splits by comma
numbers = [ int(x) for x in numbers ] #String to integer
new_numbers = functools.reduce(operator.mul, numbers, 1)
numlen = int(len(numbers)) #Number length
print(int(new_numbers**(1/numlen))) #This is the square root to the nth
def isprime():
num = int(input("Enter a number: "))
if(num % 2 == 0):
print("The number is prime.")
else:
print("The number is not prime.")
#initialize the program by starting main()
init()
| apache-2.0 | -3,720,898,860,227,680,000 | 37.103093 | 430 | 0.580763 | false | 3.414319 | false | false | false |
rickpeters/mkdocs | mkdocs/tests/utils/ghp_import_tests.py | 24 | 3809 | #!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
import mock
import os
import subprocess
import tempfile
import unittest
import shutil
from mkdocs.utils import ghp_import
class UtilsTests(unittest.TestCase):
@mock.patch('subprocess.call', auto_spec=True)
@mock.patch('subprocess.Popen', auto_spec=True)
def test_try_rebase(self, mock_popen, mock_call):
popen = mock.Mock()
mock_popen.return_value = popen
popen.communicate.return_value = (
'4c82346e4b1b816be89dd709d35a6b169aa3df61\n', '')
popen.wait.return_value = 0
ghp_import.try_rebase('origin', 'gh-pages')
mock_popen.assert_called_once_with(
['git', 'rev-list', '--max-count=1', 'origin/gh-pages'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
mock_call.assert_called_once_with(
['git', 'update-ref', 'refs/heads/gh-pages',
'4c82346e4b1b816be89dd709d35a6b169aa3df61'])
@mock.patch('subprocess.Popen', auto_spec=True)
def test_get_prev_commit(self, mock_popen):
popen = mock.Mock()
mock_popen.return_value = popen
popen.communicate.return_value = (
b'4c82346e4b1b816be89dd709d35a6b169aa3df61\n', '')
popen.wait.return_value = 0
result = ghp_import.get_prev_commit('test-branch')
self.assertEqual(result, u'4c82346e4b1b816be89dd709d35a6b169aa3df61')
mock_popen.assert_called_once_with(
['git', 'rev-list', '--max-count=1', 'test-branch', '--'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch('subprocess.Popen', auto_spec=True)
def test_get_config(self, mock_popen):
popen = mock.Mock()
mock_popen.return_value = popen
popen.communicate.return_value = (
b'Dougal Matthews\n', '')
result = ghp_import.get_config('user.name')
self.assertEqual(result, u'Dougal Matthews')
mock_popen.assert_called_once_with(
['git', 'config', 'user.name'],
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
@mock.patch('mkdocs.utils.ghp_import.get_prev_commit')
@mock.patch('mkdocs.utils.ghp_import.get_config')
def test_start_commit(self, mock_get_config, mock_get_prev_commit):
pipe = mock.Mock()
mock_get_config.side_effect = ['username', 'email']
mock_get_prev_commit.return_value = 'SHA'
ghp_import.start_commit(pipe, 'test-branch', 'test-message')
mock_get_prev_commit.assert_called_once_with('test-branch')
self.assertEqual(pipe.stdin.write.call_count, 5)
@mock.patch('mkdocs.utils.ghp_import.try_rebase', return_value=True)
@mock.patch('mkdocs.utils.ghp_import.get_prev_commit', return_value='sha')
@mock.patch('mkdocs.utils.ghp_import.get_config', return_value='config')
@mock.patch('subprocess.call', auto_spec=True)
@mock.patch('subprocess.Popen', auto_spec=True)
def test_ghp_import(self, mock_popen, mock_call, mock_get_config,
mock_get_prev_commit, mock_try_rebase):
directory = tempfile.mkdtemp()
open(os.path.join(directory, 'file'), 'a').close()
try:
popen = mock.Mock()
mock_popen.return_value = popen
popen.communicate.return_value = ('', '')
popen.wait.return_value = 0
ghp_import.ghp_import(directory, "test message",
remote='fake-remote-name',
branch='fake-branch-name')
self.assertEqual(mock_popen.call_count, 2)
self.assertEqual(mock_call.call_count, 0)
finally:
shutil.rmtree(directory)
| bsd-2-clause | -487,773,874,232,354,560 | 34.598131 | 78 | 0.62011 | false | 3.382771 | true | false | false |
akrherz/iem | scripts/dbutil/xcheck_nwsli_csv.py | 1 | 1109 | """See if we have metadata in a local CSV file
NOTE: I had to manually edit the .csv file to remove the first row
"""
import pandas as pd
from pandas.io.sql import read_sql
from pyiem.util import get_dbconn
CSVFN = "/home/akrherz/Downloads/nwsli_database.csv"
def dowork(df, nwsli):
"""do work!"""
df2 = df[df["NWSLI"] == nwsli]
if df2.empty:
return
row = df2.iloc[0]
print("------")
print(row["NWSLI"])
print(
"%s %s%s - %s"
% (row["City"], row["Detail"], row["Direction"], row["Station Name"])
)
print(row["State"])
print(f"Program {row['Program']}")
print(row["Latitude"])
print(row["Longitude"])
def main():
"""Go Main Go!"""
pgconn = get_dbconn("hads", user="mesonet")
udf = read_sql(
"SELECT distinct nwsli, 1 as col from unknown ORDER by nwsli",
pgconn,
index_col="nwsli",
)
print("Found %s unknown entries" % (len(udf.index),))
df = pd.read_csv(CSVFN, low_memory=False)
for nwsli, _row in udf.iterrows():
dowork(df, nwsli)
if __name__ == "__main__":
main()
| mit | -974,580,413,722,915,000 | 23.644444 | 77 | 0.577998 | false | 2.997297 | false | false | false |
was4444/chromium.src | chrome/test/media_router/telemetry/benchmarks/media_router_benchmark.py | 1 | 2143 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import benchmark
from core import perf_benchmark
from core import path_util
from telemetry.timeline import tracing_category_filter
from telemetry.web_perf import timeline_based_measurement
from benchmarks.pagesets import media_router_pages
from benchmarks import media_router_measurements
from benchmarks import media_router_timeline_metric
class _BaseCastBenchmark(perf_benchmark.PerfBenchmark):
options = {'page_repeat': 6}
page_set = media_router_pages.MediaRouterPageSet
def SetExtraBrowserOptions(self, options):
options.clear_sytem_cache_for_browser_and_profile_on_start = True
# TODO: find a better way to find extension location.
options.AppendExtraBrowserArgs([
'--load-extension=' + os.path.join(path_util.GetChromiumSrcDir(), 'out',
'Release', 'mr_extension', 'release'),
'--whitelisted-extension-id=enhhojjnijigcajfphajepfemndkmdlo',
'--media-router=1',
'--enable-stats-collection-bindings'
])
@classmethod
def ValueCanBeAddedPredicate(cls, value, is_first_result):
"""Only drops the first result."""
return not is_first_result
class TraceEventCaseBenckmark(_BaseCastBenchmark):
def CreateTimelineBasedMeasurementOptions(self):
media_router_category = 'media_router'
category_filter = tracing_category_filter.TracingCategoryFilter(
media_router_category)
category_filter.AddIncludedCategory('blink.console')
options = timeline_based_measurement.Options(category_filter)
options.SetLegacyTimelineBasedMetrics([
media_router_timeline_metric.MediaRouterMetric()])
return options
@classmethod
def Name(cls):
return 'media_router.dialog.latency.tracing'
class HistogramCaseBenckmark(_BaseCastBenchmark):
def CreatePageTest(self, options):
return media_router_measurements.MediaRouterPageTest()
@classmethod
def Name(cls):
return 'media_router.dialog.latency.histogram'
| bsd-3-clause | -8,475,967,177,597,016,000 | 31.969231 | 80 | 0.747084 | false | 4.013109 | false | false | false |
carlossg/ninjacape-mqtt-bridge | ninjaCapeSerialMQTTBridge.py | 1 | 3609 | #!/usr/bin/python
#
# used to interface the NinjaCape to openHAB via MQTT
# - reads data from serial port and publishes on MQTT client
# - writes data to serial port from MQTT subscriptions
#
# - uses the Python MQTT client from the Mosquitto project http://mosquitto.org (now in Paho)
#
# https://github.com/perrin7/ninjacape-mqtt-bridge
# perrin7
import serial
import paho.mqtt.client as mqtt
import os
import json
import threading
import time
### Settings
serialdev = '/dev/ttyO1'
broker = "127.0.0.1" # mqtt broker
port = 1883 # mqtt broker port
debug = False ## set this to True for lots of prints
# buffer of data to output to the serial port
outputData = []
#### MQTT callbacks
def on_connect(client, userdata, flags, rc):
if rc == 0:
#rc 0 successful connect
print "Connected"
else:
raise Exception
#subscribe to the output MQTT messages
output_mid = client.subscribe("ninjaCape/output/#")
def on_publish(client, userdata, mid):
if(debug):
print "Published. mid:", mid
def on_subscribe(client, userdata, mid, granted_qos):
if(debug):
print "Subscribed. mid:", mid
def on_message_output(client, userdata, msg):
if(debug):
print "Output Data: ", msg.topic, "data:", msg.payload
#add to outputData list
outputData.append(msg)
def on_message(client, userdata, message):
if(debug):
print "Unhandled Message Received: ", message.topic, message.paylod
#called on exit
#close serial, disconnect MQTT
def cleanup():
print "Ending and cleaning up"
ser.close()
mqttc.disconnect()
def mqtt_to_JSON_output(mqtt_message):
topics = mqtt_message.topic.split('/');
## JSON message in ninjaCape form
json_data = '{"DEVICE": [{"G":"0","V":0,"D":' + topics[2] + ',"DA":"' + mqtt_message.payload + '"}]})'
return json_data
#thread for reading serial data and publishing to MQTT client
def serial_read_and_publish(ser, mqttc):
ser.flushInput()
while True:
line = ser.readline() # this is blocking
if(debug):
print "line to decode:",line
# split the JSON packet up here and publish on MQTT
json_data = json.loads(line)
if(debug):
print "json decoded:",json_data
try:
device = str( json_data['DEVICE'][0]['D'] )
data = str( json_data['DEVICE'][0]['DA'] )
mqttc.publish("ninjaCape/input/"+device, data)
except(KeyError):
# TODO should probably do something here if the data is malformed
pass
############ MAIN PROGRAM START
try:
print "Connecting... ", serialdev
#connect to serial port
ser = serial.Serial(serialdev, 9600, timeout=None) #timeout 0 for non-blocking. Set to None for blocking.
except:
print "Failed to connect serial"
#unable to continue with no serial input
raise SystemExit
try:
#create an mqtt client
mqttc = mqtt.Client("ninjaCape")
#attach MQTT callbacks
mqttc.on_connect = on_connect
mqttc.on_publish = on_publish
mqttc.on_subscribe = on_subscribe
mqttc.on_message = on_message
mqttc.message_callback_add("ninjaCape/output/#", on_message_output)
#connect to broker
mqttc.connect(broker, port, 60)
# start the mqttc client thread
mqttc.loop_start()
serial_thread = threading.Thread(target=serial_read_and_publish, args=(ser, mqttc))
serial_thread.daemon = True
serial_thread.start()
while True: # main thread
#writing to serial port if there is data available
if( len(outputData) > 0 ):
#print "***data to OUTPUT:",mqtt_to_JSON_output(outputData[0])
ser.write(mqtt_to_JSON_output(outputData.pop()))
time.sleep(0.5)
# handle app closure
except (KeyboardInterrupt):
print "Interrupt received"
cleanup()
except (RuntimeError):
print "uh-oh! time to die"
cleanup()
| mit | -5,419,746,929,261,088,000 | 25.152174 | 106 | 0.707121 | false | 3.048142 | false | false | false |
LEX2016WoKaGru/pyClamster | pyclamster/image.py | 1 | 16000 | # -*- coding: utf-8 -*-
"""
Created on 23.05.16
Created for pyclamster
Copyright (C) {2016}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# System modules
import logging
import os
import sys
import datetime
import copy
# External modules
import PIL.Image
import numpy as np
import scipy
import scipy.ndimage
import skimage.morphology
import pysolar
# Internal modules
from . import coordinates as coords
from . import fisheye
from . import utils
__version__ = "0.1"
# create logger
logger = logging.getLogger(__name__)
# image class
class Image(object):
"""
class to deal with images. This class is basically a subclass to
PIL.Image.Image, but since it is not made for directly subclassing,
this class is a wrapper that redirects attribute requests to an instance
of class PIL.Image.Image.
This class adds a simple possiblity to work with the underlying image
data as numpy.ndarray. To get this array, use the Image.data property.
You may also set this property to change the image. Note that currently
only updating the image with information of the same image type is
possible.
To get the underlying PIL.Image.Image, use the Image.image property.
You may also set this property to change the image.
"""
###################
### constructor ###
###################
def __init__(self,
image=None,
time=None,
coordinates=None,
longitude=None,
latitude=None,
heightNN=None
):
"""
args:
image(optional[PIL.Image.Image,str/path,Image]) something to read the image from
time(optional[datetime.datetime]) time for image
coordinates(optional[pyclamster.coordinates.Coordinates3d]) coordinates on the image pixels
latitude,longitude (float): gps position of image in degrees
heightNN (float): height in metres over NN
"""
# set metadata
if isinstance(coordinates,coords.Coordinates3d):
self.coordinates = coordinates
else:
self.coordinates = coords.Coordinates3d()
self.longitude = longitude
self.latitude = latitude
self.heightNN = heightNN
self.time = time
self.path = None
# load the image
self.loadImage(image)
#############################################
### attributes/properties getters/setters ###
#############################################
# every attribute request (except _image itself) goes directly to _image
# this makes this class practically a subclass to PIL.Image.Image
def __getattr__(self, key):
# logger.debug("requested attribute '{}'".format(key))
if key == '_image':
raise AttributeError(" ".join([
"Can't access _image attribute.",
"Did you try to access properties before",
"loading an image?"
]))
return getattr(self._image, key)
@property
def time(self):
return self._time
@time.setter
def time(self, newtime):
if isinstance(newtime, datetime.datetime) or newtime is None:
self._time = newtime
else:
raise ValueError("time has to be a datetime.datetime object.")
# the image property is a wrapper around _image
@property
def image(self):
return self._image
# when you set the image property, both _image and _data are updated
@image.setter
def image(self, image):
"""
set the underlying image and update the data
args:
newdata(PIL.Image.Image): the new image
"""
if not isinstance(image, PIL.Image.Image):
raise TypeError("image property has to be a PIL.Image.Image")
# set values
self._image = image
self._data = np.array(self._image)
# set coordinate shape
self.coordinates.shape = self.data.shape[:2]
@property
def data(self):
return self._data
# when you set the data property, both _image and _data are updated
@data.setter
def data(self, newdata):
"""
set the underlying image data and update the image. It is only possible
to use the same image format (L,RGB,RGBA, etc...) as before.
args:
newdata(numpy.ndarray): the new image data, shape(width, height, {1,3})
"""
try: # check if image is set
mode = self._image.mode
except: # no image set
raise AttributeError(" ".join([
"No image was specified until now.",
"Can't determine image mode to set new data."
]))
# set new data
self._data = newdata
self._image = PIL.Image.fromarray(self._data, mode)
# set coordinate shape
self.coordinates.shape = self.data.shape[:2]
###############
### methods ###
###############
# try to read the time from image EXIF data
def getEXIFtime(self, path=None):
"""
get the EXIF time from either this image or an image specified by path
args:
path(optional[str/path]): an image to get the EXIF time from
returns:
datetime.datetime object or None
"""
ret = None
try: # try to read time
try: # try to read Image from path
image = PIL.Image.open(path)
except: # otherwise take current image
image = self
exif = image._getexif() # read EXIF data
t = exif[0x9003] # get exif ctime value
logger.debug("EXIF ctime of image is '{}'".format(t))
try: # try to convert to datetime object
t = datetime.datetime.strptime(str(t), "%Y:%m:%d %H:%M:%S")
logger.debug(
"converted EXIF ctime to datetime object.")
ret = t
except:
logger.warning(
"cannot convert EXIF ctime to datetime object.".format(t))
except (AttributeError, ValueError, TypeError): # reading didn't work
logger.warning("cannot read EXIF time from image")
return ret # result
# try to load the time from image EXIF data
def loadEXIFtime(self, filename = None):
if filename is None:
filename = self.path
if filename is None:
logger.warning("No filename specified to read EXIF data.")
logger.debug(
"trying to load time from image '{}'".format(self.path))
self.time = self.getEXIFtime( filename )
# load the image
def loadImage(self, image=None):
"""
load image either from path, PIL.Image or numpy.ndarray
args:
image (str/path or PIL.Image or numpy.ndarray): image to load
"""
### create self._image according to specified argument ###
success = False
# looks like PIL image
if isinstance(image, PIL.Image.Image):
logger.info("reading image directly from PIL.Image.Image object")
self.image = image
success = True
# argument is an image aleady
elif isinstance(image, Image):
logger.debug("copying image directly from Image")
# copy over attributes
self.__dict__.update(image.__dict__)
### copy everything by hand ###
self.time = copy.deepcopy(image.time)
self.coordinates = copy.deepcopy(image.coordinates)
self.path = copy.deepcopy(image.path)
self.data = copy.deepcopy(image.data)
success = True
# argument looks like path
elif isinstance(image, str):
logger.debug("image argument is a string")
if os.path.isfile(image):
logger.debug("image argument is a valid path")
logger.info("reading image from path")
self.image = PIL.Image.open(image)
self.path = image # set path
success = True
else:
logger.warning(
"image argument is not a valid path! Can't read image.")
# self.image = PIL.Image.new(mode="RGB", size=(1, 1))
# looks like numpy array
elif isinstance(image, np.ndarray):
logger.debug("argument is a numpy array")
logger.debug("creating image from numpy array")
self.path = None # reset path because data comes from array
raise Exception(" ".join([
"Creating Image from ndarray is not implemented yet.",
"use PIL.Image.fromarray and pass that to loadImage() instead."
]))
self.data = image
success = True
# TODO: does not work like this, mode has to be specified somehow
# nothing correct specified
else:
logger.info("nothing specified to read image. Nothing loaded.")
# self.image = PIL.Image.new(mode="RGB", size=(1, 1)) # hard coded
# load time from filename
def _get_time_from_filename(self, fmt, filename=None):
if isinstance(filename, str): f = filename
else: f = self.path
if not f is None:
f = os.path.basename(f)
return datetime.datetime.strptime(f, fmt)
else:
raise ValueError("Neither filename nor self.path is defined.")
# try to load the time from filename
def loadTimefromfilename(self, fmt, filename=None):
self.time = self._get_time_from_filename(fmt, filename)
# set time of image
def setTime(self, time):
"""
set internal image time
args:
time (datetime object): time to set
"""
if isinstance(time, datetime.datetime):
self.time = time
else:
logger.warning(
"time is not a datetime object. Ignoring time setting.")
##############################
### Information extraction ###
##############################
def getImageSunPosition(self, threshold=240, sun_greater_than=7):
"""
attempt to find the sun on the image
args:
threshold (integer): threshold of red channel value to interpret
as bright enough
sun_greater_than (integer): approx min. number of sun pixels
returns:
2-tuple of floats: (row, col)
"""
data = self.data.copy()
data[:100, :, :] = 0
data[-100:, :, :] = 0
data = scipy.ndimage.filters.gaussian_filter(data, 3)
sun_filter = data[:,:,0] > threshold
sun_filter = skimage.morphology.remove_small_objects(sun_filter, 7)
sun_position = scipy.ndimage.center_of_mass(sun_filter)
return sun_position
# get real-world sun elevation
def getRealSunElevation(self):
try:
return np.pi/2 - utils.deg2rad(pysolar.solar.get_altitude(
self.latitude, self.longitude, self.time))
except:
logger.error("Are latitude, longitude and time defined?")
raise
# get real-world sun azimuth
def getRealSunAzimuth(self):
try:
azimuth = abs(pysolar.solar.get_azimuth(
self.latitude, self.longitude, self.time))
azimuth = utils.deg2rad(np.asarray(azimuth))
azimuth = azimuth + np.pi
azimuth = (azimuth + 2*np.pi) % (2*np.pi)
return azimuth
except:
logger.error("Are latitude, longitude and time defined?")
raise
### projection to carthesian coordinates
def calculate_carthesian_coordinates(self):
self.position = utils.lonlat2xy(self.longitude, self.latitude
,coordinates = True)
##########################
### Image manipulation ###
##########################
def crop(self, box):
"""
crop the image in-place to a box
args:
box (4-tuple of int): (left, top, right, bottom) (see PIL documentation)
"""
# crop metadata
# do this BEFORE re-setting the image
# otherwise, the shapes wouldn't match and the coordinate class
# would re-init the coordinates with empty masked arrays
self.coordinates.crop(box)
# crop image
# somehow, self.image.crop( box ) alone does not work,
# the self.image property has to be set...
self.image = self.image.crop(box)
def cut(self, box):
"""
cut out a box of the image and return it
args:
box (4-tuple of int): (left, top, right, bottom) (see PIL documentation)
returns:
a new cut image
"""
# copy image
# deepcopy does not work somehow, so create a new image exactly like
# this one
cutimage = Image(self)
# crop image
cutimage.crop(box)
return cutimage
######################
### Transformation ###
######################
def applyDistortionMap(self, map, inplace=False, order=0):
# for some reason, isinstance(map, fisheye.DistortionMap)
# or isinstance(map, DistortionMap) does not work?!
# this solves it...
if not map.__class__.__name__ == "DistortionMap":
raise TypeError("map has to be a DistortionMap")
if not np.shape(self.data)[:2] == map.src_shape:
logger.warning("Map source shape is not defined or does not match!")
if inplace: image = self # operate on this image
else: image = Image( self ) # copy over this image
# apply map
logger.debug("applying distortion map...")
# This is NOT very elegant...
# I don't know a better possibility to loop over the last index
# than this. The problem is, that a grayscale image has no third index.
# ...
if len(np.shape(image.data)) == 3:
for layer in range(np.shape(image.data)[2]):
layerout = scipy.ndimage.interpolation.map_coordinates(
input = self.data[:,:,layer],
coordinates = map.T, # map has to be transposed somehow
order = order
)
try: out = np.dstack( (out, layerout) ) # add to stack
except: out = layerout # first addition
image.data = out # set image data
#image.data = scipy.ndimage.filters.median_filter(image.data,(5,5,1))
else: # only 2 dim...
image.data = scipy.ndimage.interpolation.map_coordinates(
input = image.data,
coordinates = map.T, # map has to be transposed somehow
order = order
)
#image.data = scipy.ndimage.filters.median_filter(image.data,(5,5))
# set coordinates from DistortionMap
image.coordinates = map.out_coord
logger.debug("done applying distortion map.")
if not inplace: # return new image if not inplace
return image
| gpl-3.0 | -5,782,873,232,737,171,000 | 33.858388 | 103 | 0.567813 | false | 4.49312 | false | false | false |
opennode/waldur-mastermind | src/waldur_rancher/migrations/0022_rancheruserprojectlink.py | 2 | 1404 | # Generated by Django 2.2.10 on 2020-06-11 09:49
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('waldur_rancher', '0021_rancher_user_uuid'),
]
operations = [
migrations.CreateModel(
name='RancherUserProjectLink',
fields=[
(
'id',
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID',
),
),
('backend_id', models.CharField(blank=True, max_length=255)),
('role', models.CharField(max_length=255)),
(
'project',
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to='waldur_rancher.Project',
),
),
(
'user',
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to='waldur_rancher.RancherUser',
),
),
],
options={'unique_together': {('user', 'project', 'role')},},
),
]
| mit | 4,729,473,016,516,208,000 | 30.2 | 77 | 0.418091 | false | 5.05036 | false | false | false |
buremba/django-simit | simit/templatetags/simit_tags.py | 1 | 2940 | import re
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.template import Library
from django.core.cache import cache
from simit.models import CustomArea, Menu
register = Library()
CACHE_TIMEOUT = 60 * 60 * 24
class VariableTag(template.Node):
def __init__(self, slug, name=None, var_type=None, category=None, description=None):
self.var_type = var_type
self.slug = slug
self.name = name
self.category = category
self.description = description
def render(self, context):
slug = template.Variable(self.slug).resolve(context)
cache_key = "simit:variable:%s" % slug
c = cache.get(cache_key)
if c is not None:
return c
try:
val = CustomArea.objects.get(slug=slug).value
except ObjectDoesNotExist:
val = ""
cache.set(cache_key, val, CACHE_TIMEOUT)
return val
@register.tag
def variable(_, token):
try:
args = re.findall(r'(\".+?\")', token.contents)
slug = args[0]
if len(args) > 1:
name = args[1]
var_type = args[2]
category = args[3] if len(args) > 3 else None
desc = args[4] if len(args) > 4 else None
return VariableTag(slug, name, var_type, category, desc)
except ValueError, e:
raise template.TemplateSyntaxError, "%r tag requires arguments" % token.contents.split()[0]
return VariableTag(slug)
@register.filter
def variable(slug):
cache_key = "simit:variable:%s" % slug
c = cache.get(cache_key)
if c is not None:
return c
try:
area = CustomArea.objects.get(slug=slug)
area_type = area.type
val = area.value
except ObjectDoesNotExist:
area_type = None
val = ""
if area_type == 5:
val = True if val == "True" else False
cache.set(cache_key, val, CACHE_TIMEOUT)
return val
class FetchMenu(template.Node):
def __init__(self, lookup, var):
self.variable = var
self.lookup = lookup
def render(self, context):
lookup = template.Variable(self.lookup).resolve(context)
cache_key = "simit:menu:%s" % lookup
menus = cache.get(cache_key)
if menus is None:
menus = Menu.objects.filter(section__name=lookup)
cache.set(cache_key, menus, CACHE_TIMEOUT)
context[self.variable] = menus
return ''
@register.tag
def getmenu(parser, token):
try:
tag_name, arg = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires arguments" % token.contents.split()[0]
m = re.search(r'(.*?) as (\w+)', arg)
if not m:
raise template.TemplateSyntaxError, "%r tag had invalid arguments" % tag_name
format_string, var_name = m.groups()
return FetchMenu(format_string, var_name)
| mit | 6,695,195,765,043,607,000 | 27.543689 | 99 | 0.611224 | false | 3.803364 | false | false | false |
Alphadelta14/python-servicediscovery | servicediscovery/methods/method.py | 1 | 1951 |
"""
Method
Ways to detect the registry node
Author: Alpha <[email protected]>
"""
import socket
from servicediscovery.client import ServiceRegistry
__all__ = ['Method']
class Method(object):
"""A registry lookup method"""
def register(self, client):
"""Register with a registry via this method
Parameters
----------
client : ServiceClient
Client to try registering from
Returns
-------
registry : ServiceRegistry or None
the registry if found
"""
registry = self.get_registry()
registry._clients.append(client)
return registry
def get_registry(self):
"""Get a registry. Not available for the base class"""
raise NotImplementedError('Cannot find a registry with no method')
@staticmethod
def found_registry(ip_address, port):
"""Factory that returns a registry after finding it
Parameters
----------
ip_address : str
Address of the service
port : int
Port of the service
"""
registry = ServiceRegistry()
registry.ip_address = ip_address
registry.port = port
return registry
@staticmethod
def check_ip(ip_address, port, family=socket.AF_INET):
"""Checks that an IP:port is open
Parameters
----------
ip_address : str
Address to check
port : int
Port to check
Returns
-------
ip_address, port : (str, int) or (None, None)
"""
sock = socket.socket(family, socket.SOCK_STREAM)
sock.settimeout(1.0) # TODO: configurable
try:
sock.connect((ip_address, port))
except:
return None, None
else:
pass # TODO: acknowledge
finally:
sock.close()
return ip_address, port
| mit | -8,475,473,471,174,790,000 | 23.3875 | 74 | 0.55305 | false | 4.865337 | false | false | false |
ctripcorp/tars | tars/deployment/models/targets.py | 1 | 3910 | from django.db import models
from constance import config
from rest_client.exceptions import SLBClientError
from roll_engine.utils.log import get_logger
from roll_engine.db import SoftDeleteManager
from roll_engine.models import DeploymentTarget
from tars.server.models import Server, Group
from tars.deployment.fsm import TarsTargetFSMixin
from tars.exceptions import PackageError
from .batches import TarsDeploymentBatch
es_logger = get_logger()
class TarsDeploymentTarget(TarsTargetFSMixin, DeploymentTarget):
batch = models.ForeignKey(TarsDeploymentBatch, related_name='targets',
db_constraint=False, null=True)
server = models.ForeignKey(Server, related_name='targets',
db_constraint=False, null=True)
is_deleted = models.BooleanField(default=False)
objects = SoftDeleteManager()
class Meta:
db_table = 'deployment_targets'
salt_timeout = 300
def get_object(self):
if self.deployment.group.g_type == Group.G_TYPE_ENUM.join:
self.__class__ = TarsJoinGroupTarget
return self
def delete(self):
self.is_deleted = True
self.save()
@DeploymentTarget.hostname.getter
def hostname(self):
hostname = super(TarsDeploymentTarget, self).hostname
return self.server.hostname if hostname is None else hostname
@DeploymentTarget.ip_address.getter
def ip_address(self):
ip_address = super(TarsDeploymentTarget, self).ip_address
return self.server.ip_address if ip_address is None else ip_address
@property
def deployment(self):
return self.batch.deployment
@property
def agency(self):
return self.deployment.agency(self)
def _prepare_common_salt_kw(self):
""" common params for app_container, ssl, etc. it's safe to pass salt module
useless params, which are simply ignored
"""
group = self.deployment.group
app = self.deployment.application
salt_kw = {'app_container': app.container, 'ssl': group.is_ssl}
if app.language in ['java', 'nodejs', 'golang']:
salt_kw.update(
httpport=group.business_port,
health_check_url=group.health_check_url,
adminport=group.shutdown_port
)
return salt_kw
def download_package(self):
return self.agency.download_package()
def install_app(self, **salt_kw):
return self.agency.install_app(**salt_kw)
def verify_app(self, **salt_kw):
return self.agency.verify_app(**salt_kw)
def skip(self, **salt_kw):
return self.agency.skip(**salt_kw)
def pull_out(self):
slb = self.deployment.slb_client
try:
result = slb.pull_out(self.ip_address)
except SLBClientError as e:
es_logger.error(str(e), extra=self.extras)
result = False
return result
def pull_in(self):
slb = self.deployment.slb_client
try:
result = slb.pull_in(self.ip_address)
except SLBClientError as e:
es_logger.error(str(e), extra=self.extras)
result = False
return result
class TarsJoinGroupTarget(TarsDeploymentTarget):
class Meta:
proxy = True
def _prepare_common_salt_kw(self):
kw = super(TarsJoinGroupTarget, self)._prepare_common_salt_kw()
# check if this targets belongs to multiple groups, which does not have same meta eg. ssl in common
related_group_ssl_options = \
self.deployment.group.servers.verbose_all().filter(hostname=self.hostname).\
values_list('group__is_ssl', flat=True)
# uniform ssl option
if len(related_group_ssl_options) > 1:
kw.update(
ssl=all(related_group_ssl_options)
)
return kw
| apache-2.0 | -6,586,950,096,270,718,000 | 30.031746 | 107 | 0.638875 | false | 3.989796 | false | false | false |
anjalisood/spark-tk | python/sparktk/graph/ops/connected_components.py | 12 | 1907 | # vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def connected_components(self):
"""
Connected components determines groups all the vertices in a particular graph
by whether or not there is path between these vertices. This method returns
a frame with the vertices and their corresponding component
Parameters
----------
:return: (Frame) Frame containing the vertex id's and their components
Examples
--------
>>> vertex_schema = [('id', int)]
>>> edge_schema = [('src', int), ('dst', int)]
>>> vertex_rows = [ [1], [2], [3], [4], [5] ]
>>> edge_rows = [ [1, 2], [1, 3], [2, 3], [4, 5] ]
>>> vertex_frame = tc.frame.create(vertex_rows, vertex_schema)
>>> edge_frame = tc.frame.create(edge_rows, edge_schema)
>>> graph = tc.graph.create(vertex_frame, edge_frame)
>>> result = graph.connected_components()
>>> result.inspect()
[#] id component
===================
[0] 1 0
[1] 2 0
[2] 3 0
[3] 4 8589934593
[4] 5 8589934593
"""
from sparktk.frame.frame import Frame
return Frame(self._tc, self._scala.connectedComponents())
| apache-2.0 | -4,965,774,323,346,621,000 | 31.660714 | 81 | 0.602515 | false | 3.254448 | false | false | false |
pymir3/pymir3 | lessnoise_specs.py | 1 | 1058 | import glob
import remove_random_noise as rrn
import os
filename, file_extension = os.path.splitext('/path/to/somefile.ext')
if __name__ == "__main__":
wavdir = "./links/"
wavs = sorted(glob.glob(wavdir + "*.wav"))
pngdir = "./pngs/"
#print wavs
for f in wavs:
nome, ext = os.path.splitext(f)
pngfile = pngdir + nome.split("/")[-1] + ".png"
print f, "->", pngfile
rrn.remove_random_noise_from_wav(f, False, pngfile, 'log10', passes=1)
# thedir = "/home/juliano/base_teste_rafael_94_especies"
# linkdir = "./links/"
# dirs = [ name for name in os.listdir(thedir) if os.path.isdir(os.path.join(thedir, name)) ]
#
# for d in dirs:
# p = "/home/juliano/base_teste_rafael_94_especies" + "/" + d
# files = sorted(glob.glob(p + "/*.wav"))
# i = 0
# for f in files:
# print f.split("/")[-1]
# call(['ln', '-s', f, linkdir ])
# call(['mv', linkdir + f.split("/")[-1], linkdir + d + "." + str(i) + '.wav'])
# i+=1 | mit | 8,582,541,661,980,047,000 | 34.3 | 97 | 0.524575 | false | 2.867209 | false | false | false |
ericd/redeem | redeem/gcodes/M557.py | 2 | 1668 | """
GCode M557
Example: M557 P1 X30 Y40.5
Set the points at which the bed will be probed to compensate for its plane
being slightly out of horizontal. The P value is the index of the point
(indices start at 0) and the X and Y values are the position to move extruder 0
to to probe the bed. An implementation should allow a minimum of three points
(P0, P1 and P2). This just records the point coordinates; it does not actually
do the probing. See G32.
Author: Elias Bakken
License: CC BY-SA: http://creativecommons.org/licenses/by-sa/2.0/
"""
from GCodeCommand import GCodeCommand
import logging
class M557(GCodeCommand):
def execute(self, g):
if g.has_letter("P"):
p = int(g.get_value_by_letter("P"))
else:
logging.warning("M557: Missing P-parameter")
return
if g.has_letter("X"):
X = float(g.get_value_by_letter("X"))
else:
logging.warning("M557: Missing X-parameter")
return
if g.has_letter("Y"):
Y = float(g.get_value_by_letter("Y"))
else:
logging.warning("M557: Missing Y-parameter")
return
if g.has_letter("Z"):
Z = float(g.get_value_by_letter("Z"))
else:
logging.warning("M557: Missing Z-parameter")
Z = 0
if len(self.printer.probe_points) > p:
self.printer.probe_points[p] = {"X": X, "Y": Y, "Z": Z}
else:
self.printer.probe_points.append({"X": X, "Y": Y, "Z": Z})
self.printer.probe_heights.append(0)
def get_description(self):
return "Set probe point"
| gpl-3.0 | 2,417,622,945,245,787,600 | 31.076923 | 79 | 0.589928 | false | 3.467775 | false | false | false |
14rcole/MediaBin | app/audfprint/hash_table.py | 1 | 15577 | """
hash_table.py
Python implementation of the very simple, fixed-array hash table
used for the audfprint fingerprinter.
2014-05-25 Dan Ellis [email protected]
"""
from __future__ import print_function
import numpy as np
import random
import cPickle as pickle
import os, gzip
import scipy.io
import math
# Current format version
HT_VERSION = 20140920
# Earliest acceptable version
HT_COMPAT_VERSION = 20140920
def _bitsfor(maxval):
""" Convert a maxval into a number of bits (left shift).
Raises a ValueError if the maxval is not a power of 2. """
maxvalbits = int(round(math.log(maxval)/math.log(2)))
if maxval != (1 << maxvalbits):
raise ValueError("maxval must be a power of 2, not %d" % maxval)
return maxvalbits
class HashTable(object):
"""
Simple hash table for storing and retrieving fingerprint hashes.
:usage:
>>> ht = HashTable(size=2**10, depth=100)
>>> ht.store('identifier', list_of_landmark_time_hash_pairs)
>>> list_of_ids_tracks = ht.get_hits(hash)
"""
def __init__(self, filename=None, hashbits=20, depth=100, maxtime=16384):
""" allocate an empty hash table of the specified size """
if filename is not None:
self.params = self.load(filename)
else:
self.hashbits = hashbits
self.depth = depth
self.maxtimebits = _bitsfor(maxtime)
# allocate the big table
size = 2**hashbits
self.table = np.zeros((size, depth), dtype=np.uint32)
# keep track of number of entries in each list
self.counts = np.zeros(size, dtype=np.int32)
# map names to IDs
self.names = []
# track number of hashes stored per id
self.hashesperid = np.zeros(0, np.uint32)
# Empty params
self.params = {}
# Record the current version
self.ht_version = HT_VERSION
# Mark as unsaved
self.dirty = True
def reset(self):
""" Reset to empty state (but preserve parameters) """
self.table[:,:] = 0
self.counts[:] = 0
self.names = []
self.hashesperid.resize(0)
self.dirty = True
def store(self, name, timehashpairs):
""" Store a list of hashes in the hash table
associated with a particular name (or integer ID) and time.
"""
id_ = self.name_to_id(name, add_if_missing=True)
# Now insert the hashes
hashmask = (1 << self.hashbits) - 1
#mxtime = self.maxtime
maxtime = 1 << self.maxtimebits
timemask = maxtime - 1
# Try sorting the pairs by hash value, for better locality in storing
#sortedpairs = sorted(timehashpairs, key=lambda x:x[1])
sortedpairs = timehashpairs
# Tried making it an np array to permit vectorization, but slower...
#sortedpairs = np.array(sorted(timehashpairs, key=lambda x:x[1]),
# dtype=int)
# Keep only the bottom part of the time value
#sortedpairs[:,0] = sortedpairs[:,0] % self.maxtime
# Keep only the bottom part of the hash value
#sortedpairs[:,1] = sortedpairs[:,1] & hashmask
idval = id_ << self.maxtimebits
for time_, hash_ in sortedpairs:
# How many already stored for this hash?
count = self.counts[hash_]
# Keep only the bottom part of the time value
#time_ %= mxtime
time_ &= timemask
# Keep only the bottom part of the hash value
hash_ &= hashmask
# Mixin with ID
val = (idval + time_) #.astype(np.uint32)
if count < self.depth:
# insert new val in next empty slot
#slot = self.counts[hash_]
self.table[hash_, count] = val
else:
# Choose a point at random
slot = random.randint(0, count)
# Only store if random slot wasn't beyond end
if slot < self.depth:
self.table[hash_, slot] = val
# Update record of number of vals in this bucket
self.counts[hash_] = count + 1
# Record how many hashes we (attempted to) save for this id
self.hashesperid[id_] += len(timehashpairs)
# Mark as unsaved
self.dirty = True
def get_entry(self, hash_):
""" Return np.array of [id, time] entries
associate with the given hash as rows.
"""
vals = self.table[hash_, :min(self.depth, self.counts[hash_])]
maxtimemask = (1 << self.matimebits) - 1
ids = vals >> self.maxtimebits
return np.c_[ids, vals & maxtimemask].astype(np.int32)
def get_hits_orig(self, hashes):
""" Return np.array of [id, delta_time, hash, time] rows
associated with each element in hashes array of [time, hash] rows.
This is the original version that actually calls get_entry().
"""
# Allocate to largest possible number of hits
hits = np.zeros((np.shape(hashes)[0]*self.depth, 4), np.int32)
nhits = 0
# Fill in
for time_, hash_ in hashes:
idstimes = self.get_entry(hash_)
nids = np.shape(idstimes)[0]
hitrows = nhits + np.arange(nids)
hits[hitrows, 0] = idstimes[:, 0]
hits[hitrows, 1] = idstimes[:, 1] - time_
hits[hitrows, 2] = hash_
hits[hitrows, 3] = time_
nhits += nids
# Discard the excess rows
hits.resize( (nhits, 4) )
return hits
def get_hits(self, hashes):
""" Return np.array of [id, delta_time, hash, time] rows
associated with each element in hashes array of [time, hash] rows.
This version has get_entry() inlined, it's about 30% faster.
"""
# Allocate to largest possible number of hits
nhashes = np.shape(hashes)[0]
hits = np.zeros((nhashes*self.depth, 4), np.int32)
nhits = 0
maxtimemask = (1 << self.maxtimebits) - 1
# Fill in
for ix in xrange(nhashes):
time_ = hashes[ix][0]
hash_ = hashes[ix][1]
nids = min(self.depth, self.counts[hash_])
tabvals = self.table[hash_, :nids]
hitrows = nhits + np.arange(nids)
hits[hitrows, 0] = tabvals >> self.maxtimebits
hits[hitrows, 1] = (tabvals & maxtimemask) - time_
hits[hitrows, 2] = hash_
hits[hitrows, 3] = time_
nhits += nids
# Discard the excess rows
hits.resize( (nhits, 4) )
return hits
def save(self, name, params=None):
""" Save hash table to file <name>,
including optional addition params
"""
# Merge in any provided params
if params:
for key in params:
self.params[key] = params[key]
with gzip.open(name, 'wb') as f:
pickle.dump(self, f, pickle.HIGHEST_PROTOCOL)
self.dirty = False
nhashes = sum(self.counts)
print("Saved fprints for", sum(n is not None for n in self.names),
"files (", nhashes, "hashes) to", name)
# Report the proportion of dropped hashes (overfull table)
dropped = nhashes - sum(np.minimum(self.depth, self.counts))
print("Dropped hashes=", dropped, "(%.2f%%)" % (
100.0*dropped/max(1, nhashes)))
def load(self, name):
""" Read either pklz or mat-format hash table file """
ext = os.path.splitext(name)[1]
if ext == '.mat':
params = self.load_matlab(name)
else:
params = self.load_pkl(name)
print("Read fprints for", sum(n is not None for n in self.names),
"files (", sum(self.counts), "hashes) from", name)
return params
def load_pkl(self, name):
""" Read hash table values from file <name>, return params """
with gzip.open(name, 'rb') as f:
temp = pickle.load(f)
assert temp.ht_version >= HT_COMPAT_VERSION
params = temp.params
self.hashbits = temp.hashbits
self.depth = temp.depth
if hasattr(temp, 'maxtimebits'):
self.maxtimebits = temp.maxtimebits
else:
self.maxtimebits = _bitsfor(temp.maxtime)
self.table = temp.table
self.counts = temp.counts
self.names = temp.names
self.hashesperid = np.array(temp.hashesperid).astype(np.uint32)
self.ht_version = temp.ht_version
self.dirty = False
return params
def load_matlab(self, name):
""" Read hash table from version saved by Matlab audfprint.
:params:
name : str
filename of .mat matlab fp dbase file
:returns:
params : dict
dictionary of parameters from the Matlab file including
'mat_version' : float
version read from Matlab file (must be >= 0.90)
'hoptime' : float
hoptime read from Matlab file (must be 0.02322)
'targetsr' : float
target sampling rate from Matlab file (must be 11025)
"""
mht = scipy.io.loadmat(name)
params = {}
params['mat_version'] = mht['HT_params'][0][0][-1][0][0]
assert params['mat_version'] >= 0.9
self.hashbits = _bitsfor(mht['HT_params'][0][0][0][0][0])
self.depth = mht['HT_params'][0][0][1][0][0]
self.maxtimebits = _bitsfor(mht['HT_params'][0][0][2][0][0])
params['hoptime'] = mht['HT_params'][0][0][3][0][0]
params['targetsr'] = mht['HT_params'][0][0][4][0][0]
params['nojenkins'] = mht['HT_params'][0][0][5][0][0]
# Python doesn't support the (pointless?) jenkins hashing
assert params['nojenkins']
self.table = mht['HashTable'].T
self.counts = mht['HashTableCounts'][0]
self.names = [str(val[0]) if len(val) > 0 else []
for val in mht['HashTableNames'][0]]
self.hashesperid = np.array(mht['HashTableLengths'][0]).astype(uint32)
# Matlab uses 1-origin for the IDs in the hashes, so rather than
# rewrite them all, we shift the corresponding decode tables
# down one cell
self.names.insert(0, '')
self.hashesperid = np.append([0], self.hashesperid)
# Otherwise unmodified database
self.dirty = False
return params
def totalhashes(self):
""" Return the total count of hashes stored in the table """
return np.sum(self.counts)
def merge(self, ht):
""" Merge in the results from another hash table """
# All the items go into our table, offset by our current size
# Check compatibility
assert self.maxtimebits == ht.maxtimebits
ncurrent = len(self.names)
#size = len(self.counts)
self.names += ht.names
self.hashesperid = np.append(self.hashesperid, ht.hashesperid)
# All the table values need to be increased by the ncurrent
idoffset = (1 << self.maxtimebits) * ncurrent
for hash_ in np.nonzero(ht.counts)[0]:
allvals = np.r_[self.table[hash_, :self.counts[hash_]],
ht.table[hash_, :ht.counts[hash_]] + idoffset]
# ht.counts[hash_] may be more than the actual number of
# hashes we obtained, if ht.counts[hash_] > ht.depth.
# Subselect based on actual size.
if len(allvals) > self.depth:
# Our hash bin is filled: randomly subselect the
# hashes, and update count to accurately track the
# total number of hashes we've seen for this bin.
somevals = np.random.permutation(allvals)[:self.depth]
self.table[hash_, ] = somevals
self.counts[hash_] += ht.counts[hash_]
else:
# Our bin isn't full. Store all the hashes, and
# accurately track how many values it contains. This
# may mean some of the hashes counted for full buckets
# in ht are "forgotten" if ht.depth < self.depth.
self.table[hash_, :len(allvals)] = allvals
self.counts[hash_] = len(allvals)
self.dirty = True
def name_to_id(self, name, add_if_missing=False):
""" Lookup name in the names list, or optionally add. """
if type(name) is str:
# lookup name or assign new
if name not in self.names:
if not add_if_missing:
raise ValueError("name " + name + " not found")
# Use an empty slot in the list if one exists.
try:
id_ = self.names.index(None)
self.names[id_] = name
self.hashesperid[id_] = 0
except ValueError:
self.names.append(name)
self.hashesperid = np.append(self.hashesperid, [0])
id_ = self.names.index(name)
else:
# we were passed in a numerical id
id_ = name
return id_
def remove(self, name):
""" Remove all data for named entity from the hash table. """
id_ = self.name_to_id(name)
# If we happen to be removing the first item (id_ == 0), this will
# match every empty entry in table. This is very inefficient, but
# it still works, and it's just one ID. We could have fixed it by
# making the IDs written in to table start an 1, but that would mess
# up backwards compatibility.
id_in_table = (self.table >> self.maxtimebits) == id_
hashes_removed = 0
for hash_ in np.nonzero(np.max(id_in_table, axis=1))[0]:
vals = self.table[hash_, :self.counts[hash_]]
vals = [v for v, x in zip(vals, id_in_table[hash_])
if not x]
self.table[hash_] = np.hstack([vals,
np.zeros(self.depth - len(vals))])
# This will forget how many extra hashes we had dropped until now.
self.counts[hash_] = len(vals)
hashes_removed += np.sum(id_in_table[hash_])
self.names[id_] = None
self.hashesperid[id_] = 0
self.dirty = True
print("Removed", name, "(", hashes_removed, "hashes).")
def retrieve(self, name):
"""Return a list of (time, hash) pairs by finding them in the table."""
timehashpairs = []
id_ = self.name_to_id(name)
maxtimemask = (1 << self.maxtimebits) - 1
# Still a bug for id_ 0.
hashes_containing_id = np.nonzero(
np.max((self.table >> self.maxtimebits) == id_, axis=1))[0]
for hash_ in hashes_containing_id:
entries = self.table[hash_, :self.counts[hash_]]
matching_entries = np.nonzero(
(entries >> self.maxtimebits) == id_)[0]
times = (entries[matching_entries] & maxtimemask)
timehashpairs.extend([(time, hash_) for time in times])
return timehashpairs
def list(self, print_fn=None):
""" List all the known items. """
if not print_fn:
print_fn = print
for name, count in zip(self.names, self.hashesperid):
if name:
print_fn(name + " (" + str(count) + " hashes)")
| apache-2.0 | 6,069,818,742,438,169,000 | 40.428191 | 79 | 0.557168 | false | 3.88357 | false | false | false |
procangroup/edx-platform | common/lib/xmodule/xmodule/mongo_utils.py | 15 | 3402 | """
Common MongoDB connection functions.
"""
import logging
import pymongo
from pymongo import ReadPreference
from mongodb_proxy import MongoProxy
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
# pylint: disable=bad-continuation
def connect_to_mongodb(
db, host,
port=27017, tz_aware=True, user=None, password=None,
retry_wait_time=0.1, proxy=True, **kwargs
):
"""
Returns a MongoDB Database connection, optionally wrapped in a proxy. The proxy
handles AutoReconnect errors by retrying read operations, since these exceptions
typically indicate a temporary step-down condition for MongoDB.
"""
# The MongoReplicaSetClient class is deprecated in Mongo 3.x, in favor of using
# the MongoClient class for all connections. Update/simplify this code when using
# PyMongo 3.x.
if kwargs.get('replicaSet'):
# Enable reading from secondary nodes in the MongoDB replicaset by using the
# MongoReplicaSetClient class.
# The 'replicaSet' parameter in kwargs is required for secondary reads.
# The read_preference should be set to a proper value, like SECONDARY_PREFERRED.
mongo_client_class = pymongo.MongoReplicaSetClient
else:
# No 'replicaSet' in kwargs - so no secondary reads.
mongo_client_class = pymongo.MongoClient
# If read_preference is given as a name of a valid ReadPreference.<NAME> constant
# such as "SECONDARY_PREFERRED", convert it. Otherwise pass it through unchanged.
if 'read_preference' in kwargs:
read_preference = getattr(ReadPreference, kwargs['read_preference'], None)
if read_preference is not None:
kwargs['read_preference'] = read_preference
mongo_conn = pymongo.database.Database(
mongo_client_class(
host=host,
port=port,
tz_aware=tz_aware,
document_class=dict,
**kwargs
),
db
)
if proxy:
mongo_conn = MongoProxy(
mongo_conn,
wait_time=retry_wait_time
)
# If credentials were provided, authenticate the user.
if user is not None and password is not None:
mongo_conn.authenticate(user, password)
return mongo_conn
def create_collection_index(
collection, keys,
ignore_created=True, ignore_created_opts=True, **kwargs
):
"""
Create a MongoDB index in a collection. Optionally,
ignore errors related to the index already existing.
"""
# For an explanation of the error codes:
# https://github.com/mongodb/mongo/blob/v3.0/src/mongo/db/catalog/index_catalog.cpp#L542-L583
# https://github.com/mongodb/mongo/blob/v3.0/src/mongo/base/error_codes.err#L70-L87
# pylint: disable=invalid-name
INDEX_ALREADY_EXISTS = 68
INDEX_OPTIONS_CONFLICT = 85
try:
collection.create_index(keys, **kwargs)
except pymongo.errors.OperationFailure as exc:
errors_to_ignore = []
if ignore_created:
errors_to_ignore.append(INDEX_ALREADY_EXISTS)
if ignore_created_opts:
errors_to_ignore.append(INDEX_OPTIONS_CONFLICT)
if exc.code in errors_to_ignore:
logger.warning("Existing index in collection '{}' remained unchanged!: {}".format(
collection.full_name, exc.details['errmsg'])
)
else:
raise exc
| agpl-3.0 | -2,350,356,380,146,777,600 | 34.810526 | 97 | 0.665785 | false | 4.093863 | false | false | false |
desbma/sacad | sacad/tqdm_logging.py | 1 | 1439 | """ Code to help using the logging module with tqdm progress bars. """
import contextlib
import logging
import threading
logging_handlers_lock = threading.Lock()
class TqdmLoggingHandler(logging.Handler):
""" Logging handler sending messages to the tqdm write method (avoids overlap). """
def __init__(self, tqdm, *args, **kwargs):
self.tqdm = tqdm
super().__init__(*args, **kwargs)
def emit(self, record):
""" See logging.Handler.emit. """
msg = self.format(record)
self.tqdm.write(msg)
@contextlib.contextmanager
def redirect_logging(tqdm_obj, logger=logging.getLogger()):
""" Redirect logging to a TqdmLoggingHandler object and then restore the original logging behavior. """
with logging_handlers_lock:
# remove current handlers
prev_handlers = []
for handler in logger.handlers.copy():
prev_handlers.append(handler)
logger.removeHandler(handler)
# add tqdm handler
tqdm_handler = TqdmLoggingHandler(tqdm_obj)
if prev_handlers[-1].formatter is not None:
tqdm_handler.setFormatter(prev_handlers[-1].formatter)
logger.addHandler(tqdm_handler)
try:
yield
finally:
# restore handlers
with logging_handlers_lock:
logger.removeHandler(tqdm_handler)
for handler in prev_handlers:
logger.addHandler(handler)
| mpl-2.0 | -2,664,702,171,294,622,000 | 29.617021 | 107 | 0.646282 | false | 4.099715 | false | false | false |
ulope/django | tests/introspection/tests.py | 13 | 7475 | from __future__ import unicode_literals
from django.db import connection
from django.db.utils import DatabaseError
from django.test import TestCase, skipUnlessDBFeature
from .models import Reporter, Article
class IntrospectionTests(TestCase):
def test_table_names(self):
tl = connection.introspection.table_names()
self.assertEqual(tl, sorted(tl))
self.assertIn(Reporter._meta.db_table, tl,
"'%s' isn't in table_list()." % Reporter._meta.db_table)
self.assertIn(Article._meta.db_table, tl,
"'%s' isn't in table_list()." % Article._meta.db_table)
def test_django_table_names(self):
with connection.cursor() as cursor:
cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);')
tl = connection.introspection.django_table_names()
cursor.execute("DROP TABLE django_ixn_test_table;")
self.assertNotIn('django_ixn_test_table', tl,
"django_table_names() returned a non-Django table")
def test_django_table_names_retval_type(self):
# Ticket #15216
with connection.cursor() as cursor:
cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);')
tl = connection.introspection.django_table_names(only_existing=True)
self.assertIs(type(tl), list)
tl = connection.introspection.django_table_names(only_existing=False)
self.assertIs(type(tl), list)
def test_table_names_with_views(self):
with connection.cursor() as cursor:
try:
cursor.execute(
'CREATE VIEW introspection_article_view AS SELECT headline '
'from introspection_article;')
except DatabaseError as e:
if 'insufficient privileges' in str(e):
self.fail("The test user has no CREATE VIEW privileges")
else:
raise
self.assertIn('introspection_article_view',
connection.introspection.table_names(include_views=True))
self.assertNotIn('introspection_article_view',
connection.introspection.table_names())
def test_installed_models(self):
tables = [Article._meta.db_table, Reporter._meta.db_table]
models = connection.introspection.installed_models(tables)
self.assertEqual(models, {Article, Reporter})
def test_sequence_list(self):
sequences = connection.introspection.sequence_list()
expected = {'table': Reporter._meta.db_table, 'column': 'id'}
self.assertIn(expected, sequences,
'Reporter sequence not found in sequence_list()')
def test_get_table_description_names(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual([r[0] for r in desc],
[f.column for f in Reporter._meta.fields])
def test_get_table_description_types(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual(
[datatype(r[1], r) for r in desc],
['AutoField' if connection.features.can_introspect_autofield else 'IntegerField',
'CharField', 'CharField', 'CharField',
'BigIntegerField' if connection.features.can_introspect_big_integer_field else 'IntegerField',
'BinaryField' if connection.features.can_introspect_binary_field else 'TextField',
'SmallIntegerField' if connection.features.can_introspect_small_integer_field else 'IntegerField']
)
# The following test fails on Oracle due to #17202 (can't correctly
# inspect the length of character columns).
@skipUnlessDBFeature('can_introspect_max_length')
def test_get_table_description_col_lengths(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual(
[r[3] for r in desc if datatype(r[1], r) == 'CharField'],
[30, 30, 254]
)
@skipUnlessDBFeature('can_introspect_null')
def test_get_table_description_nullable(self):
with connection.cursor() as cursor:
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
nullable_by_backend = connection.features.interprets_empty_strings_as_nulls
self.assertEqual(
[r[6] for r in desc],
[False, nullable_by_backend, nullable_by_backend, nullable_by_backend, True, True, False]
)
# Regression test for #9991 - 'real' types in postgres
@skipUnlessDBFeature('has_real_datatype')
def test_postgresql_real_type(self):
with connection.cursor() as cursor:
cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);")
desc = connection.introspection.get_table_description(cursor, 'django_ixn_real_test_table')
cursor.execute('DROP TABLE django_ixn_real_test_table;')
self.assertEqual(datatype(desc[0][1], desc[0]), 'FloatField')
def test_get_relations(self):
with connection.cursor() as cursor:
relations = connection.introspection.get_relations(cursor, Article._meta.db_table)
# Older versions of MySQL don't have the chops to report on this stuff,
# so just skip it if no relations come back. If they do, though, we
# should test that the response is correct.
if relations:
# That's {field_index: (field_index_other_table, other_table)}
self.assertEqual(relations, {3: (0, Reporter._meta.db_table),
4: (0, Article._meta.db_table)})
@skipUnlessDBFeature('can_introspect_foreign_keys')
def test_get_key_columns(self):
with connection.cursor() as cursor:
key_columns = connection.introspection.get_key_columns(cursor, Article._meta.db_table)
self.assertEqual(
set(key_columns),
{('reporter_id', Reporter._meta.db_table, 'id'),
('response_to_id', Article._meta.db_table, 'id')})
def test_get_primary_key_column(self):
with connection.cursor() as cursor:
primary_key_column = connection.introspection.get_primary_key_column(cursor, Article._meta.db_table)
self.assertEqual(primary_key_column, 'id')
def test_get_indexes(self):
with connection.cursor() as cursor:
indexes = connection.introspection.get_indexes(cursor, Article._meta.db_table)
self.assertEqual(indexes['reporter_id'], {'unique': False, 'primary_key': False})
def test_get_indexes_multicol(self):
"""
Test that multicolumn indexes are not included in the introspection
results.
"""
with connection.cursor() as cursor:
indexes = connection.introspection.get_indexes(cursor, Reporter._meta.db_table)
self.assertNotIn('first_name', indexes)
self.assertIn('id', indexes)
def datatype(dbtype, description):
"""Helper to convert a data type into a string."""
dt = connection.introspection.get_field_type(dbtype, description)
if type(dt) is tuple:
return dt[0]
else:
return dt
| bsd-3-clause | 1,906,700,952,050,262,800 | 45.141975 | 112 | 0.637458 | false | 4.223164 | true | false | false |
Dekken/tick | tick/prox/prox_positive.py | 2 | 1824 | # License: BSD 3 clause
# -*- coding: utf8 -*-
import numpy as np
from .base import Prox
from .build.prox import ProxPositiveDouble as _ProxPositiveDouble
from .build.prox import ProxPositiveFloat as _ProxPositiveFloat
__author__ = 'Stephane Gaiffas'
dtype_map = {
np.dtype("float64"): _ProxPositiveDouble,
np.dtype("float32"): _ProxPositiveFloat
}
class ProxPositive(Prox):
"""Projection operator onto the half-space of vectors with
non-negative entries
Parameters
----------
range : `tuple` of two `int`, default=`None`
Range on which the prox is applied. If `None` then the prox is
applied on the whole vector
Attributes
----------
dtype : `{'float64', 'float32'}`
Type of the arrays used.
"""
def __init__(self, range: tuple = None, positive: bool = False):
Prox.__init__(self, range)
self._prox = self._build_cpp_prox("float64")
def _call(self, coeffs: np.ndarray, step: object, out: np.ndarray):
self._prox.call(coeffs, step, out)
def value(self, coeffs: np.ndarray):
"""
Returns the projected ``coeffs``
Parameters
----------
coeffs : `numpy.ndarray`, shape=(n_coeffs,)
Vector to be projected
Returns
-------
output : `float`
Returns 0 (as this is a projection)
"""
return self._prox.value(coeffs)
def _build_cpp_prox(self, dtype_or_object_with_dtype):
self.dtype = self._extract_dtype(dtype_or_object_with_dtype)
prox_class = self._get_typed_class(dtype_or_object_with_dtype,
dtype_map)
if self.range is None:
return prox_class(0.)
else:
return prox_class(0., self.range[0], self.range[1])
| bsd-3-clause | 5,046,152,692,943,984,000 | 27.061538 | 71 | 0.585526 | false | 3.864407 | false | false | false |
kapteyn-astro/kapteyn | doc/source/EXAMPLES/kmpfit_chi2landscape_gauss.py | 1 | 5097 | #!/usr/bin/env python
#------------------------------------------------------------
# Script compares efficiency of automatic derivatives vs
# analytical in mpfit.py
# Vog, 31 okt 2011
#------------------------------------------------------------
import numpy
from matplotlib.pyplot import figure, show, rc
from mpl_toolkits.mplot3d import axes3d
from kapteyn import kmpfit
def my_model(p, x):
#-----------------------------------------------------------------------
# This describes the model and its parameters for which we want to find
# the best fit. 'p' is a sequence of parameters (array/list/tuple).
#-----------------------------------------------------------------------
A, mu, sigma, zerolev = p
return( A * numpy.exp(-(x-mu)*(x-mu)/(2.0*sigma*sigma)) + zerolev )
def my_residuals(p, data):
#-----------------------------------------------------------------------
# This function is the function called by the fit routine in kmpfit
# It returns a weighted residual. De fit routine calculates the
# square of these values.
#-----------------------------------------------------------------------
x, y, err = data
return (y-my_model(p,x)) / err
def my_derivs(p, data, dflags):
#-----------------------------------------------------------------------
# This function is used by the fit routine to find the values for
# the explicit partial derivatives. Argument 'dflags' is a list
# with booleans. If an element is True then an explicit partial
# derivative is required.
#-----------------------------------------------------------------------
x, y, err = data
A, mu, sigma, zerolev = p
pderiv = numpy.zeros([len(p), len(x)]) # You need to create the required array
sig2 = sigma*sigma
sig3 = sig2 * sigma
xmu = x-mu
xmu2 = xmu**2
expo = numpy.exp(-xmu2/(2.0*sig2))
fx = A * expo
for i, flag in enumerate(dflags):
if flag:
if i == 0:
pderiv[0] = expo
elif i == 1:
pderiv[1] = fx * xmu/(sig2)
elif i == 2:
pderiv[2] = fx * xmu2/(sig3)
elif i == 3:
pderiv[3] = 1.0
return pderiv/-err
#return numpy.divide(pderiv, -err)
# Artificial data
N = 100
x = numpy.linspace(-5, 10, N)
truepars = [10.0, 5.0, 2.0, 0.0]
p0 = [9, 4.5, 0.8, 0]
y = my_model(truepars, x) + 1.2*numpy.random.randn(len(x))
err = 0.4*numpy.random.randn(N)
# The fit
fitobj = kmpfit.Fitter(residuals=my_residuals, deriv=my_derivs, data=(x, y, err))
try:
fitobj.fit(params0=p0)
except Exception as mes:
print("Something wrong with fit: ", mes)
raise SystemExit
print("\n\n======== Results kmpfit with explicit partial derivatives =========")
print("Params: ", fitobj.params)
print("Errors from covariance matrix : ", fitobj.xerror)
print("Uncertainties assuming reduced Chi^2=1: ", fitobj.stderr)
print("Chi^2 min: ", fitobj.chi2_min)
print("Reduced Chi^2: ", fitobj.rchi2_min)
print("Iterations: ", fitobj.niter)
print("Function ev: ", fitobj.nfev)
print("Status: ", fitobj.status)
print("Status Message:", fitobj.message)
print("Covariance:\n", fitobj.covar)
# We want to plot the chi2 landscape
# for a range of values of mu and sigma.
A = fitobj.params[1] # mu
B = fitobj.params[2] # sigma
nx = 200
ny = 200
Da1 = 15.0; Da2 = 20.0
Dy = 20.0
aa = numpy.linspace(A-Da1,A+Da2,nx)
bb = numpy.linspace(0.5,B+Dy,ny)
Z = numpy.zeros( (ny,nx) )
# Get the Chi^2 landscape.
pars = fitobj.params
i = -1
for a in aa:
i += 1
j = -1
for b in bb:
j += 1
pars[1] = a
pars[2] = b
Z[j,i] = (my_residuals(pars, (x,y,err))**2).sum()
Z /= 100000.0
XY = numpy.meshgrid(aa, bb)
# Plot the result
rc('font', size=9)
rc('legend', fontsize=8)
fig = figure(1)
frame = fig.add_subplot(1,1,1)
frame.errorbar(x, y, yerr=err, fmt='go', alpha=0.7, label="Noisy data")
frame.plot(x, my_model(truepars,x), 'r', label="True data")
frame.plot(x, my_model(fitobj.params,x), 'b', lw=2, label="Fit with kmpfit")
frame.set_xlabel("X")
frame.set_ylabel("Measurement data")
frame.set_title("Best fit parameters for Gaussian model with noisy data",
fontsize=10)
leg = frame.legend(loc=2)
# Plot chi squared landscape
fig2 = figure(2)
frame = fig2.add_subplot(1,1,1, projection='3d', azim=-31, elev=31)
frame.plot((A,),(B,),(0,), 'or', alpha=0.8)
frame.plot_surface(XY[0], XY[1], Z, color='g', alpha=0.9)
frame.set_xlabel('$X=\\mu$')
frame.set_ylabel('$Y=\\sigma$')
frame.set_zlabel('$Z=\\chi^2_{\\nu}$')
frame.set_zlim3d(Z.min(), Z.max(), alpha=0.5)
frame.set_title("Chi-squared landscape $(\\mu,\\sigma)$ of Gaussian model",
fontsize=10)
contlevs = [1.0, 0.1, 0.5, 1.5, 2.0, 5, 10, 15, 20, 100, 200]
fig3 = figure(3)
frame = fig3.add_subplot(1,1,1)
cs = frame.contour(XY[0], XY[1], Z, contlevs)
zc = cs.collections[0]
zc.set_color('red')
zc.set_linewidth(2)
frame.clabel(cs, contlevs, inline=False, fmt='%1.1f', fontsize=10, color='k')
frame.set_title("Chi-squared contours $(\\mu,\\sigma)$ of Gaussian model",
fontsize=10)
show()
| bsd-3-clause | 258,418,599,411,153,250 | 31.259494 | 82 | 0.560133 | false | 3.023132 | false | false | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/hplip/ui4/fabgrouptable.py | 1 | 2090 | # -*- coding: utf-8 -*-
#
# (c) Copyright 2003-2007 Hewlett-Packard Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Author: Don Welch
#
# Local
from base.g import *
# Qt
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class FABGroupTable(QTableWidget):
def __init__(self, parent):
QTableWidget.__init__(self, parent)
self.db = None
def setDatabase(self, db):
self.db = db
def dragMoveEvent(self, e):
item = self.itemAt(e.pos())
if item is not None:
group = unicode(item.text())
if group == u'All':
e.ignore()
return
names = unicode(e.mimeData().data(u'text/plain')).split(u'|')
group_members = self.db.group_members(group)
if not group_members:
e.accept()
return
for n in names:
if n not in group_members:
e.accept()
return
e.ignore()
def dropMimeData(self, row, col, data, action):
items = unicode(data.data(u'text/plain')).split(u'|')
self.emit(SIGNAL("namesAddedToGroup"), row, items)
return False
def mimeTypes(self):
return QStringList([u'text/plain'])
| gpl-3.0 | 3,965,829,788,877,723,000 | 26.866667 | 74 | 0.577512 | false | 4.163347 | false | false | false |
Azure/azure-sdk-for-python | sdk/cognitiveservices/azure-cognitiveservices-language-luis/azure/cognitiveservices/language/luis/authoring/operations/_train_operations.py | 1 | 6694 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class TrainOperations(object):
"""TrainOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def train_version(
self, app_id, version_id, custom_headers=None, raw=False, **operation_config):
"""Sends a training request for a version of a specified LUIS app. This
POST request initiates a request asynchronously. To determine whether
the training request is successful, submit a GET request to get
training status. Note: The application version is not fully trained
unless all the models (intents and entities) are trained successfully
or are up to date. To verify training success, get the training status
at least once after training is complete.
:param app_id: The application ID.
:type app_id: str
:param version_id: The version ID.
:type version_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: EnqueueTrainingResponse or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.language.luis.authoring.models.EnqueueTrainingResponse
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.cognitiveservices.language.luis.authoring.models.ErrorResponseException>`
"""
# Construct URL
url = self.train_version.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'appId': self._serialize.url("app_id", app_id, 'str'),
'versionId': self._serialize.url("version_id", version_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 202:
deserialized = self._deserialize('EnqueueTrainingResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
train_version.metadata = {'url': '/apps/{appId}/versions/{versionId}/train'}
def get_status(
self, app_id, version_id, custom_headers=None, raw=False, **operation_config):
"""Gets the training status of all models (intents and entities) for the
specified LUIS app. You must call the train API to train the LUIS app
before you call this API to get training status. "appID" specifies the
LUIS app ID. "versionId" specifies the version number of the LUIS app.
For example, "0.1".
:param app_id: The application ID.
:type app_id: str
:param version_id: The version ID.
:type version_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype:
list[~azure.cognitiveservices.language.luis.authoring.models.ModelTrainingInfo]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.cognitiveservices.language.luis.authoring.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_status.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'appId': self._serialize.url("app_id", app_id, 'str'),
'versionId': self._serialize.url("version_id", version_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[ModelTrainingInfo]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_status.metadata = {'url': '/apps/{appId}/versions/{versionId}/train'}
| mit | -3,799,172,750,404,512,000 | 41.367089 | 136 | 0.64879 | false | 4.57553 | true | false | false |
joshfriend/sqlalchemy-utils | sqlalchemy_utils/observer.py | 2 | 10527 | """
This module provides a decorator function for observing changes in given
property. Internally the decorator is implemented using SQLAlchemy event
listeners. Both column properties and relationship properties can be observed.
Property observers can be used for pre-calculating aggregates and automatic
real-time data denormalization.
Simple observers
----------------
At the heart of the observer extension is the :func:`observes` decorator. You
mark some property path as being observed and the marked method will get
notified when any changes are made to given path.
Consider the following model structure:
::
class Director(Base):
__tablename__ = 'director'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String)
date_of_birth = sa.Column(sa.Date)
class Movie(Base):
__tablename__ = 'movie'
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String)
director_id = sa.Column(sa.Integer, sa.ForeignKey(Director.id))
director = sa.orm.relationship(Director, backref='movies')
Now consider we want to show movies in some listing ordered by director id
first and movie id secondly. If we have many movies then using joins and
ordering by Director.name will be very slow. Here is where denormalization
and :func:`observes` comes to rescue the day. Let's add a new column called
director_name to Movie which will get automatically copied from associated
Director.
::
from sqlalchemy_utils import observes
class Movie(Base):
# same as before..
director_name = sa.Column(sa.String)
@observes('director')
def director_observer(self, director):
self.director_name = director.name
.. note::
This example could be done much more efficiently using a compound foreing
key from direcor_name, director_id to Director.name, Director.id but for
the sake of simplicity we added this as an example.
Observes vs aggregated
----------------------
:func:`observes` and :func:`.aggregates.aggregated` can be used for similar
things. However performance wise you should take the following things into
consideration:
* :func:`observes` works always inside transaction and deals with objects. If
the relationship observer is observing has large number of objects its better
to use :func:`.aggregates.aggregated`.
* :func:`.aggregates.aggregated` always executes one additional query per
aggregate so in scenarios where the observed relationship has only handful of
objects its better to use :func:`observes` instead.
Example 1. Movie with many ratings
Let's say we have a Movie object with potentially thousands of ratings. In this
case we should always use :func:`.aggregates.aggregated` since iterating
through thousands of objects is slow and very memory consuming.
Example 2. Product with denormalized catalog name
Each product belongs to one catalog. Here it is natural to use :func:`observes`
for data denormalization.
Deeply nested observing
-----------------------
Consider the following model structure where Catalog has many Categories and
Category has many Products.
::
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column(sa.Integer, primary_key=True)
product_count = sa.Column(sa.Integer, default=0)
@observes('categories.products')
def product_observer(self, products):
self.product_count = len(products)
categories = sa.orm.relationship('Category', backref='catalog')
class Category(Base):
__tablename__ = 'category'
id = sa.Column(sa.Integer, primary_key=True)
catalog_id = sa.Column(sa.Integer, sa.ForeignKey('catalog.id'))
products = sa.orm.relationship('Product', backref='category')
class Product(Base):
__tablename__ = 'product'
id = sa.Column(sa.Integer, primary_key=True)
price = sa.Column(sa.Numeric)
category_id = sa.Column(sa.Integer, sa.ForeignKey('category.id'))
:func:`observes` is smart enough to:
* Notify catalog objects of any changes in associated Product objects
* Notify catalog objects of any changes in Category objects that affect
products (for example if Category gets deleted, or a new Category is added to
Catalog with any number of Products)
::
category = Category(
products=[Product(), Product()]
)
category2 = Category(
product=[Product()]
)
catalog = Catalog(
categories=[category, category2]
)
session.add(catalog)
session.commit()
catalog.product_count # 2
session.delete(category)
session.commit()
catalog.product_count # 1
"""
import sqlalchemy as sa
from collections import defaultdict, namedtuple, Iterable
import itertools
from sqlalchemy_utils.functions import getdotattr
from sqlalchemy_utils.path import AttrPath
from sqlalchemy_utils.utils import is_sequence
Callback = namedtuple('Callback', ['func', 'path', 'backref', 'fullpath'])
class PropertyObserver(object):
def __init__(self):
self.listener_args = [
(
sa.orm.mapper,
'mapper_configured',
self.update_generator_registry
),
(
sa.orm.mapper,
'after_configured',
self.gather_paths
),
(
sa.orm.session.Session,
'before_flush',
self.invoke_callbacks
)
]
self.callback_map = defaultdict(list)
# TODO: make the registry a WeakKey dict
self.generator_registry = defaultdict(list)
def remove_listeners(self):
for args in self.listener_args:
sa.event.remove(*args)
def register_listeners(self):
for args in self.listener_args:
if not sa.event.contains(*args):
sa.event.listen(*args)
def __repr__(self):
return '<PropertyObserver>'
def update_generator_registry(self, mapper, class_):
"""
Adds generator functions to generator_registry.
"""
for generator in class_.__dict__.values():
if hasattr(generator, '__observes__'):
self.generator_registry[class_].append(
generator
)
def gather_paths(self):
for class_, callbacks in self.generator_registry.items():
for callback in callbacks:
path = AttrPath(class_, callback.__observes__)
self.callback_map[class_].append(
Callback(
func=callback,
path=path,
backref=None,
fullpath=path
)
)
for index in range(len(path)):
i = index + 1
prop_class = path[index].property.mapper.class_
self.callback_map[prop_class].append(
Callback(
func=callback,
path=path[i:],
backref=~ (path[:i]),
fullpath=path
)
)
def gather_callback_args(self, obj, callbacks):
session = sa.orm.object_session(obj)
for callback in callbacks:
backref = callback.backref
root_objs = getdotattr(obj, backref) if backref else obj
if root_objs:
if not isinstance(root_objs, Iterable):
root_objs = [root_objs]
for root_obj in root_objs:
objects = getdotattr(
root_obj,
callback.fullpath,
lambda obj: obj not in session.deleted
)
yield (
root_obj,
callback.func,
objects
)
def changed_objects(self, session):
objs = itertools.chain(session.new, session.dirty, session.deleted)
for obj in objs:
for class_, callbacks in self.callback_map.items():
if isinstance(obj, class_):
yield obj, callbacks
def invoke_callbacks(self, session, ctx, instances):
callback_args = defaultdict(lambda: defaultdict(set))
for obj, callbacks in self.changed_objects(session):
args = self.gather_callback_args(obj, callbacks)
for (root_obj, func, objects) in args:
if is_sequence(objects):
callback_args[root_obj][func] = (
callback_args[root_obj][func] | set(objects)
)
else:
callback_args[root_obj][func] = objects
for root_obj, callback_objs in callback_args.items():
for callback, objs in callback_objs.items():
callback(root_obj, objs)
observer = PropertyObserver()
def observes(path, observer=observer):
"""
Mark method as property observer for given property path. Inside
transaction observer gathers all changes made in given property path and
feeds the changed objects to observer-marked method at the before flush
phase.
::
from sqlalchemy_utils import observes
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column(sa.Integer, primary_key=True)
category_count = sa.Column(sa.Integer, default=0)
@observes('categories')
def category_observer(self, categories):
self.category_count = len(categories)
class Category(Base):
__tablename__ = 'category'
id = sa.Column(sa.Integer, primary_key=True)
catalog_id = sa.Column(sa.Integer, sa.ForeignKey('catalog.id'))
catalog = Catalog(categories=[Category(), Category()])
session.add(catalog)
session.commit()
catalog.category_count # 2
.. versionadded: 0.28.0
:param path: Dot-notated property path, eg. 'categories.products.price'
:param observer: :meth:`PropertyObserver` object
"""
observer.register_listeners()
def wraps(func):
def wrapper(self, *args, **kwargs):
return func(self, *args, **kwargs)
wrapper.__observes__ = path
return wrapper
return wraps
| bsd-3-clause | 1,791,508,742,861,565,000 | 30.803625 | 79 | 0.604826 | false | 4.527742 | false | false | false |
danielballan/dataportal | dataportal/broker/pims_readers.py | 1 | 1797 | """This module contains "PIMS readers" (see github.com/soft-matter/pims) that
take in headers and detector aliases and return a sliceable generator of arrays."""
from pims import FramesSequence, Frame
from . import get_events
from filestore.api import retrieve
def get_images(headers, name):
"""
Load images from a detector for given Header(s).
Parameters
----------
headers : Header or list of Headers
name : string
field name (data key) of a detector
Example
-------
>>> header = DataBroker[-1]
>>> images = Images(header, 'my_detector_lightfield')
>>> for image in images:
# do something
"""
return Images(headers, name)
class Images(FramesSequence):
def __init__(self, headers, name):
"""
Load images from a detector for given Header(s).
Parameters
----------
headers : Header or list of Headers
name : str
field name (data key) of a detector
Example
-------
>>> header = DataBroker[-1]
>>> images = Images(header, 'my_detector_lightfield')
>>> for image in images:
# do something
"""
events = get_events(headers, [name], fill=False)
self._datum_uids = [event.data[name] for event in events]
self._len = len(self._datum_uids)
example_frame = retrieve(self._datum_uids[0])
self._dtype = example_frame.dtype
self._shape = example_frame.shape
@property
def pixel_type(self):
return self._dtype
@property
def frame_shape(self):
return self._shape
def __len__(self):
return self._len
def get_frame(self, i):
img = retrieve(self._datum_uids[i])
return Frame(img, frame_no=i)
| bsd-3-clause | 1,673,965,228,306,933,800 | 26.227273 | 83 | 0.585977 | false | 4.150115 | false | false | false |
simon-weber/gpsoauth | gpsoauth/google.py | 1 | 1618 | """Functions to work with Google authentication structures."""
from __future__ import annotations
import base64
import hashlib
from Cryptodome.Cipher import PKCS1_OAEP
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
from .util import bytes_to_int, int_to_bytes
def key_from_b64(b64_key: bytes) -> RsaKey:
"""Extract key from base64."""
binary_key = base64.b64decode(b64_key)
i = bytes_to_int(binary_key[:4])
mod = bytes_to_int(binary_key[4 : 4 + i])
j = bytes_to_int(binary_key[i + 4 : i + 4 + 4])
exponent = bytes_to_int(binary_key[i + 8 : i + 8 + j])
key = RSA.construct((mod, exponent))
return key
def key_to_struct(key: RsaKey) -> bytes:
"""Convert key to struct."""
mod = int_to_bytes(key.n)
exponent = int_to_bytes(key.e)
return b"\x00\x00\x00\x80" + mod + b"\x00\x00\x00\x03" + exponent
def parse_auth_response(text: str) -> dict[str, str]:
"""Parse received auth response."""
response_data = {}
for line in text.split("\n"):
if not line:
continue
key, _, val = line.partition("=")
response_data[key] = val
return response_data
def construct_signature(email: str, password: str, key: RsaKey) -> bytes:
"""Construct signature."""
signature = bytearray(b"\x00")
struct = key_to_struct(key)
signature.extend(hashlib.sha1(struct).digest()[:4])
cipher = PKCS1_OAEP.new(key)
encrypted_login = cipher.encrypt((email + "\x00" + password).encode("utf-8"))
signature.extend(encrypted_login)
return base64.urlsafe_b64encode(signature)
| mit | 5,425,258,570,688,854,000 | 25.096774 | 81 | 0.645241 | false | 3.172549 | false | false | false |
PaulMcMillan/2014_defcon_timing | hue/vis10.py | 1 | 2970 | import matplotlib.pyplot as plt
from collections import defaultdict
from itertools import combinations
from pprint import pprint
from scipy import stats, signal
import random
from itertools import chain
class QueryResponse(object):
"""Class to make it easier to work with parsed data. Works with
everything natively in nanoseconds.
"""
# This offset is a convenience that makes it easier to avoid
# losing precision if we start using floats. Pick the right value
# for you.
OFFSET = 1405000000000000000
def __init__(self, *args):
if len(args) < 3:
print args
self.host = args[0]
self.path = args[1]
self.query = self._parse(args[2])
self.response = map(self._parse, args[3:])
def _parse(self, nano_time):
""" Parse a nansecond timestamp string into nanoseconds (integer) """
# If we accidentally mix microsecond time, fix it to nano.
seconds, nanoseconds = nano_time.split('.')
return int('{}{:<9}'.format(seconds, nanoseconds)) - self.OFFSET
def total(self):
""" Time from Request to complete response. """
return self.response[-1] - self.query
def first_response(self):
""" Time from request to first response. """
return self.response[0] - self.query
def total_response(self):
""" Delta first response packet to last. """
return self.response[-1] - self.response[0]
def last_delta(self):
""" Time from second to last packet, to last response packet. """
return self.response[-1] - self.response[-2]
def response_count(self):
""" How many packets were in the response? """
return len(self.response)
def _response_deltas(self):
for x in range(len(self.response) - 1):
yield self.response[x+1] - self.response[x]
data = defaultdict(list)
with open('data/out.parsed') as f:
for line in f:
qr = QueryResponse(*line.strip().split(','))
if qr.path.startswith('/api/'):
# if qr.response_count() > 7:
data[qr.path.replace('/api/', '')[5]].append(
qr)
common_params = dict(
# bins=1000,
# range=(2.86 * 10**7, 2.87 * 10**7),
# histtype='step',
# style="ro",
alpha=0.5,
#normed=True,
)
plt.plot(signal.medfilt([x.total() for x in data['3']], kernel_size=3))
plt.show()
exit()
for key, value in data.items()[:]:
# value = value[:10000]
# for rlen in range(10):
# value2 = [x for x in value if x.response_count() == rlen]
# if len(value2):
value2 = value
print signal.medfilt([[x.response[5] - x.response[4] for x in value2]], kernel_size=3),
plt.plot(#[x.response[0] for x in value2],
'.',
label=str(key) , **common_params)
plt.legend()
try:
plt.show()
except KeyboardInterrupt:
plt.close()
| bsd-2-clause | -1,070,419,007,787,929,100 | 30.595745 | 99 | 0.590236 | false | 3.735849 | false | false | false |
mjwestcott/chatroulette | twistedchat.py | 1 | 6644 | """
twistedchat.py
A TCP chat server in the style of 'Chatroulette' using Twisted.
Part of a study in concurrency and networking in Python:
https://github.com/mjwestcott/chatroulette in which I create versions of this
server using asyncio, gevent, Tornado, and Twisted.
Some 'features':
- on connection, clients are prompted for their name, which will prefix all
of their sent messages;
- the server will notify clients when they are matched with a partner or
their partner disconnects;
- clients whose partner disconnects will be put back in a waiting list to
be matched again;
- clients in the waiting list will periodically be sent a Nietzsche aphorism
to keep them busy;
- clients can issue the following commands:
/help -> describes the service, including the commands below
/quit -> close the connection
/next -> end current chat and wait for a new partner
- the /next command will remember the rejected partners so as not to match
them together again.
Clients are expected to connect via telnet.
"""
from twisted.protocols.basic import LineReceiver
from twisted.internet.protocol import Factory
from twisted.internet import reactor
import random
HOST = 'localhost'
PORT = 12345
with open("nietzsche.txt", "r") as f:
# We'll use these to keep waiting clients busy.
aphorisms = list(filter(bool, f.read().split("\n")))
def main():
reactor.listenTCP(PORT, ChatFactory(), interface=HOST)
reactor.run()
class ChatProtocol(LineReceiver):
delimiter = b'\n'
def __init__(self):
self.name = None
self.partner = None
self.rejected = set()
def connectionMade(self):
# Prompt user for name. Answer will be picked up in lineReceived.
self.sendLine(
b'Server: Welcome to TCP chat roulette! '
b'You will be matched with a partner.')
self.sendLine(b'Server: What is your name?')
def connectionLost(self, exception):
if self.name in self.factory.clients:
del self.factory.clients[self.name]
# Notify the client's partner, if any.
other = self.partner
if other is not None:
other.partnerDisconnected()
def lineReceived(self, data):
if self.name is None: # First interaction with user; data is user's name.
self.handleName(data)
elif data.startswith(b'/'):
self.handleCmd(data)
else:
self.messagePartner(data)
def handleName(self, name):
if name in self.factory.clients:
self.sendLine(b'Server: Sorry, that name is taken. Please choose again.')
else:
self.name = name
self.factory.clients[self.name] = self
self.sendLine(b'Server: Hello, %b. Please wait for a partner.' % self.name)
# Successful onboarding; match client.
reactor.callLater(0, self.match)
def handleCmd(self, cmd):
if cmd.startswith(b'/help'):
self.sendLine(b'Server: Welcome to TCP chat roulette! You will be matched with a partner.')
self.sendLine(b'\t/help -> display this help message')
self.sendLine(b'\t/quit -> close the connection')
self.sendLine(b'\t/next -> end current chat and wait for a new random partner')
elif cmd.startswith(b'/quit'):
self.transport.loseConnection()
elif cmd.startswith(b'/next'):
other = self.partner
if other is None:
# Command issued when not enagaged in chat with a partner.
self.sendLine(b'Server: Sorry, no partner. Please wait.')
else:
self.sendLine(b'Server: Chat over. Please wait for a new partner.')
self.rejected.add(other)
self.partner = None
reactor.callLater(0, self.match)
# Let down the partner gently.
other.partnerDisconnected()
else:
self.sendLine(b'Server: Command not recognised.')
def partnerDisconnected(self):
self.partner = None
self.sendLine(b'Server: Partner disconnected. Please wait.')
reactor.callLater(0, self.match)
def messagePartner(self, msg):
"""Send msg from the sender to their partner. Prefix the message with the
sender's name."""
assert isinstance(msg, bytes)
partner = self.partner
if partner is None:
self.sendLine(b'Server: Sorry, no partner. Please wait.')
else:
partner.sendLine(b'%b: %b' % (self.name, msg))
def match(self, tries=1):
# The global clients dict and waiting set.
waiting = self.factory.waiting
clients = self.factory.clients
# Find any clients who do not have a partner and add them to the
# waiting set.
waiting.update(c for c in clients.values() if c.partner is None)
# Find any clients in the waiting set who have disconnected and remove
# them from the waiting set. (If they are disconnected they will have
# been removed from the client list.)
waiting.intersection_update(clients.values())
if self not in waiting:
# We've been matched by our partner or we've disconnected.
return
if len(waiting) >= 2:
# Attempt to match clients.
A = self
wanted = waiting - A.rejected
partners = [B for B in wanted if A not in B.rejected and A != B]
if partners:
# Match succeeded.
B = partners.pop()
waiting.remove(A)
waiting.remove(B)
A.partner = B
B.partner = A
A.sendLine(b'Server: Partner found! Say hello.')
B.sendLine(b'Server: Partner found! Say hello.')
return
# Match failed. Periodically send something interesting.
if tries % 5 == 0:
aphorism = random.choice(aphorisms)
self.transport.write(
b'Server: Thanks for waiting! Here\'s Nietzsche:\n'
b'\n%b\n\n' % aphorism.encode("utf-8"))
# Exponential backoff up to a maximum sleep of 20 secs.
reactor.callLater(min(20, (tries**2)/4), self.match, tries+1)
class ChatFactory(Factory):
protocol = ChatProtocol
def __init__(self):
self.clients = {} # From names (bytes) to ChatProtocol instances.
self.waiting = set() # Clients without a partner i.e. waiting to chat.
if __name__ == '__main__':
main()
| mit | 1,644,161,619,297,750,800 | 36.325843 | 103 | 0.613034 | false | 4.157697 | false | false | false |
aayushKumarJarvis/Algorithm-Implementations | Rabin_Karp/Python/shivam5992/rabin_karp.py | 27 | 1502 | '''
Rabin Karp Algorithm for pattern matching.It is a string searching algorithm
that uses hashing to find any one of a set of pattern strings in a text.
'''
def rabin_karp(pat,txt):
q = 19 # a random prime number to calcualte hash values
d = 256 # number of input characters
h = 1 # hash function initial value
flag = 0
M = len(pat)
N = len(txt)
# hash function
for i in range(0,M-1):
h = (h*d)%q
# initial hash values for pattern and text
p = 0
t = 0
# hash values of pattern and first window of text
for i in range(0,M):
p = (d*p + ord(pat[i]))%q
t = (d*t + ord(txt[i]))%q
# sliding the pattern over text one by one
for i in range(0,N - M + 1):
# If the hash values of current window and the pattern matches, then only check for characters on by one
if p == t:
for j in range(0,M):
if txt[i+j] != pat[j]:
break
if j+1 == M:
print "Pattern found at index", i
flag = 1
# Hash value for next window of text, Remove leading digit, add trailing digit
if i < N-M:
t = (d*(t - ord(txt[i])*h) + ord(txt[i+M]))%q;
if t < 0:
t = t + q
if flag != 1:
print "No pattern found"
if __name__ == '__main__':
txt = "Driver program to test above function"
pat = "test"
rabin_karp(pat, txt)
| mit | -6,190,843,875,669,887,000 | 27.884615 | 112 | 0.519973 | false | 3.567696 | false | false | false |
MobProgramming/MobTimer.Python | Infrastructure/TimeSettingsManager.py | 1 | 1455 | class TimeSettingsManager(object):
def __init__(self):
self.minutes = 10
self.seconds = 0
self.time_change_callbacks = []
def get_time_string(self):
return "{0:0>2}:{1:0>2}".format(self.minutes, self.seconds)
def increment_minutes(self):
self.minutes += 1
self.fire_time_change_callbacks()
def decrement_minutes(self):
self.minutes -= 1
if self.minutes < 0:
self.minutes = 0
self.fire_time_change_callbacks()
def increment_seconds(self, increment = 15):
self.seconds = (self.seconds + increment) % 60
self.fire_time_change_callbacks()
def decrement_seconds(self, decrement=15):
self.seconds = ((self.seconds - decrement) % 60)
self.fire_time_change_callbacks()
def subscribe_to_timechange(self, time_change_callback):
self.time_change_callbacks.append(time_change_callback)
self.fire_time_change_callbacks()
def fire_time_change_callbacks(self, origin_station_name=None):
for time_change_callback in self.time_change_callbacks:
if time_change_callback:
time_change_callback(self.get_time_string(), self.minutes, self.seconds, origin_station_name)
def set_countdown_time(self, minutes, seconds, origin_station_name=None):
self.minutes = minutes
self.seconds = seconds
self.fire_time_change_callbacks(origin_station_name) | mit | -4,990,201,095,449,209,000 | 35.4 | 109 | 0.643299 | false | 3.789063 | false | false | false |
akhileshpillai/treeherder | treeherder/webapp/api/urls.py | 2 | 5273 | import copy
from django.conf.urls import (include,
url)
from rest_framework import routers
from treeherder.webapp.api import (artifact,
bug,
bugzilla,
classifiedfailure,
failureline,
job_log_url,
jobs,
logslice,
note,
performance_data,
refdata,
resultset,
runnable_jobs,
text_log_summary,
text_log_summary_line)
# router for views that are bound to a project
# i.e. all those views that don't involve reference data
project_bound_router = routers.SimpleRouter()
project_bound_router.register(
r'jobs',
jobs.JobsViewSet,
base_name='jobs',
)
project_bound_router.register(
r'runnable_jobs',
runnable_jobs.RunnableJobsViewSet,
base_name='runnable_jobs',
)
project_bound_router.register(
r'resultset',
resultset.ResultSetViewSet,
base_name='resultset',
)
project_bound_router.register(
r'artifact',
artifact.ArtifactViewSet,
base_name='artifact',
)
project_bound_router.register(
r'note',
note.NoteViewSet,
base_name='note',
)
project_bound_router.register(
r'bug-job-map',
bug.BugJobMapViewSet,
base_name='bug-job-map',
)
project_bound_router.register(
r'logslice',
logslice.LogSliceView,
base_name='logslice',
)
project_bound_router.register(
r'job-log-url',
job_log_url.JobLogUrlViewSet,
base_name='job-log-url',
)
project_bound_router.register(
r'performance/data',
performance_data.PerformanceDatumViewSet,
base_name='performance-data')
project_bound_router.register(
r'performance/signatures',
performance_data.PerformanceSignatureViewSet,
base_name='performance-signatures')
project_bound_router.register(
r'performance/platforms',
performance_data.PerformancePlatformViewSet,
base_name='performance-signatures-platforms')
# this is the default router for plain restful endpoints
class ExtendedRouter(routers.DefaultRouter):
routes = copy.deepcopy(routers.DefaultRouter.routes)
routes[0].mapping[u"put"] = u"update_many"
# refdata endpoints:
default_router = ExtendedRouter()
default_router.register(r'product', refdata.ProductViewSet)
default_router.register(r'machine', refdata.MachineViewSet)
default_router.register(r'machineplatform', refdata.MachinePlatformViewSet)
default_router.register(r'buildplatform', refdata.BuildPlatformViewSet)
default_router.register(r'jobgroup', refdata.JobGroupViewSet)
default_router.register(r'jobtype', refdata.JobTypeViewSet)
default_router.register(r'repository', refdata.RepositoryViewSet)
default_router.register(r'optioncollectionhash', refdata.OptionCollectionHashViewSet,
base_name='optioncollectionhash')
default_router.register(r'failureclassification', refdata.FailureClassificationViewSet)
default_router.register(r'user', refdata.UserViewSet, base_name='user')
default_router.register(r'exclusion-profile', refdata.ExclusionProfileViewSet)
default_router.register(r'job-exclusion', refdata.JobExclusionViewSet)
default_router.register(r'matcher', refdata.MatcherViewSet)
default_router.register(r'failure-line', failureline.FailureLineViewSet,
base_name='failure-line')
default_router.register(r'classified-failure',
classifiedfailure.ClassifiedFailureViewSet,
base_name='classified-failure')
default_router.register(r'text-log-summary',
text_log_summary.TextLogSummaryViewSet,
base_name='text-log-summary')
default_router.register(r'text-log-summary-line',
text_log_summary_line.TextLogSummaryLineViewSet,
base_name='text-log-summary-line')
default_router.register(r'performance/alertsummary',
performance_data.PerformanceAlertSummaryViewSet,
base_name='performance-alert-summaries')
default_router.register(r'performance/alert',
performance_data.PerformanceAlertViewSet,
base_name='performance-alerts')
default_router.register(r'performance/framework',
performance_data.PerformanceFrameworkViewSet,
base_name='performance-frameworks')
default_router.register(r'performance/bug-template',
performance_data.PerformanceBugTemplateViewSet,
base_name='performance-bug-template')
default_router.register(r'bugzilla', bugzilla.BugzillaViewSet,
base_name='bugzilla')
default_router.register(r'jobdetail', jobs.JobDetailViewSet,
base_name='jobdetail')
urlpatterns = [
url(r'^project/(?P<project>[\w-]{0,50})/',
include(project_bound_router.urls)),
url(r'^',
include(default_router.urls)),
]
| mpl-2.0 | -6,742,364,100,357,935,000 | 35.116438 | 87 | 0.638536 | false | 4.211661 | false | false | false |
pylada/pylada-light | src/pylada/misc/relativepath.py | 1 | 10211 | ###############################
# This file is part of PyLaDa.
#
# Copyright (C) 2013 National Renewable Energy Lab
#
# PyLaDa is a high throughput computational platform for Physics. It aims to make it easier to submit
# large numbers of jobs on supercomputers. It provides a python interface to physical input, such as
# crystal structures, as well as to a number of DFT (VASP, CRYSTAL) and atomic potential programs. It
# is able to organise and launch computational jobs on PBS and SLURM.
#
# PyLaDa is free software: you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# PyLaDa is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along with PyLaDa. If not, see
# <http://www.gnu.org/licenses/>.
###############################
""" Defines a path given relative to another.
The object is to make it easy to switch from one computer to another, using
environment variables defined in both.
"""
class RelativePath(object):
""" Directory property which is relative to the user's home.
The path which is returned (eg __get__) is always absolute. However,
it is stored relative to the user's home, and hence can be passed from
one computer system to the next.
Unless you know what you are doing, it is best to get and set using the
``path`` attribute, starting from the current working directory if a
relative path is given, and from the '/' root if an absolute path is
given.
>>> from os import getcwd, environ
>>> getcwd()
'/home/me/inhere/'
>>> relative_directory.path = 'use/this/attribute'
>>> relative_directory.path
'/home/me/inhere/use/this/attribute'
Other descriptors have somewhat more complex behaviors. ``envvar`` is the
root directory - aka the fixed point. Changing it will simply change the
root directory.
>>> environ["SCRATCH"]
'/scratch/me/'
>>> relative_directory.envvar = "$SCRATCH"
>>> relative_directory.envvar
'/scratch/me/use/this/attribute'
Modifying ``relative`` will change the second part of the relative
directory. If a relative path is given, that relative path is used as is,
without reference to the working directory. It is an error to give an
absolute directory.
>>> relative_directory.relative = "now/here"
'/scratch/me/now/here'
>>> relative_directory.relative = "/now/here"
ValueError: Cannot set relative with absolute path.
"""
def __init__(self, path=None, envvar=None, hook=None):
""" Initializes the relative directory.
:Parameters:
path : str or None
path to store here. It can be relative to the current working
directory, include envirnonment variables or shorthands for user
homes. If None, will be set to `envvar`.
envvar : str or None
Fixed point wich can be understood from system to system. It should
be a shorthand to a user homer directory ("~/") or use an
environment variable ("$SCRATCH"). If None, defaults to user's
home.
hook : callable or None
This function will be called if/when the directory is changed. Note
that it may be lost during pickling if it is not itself pickelable.
"""
super(RelativePath, self).__init__()
self._relative = None
""" Private path relative to fixed point. """
self._envvar = None
""" Private envvar variable. """
self._hook = None
""" Private hook variable. """
self.path = path
""" Relative path. """
self.envvar = envvar
""" Fixed point. """
self.hook = hook
""" An object to call when the path is changed.
Callable with at most one argument.
"""
@property
def relative(self):
""" Path relative to fixed point. """
return self._relative if self._relative is not None else ""
@relative.setter
def relative(self, value):
""" Path relative to fixed point. """
from os.path import expandvars, expanduser
if value is None:
value = ""
value = expandvars(expanduser(value.rstrip().lstrip()))
assert value[0] != '/', ValueError('Cannot set "relative" attribute with absolute path.')
self._relative = value if len(value) else None
self.hook(self.path)
@property
def envvar(self):
""" Fixed point for relative directory. """
from os.path import expanduser, expandvars, normpath
from . import local_path
from .. import global_root
if self._envvar is None:
if global_root is None:
return '/'
if '$' not in global_root and '~' not in global_root:
return normpath(global_root)
# Need to figure it out.
try:
local_path(global_root).ensure(dir=True)
return str(local_path(global_root))
except OSError as e:
raise IOError('Could not figure out directory {0}.\n'
'Caught error OSError {1.errno}: {1.message}'
.format(global_root, e))
return normpath(expandvars(expanduser(self._envvar)))
@envvar.setter
def envvar(self, value):
path = self.path if self._relative is not None else None
if value is None:
self._envvar = None
elif len(value.rstrip().lstrip()) == 0:
self._envvar = None
else:
self._envvar = value
if path is not None:
self.path = path
self.hook(self.path)
@property
def path(self):
""" Returns absolute path, including fixed-point. """
from os.path import join, normpath
if self._relative is None:
return self.envvar
return normpath(join(self.envvar, self._relative))
@path.setter
def path(self, value):
from os.path import relpath, expandvars, expanduser, abspath
from os import getcwd
if value is None:
value = getcwd()
if isinstance(value, tuple) and len(value) == 2:
self.envvar = value[0]
self.relative = value[1]
return
if len(value.rstrip().lstrip()) == 0:
value = getcwd()
# This is a python bug where things don't work out if the root path is '/'.
# Seems corrected after 2.7.2
if self.envvar == '/':
self._relative = abspath(expanduser(expandvars(value)))[1:]
else:
self._relative = relpath(expanduser(expandvars(value)), self.envvar)
self.hook(self.path)
@property
def unexpanded(self):
""" Unexpanded path (eg with envvar as is). """
from os.path import join
from .. import global_root
e = global_root if self._envvar is None else self._envvar
return e if self._relative is None else join(e, self._relative)
@property
def hook(self):
from inspect import ismethod
from sys import version_info
if version_info[0] < 3:
from inspect import getargspec
else:
from inspect import getfullargspec as getargspec
if self._hook is None:
return lambda x: None
N = len(getargspec(self._hook).args)
if ismethod(self._hook):
N -= 1
if N == 0:
return lambda x: self._hook()
return self._hook
@hook.setter
def hook(self, value):
from sys import version_info
from inspect import ismethod, getargspec, isfunction
if version_info[0] == 2:
from inspect import getargspec
else:
from inspect import getfullargspec as getargspec
if value is None:
self._hook = None
return
assert ismethod(value) or isfunction(value), \
TypeError("hook is not a function or bound method.")
N = len(getargspec(value)[0])
if ismethod(value):
if getattr(value, '__self__', getattr(value, 'im_self', None)) is None:
raise TypeError("hook callable cannot be an unbound method.")
N -= 1
assert N < 2, TypeError("hook callable cannot have more than one argument.")
self._hook = value
def __getstate__(self):
""" Saves state.
If hook was not pickleable, then it will not be saved appropriately.
"""
from pickle import dumps
try:
dumps(self._hook)
except:
return self._relative, self._envvar
else:
return self._relative, self._envvar, self._hook
def __setstate__(self, args):
""" Resets state.
If hook was not pickleable, then it will not be reset.
"""
if len(args) == 3:
self._relative, self._envvar, self._hook = args
else:
self._relative, self._envvar = args
def set(self, path=None, envvar=None):
""" Sets path and envvar.
Used by repr.
"""
hook = self._hook
self._hook = None
self.envvar = envvar
self.path = path
self._hook = hook
self.hook(self.path)
def repr(self):
""" Makes this instance somewhat representable.
Since hook cannot be represented in most cases, and is most-likely set
on initialization, this method uses ``set`` to get away with
representability.
"""
return "{0}, {1}".format(repr(self._envvar), repr(self._relative))
| gpl-3.0 | -7,389,663,857,729,356,000 | 36.40293 | 103 | 0.58956 | false | 4.420346 | false | false | false |
18F/regulations-parser | regparser/commands/versions.py | 2 | 4244 | import logging
import re
from collections import namedtuple
from operator import attrgetter, itemgetter
import click
from regparser.federalregister import fetch_notice_json
from regparser.history.versions import Version
from regparser.index import dependency, entry
logger = logging.getLogger(__name__)
def fetch_version_ids(cfr_title, cfr_part, notice_dir):
"""Returns a list of version ids after looking them up between the federal
register and the local filesystem"""
present_ids = [v.path[-1] for v in notice_dir.sub_entries()]
final_rules = fetch_notice_json(cfr_title, cfr_part, only_final=True)
version_ids = []
pair_fn = itemgetter('document_number', 'full_text_xml_url')
for fr_id, xml_url in map(pair_fn, final_rules):
if xml_url:
# Version_id concatenated with the date
regex = re.compile(re.escape(fr_id) + r"_\d{8}")
split_entries = [vid for vid in present_ids if regex.match(vid)]
# Add either the split entries or the original version_id
version_ids.extend(split_entries or [fr_id])
else:
logger.warning("No XML for %s; skipping", fr_id)
return version_ids
Delay = namedtuple('Delay', ['by', 'until'])
def delays(xmls):
"""Find all changes to effective dates. Return the latest change to each
version of the regulation"""
delay_map = {}
# Sort so that later modifications override earlier ones
for delayer in sorted(xmls, key=attrgetter('published')):
for delay in delayer.delays():
for delayed in filter(delay.modifies_notice_xml, xmls):
delay_map[delayed.version_id] = Delay(delayer.version_id,
delay.delayed_until)
return delay_map
def generate_dependencies(version_dir, version_ids, delays_by_version):
"""Creates a dependency graph and adds all dependencies for input xml and
delays between notices"""
notice_dir = entry.Notice()
deps = dependency.Graph()
for version_id in version_ids:
deps.add(version_dir / version_id, notice_dir / version_id)
for delayed, delay in delays_by_version.items():
deps.add(version_dir / delayed, notice_dir / delay.by)
return deps
def write_to_disk(xml, version_entry, delay=None):
"""Serialize a Version instance to disk"""
effective = xml.effective if delay is None else delay.until
if effective:
version = Version(xml.version_id, effective, xml.fr_citation)
version_entry.write(version)
else:
logger.warning("No effective date for this rule: %s. Skipping",
xml.version_id)
def write_if_needed(cfr_title, cfr_part, version_ids, xmls, delays_by_version):
"""All versions which are stale (either because they were never create or
because their dependency has been updated) are written to disk. If any
dependency is missing, an exception is raised"""
version_dir = entry.FinalVersion(cfr_title, cfr_part)
deps = generate_dependencies(version_dir, version_ids, delays_by_version)
for version_id in version_ids:
version_entry = version_dir / version_id
deps.validate_for(version_entry)
if deps.is_stale(version_entry):
write_to_disk(xmls[version_id], version_entry,
delays_by_version.get(version_id))
@click.command()
@click.argument('cfr_title', type=int)
@click.argument('cfr_part', type=int)
def versions(cfr_title, cfr_part):
"""Find all Versions for a regulation. Accounts for locally modified
notice XML and rules modifying the effective date of versions of a
regulation"""
cfr_title, cfr_part = str(cfr_title), str(cfr_part)
notice_dir = entry.Notice()
logger.info("Finding versions")
version_ids = fetch_version_ids(cfr_title, cfr_part, notice_dir)
logger.debug("Versions found: %r", version_ids)
version_entries = [notice_dir / version_id for version_id in version_ids]
# notices keyed by version_id
xmls = {e.path[-1]: e.read() for e in version_entries if e.exists()}
delays_by_version = delays(xmls.values())
write_if_needed(cfr_title, cfr_part, version_ids, xmls, delays_by_version)
| cc0-1.0 | 2,874,768,007,242,297,300 | 38.663551 | 79 | 0.671065 | false | 3.66494 | false | false | false |
bbfamily/abu | abupy/AlphaBu/ABuPickStockExecute.py | 1 | 1784 | # -*- encoding:utf-8 -*-
"""
包装选股worker进行,完善前后工作
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from .ABuPickStockWorker import AbuPickStockWorker
from ..CoreBu.ABuEnvProcess import add_process_env_sig
from ..MarketBu.ABuMarket import split_k_market
from ..TradeBu.ABuKLManager import AbuKLManager
from ..CoreBu.ABuFixes import ThreadPoolExecutor
__author__ = '阿布'
__weixin__ = 'abu_quant'
@add_process_env_sig
def do_pick_stock_work(choice_symbols, benchmark, capital, stock_pickers):
"""
包装AbuPickStockWorker进行选股
:param choice_symbols: 初始备选交易对象序列
:param benchmark: 交易基准对象,AbuBenchmark实例对象
:param capital: 资金类AbuCapital实例化对象
:param stock_pickers: 选股因子序列
:return:
"""
kl_pd_manager = AbuKLManager(benchmark, capital)
stock_pick = AbuPickStockWorker(capital, benchmark, kl_pd_manager, choice_symbols=choice_symbols,
stock_pickers=stock_pickers)
stock_pick.fit()
return stock_pick.choice_symbols
@add_process_env_sig
def do_pick_stock_thread_work(choice_symbols, benchmark, capital, stock_pickers, n_thread):
"""包装AbuPickStockWorker启动线程进行选股"""
result = []
def when_thread_done(r):
result.extend(r.result())
with ThreadPoolExecutor(max_workers=n_thread) as pool:
thread_symbols = split_k_market(n_thread, market_symbols=choice_symbols)
for symbols in thread_symbols:
future_result = pool.submit(do_pick_stock_work, symbols, benchmark, capital, stock_pickers)
future_result.add_done_callback(when_thread_done)
return result
| gpl-3.0 | 3,025,747,498,661,473,300 | 31.392157 | 103 | 0.705206 | false | 2.848276 | false | false | false |
hmoco/osf.io | framework/auth/views.py | 1 | 41778 | # -*- coding: utf-8 -*-
import furl
import httplib as http
import urllib
import markupsafe
from django.utils import timezone
from flask import request
import uuid
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from modularodm.exceptions import ValidationError
from modularodm.exceptions import ValidationValueError
from framework import forms, sentry, status
from framework import auth as framework_auth
from framework.auth import exceptions
from framework.auth import cas, campaigns
from framework.auth import logout as osf_logout
from framework.auth import get_user
from framework.auth.exceptions import DuplicateEmailError, ExpiredTokenError, InvalidTokenError
from framework.auth.core import generate_verification_key
from framework.auth.decorators import block_bing_preview, collect_auth, must_be_logged_in
from framework.auth.forms import ResendConfirmationForm, ForgotPasswordForm, ResetPasswordForm
from framework.auth.utils import ensure_external_identity_uniqueness, validate_recaptcha
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from framework.sessions.utils import remove_sessions_for_user, remove_session
from framework.sessions import get_session
from website import settings, mails, language
from website.models import User
from website.util import web_url_for
from website.util.time import throttle_period_expired
from website.util.sanitize import strip_html
@block_bing_preview
@collect_auth
def reset_password_get(auth, uid=None, token=None):
"""
View for user to land on the reset password page.
HTTp Method: GET
:param auth: the authentication state
:param uid: the user id
:param token: the token in verification key
:return
:raises: HTTPError(http.BAD_REQUEST) if verification key for the user is invalid, has expired or was used
"""
# if users are logged in, log them out and redirect back to this page
if auth.logged_in:
return auth_logout(redirect_url=request.url)
# Check if request bears a valid pair of `uid` and `token`
user_obj = User.load(uid)
if not (user_obj and user_obj.verify_password_token(token=token)):
error_data = {
'message_short': 'Invalid Request.',
'message_long': 'The requested URL is invalid, has expired, or was already used',
}
raise HTTPError(http.BAD_REQUEST, data=error_data)
# refresh the verification key (v2)
user_obj.verification_key_v2 = generate_verification_key(verification_type='password')
user_obj.save()
return {
'uid': user_obj._id,
'token': user_obj.verification_key_v2['token'],
}
def reset_password_post(uid=None, token=None):
"""
View for user to submit reset password form.
HTTP Method: POST
:param uid: the user id
:param token: the token in verification key
:return:
:raises: HTTPError(http.BAD_REQUEST) if verification key for the user is invalid, has expired or was used
"""
form = ResetPasswordForm(request.form)
# Check if request bears a valid pair of `uid` and `token`
user_obj = User.load(uid)
if not (user_obj and user_obj.verify_password_token(token=token)):
error_data = {
'message_short': 'Invalid Request.',
'message_long': 'The requested URL is invalid, has expired, or was already used',
}
raise HTTPError(http.BAD_REQUEST, data=error_data)
if not form.validate():
# Don't go anywhere
forms.push_errors_to_status(form.errors)
else:
# clear verification key (v2)
user_obj.verification_key_v2 = {}
# new verification key (v1) for CAS
user_obj.verification_key = generate_verification_key(verification_type=None)
try:
user_obj.set_password(form.password.data)
user_obj.save()
except exceptions.ChangePasswordError as error:
for message in error.messages:
status.push_status_message(message, kind='warning', trust=False)
else:
status.push_status_message('Password reset', kind='success', trust=False)
# redirect to CAS and authenticate the user automatically with one-time verification key.
return redirect(cas.get_login_url(
web_url_for('user_account', _absolute=True),
username=user_obj.username,
verification_key=user_obj.verification_key
))
return {
'uid': user_obj._id,
'token': user_obj.verification_key_v2['token'],
}
@collect_auth
def forgot_password_get(auth):
"""
View for user to land on the forgot password page.
HTTP Method: GET
:param auth: the authentication context
:return
"""
# if users are logged in, log them out and redirect back to this page
if auth.logged_in:
return auth_logout(redirect_url=request.url)
return {}
def forgot_password_post():
"""
View for user to submit forgot password form.
HTTP Method: POST
:return {}
"""
form = ForgotPasswordForm(request.form, prefix='forgot_password')
if not form.validate():
# Don't go anywhere
forms.push_errors_to_status(form.errors)
else:
email = form.email.data
status_message = ('If there is an OSF account associated with {0}, an email with instructions on how to '
'reset the OSF password has been sent to {0}. If you do not receive an email and believe '
'you should have, please contact OSF Support. ').format(email)
kind = 'success'
# check if the user exists
user_obj = get_user(email=email)
if user_obj:
# rate limit forgot_password_post
if not throttle_period_expired(user_obj.email_last_sent, settings.SEND_EMAIL_THROTTLE):
status_message = 'You have recently requested to change your password. Please wait a few minutes ' \
'before trying again.'
kind = 'error'
else:
# TODO [OSF-6673]: Use the feature in [OSF-6998] for user to resend claim email.
# if the user account is not claimed yet
if (user_obj.is_invited and
user_obj.unclaimed_records and
not user_obj.date_last_login and
not user_obj.is_claimed and
not user_obj.is_registered):
status_message = 'You cannot reset password on this account. Please contact OSF Support.'
kind = 'error'
else:
# new random verification key (v2)
user_obj.verification_key_v2 = generate_verification_key(verification_type='password')
user_obj.email_last_sent = timezone.now()
user_obj.save()
reset_link = furl.urljoin(
settings.DOMAIN,
web_url_for(
'reset_password_get',
uid=user_obj._id,
token=user_obj.verification_key_v2['token']
)
)
mails.send_mail(
to_addr=email,
mail=mails.FORGOT_PASSWORD,
reset_link=reset_link
)
status.push_status_message(status_message, kind=kind, trust=False)
return {}
def login_and_register_handler(auth, login=True, campaign=None, next_url=None, logout=None):
"""
Non-view helper to handle `login` and `register` requests.
:param auth: the auth context
:param login: `True` if `GET /login`, `False` if `GET /register`
:param campaign: a target campaign defined in `auth.campaigns`
:param next_url: the service url for CAS login or redirect url for OSF
:param logout: used only for `claim_user_registered`
:return: data object that contains actions for `auth_register` and `auth_login`
:raises: http.BAD_REQUEST
"""
# Only allow redirects which are relative root or full domain. Disallows external redirects.
if next_url and not validate_next_url(next_url):
raise HTTPError(http.BAD_REQUEST)
data = {
'status_code': http.FOUND if login else http.OK,
'next_url': next_url,
'campaign': None,
'must_login_warning': False,
}
# login or register with campaign parameter
if campaign:
if validate_campaign(campaign):
# GET `/register` or '/login` with `campaign=institution`
# unlike other campaigns, institution login serves as an alternative for authentication
if campaign == 'institution':
next_url = web_url_for('dashboard', _absolute=True)
data['status_code'] = http.FOUND
if auth.logged_in:
data['next_url'] = next_url
else:
data['next_url'] = cas.get_login_url(next_url, campaign='institution')
# for non-institution campaigns
else:
destination = next_url if next_url else campaigns.campaign_url_for(campaign)
if auth.logged_in:
# if user is already logged in, go to the campaign landing page
data['status_code'] = http.FOUND
data['next_url'] = destination
else:
# if user is logged out, go to the osf register page with campaign context
if login:
# `GET /login?campaign=...`
data['next_url'] = web_url_for('auth_register', campaign=campaign, next=destination)
else:
# `GET /register?campaign=...`
data['campaign'] = campaign
if campaigns.is_proxy_login(campaign):
data['next_url'] = web_url_for(
'auth_login',
next=destination,
_absolute=True
)
else:
data['next_url'] = destination
else:
# invalid campaign, inform sentry and redirect to non-campaign sign up or sign in
redirect_view = 'auth_login' if login else 'auth_register'
data['status_code'] = http.FOUND
data['next_url'] = web_url_for(redirect_view, campaigns=None, next=next_url)
data['campaign'] = None
sentry.log_message(
'{} is not a valid campaign. Please add it if this is a new one'.format(campaign)
)
# login or register with next parameter
elif next_url:
if logout:
# handle `claim_user_registered`
data['next_url'] = next_url
if auth.logged_in:
# log user out and come back
data['status_code'] = 'auth_logout'
else:
# after logout, land on the register page with "must_login" warning
data['status_code'] = http.OK
data['must_login_warning'] = True
elif auth.logged_in:
# if user is already logged in, redirect to `next_url`
data['status_code'] = http.FOUND
data['next_url'] = next_url
elif login:
# `/login?next=next_url`: go to CAS login page with current request url as service url
data['status_code'] = http.FOUND
data['next_url'] = cas.get_login_url(request.url)
else:
# `/register?next=next_url`: land on OSF register page with request url as next url
data['status_code'] = http.OK
data['next_url'] = request.url
else:
# `/login/` or `/register/` without any parameter
if auth.logged_in:
data['status_code'] = http.FOUND
data['next_url'] = web_url_for('dashboard', _absolute=True)
return data
@collect_auth
def auth_login(auth):
"""
View (no template) for OSF Login.
Redirect user based on `data` returned from `login_and_register_handler`.
`/login` only takes valid campaign, valid next, or no query parameter
`login_and_register_handler()` handles the following cases:
if campaign and logged in, go to campaign landing page (or valid next_url if presents)
if campaign and logged out, go to campaign register page (with next_url if presents)
if next_url and logged in, go to next url
if next_url and logged out, go to cas login page with current request url as service parameter
if none, go to `/dashboard` which is decorated by `@must_be_logged_in`
:param auth: the auth context
:return: redirects
"""
campaign = request.args.get('campaign')
next_url = request.args.get('next')
data = login_and_register_handler(auth, login=True, campaign=campaign, next_url=next_url)
if data['status_code'] == http.FOUND:
return redirect(data['next_url'])
@collect_auth
def auth_register(auth):
"""
View for OSF register. Land on the register page, redirect or go to `auth_logout`
depending on `data` returned by `login_and_register_handler`.
`/register` only takes a valid campaign, a valid next, the logout flag or no query parameter
`login_and_register_handler()` handles the following cases:
if campaign and logged in, go to campaign landing page (or valid next_url if presents)
if campaign and logged out, go to campaign register page (with next_url if presents)
if next_url and logged in, go to next url
if next_url and logged out, go to cas login page with current request url as service parameter
if next_url and logout flag, log user out first and then go to the next_url
if none, go to `/dashboard` which is decorated by `@must_be_logged_in`
:param auth: the auth context
:return: land, redirect or `auth_logout`
:raise: http.BAD_REQUEST
"""
context = {}
# a target campaign in `auth.campaigns`
campaign = request.args.get('campaign')
# the service url for CAS login or redirect url for OSF
next_url = request.args.get('next')
# used only for `claim_user_registered`
logout = request.args.get('logout')
# logout must have next_url
if logout and not next_url:
raise HTTPError(http.BAD_REQUEST)
data = login_and_register_handler(auth, login=False, campaign=campaign, next_url=next_url, logout=logout)
# land on register page
if data['status_code'] == http.OK:
if data['must_login_warning']:
status.push_status_message(language.MUST_LOGIN, trust=False)
destination = cas.get_login_url(data['next_url'])
# "Already have and account?" link
context['non_institution_login_url'] = destination
# "Sign In" button in navigation bar, overwrite the default value set in routes.py
context['login_url'] = destination
# "Login through your institution" link
context['institution_login_url'] = cas.get_login_url(data['next_url'], campaign='institution')
context['campaign'] = data['campaign']
return context, http.OK
# redirect to url
elif data['status_code'] == http.FOUND:
return redirect(data['next_url'])
# go to other views
elif data['status_code'] == 'auth_logout':
return auth_logout(redirect_url=data['next_url'])
raise HTTPError(http.BAD_REQUEST)
@collect_auth
def auth_logout(auth, redirect_url=None, next_url=None):
"""
Log out, delete current session and remove OSF cookie.
If next url is valid and auth is logged in, redirect to CAS logout endpoint with the current request url as service.
If next url is valid and auth is logged out, redirect directly to the next url.
Otherwise, redirect to CAS logout or login endpoint with redirect url as service.
The CAS logout endpoint which clears sessions and cookies for CAS and Shibboleth.
HTTP Method: GET
Note 1: OSF tells CAS where it wants to be redirected back after successful logout. However, CAS logout flow may not
respect this url if user is authenticated through remote identity provider.
Note 2: The name of the query parameter is `next`, `next_url` is used to avoid python reserved word.
:param auth: the authentication context
:param redirect_url: url to DIRECTLY redirect after CAS logout, default is `OSF/goodbye`
:param next_url: url to redirect after OSF logout, which is after CAS logout
:return: the response
"""
# For `?next=`:
# takes priority
# the url must be a valid OSF next url,
# the full request url is set to CAS service url,
# does not support `reauth`
# For `?redirect_url=`:
# the url must be valid CAS service url
# the redirect url is set to CAS service url.
# support `reauth`
# logout/?next=<an OSF verified next url>
next_url = next_url or request.args.get('next', None)
if next_url and validate_next_url(next_url):
cas_logout_endpoint = cas.get_logout_url(request.url)
if auth.logged_in:
resp = redirect(cas_logout_endpoint)
else:
resp = redirect(next_url)
# logout/ or logout/?redirect_url=<a CAS verified redirect url>
else:
redirect_url = redirect_url or request.args.get('redirect_url') or web_url_for('goodbye', _absolute=True)
# set redirection to CAS log out (or log in if `reauth` is present)
if 'reauth' in request.args:
cas_endpoint = cas.get_login_url(redirect_url)
else:
cas_endpoint = cas.get_logout_url(redirect_url)
resp = redirect(cas_endpoint)
# perform OSF logout
osf_logout()
# set response to delete OSF cookie
resp.delete_cookie(settings.COOKIE_NAME, domain=settings.OSF_COOKIE_DOMAIN)
return resp
def auth_email_logout(token, user):
"""
When a user is adding an email or merging an account, add the email to the user and log them out.
"""
redirect_url = cas.get_logout_url(service_url=cas.get_login_url(service_url=web_url_for('index', _absolute=True)))
try:
unconfirmed_email = user.get_unconfirmed_email_for_token(token)
except InvalidTokenError:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Bad token',
'message_long': 'The provided token is invalid.'
})
except ExpiredTokenError:
status.push_status_message('The private link you used is expired.')
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Expired link',
'message_long': 'The private link you used is expired.'
})
try:
user_merge = User.find_one(Q('emails', 'eq', unconfirmed_email))
except NoResultsFound:
user_merge = False
if user_merge:
remove_sessions_for_user(user_merge)
user.email_verifications[token]['confirmed'] = True
user.save()
remove_sessions_for_user(user)
resp = redirect(redirect_url)
resp.delete_cookie(settings.COOKIE_NAME, domain=settings.OSF_COOKIE_DOMAIN)
return resp
@block_bing_preview
@collect_auth
def external_login_confirm_email_get(auth, uid, token):
"""
View for email confirmation links when user first login through external identity provider.
HTTP Method: GET
When users click the confirm link, they are expected not to be logged in. If not, they will be logged out first and
redirected back to this view. After OSF verifies the link and performs all actions, they will be automatically
logged in through CAS and redirected back to this view again being authenticated.
:param auth: the auth context
:param uid: the user's primary key
:param token: the verification token
"""
user = User.load(uid)
if not user:
raise HTTPError(http.BAD_REQUEST)
destination = request.args.get('destination')
if not destination:
raise HTTPError(http.BAD_REQUEST)
# if user is already logged in
if auth and auth.user:
# if it is a wrong user
if auth.user._id != user._id:
return auth_logout(redirect_url=request.url)
# if it is the expected user
new = request.args.get('new', None)
if destination in campaigns.get_campaigns():
# external domain takes priority
campaign_url = campaigns.external_campaign_url_for(destination)
if not campaign_url:
campaign_url = campaigns.campaign_url_for(destination)
return redirect(campaign_url)
if new:
status.push_status_message(language.WELCOME_MESSAGE, kind='default', jumbotron=True, trust=True)
return redirect(web_url_for('dashboard'))
# token is invalid
if token not in user.email_verifications:
raise HTTPError(http.BAD_REQUEST)
verification = user.email_verifications[token]
email = verification['email']
provider = verification['external_identity'].keys()[0]
provider_id = verification['external_identity'][provider].keys()[0]
# wrong provider
if provider not in user.external_identity:
raise HTTPError(http.BAD_REQUEST)
external_status = user.external_identity[provider][provider_id]
try:
ensure_external_identity_uniqueness(provider, provider_id, user)
except ValidationError as e:
raise HTTPError(http.FORBIDDEN, e.message)
if not user.is_registered:
user.register(email)
if email.lower() not in user.emails:
user.emails.append(email.lower())
user.date_last_logged_in = timezone.now()
user.external_identity[provider][provider_id] = 'VERIFIED'
user.social[provider.lower()] = provider_id
del user.email_verifications[token]
user.verification_key = generate_verification_key()
user.save()
service_url = request.url
if external_status == 'CREATE':
mails.send_mail(
to_addr=user.username,
mail=mails.WELCOME,
mimetype='html',
user=user
)
service_url += '&{}'.format(urllib.urlencode({'new': 'true'}))
elif external_status == 'LINK':
mails.send_mail(
user=user,
to_addr=user.username,
mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS,
external_id_provider=provider,
)
# redirect to CAS and authenticate the user with the verification key
return redirect(cas.get_login_url(
service_url,
username=user.username,
verification_key=user.verification_key
))
@block_bing_preview
@collect_auth
def confirm_email_get(token, auth=None, **kwargs):
"""
View for email confirmation links. Authenticates and redirects to user settings page if confirmation is successful,
otherwise shows an "Expired Link" error.
HTTP Method: GET
"""
user = User.load(kwargs['uid'])
is_merge = 'confirm_merge' in request.args
is_initial_confirmation = not user.date_confirmed
log_out = request.args.get('logout', None)
if user is None:
raise HTTPError(http.NOT_FOUND)
# if the user is merging or adding an email (they already are an osf user)
if log_out:
return auth_email_logout(token, user)
if auth and auth.user and (auth.user._id == user._id or auth.user._id == user.merged_by._id):
if not is_merge:
# determine if the user registered through a campaign
campaign = campaigns.campaign_for_user(user)
if campaign:
return redirect(campaigns.campaign_url_for(campaign))
# go to home page with push notification
if len(auth.user.emails) == 1 and len(auth.user.email_verifications) == 0:
status.push_status_message(language.WELCOME_MESSAGE, kind='default', jumbotron=True, trust=True)
if token in auth.user.email_verifications:
status.push_status_message(language.CONFIRM_ALTERNATE_EMAIL_ERROR, kind='danger', trust=True)
return redirect(web_url_for('index'))
status.push_status_message(language.MERGE_COMPLETE, kind='success', trust=False)
return redirect(web_url_for('user_account'))
try:
user.confirm_email(token, merge=is_merge)
except exceptions.EmailConfirmTokenError as e:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': e.message_short,
'message_long': e.message_long
})
if is_initial_confirmation:
user.update_date_last_login()
user.save()
# send out our welcome message
mails.send_mail(
to_addr=user.username,
mail=mails.WELCOME,
mimetype='html',
user=user
)
# new random verification key, allows CAS to authenticate the user w/o password one-time only.
user.verification_key = generate_verification_key()
user.save()
# redirect to CAS and authenticate the user with a verification key.
return redirect(cas.get_login_url(
request.url,
username=user.username,
verification_key=user.verification_key
))
@must_be_logged_in
def unconfirmed_email_remove(auth=None):
"""
Called at login if user cancels their merge or email add.
HTTP Method: DELETE
"""
user = auth.user
json_body = request.get_json()
try:
given_token = json_body['token']
except KeyError:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Missing token',
'message_long': 'Must provide a token'
})
user.clean_email_verifications(given_token=given_token)
user.save()
return {
'status': 'success',
'removed_email': json_body['address']
}, 200
@must_be_logged_in
def unconfirmed_email_add(auth=None):
"""
Called at login if user confirms their merge or email add.
HTTP Method: PUT
"""
user = auth.user
json_body = request.get_json()
try:
token = json_body['token']
except KeyError:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': 'Missing token',
'message_long': 'Must provide a token'
})
try:
user.confirm_email(token, merge=True)
except exceptions.InvalidTokenError:
raise InvalidTokenError(http.BAD_REQUEST, data={
'message_short': 'Invalid user token',
'message_long': 'The user token is invalid'
})
except exceptions.EmailConfirmTokenError as e:
raise HTTPError(http.BAD_REQUEST, data={
'message_short': e.message_short,
'message_long': e.message_long
})
user.save()
return {
'status': 'success',
'removed_email': json_body['address']
}, 200
def send_confirm_email(user, email, renew=False, external_id_provider=None, external_id=None, destination=None):
"""
Sends `user` a confirmation to the given `email`.
:param user: the user
:param email: the email
:param renew: refresh the token
:param external_id_provider: user's external id provider
:param external_id: user's external id
:param destination: the destination page to redirect after confirmation
:return:
:raises: KeyError if user does not have a confirmation token for the given email.
"""
confirmation_url = user.get_confirmation_url(
email,
external=True,
force=True,
renew=renew,
external_id_provider=external_id_provider,
destination=destination
)
try:
merge_target = User.find_one(Q('emails', 'eq', email))
except NoResultsFound:
merge_target = None
campaign = campaigns.campaign_for_user(user)
branded_preprints_provider = None
# Choose the appropriate email template to use and add existing_user flag if a merge or adding an email.
if external_id_provider and external_id:
# First time login through external identity provider, link or create an OSF account confirmation
if user.external_identity[external_id_provider][external_id] == 'CREATE':
mail_template = mails.EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE
elif user.external_identity[external_id_provider][external_id] == 'LINK':
mail_template = mails.EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK
elif merge_target:
# Merge account confirmation
mail_template = mails.CONFIRM_MERGE
confirmation_url = '{}?logout=1'.format(confirmation_url)
elif user.is_active:
# Add email confirmation
mail_template = mails.CONFIRM_EMAIL
confirmation_url = '{}?logout=1'.format(confirmation_url)
elif campaign:
# Account creation confirmation: from campaign
mail_template = campaigns.email_template_for_campaign(campaign)
if campaigns.is_proxy_login(campaign) and campaigns.get_service_provider(campaign) != 'OSF':
branded_preprints_provider = campaigns.get_service_provider(campaign)
else:
# Account creation confirmation: from OSF
mail_template = mails.INITIAL_CONFIRM_EMAIL
mails.send_mail(
email,
mail_template,
'plain',
user=user,
confirmation_url=confirmation_url,
email=email,
merge_target=merge_target,
external_id_provider=external_id_provider,
branded_preprints_provider=branded_preprints_provider
)
def register_user(**kwargs):
"""
Register new user account.
HTTP Method: POST
:param-json str email1:
:param-json str email2:
:param-json str password:
:param-json str fullName:
:param-json str campaign:
:raises: HTTPError(http.BAD_REQUEST) if validation fails or user already exists
"""
# Verify that email address match.
# Note: Both `landing.mako` and `register.mako` already have this check on the form. Users can not submit the form
# if emails do not match. However, this check should not be removed given we may use the raw api call directly.
json_data = request.get_json()
if str(json_data['email1']).lower() != str(json_data['email2']).lower():
raise HTTPError(
http.BAD_REQUEST,
data=dict(message_long='Email addresses must match.')
)
# Verify that captcha is valid
if settings.RECAPTCHA_SITE_KEY and not validate_recaptcha(json_data.get('g-recaptcha-response'), remote_ip=request.remote_addr):
raise HTTPError(
http.BAD_REQUEST,
data=dict(message_long='Invalid Captcha')
)
try:
full_name = request.json['fullName']
full_name = strip_html(full_name)
campaign = json_data.get('campaign')
if campaign and campaign not in campaigns.get_campaigns():
campaign = None
user = framework_auth.register_unconfirmed(
request.json['email1'],
request.json['password'],
full_name,
campaign=campaign,
)
framework_auth.signals.user_registered.send(user)
except (ValidationValueError, DuplicateEmailError):
raise HTTPError(
http.BAD_REQUEST,
data=dict(
message_long=language.ALREADY_REGISTERED.format(
email=markupsafe.escape(request.json['email1'])
)
)
)
except ValidationError as e:
raise HTTPError(
http.BAD_REQUEST,
data=dict(message_long=e.message)
)
if settings.CONFIRM_REGISTRATIONS_BY_EMAIL:
send_confirm_email(user, email=user.username)
message = language.REGISTRATION_SUCCESS.format(email=user.username)
return {'message': message}
else:
return {'message': 'You may now log in.'}
@collect_auth
def resend_confirmation_get(auth):
"""
View for user to land on resend confirmation page.
HTTP Method: GET
"""
# If user is already logged in, log user out
if auth.logged_in:
return auth_logout(redirect_url=request.url)
form = ResendConfirmationForm(request.form)
return {
'form': form,
}
@collect_auth
def resend_confirmation_post(auth):
"""
View for user to submit resend confirmation form.
HTTP Method: POST
"""
# If user is already logged in, log user out
if auth.logged_in:
return auth_logout(redirect_url=request.url)
form = ResendConfirmationForm(request.form)
if form.validate():
clean_email = form.email.data
user = get_user(email=clean_email)
status_message = ('If there is an OSF account associated with this unconfirmed email {0}, '
'a confirmation email has been resent to it. If you do not receive an email and believe '
'you should have, please contact OSF Support.').format(clean_email)
kind = 'success'
if user:
if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE):
try:
send_confirm_email(user, clean_email, renew=True)
except KeyError:
# already confirmed, redirect to dashboard
status_message = 'This email {0} has already been confirmed.'.format(clean_email)
kind = 'warning'
user.email_last_sent = timezone.now()
user.save()
else:
status_message = ('You have recently requested to resend your confirmation email. '
'Please wait a few minutes before trying again.')
kind = 'error'
status.push_status_message(status_message, kind=kind, trust=False)
else:
forms.push_errors_to_status(form.errors)
# Don't go anywhere
return {'form': form}
def external_login_email_get():
"""
Landing view for first-time oauth-login user to enter their email address.
HTTP Method: GET
"""
form = ResendConfirmationForm(request.form)
session = get_session()
if not session.is_external_first_login:
raise HTTPError(http.UNAUTHORIZED)
external_id_provider = session.data['auth_user_external_id_provider']
return {
'form': form,
'external_id_provider': external_id_provider
}
def external_login_email_post():
"""
View to handle email submission for first-time oauth-login user.
HTTP Method: POST
"""
form = ResendConfirmationForm(request.form)
session = get_session()
if not session.is_external_first_login:
raise HTTPError(http.UNAUTHORIZED)
external_id_provider = session.data['auth_user_external_id_provider']
external_id = session.data['auth_user_external_id']
fullname = session.data['auth_user_fullname']
service_url = session.data['service_url']
# TODO: @cslzchen use user tags instead of destination
destination = 'dashboard'
for campaign in campaigns.get_campaigns():
if campaign != 'institution':
# Handle different url encoding schemes between `furl` and `urlparse/urllib`.
# OSF use `furl` to parse service url during service validation with CAS. However, `web_url_for()` uses
# `urlparse/urllib` to generate service url. `furl` handles `urlparser/urllib` generated urls while ` but
# not vice versa.
campaign_url = furl.furl(campaigns.campaign_url_for(campaign)).url
external_campaign_url = furl.furl(campaigns.external_campaign_url_for(campaign)).url
if campaigns.is_proxy_login(campaign):
# proxy campaigns: OSF Preprints and branded ones
if check_service_url_with_proxy_campaign(str(service_url), campaign_url, external_campaign_url):
destination = campaign
# continue to check branded preprints even service url matches osf preprints
if campaign != 'osf-preprints':
break
elif service_url.startswith(campaign_url):
# osf campaigns: OSF Prereg and ERPC
destination = campaign
break
if form.validate():
clean_email = form.email.data
user = get_user(email=clean_email)
external_identity = {
external_id_provider: {
external_id: None,
},
}
try:
ensure_external_identity_uniqueness(external_id_provider, external_id, user)
except ValidationError as e:
raise HTTPError(http.FORBIDDEN, e.message)
if user:
# 1. update user oauth, with pending status
external_identity[external_id_provider][external_id] = 'LINK'
if external_id_provider in user.external_identity:
user.external_identity[external_id_provider].update(external_identity[external_id_provider])
else:
user.external_identity.update(external_identity)
# 2. add unconfirmed email and send confirmation email
user.add_unconfirmed_email(clean_email, external_identity=external_identity)
user.save()
send_confirm_email(
user,
clean_email,
external_id_provider=external_id_provider,
external_id=external_id,
destination=destination
)
# 3. notify user
message = language.EXTERNAL_LOGIN_EMAIL_LINK_SUCCESS.format(
external_id_provider=external_id_provider,
email=user.username
)
kind = 'success'
# 4. remove session and osf cookie
remove_session(session)
else:
# 1. create unconfirmed user with pending status
external_identity[external_id_provider][external_id] = 'CREATE'
user = User.create_unconfirmed(
username=clean_email,
password=str(uuid.uuid4()),
fullname=fullname,
external_identity=external_identity,
campaign=None
)
# TODO: [#OSF-6934] update social fields, verified social fields cannot be modified
user.save()
# 3. send confirmation email
send_confirm_email(
user,
user.username,
external_id_provider=external_id_provider,
external_id=external_id,
destination=destination
)
# 4. notify user
message = language.EXTERNAL_LOGIN_EMAIL_CREATE_SUCCESS.format(
external_id_provider=external_id_provider,
email=user.username
)
kind = 'success'
# 5. remove session
remove_session(session)
status.push_status_message(message, kind=kind, trust=False)
else:
forms.push_errors_to_status(form.errors)
# Don't go anywhere
return {
'form': form,
'external_id_provider': external_id_provider
}
def validate_campaign(campaign):
"""
Non-view helper function that validates `campaign`.
:param campaign: the campaign to validate
:return: True if valid, False otherwise
"""
return campaign and campaign in campaigns.get_campaigns()
def validate_next_url(next_url):
"""
Non-view helper function that checks `next_url`.
Only allow redirects which are relative root or full domain (CAS, OSF and MFR).
Disallows external redirects.
:param next_url: the next url to check
:return: True if valid, False otherwise
"""
# disable external domain using `//`: the browser allows `//` as a shortcut for non-protocol specific requests
# like http:// or https:// depending on the use of SSL on the page already.
if next_url.startswith('//'):
return False
# only OSF, MFR, CAS and Branded Preprints domains are allowed
if next_url[0] == '/' or next_url.startswith(settings.DOMAIN):
# OSF
return True
if next_url.startswith(settings.CAS_SERVER_URL) or next_url.startswith(settings.MFR_SERVER_URL):
# CAS or MFR
return True
for url in campaigns.get_external_domains():
# Branded Preprints Phase 2
if next_url.startswith(url):
return True
return False
def check_service_url_with_proxy_campaign(service_url, campaign_url, external_campaign_url=None):
"""
Check if service url belongs to proxy campaigns: OSF Preprints and branded ones.
Both service_url and campaign_url are parsed using `furl` encoding scheme.
:param service_url: the `furl` formatted service url
:param campaign_url: the `furl` formatted campaign url
:param external_campaign_url: the `furl` formatted external campaign url
:return: the matched object or None
"""
prefix_1 = settings.DOMAIN + 'login/?next=' + campaign_url
prefix_2 = settings.DOMAIN + 'login?next=' + campaign_url
valid = service_url.startswith(prefix_1) or service_url.startswith(prefix_2)
valid_external = False
if external_campaign_url:
prefix_3 = settings.DOMAIN + 'login/?next=' + external_campaign_url
prefix_4 = settings.DOMAIN + 'login?next=' + external_campaign_url
valid_external = service_url.startswith(prefix_3) or service_url.startswith(prefix_4)
return valid or valid_external
| apache-2.0 | 7,350,794,578,634,425,000 | 37.153425 | 132 | 0.626861 | false | 4.113222 | false | false | false |
porolakka/motioneye-jp | src/template.py | 4 | 1811 |
# Copyright (c) 2013 Calin Crisan
# This file is part of motionEye.
#
# motionEye is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from jinja2 import Environment, FileSystemLoader
import settings
import utils
_jinja_env = None
def _init_jinja():
global _jinja_env
_jinja_env = Environment(
loader=FileSystemLoader(settings.TEMPLATE_PATH),
trim_blocks=False)
# globals
_jinja_env.globals['settings'] = settings
# filters
_jinja_env.filters['pretty_date_time'] = utils.pretty_date_time
_jinja_env.filters['pretty_date'] = utils.pretty_date
_jinja_env.filters['pretty_time'] = utils.pretty_time
_jinja_env.filters['pretty_duration'] = utils.pretty_duration
def add_template_path(path):
global _jinja_env
if _jinja_env is None:
_init_jinja()
_jinja_env.loader.searchpath.append(path)
def add_context(name, value):
global _jinja_env
if _jinja_env is None:
_init_jinja()
_jinja_env.globals[name] = value
def render(template_name, **context):
global _jinja_env
if _jinja_env is None:
_init_jinja()
template = _jinja_env.get_template(template_name)
return template.render(**context)
| gpl-3.0 | -8,569,701,440,049,534,000 | 26.439394 | 72 | 0.690226 | false | 3.726337 | false | false | false |
luhn/johnhancock | johnhancock/__init__.py | 1 | 10990 | import re
import hashlib
import hmac
import binascii
from datetime import datetime as DateTime
from urllib.parse import urlsplit, parse_qsl, urlencode
from collections import namedtuple
from collections.abc import MutableMapping, Mapping
class Headers(MutableMapping):
"""
A case-insensitive dictionary-like object, for use in storing the headers.
"""
def __init__(self, init):
self._map = {}
for key, value in init.items():
self[key] = value
def __getitem__(self, key):
return self._map[key.lower()]
def __setitem__(self, key, value):
self._map[key.lower()] = value
def __delitem__(self, key):
del self._map[key.lower()]
def __iter__(self):
for key in self._map:
yield key.lower()
def __len__(self):
return len(self._map)
class CanonicalRequest(object):
"""
An object representing an HTTP request to be made to AWS.
:param method: The HTTP method being used.
:type method: str
:param url: The full URL, including protocol, host, and optionally the
query string.
:type uri: str
:param query: The request query as a dictionary or a string. Can be
omitted if no query string or included in the URL.
:type query: str or dict or list of two-tuples
:param headers: A dictionary of headers.
:type headers: dict
:param payload: The request body.
:type payload: bytes-like object
"""
def __init__(
self,
method,
uri,
query=None,
headers=None,
payload=b'',
):
self.method = method
self._parts = urlsplit(uri)
if isinstance(query, Mapping):
self.query = list(query.items())
elif isinstance(query, str):
self.query = parse_qsl(query)
else:
self.query = query or []
self.headers = Headers(headers or {})
self.payload = payload
if self._parts[1] and 'host' not in self.headers:
self.headers['host'] = self._parts[1]
def __str__(self):
return '\n'.join([
self.method,
self._parts[2],
urlencode(self.query),
self.canonical_headers,
self.signed_headers,
self.hashed_payload,
])
@property
def hashed(self):
return hashlib.sha256(str(self).encode('ascii')).hexdigest()
@property
def payload(self):
raise NotImplementedError('Cannot directly access payload.')
@payload.setter
def payload(self, value):
self.hashed_payload = hashlib.sha256(value).hexdigest()
@property
def canonical_headers(self):
lines = []
for header, value in sorted(
self.headers.items(),
key=lambda x: x[0].lower(),
):
value = value.strip()
# Eliminate duplicate spaces in non-quoted headers
if not (len(value) >= 2 and value[0] == '"' and value[-1] == '"'):
value = re.sub(r' +', ' ', value)
lines.append('{}:{}'.format(header.lower(), value))
return '\n'.join(lines) + '\n'
@property
def signed_headers(self):
return ';'.join(sorted(self.headers.keys()))
def _datetime(self):
"""
Return the current UTC datetime.
"""
return DateTime.utcnow()
@property
def datetime(self):
"""
Extract the datetime from the request
"""
if 'x-amz-date' in self.headers:
datetime = self.headers['x-amz-date']
elif any(key == 'X-Amz-Date' for (key, _) in self.query):
datetime = dict(self.query)['X-Amz-Date']
else:
raise ValueError('No datetime is set in the request.')
return DateTime.strptime(
datetime,
'%Y%m%dT%H%M%SZ',
)
def set_date_header(self):
"""
Set the ``X-Amz-Date`` header to the current datetime, if not set.
:returns: The datetime from the ``X-Amz-Date`` header.
:rtype: :class:`datetime.datetime`
"""
if 'x-amz-date' not in self.headers:
datetime = self._datetime().strftime('%Y%m%dT%H%M%SZ')
self.headers['x-amz-date'] = datetime
return datetime
else:
return None
def set_date_param(self):
"""
Set the ``X-Amz-Date`` query parameter to the current datetime, if not
set.
:returns: The datetime from the ``X-Amz-Date`` parameter.
:rtype: :class:`datetime.datetime`
"""
if not any(key == 'X-Amz-Date' for (key, _) in self.query):
datetime = self._datetime().strftime('%Y%m%dT%H%M%SZ')
self.query.append(
('X-Amz-Date', datetime)
)
return datetime
else:
return None
#: A signed request. Does not include the request body.
SignedRequest = namedtuple('SignedRequest', [
'method', 'uri', 'headers',
])
class CredentialScope(
namedtuple('CredentialScope', ['region', 'service'])
):
"""
The credential scope, sans date.
:param region: The region the request is querying. See
`Regions and Endpoints`_ for a list of values.
:type region: str
:param service: The service the request is querying.
:type service: str
"""
def date(self, date):
"""
Generate a :class:`DatedCredentialScope` from this objec.t
"""
return DatedCredentialScope(
self.region,
self.service,
date,
)
class DatedCredentialScope(
namedtuple('DatedCredentialScope', ['region', 'service', 'date'])
):
"""
The credential scope, generated from the region and service.
:param region: The region the request is querying. See
`Regions and Endpoints`_ for a list of values.
:type region: str
:param service: The service the request is querying.
:type service: str
:param date: The date for the credential scope.
:type date: :class:`datetime.date` or :class:`datetime.datetime`
.. _`Regions and Endpoints`:
http://docs.aws.amazon.com/general/latest/gr/rande.html
"""
def __str__(self):
"""
Calculate the credential scope for the given date.
"""
return '/'.join([
self.date.strftime('%Y%m%d'),
self.region,
self.service,
'aws4_request',
])
class SigningKey(object):
"""
A signing key from the secret and the credential scope.
:param secret: The AWS key secret.
:type secret: str
:param scope: The credential scope with date.
:type scope: :class:`DatedCredentialScope`
"""
#: The computed signing key as a bytes object
key = None
def __init__(self, secret, scope):
date = scope.date.strftime('%Y%m%d')
signed_date = self._sign(b'AWS4' + secret.encode('ascii'), date)
signed_region = self._sign(signed_date, scope.region)
signed_service = self._sign(signed_region, scope.service)
self.key = self._sign(signed_service, 'aws4_request')
def _sign(self, key, value):
return hmac.new(
key,
value.encode('ascii'),
hashlib.sha256,
).digest()
def sign(self, string):
"""
Sign a string. Returns the hexidecimal digest.
"""
return binascii.hexlify(self._sign(self.key, string)).decode('ascii')
def generate_string_to_sign(date, scope, request):
"""
Generate a string which should be signed by the signing key.
:param date: The datetime of the request.
:type date: :class:`datetime.datetime`
:param scope: The credential scope.
:type scope: :class:`CredentialScope` or :class:`DatedCredentialScope`
:param request: The request to sign.
:type request: :class:`CanonicalRequest`
"""
if isinstance(scope, CredentialScope):
scope = scope.date(date)
return '\n'.join([
'AWS4-HMAC-SHA256',
date.strftime('%Y%m%dT%H%M%SZ'),
str(scope),
request.hashed,
])
class Credentials(object):
"""
An object that encapsulates all the necessary credentials to sign a
request.
"""
def __init__(self, key_id, key_secret, region, service):
self._key_id = key_id
self._key_secret = key_secret
self._scope = CredentialScope(region, service)
def scope(self, datetime):
return self._scope.date(datetime)
def signing_key(self, datetime):
return SigningKey(self._key_secret, self.scope(datetime))
def sign_via_headers(self, request):
"""
Generate the appropriate headers to sign the request
:param request: The request to sign.
:type request: :class:`CanonicalRequest`
:returns: A list of additional headers.
:rtype: list of two-tuples
"""
headers = []
datetime_str = request.set_date_header()
if datetime_str is not None:
headers.append(('X-Amz-Date', datetime_str))
datetime = request.datetime
scope = self.scope(datetime)
key = self.signing_key(datetime)
to_sign = generate_string_to_sign(datetime, scope, request)
auth = 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential={}/{}'.format(self._key_id, str(scope)),
'SignedHeaders={}'.format(request.signed_headers),
'Signature={}'.format(key.sign(to_sign)),
])
headers.append(('Authorization', auth))
return headers
def sign_via_query_string(self, request, expires=60):
"""
Create a :clas:`SignedRequest` from the given request by adding the
appropriate query parameters.
:param credentials: The credentials with which to sign the request.
:type credentials: :class:`Client`
:returns: The signed request.
:rtype: :class:`SignedRequest`
"""
params = []
datetime_str = request.set_date_param()
if datetime_str is not None:
params.append(('X-Amz-Date', datetime_str))
datetime = request.datetime
scope = self.scope(datetime)
key = self.signing_key(datetime)
to_append = [
('X-Amz-Algorithm', 'AWS4-HMAC-SHA256'),
('X-Amz-Credential', '{}/{}'.format(self._key_id, str(scope))),
('X-Amz-Expires', str(expires)),
('X-Amz-SignedHeaders', request.signed_headers),
]
request.query = request.query[:-1] + to_append[:2] + request.query[-1:] + to_append[2:]
params = to_append[:2] + params + to_append[2:]
to_sign = generate_string_to_sign(datetime, scope, request)
params.append(
('X-Amz-Signature', key.sign(to_sign))
)
return params
| mit | -2,921,721,250,942,728,000 | 28.702703 | 95 | 0.573157 | false | 4.003643 | false | false | false |
hycis/TensorGraph | examples/charcnn_text_classifier.py | 1 | 3857 |
import tensorflow as tf
import tensorgraph as tg
from tensorgraph.layers import Reshape, Embedding, Conv2D, RELU, Linear, Flatten, ReduceSum, Softmax
from nltk.tokenize import RegexpTokenizer
from nlpbox import CharNumberEncoder, CatNumberEncoder
from tensorgraph.utils import valid, split_df, make_one_hot
from tensorgraph.cost import entropy, accuracy
import pandas
import numpy as np
# character CNN
def model(word_len, sent_len, nclass):
unicode_size = 1000
ch_embed_dim = 20
X_ph = tf.placeholder('int32', [None, sent_len, word_len])
input_sn = tg.StartNode(input_vars=[X_ph])
charcnn_hn = tg.HiddenNode(prev=[input_sn],
layers=[Reshape(shape=(-1, word_len)),
Embedding(cat_dim=unicode_size,
encode_dim=ch_embed_dim,
zero_pad=True),
Reshape(shape=(-1, ch_embed_dim, word_len, 1)),
Conv2D(num_filters=20, padding='VALID',
kernel_size=(ch_embed_dim,5), stride=(1,1)),
RELU(),
Conv2D(num_filters=40, padding='VALID',
kernel_size=(1,5), stride=(1,1)),
RELU(),
Conv2D(num_filters=60, padding='VALID',
kernel_size=(1,5), stride=(1,2)),
RELU(),
Flatten(),
Linear(nclass),
Reshape((-1, sent_len, nclass)),
ReduceSum(1),
Softmax()
])
output_en = tg.EndNode(prev=[charcnn_hn])
graph = tg.Graph(start=[input_sn], end=[output_en])
y_train_sb = graph.train_fprop()[0]
y_test_sb = graph.test_fprop()[0]
return X_ph, y_train_sb, y_test_sb
def tweets(word_len, sent_len, train_valid_ratio=[5,1]):
df = pandas.read_csv('tweets_large.csv')
field = 'text'
label = 'label'
tokenizer = RegexpTokenizer(r'\w+')
# encode characters into numbers
encoder = CharNumberEncoder(df[field].values, tokenizer=tokenizer,
word_len=word_len, sent_len=sent_len)
encoder.build_char_map()
encode_X = encoder.make_char_embed()
# encode categories into one hot array
cat_encoder = CatNumberEncoder(df[label])
cat_encoder.build_cat_map()
encode_y = cat_encoder.make_cat_embed()
nclass = len(np.unique(encode_y))
encode_y = make_one_hot(encode_y, nclass)
return encode_X, encode_y, nclass
def train():
from tensorgraph.trainobject import train as mytrain
with tf.Session() as sess:
word_len = 20
sent_len = 50
# load data
X_train, y_train, nclass = tweets(word_len, sent_len)
# build model
X_ph, y_train_sb, y_test_sb = model(word_len, sent_len, nclass)
y_ph = tf.placeholder('float32', [None, nclass])
# set cost and optimizer
train_cost_sb = entropy(y_ph, y_train_sb)
optimizer = tf.train.AdamOptimizer(0.001)
test_accu_sb = accuracy(y_ph, y_test_sb)
# train model
mytrain(session=sess,
feed_dict={X_ph:X_train, y_ph:y_train},
train_cost_sb=train_cost_sb,
valid_cost_sb=-test_accu_sb,
optimizer=optimizer,
epoch_look_back=5, max_epoch=100,
percent_decrease=0, train_valid_ratio=[5,1],
batchsize=64, randomize_split=False)
if __name__ == '__main__':
train()
| apache-2.0 | -2,544,405,861,138,603,500 | 37.188119 | 100 | 0.513871 | false | 3.830189 | true | false | false |
jparyani/Mailpile | mailpile/plugins/keylookup/nicknym.py | 2 | 6296 | #coding:utf-8
from mailpile.commands import Command
from mailpile.conn_brokers import Master as ConnBroker
from mailpile.plugins import PluginManager
from mailpile.plugins.search import Search
from mailpile.mailutils import Email
# from mailpile.crypto.state import *
from mailpile.crypto.gpgi import GnuPG
import httplib
import re
import socket
import sys
import urllib
import urllib2
import ssl
import json
# TODO:
# * SSL certificate validation
# * Check nicknym server for a given host
# * Store provider keys on first discovery
# * Verify provider key signature
class Nicknym:
def __init__(self, config):
self.config = config
def get_key(self, address, keytype="openpgp", server=None):
"""
Request a key for address.
"""
result, signature = self._nickserver_get_key(address, keytype, server)
if self._verify_result(result, signature):
return self._import_key(result, keytype)
return False
def refresh_keys(self):
"""
Refresh all known keys.
"""
for addr, keytype in self._get_managed_keys():
result, signature = self._nickserver_get_key(addr, keytype)
# TODO: Check whether it needs refreshing and is valid
if self._verify_result(result, signature):
self._import_key(result, keytype)
def send_key(self, address, public_key, type):
"""
Send a new key to the nickserver
"""
# TODO: Unimplemented. There is currently no authentication mechanism
# defined in Nicknym standard
raise NotImplementedError()
def _parse_result(self, result):
"""Parse the result into a JSON blob and a signature"""
# TODO: No signature implemented on server side yet.
# See https://leap.se/code/issues/5340
return json.loads(result), ""
def _nickserver_get_key(self, address, keytype="openpgp", server=None):
if server == None: server = self._discover_server(address)
data = urllib.urlencode({"address": address})
with ConnBroker.context(need=[ConnBroker.OUTGOING_HTTP]):
r = urllib2.urlopen(server, data)
result = r.read()
result, signature = self._parse_result(result)
return result, signature
def _import_key(self, result, keytype):
if keytype == "openpgp":
g = GnuPG(self.config)
res = g.import_keys(result[keytype])
if len(res["updated"]):
self._managed_keys_add(result["address"], keytype)
return res
else:
# We currently only support OpenPGP keys
return False
def _get_providerkey(self, domain):
"""
Request a provider key for the appropriate domain.
This is equivalent to get_key() with address=domain,
except it should store the provider key in an
appropriate key store
"""
pass
def _verify_providerkey(self, domain):
"""
...
"""
pass
def _verify_result(self, result, signature):
"""
Verify that the JSON result blob is correctly signed,
and that the signature is from the correct provider key.
"""
# No signature. See https://leap.se/code/issues/5340
return True
def _discover_server(self, address):
"""
Automatically detect which nicknym server to query
based on the address.
"""
# TODO: Actually perform some form of lookup
addr = address.split("@")
addr.reverse()
domain = addr[0]
return "https://nicknym.%s:6425/" % domain
def _audit_key(self, address, keytype, server):
"""
Ask an alternative server for a key to verify that
the same result is being provided.
"""
result, signature = self._nickserver_get_key(address, keytype, server)
if self._verify_result(result, signature):
# TODO: verify that the result is acceptable
pass
return True
def _managed_keys_add(self, address, keytype):
try:
data = self.config.load_pickle("nicknym.cache")
except IOError:
data = []
data.append((address, keytype))
data = list(set(data))
self.config.save_pickle(data, "nicknym.cache")
def _managed_keys_remove(self, address, keytype):
try:
data = self.config.load_pickle("nicknym.cache")
except IOError:
data = []
data.remove((address, keytype))
self.config.save_pickle(data, "nicknym.cache")
def _get_managed_keys(self):
try:
return self.config.load_pickle("nicknym.cache")
except IOError:
return []
class NicknymGetKey(Command):
"""Get a key from a nickserver"""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/nicknym/getkey', 'crypto/nicknym/getkey',
'<address> [<keytype>] [<server>]')
HTTP_CALLABLE = ('POST',)
HTTP_QUERY_VARS = {
'address': 'The nick/address to fetch a key for',
'keytype': 'What type of key to import (defaults to OpenPGP)',
'server': 'The Nicknym server to use (defaults to autodetect)'}
def command(self):
address = self.data.get('address', self.args[0])
keytype = self.data.get('keytype', None)
server = self.data.get('server', None)
if len(self.args) > 1:
keytype = self.args[1]
else:
keytype = 'openpgp'
if len(self.args) > 2:
server = self.args[2]
n = Nicknym(self.session.config)
return n.get_key(address, keytype, server)
class NicknymRefreshKeys(Command):
"""Get a key from a nickserver"""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/nicknym/refreshkeys',
'crypto/nicknym/refreshkeys', '')
HTTP_CALLABLE = ('POST',)
def command(self):
n = Nicknym(self.session.config)
n.refresh_keys()
return True
_plugins = PluginManager(builtin=__file__)
_plugins.register_commands(NicknymGetKey)
_plugins.register_commands(NicknymRefreshKeys)
if __name__ == "__main__":
n = Nicknym()
print n.get_key("[email protected]")
| apache-2.0 | -4,400,018,473,072,059,400 | 29.862745 | 78 | 0.601493 | false | 3.930087 | true | false | false |
christhekeele/mal | python/step8_macros.py | 42 | 4847 | import sys, traceback
import mal_readline
import mal_types as types
import reader, printer
from env import Env
import core
# read
def READ(str):
return reader.read_str(str)
# eval
def is_pair(x):
return types._sequential_Q(x) and len(x) > 0
def quasiquote(ast):
if not is_pair(ast):
return types._list(types._symbol("quote"),
ast)
elif ast[0] == 'unquote':
return ast[1]
elif is_pair(ast[0]) and ast[0][0] == 'splice-unquote':
return types._list(types._symbol("concat"),
ast[0][1],
quasiquote(ast[1:]))
else:
return types._list(types._symbol("cons"),
quasiquote(ast[0]),
quasiquote(ast[1:]))
def is_macro_call(ast, env):
return (types._list_Q(ast) and
types._symbol_Q(ast[0]) and
env.find(ast[0]) and
hasattr(env.get(ast[0]), '_ismacro_'))
def macroexpand(ast, env):
while is_macro_call(ast, env):
mac = env.get(ast[0])
ast = macroexpand(mac(*ast[1:]), env)
return ast
def eval_ast(ast, env):
if types._symbol_Q(ast):
return env.get(ast)
elif types._list_Q(ast):
return types._list(*map(lambda a: EVAL(a, env), ast))
elif types._vector_Q(ast):
return types._vector(*map(lambda a: EVAL(a, env), ast))
elif types._hash_map_Q(ast):
keyvals = []
for k in ast.keys():
keyvals.append(EVAL(k, env))
keyvals.append(EVAL(ast[k], env))
return types._hash_map(*keyvals)
else:
return ast # primitive value, return unchanged
def EVAL(ast, env):
while True:
#print("EVAL %s" % printer._pr_str(ast))
if not types._list_Q(ast):
return eval_ast(ast, env)
# apply list
ast = macroexpand(ast, env)
if not types._list_Q(ast): return ast
if len(ast) == 0: return ast
a0 = ast[0]
if "def!" == a0:
a1, a2 = ast[1], ast[2]
res = EVAL(a2, env)
return env.set(a1, res)
elif "let*" == a0:
a1, a2 = ast[1], ast[2]
let_env = Env(env)
for i in range(0, len(a1), 2):
let_env.set(a1[i], EVAL(a1[i+1], let_env))
ast = a2
env = let_env
# Continue loop (TCO)
elif "quote" == a0:
return ast[1]
elif "quasiquote" == a0:
ast = quasiquote(ast[1]);
# Continue loop (TCO)
elif 'defmacro!' == a0:
func = EVAL(ast[2], env)
func._ismacro_ = True
return env.set(ast[1], func)
elif 'macroexpand' == a0:
return macroexpand(ast[1], env)
elif "do" == a0:
eval_ast(ast[1:-1], env)
ast = ast[-1]
# Continue loop (TCO)
elif "if" == a0:
a1, a2 = ast[1], ast[2]
cond = EVAL(a1, env)
if cond is None or cond is False:
if len(ast) > 3: ast = ast[3]
else: ast = None
else:
ast = a2
# Continue loop (TCO)
elif "fn*" == a0:
a1, a2 = ast[1], ast[2]
return types._function(EVAL, Env, a2, env, a1)
else:
el = eval_ast(ast, env)
f = el[0]
if hasattr(f, '__ast__'):
ast = f.__ast__
env = f.__gen_env__(el[1:])
else:
return f(*el[1:])
# print
def PRINT(exp):
return printer._pr_str(exp)
# repl
repl_env = Env()
def REP(str):
return PRINT(EVAL(READ(str), repl_env))
# core.py: defined using python
for k, v in core.ns.items(): repl_env.set(types._symbol(k), v)
repl_env.set(types._symbol('eval'), lambda ast: EVAL(ast, repl_env))
repl_env.set(types._symbol('*ARGV*'), types._list(*sys.argv[2:]))
# core.mal: defined using the language itself
REP("(def! not (fn* (a) (if a false true)))")
REP("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))")
REP("(defmacro! cond (fn* (& xs) (if (> (count xs) 0) (list 'if (first xs) (if (> (count xs) 1) (nth xs 1) (throw \"odd number of forms to cond\")) (cons 'cond (rest (rest xs)))))))")
REP("(defmacro! or (fn* (& xs) (if (empty? xs) nil (if (= 1 (count xs)) (first xs) `(let* (or_FIXME ~(first xs)) (if or_FIXME or_FIXME (or ~@(rest xs))))))))")
if len(sys.argv) >= 2:
REP('(load-file "' + sys.argv[1] + '")')
sys.exit(0)
# repl loop
while True:
try:
line = mal_readline.readline("user> ")
if line == None: break
if line == "": continue
print(REP(line))
except reader.Blank: continue
except Exception as e:
print("".join(traceback.format_exception(*sys.exc_info())))
| mpl-2.0 | -5,848,804,980,285,082,000 | 30.888158 | 183 | 0.502373 | false | 3.193017 | false | false | false |
facebook/chisel | commands/FBCounterCommands.py | 1 | 3920 | #!/usr/bin/python
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# These set of commands provide a way to use counters in debug time. By using these counters,
# you can track how many times your program takes a specific path.
#
# Sample Use Case:
# Let's say you have a function that logs some messages from various parts of your code.
# And you want to learn how many times logMessage is called on startup.
#
# 1. Add a breakpoint to the entry point of your program (e.g. main).
# a. Add `zzz 10 printcounter` as an action.
# b. Check "Automatically continue after evaluating actions"
# 2. Add a breakpoint to the logMessage function.
# a. Add `incrementcounter log` as an action.
# b. Add `incrementcounter log_{} message` as an action.
# c. Check "Automatically continue after evaluating actions"
# 3. Run the program
#
# Format String:
# It uses Python's string.Formatter to format strings. You can use placeholders here as you can in Python:
# https://docs.python.org/3.4/library/string.html#string.Formatter.format
#
# Sample key_format_string:
# "key_{}" (int)5 -> Will build the key string as "key_5"
# Can be removed when Python 2 support is removed.
from __future__ import print_function
import fbchisellldbbase as fb
counters = {}
def lldbcommands():
return [
FBIncrementCounterCommand(),
FBPrintCounterCommand(),
FBPrintCountersCommand(),
FBResetCounterCommand(),
FBResetCountersCommand(),
]
def generateKey(arguments):
keyFormatString = arguments[1]
keyArgs = []
for argument in arguments[2:]:
if argument.startswith("("):
value = fb.evaluateExpression(argument)
else:
value = fb.evaluateExpressionValue(argument).GetObjectDescription()
if not value:
value = fb.evaluateExpression(argument)
keyArgs.append(value)
return keyFormatString.format(*keyArgs).strip()
# Increments the counter for the key.
# (lldb) incrementcounter key_format_string key_args
class FBIncrementCounterCommand(fb.FBCommand):
def name(self):
return "incrementcounter"
def description(self):
return "Increments the counter for the key."
def run(self, arguments, options):
key = generateKey(arguments)
counters[key] = counters.get(key, 0) + 1
# Prints the counter for the key.
# (lldb) printcounter key_format_string key_args
# 0
class FBPrintCounterCommand(fb.FBCommand):
def name(self):
return "printcounter"
def description(self):
return "Prints the counter for the key."
def run(self, arguments, options):
key = generateKey(arguments)
print(str(counters[key]))
# Prints all the counters sorted by the keys.
# (lldb) printcounters
# key_1: 0
class FBPrintCountersCommand(fb.FBCommand):
def name(self):
return "printcounters"
def description(self):
return "Prints all the counters sorted by the keys."
def run(self, arguments, options):
keys = sorted(counters.keys())
for key in keys:
print(key + ": " + str(counters[key]))
# Resets the counter for the key.
# (lldb) resetcounter key_format_string key_args
class FBResetCounterCommand(fb.FBCommand):
def name(self):
return "resetcounter"
def description(self):
return "Resets the counter for the key."
def run(self, arguments, options):
key = generateKey(arguments)
counters[key] = 0
# Resets all the counters.
# (lldb) resetcounters
class FBResetCountersCommand(fb.FBCommand):
def name(self):
return "resetcounters"
def description(self):
return "Resets all the counters."
def run(self, arguments, options):
counters.clear()
| mit | 5,923,686,988,107,759,000 | 27.613139 | 106 | 0.683418 | false | 3.862069 | false | false | false |
wt52571314/MyAutoTest | test_room/tools.py | 1 | 2296 | # -*- coding: utf-8 -*-
# ===============================================================================
# @ Creator:Hainnan.Zhang
# @ Date:2016-3-24
# 模拟浏览器
# ===============================================================================
from Stock_Interface_test.old.interface_test import *
from Stock_Interface_test.old.InterFace_List import *
class MyWeb():
"""
模拟一个浏览器
"""
def __init__(self):
self.header = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0"
}
self.cookie = cookielib.CookieJar()
self.cookie_support = urllib2.HTTPCookieProcessor(self.cookie)
self.opener = urllib2.build_opener(self.cookie_support,
urllib2.HTTPHandler)
# urllib2.install_opener(self.opener)
def post(self, posturl, dictdata):
"""
模拟post请求
:param string posturl: url地址
:param dict dictdata: 发送的数据
"""
request = urllib2.Request(posturl, dictdata, self.header)
try:
content = self.opener.open(request)
return content
except Exception, e:
print ("post:" + str(e))
return None
def get(self, url):
"""
模拟get请求
:param url: url地址
:return content: 常使用read的方法来读取返回数据
:rtype : instance or None
"""
request = urllib2.Request(url, None, self.header)
try:
content = urllib2.urlopen(request)
return content
except Exception, e:
print ("open:" + str(e))
return None
if __name__ == "__main__":
web = MyWeb()
code_use = get_magic_code(url_result)
# print code_use
ticket_use = get_ticket(code_use, user_name, pwd)
# print ticket_use
date_use = 'ticket='+ticket_use
login = web.post('https://stock-api.jimustock.com/api/v1/security/login', date_use)
print login.read()
url = 'https://stock-api.jimu.com/api/v1/us/trade/validateBuy'
data = 'entrustAmount=1&entrustPrice=0.1&symbol=ACW&usAccountId=66&type=LIMIT&orderTimeInForce=DAY'
res = web.post(url, data)
print res.read() | gpl-2.0 | -6,695,613,872,900,800,000 | 31.043478 | 103 | 0.534389 | false | 3.426357 | false | false | false |
kingmotley/SickRage | lib/hachoir_parser/image/photoshop_metadata.py | 84 | 7658 | """ Photoshop metadata parser.
References:
- http://www.scribd.com/doc/32900475/Photoshop-File-Formats
"""
from hachoir_core.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, Float32, Enum,
SubFile, String, CString, PascalString8,
NullBytes, RawBytes)
from hachoir_core.text_handler import textHandler, hexadecimal
from hachoir_core.tools import alignValue, createDict
from hachoir_parser.image.iptc import IPTC
from hachoir_parser.common.win32 import PascalStringWin32
BOOL = {0: False, 1: True}
class Version(FieldSet):
def createFields(self):
yield UInt32(self, "version")
yield UInt8(self, "has_realm")
yield PascalStringWin32(self, "writer_name", charset="UTF-16-BE")
yield PascalStringWin32(self, "reader_name", charset="UTF-16-BE")
yield UInt32(self, "file_version")
size = (self.size - self.current_size) // 8
if size:
yield NullBytes(self, "padding", size)
class FixedFloat32(FieldSet):
def createFields(self):
yield UInt16(self, "int_part")
yield UInt16(self, "float_part")
def createValue(self):
return self["int_part"].value + float(self["float_part"].value) / (1<<16)
class ResolutionInfo(FieldSet):
def createFields(self):
yield FixedFloat32(self, "horiz_res")
yield Enum(UInt16(self, "horiz_res_unit"), {1:'px/in', 2:'px/cm'})
yield Enum(UInt16(self, "width_unit"), {1:'inches', 2:'cm', 3:'points', 4:'picas', 5:'columns'})
yield FixedFloat32(self, "vert_res")
yield Enum(UInt16(self, "vert_res_unit"), {1:'px/in', 2:'px/cm'})
yield Enum(UInt16(self, "height_unit"), {1:'inches', 2:'cm', 3:'points', 4:'picas', 5:'columns'})
class PrintScale(FieldSet):
def createFields(self):
yield Enum(UInt16(self, "style"), {0:'centered', 1:'size to fit', 2:'user defined'})
yield Float32(self, "x_location")
yield Float32(self, "y_location")
yield Float32(self, "scale")
class PrintFlags(FieldSet):
def createFields(self):
yield Enum(UInt8(self, "labels"), BOOL)
yield Enum(UInt8(self, "crop_marks"), BOOL)
yield Enum(UInt8(self, "color_bars"), BOOL)
yield Enum(UInt8(self, "reg_marks"), BOOL)
yield Enum(UInt8(self, "negative"), BOOL)
yield Enum(UInt8(self, "flip"), BOOL)
yield Enum(UInt8(self, "interpolate"), BOOL)
yield Enum(UInt8(self, "caption"), BOOL)
yield Enum(UInt8(self, "print_flags"), BOOL)
yield Enum(UInt8(self, "unknown"), BOOL)
def createValue(self):
return [field.name for field in self if field.value]
def createDisplay(self):
return ', '.join(self.value)
class PrintFlags2(FieldSet):
def createFields(self):
yield UInt16(self, "version")
yield UInt8(self, "center_crop_marks")
yield UInt8(self, "reserved")
yield UInt32(self, "bleed_width")
yield UInt16(self, "bleed_width_scale")
class GridGuides(FieldSet):
def createFields(self):
yield UInt32(self, "version")
yield UInt32(self, "horiz_cycle", "Horizontal grid spacing, in quarter inches")
yield UInt32(self, "vert_cycle", "Vertical grid spacing, in quarter inches")
yield UInt32(self, "guide_count", "Number of guide resource blocks (can be 0)")
class Thumbnail(FieldSet):
def createFields(self):
yield Enum(UInt32(self, "format"), {0:'Raw RGB', 1:'JPEG RGB'})
yield UInt32(self, "width", "Width of thumbnail in pixels")
yield UInt32(self, "height", "Height of thumbnail in pixels")
yield UInt32(self, "widthbytes", "Padded row bytes = (width * bits per pixel + 31) / 32 * 4")
yield UInt32(self, "uncompressed_size", "Total size = widthbytes * height * planes")
yield UInt32(self, "compressed_size", "Size after compression. Used for consistency check")
yield UInt16(self, "bits_per_pixel")
yield UInt16(self, "num_planes")
yield SubFile(self, "thumbnail", self['compressed_size'].value, "Thumbnail (JPEG file)", mime_type="image/jpeg")
class Photoshop8BIM(FieldSet):
TAG_INFO = {
0x03ed: ("res_info", ResolutionInfo, "Resolution information"),
0x03f3: ("print_flag", PrintFlags, "Print flags: labels, crop marks, colour bars, etc."),
0x03f5: ("col_half_info", None, "Colour half-toning information"),
0x03f8: ("color_trans_func", None, "Colour transfer function"),
0x0404: ("iptc", IPTC, "IPTC/NAA"),
0x0406: ("jpeg_qual", None, "JPEG quality"),
0x0408: ("grid_guide", GridGuides, "Grid guides informations"),
0x0409: ("thumb_res", Thumbnail, "Thumbnail resource (PS 4.0)"),
0x0410: ("watermark", UInt8, "Watermark"),
0x040a: ("copyright_flag", UInt8, "Copyright flag"),
0x040b: ("url", None, "URL"),
0x040c: ("thumb_res2", Thumbnail, "Thumbnail resource (PS 5.0)"),
0x040d: ("glob_angle", UInt32, "Global lighting angle for effects"),
0x0411: ("icc_tagged", None, "ICC untagged (1 means intentionally untagged)"),
0x0414: ("base_layer_id", UInt32, "Base value for new layers ID's"),
0x0416: ("indexed_colors", UInt16, "Number of colors in table that are actually defined"),
0x0417: ("transparency_index", UInt16, "Index of transparent color"),
0x0419: ("glob_altitude", UInt32, "Global altitude"),
0x041a: ("slices", None, "Slices"),
0x041e: ("url_list", None, "Unicode URLs"),
0x0421: ("version", Version, "Version information"),
0x0425: ("caption_digest", None, "16-byte MD5 caption digest"),
0x0426: ("printscale", PrintScale, "Printer scaling"),
0x2710: ("print_flag2", PrintFlags2, "Print flags (2)"),
}
TAG_NAME = createDict(TAG_INFO, 0)
CONTENT_HANDLER = createDict(TAG_INFO, 1)
TAG_DESC = createDict(TAG_INFO, 2)
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
try:
self._name, self.handler, self._description = self.TAG_INFO[self["tag"].value]
except KeyError:
self.handler = None
size = self["size"]
self._size = size.address + size.size + alignValue(size.value, 2) * 8
def createFields(self):
yield String(self, "signature", 4, "8BIM signature", charset="ASCII")
if self["signature"].value != "8BIM":
raise ParserError("Stream doesn't look like 8BIM item (wrong signature)!")
yield textHandler(UInt16(self, "tag"), hexadecimal)
if self.stream.readBytes(self.absolute_address + self.current_size, 4) != "\0\0\0\0":
yield PascalString8(self, "name")
size = 2 + (self["name"].size // 8) % 2
yield NullBytes(self, "name_padding", size)
else:
yield String(self, "name", 4, strip="\0")
yield UInt16(self, "size")
size = alignValue(self["size"].value, 2)
if not size:
return
if self.handler:
if issubclass(self.handler, FieldSet):
yield self.handler(self, "content", size=size*8)
else:
yield self.handler(self, "content")
else:
yield RawBytes(self, "content", size)
class PhotoshopMetadata(FieldSet):
def createFields(self):
yield CString(self, "signature", "Photoshop version")
if self["signature"].value == "Photoshop 3.0":
while not self.eof:
yield Photoshop8BIM(self, "item[]")
else:
size = (self._size - self.current_size) / 8
yield RawBytes(self, "rawdata", size)
| gpl-3.0 | 5,261,888,109,530,542,000 | 43.783626 | 120 | 0.61883 | false | 3.474592 | false | false | false |
crccheck/vaulty | vaulty.py | 1 | 4556 | import logging
import os
import readline
import shlex
import sys
import hvac
LOG_FILENAME = '/tmp/vaulty-completer.log'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
class REPLState:
"""
Stores state for the user's session and also wraps `hvac`.
"""
_pwd = 'secret/' # pwd is wrapped to magically make `oldpwd` work
oldpwd = None
home = 'secret/'
# This is only used to help assist tab completion
_list_cache = {}
def __init__(self, vault_client):
self.vault = vault_client
def list(self, path):
try:
results = self.vault.list(path)['data']['keys']
self._list_cache[path] = results
return results
except TypeError:
# TODO don't fail silently
return []
@property
def pwd(self):
return self._pwd
@pwd.setter
def pwd(self, new_pwd):
self.oldpwd = self._pwd
self._pwd = new_pwd
def readline_completer(self, text, state):
logging.debug('readline text:%s state:%d', text, state)
if state > 5:
# Why does this happen?
logging.error('infinite loop detected, terminating')
return None
if state == 0:
if self.pwd not in self._list_cache:
self.list(self.pwd)
current_options = [x for x in self._list_cache[self.pwd] if x.startswith(text)]
in_cd = readline.get_line_buffer().startswith('cd ') # TODO this is awkward
if in_cd:
current_options = [x for x in current_options if x.endswith('/')]
if len(current_options) == 1:
return current_options[0]
if current_options:
print()
print('\n'.join(current_options))
# print(text, end='')
print(f'{self.pwd}> {readline.get_line_buffer()}', end='')
sys.stdout.flush()
return None
def cmd_cd(state, dir_path=None):
if dir_path is None:
state.pwd = state.home
return
if dir_path == '-':
new_pwd = state.oldpwd or state.pwd
else:
new_pwd = os.path.normpath(os.path.join(state.pwd, dir_path)) + '/'
if state.list(new_pwd):
state.pwd = new_pwd
return
return f'{new_pwd} is not a valid path'
def cmd_ls(state, path=None):
"""List secrets and paths in a path, defaults to PWD."""
if path is None:
target_path = state.pwd
else:
target_path = os.path.normpath(os.path.join(state.pwd, path)) + '/'
results = state.list(target_path)
if results:
return('\n'.join(results))
return f'{path} is not a valid path'
def cmd_rm(state, *paths):
return 'rm is not implemented yet'
def repl(state):
in_text = input(f'{state.pwd}> ')
bits = shlex.split(in_text)
if not bits:
return
if bits[0] == 'pwd':
print(state.pwd)
return
if bits[0] == 'ls' or bits[0] == 'l':
print(cmd_ls(state, *bits[1:]))
return
if bits[0] == 'cd':
out = cmd_cd(state, *bits[1:])
out and print(out)
return
if bits[0] == 'cat':
if len(bits) != 2:
return 'USAGE: cat <path>'
secret_path = os.path.normpath(os.path.join(state.pwd, bits[1]))
try:
for key, value in state.vault.read(secret_path)['data'].items():
print(f'{key}={value}')
except TypeError:
print(f'{bits[1]} does not exist')
return
if bits[0] == 'rm':
print(cmd_rm(state, *bits[1:]))
return
print('DEBUG:', in_text)
def main():
path = os.path.expanduser('~/.vault-token')
if os.path.isfile(path):
with open(path) as fh:
token = fh.read().strip()
client = hvac.Client(url=os.getenv('VAULT_ADDR'), token=token)
assert client.is_authenticated()
state = REPLState(client)
team = os.getenv('VAULT_TEAM', '')
state.home = state.pwd = os.path.join(state.pwd, team) + '/'
readline.set_completer(state.readline_completer)
readline.parse_and_bind('tab: complete')
# readline.get_completer_delims()
# readline.set_completer_delims('\n`~!@#$%^&*()-=+[{]}\|;:'",<>/? ')
readline.set_completer_delims('\n`~!@#$%^&*()=+[{]}\|;:\'",<>/? ')
try:
while True:
try:
repl(state)
except hvac.exceptions.Forbidden as e:
print(e)
except (KeyboardInterrupt, EOFError):
sys.exit(1)
if __name__ == "__main__":
main()
| apache-2.0 | -8,308,577,338,291,436,000 | 25.8 | 87 | 0.552458 | false | 3.540016 | false | false | false |
mxOBS/deb-pkg_trusty_chromium-browser | third_party/chromite/lib/commandline_unittest.py | 1 | 12676 | #!/usr/bin/python
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test the commandline module."""
from __future__ import print_function
import cPickle
import signal
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from chromite.lib import commandline
from chromite.lib import cros_build_lib_unittest
from chromite.lib import cros_test_lib
from chromite.lib import gs
from chromite.lib import partial_mock
from chromite.cbuildbot import constants
# pylint: disable=W0212
class TestShutDownException(cros_test_lib.TestCase):
"""Test that ShutDownException can be pickled."""
def testShutDownException(self):
"""Test that ShutDownException can be pickled."""
ex = commandline._ShutDownException(signal.SIGTERM, 'Received SIGTERM')
ex2 = cPickle.loads(cPickle.dumps(ex))
self.assertEqual(ex.signal, ex2.signal)
self.assertEqual(ex.message, ex2.message)
class GSPathTest(cros_test_lib.TestCase):
"""Test type=gs_path normalization functionality."""
GS_REL_PATH = 'bucket/path/to/artifacts'
@staticmethod
def _ParseCommandLine(argv):
parser = commandline.OptionParser()
parser.add_option('-g', '--gs-path', type='gs_path',
help=('GS path that contains the chrome to deploy.'))
return parser.parse_args(argv)
def _RunGSPathTestCase(self, raw, parsed):
options, _ = self._ParseCommandLine(['--gs-path', raw])
self.assertEquals(options.gs_path, parsed)
def testNoGSPathCorrectionNeeded(self):
"""Test case where GS path correction is not needed."""
gs_path = '%s/%s' % (gs.BASE_GS_URL, self.GS_REL_PATH)
self._RunGSPathTestCase(gs_path, gs_path)
def testTrailingSlashRemoval(self):
"""Test case where GS path ends with /."""
gs_path = '%s/%s/' % (gs.BASE_GS_URL, self.GS_REL_PATH)
self._RunGSPathTestCase(gs_path, gs_path.rstrip('/'))
def testDuplicateSlashesRemoved(self):
"""Test case where GS path contains many / in a row."""
self._RunGSPathTestCase(
'%s/a/dir/with//////////slashes' % gs.BASE_GS_URL,
'%s/a/dir/with/slashes' % gs.BASE_GS_URL)
def testRelativePathsRemoved(self):
"""Test case where GS path contain /../ logic."""
self._RunGSPathTestCase(
'%s/a/dir/up/here/.././../now/down/there' % gs.BASE_GS_URL,
'%s/a/dir/now/down/there' % gs.BASE_GS_URL)
def testCorrectionNeeded(self):
"""Test case where GS path correction is needed."""
self._RunGSPathTestCase(
'%s/%s/' % (gs.PRIVATE_BASE_HTTPS_URL, self.GS_REL_PATH),
'%s/%s' % (gs.BASE_GS_URL, self.GS_REL_PATH))
def testInvalidPath(self):
"""Path cannot be normalized."""
with cros_test_lib.OutputCapturer():
self.assertRaises2(
SystemExit, self._RunGSPathTestCase, 'http://badhost.com/path', '',
check_attrs={'code': 2})
class DetermineCheckoutTest(cros_test_lib.MockTempDirTestCase):
"""Verify functionality for figuring out what checkout we're in."""
def setUp(self):
self.rc_mock = cros_build_lib_unittest.RunCommandMock()
self.StartPatcher(self.rc_mock)
self.rc_mock.SetDefaultCmdResult()
def RunTest(self, dir_struct, cwd, expected_root, expected_type,
expected_src):
"""Run a test with specific parameters and expected results."""
cros_test_lib.CreateOnDiskHierarchy(self.tempdir, dir_struct)
cwd = os.path.join(self.tempdir, cwd)
checkout_info = commandline.DetermineCheckout(cwd)
full_root = expected_root
if expected_root is not None:
full_root = os.path.join(self.tempdir, expected_root)
full_src = expected_src
if expected_src is not None:
full_src = os.path.join(self.tempdir, expected_src)
self.assertEquals(checkout_info.root, full_root)
self.assertEquals(checkout_info.type, expected_type)
self.assertEquals(checkout_info.chrome_src_dir, full_src)
def testGclientRepo(self):
dir_struct = [
'a/.gclient',
'a/b/.repo/',
'a/b/c/.gclient',
'a/b/c/d/somefile',
]
self.RunTest(dir_struct, 'a/b/c', 'a/b/c',
commandline.CHECKOUT_TYPE_GCLIENT,
'a/b/c/src')
self.RunTest(dir_struct, 'a/b/c/d', 'a/b/c',
commandline.CHECKOUT_TYPE_GCLIENT,
'a/b/c/src')
self.RunTest(dir_struct, 'a/b', 'a/b',
commandline.CHECKOUT_TYPE_REPO,
None)
self.RunTest(dir_struct, 'a', 'a',
commandline.CHECKOUT_TYPE_GCLIENT,
'a/src')
def testGitSubmodule(self):
"""Recognizes a chrome git submodule checkout."""
self.rc_mock.AddCmdResult(
partial_mock.In('config'), output=constants.CHROMIUM_GOB_URL)
dir_struct = [
'a/.gclient',
'a/.repo',
'a/b/.git/',
]
self.RunTest(dir_struct, 'a/b', 'a/b',
commandline.CHECKOUT_TYPE_SUBMODULE,
'a/b')
def testBadGit1(self):
""".git is not a directory."""
self.RunTest(['a/.git'], 'a', None,
commandline.CHECKOUT_TYPE_UNKNOWN, None)
def testBadGit2(self):
"""'git config' returns nothing."""
self.RunTest(['a/.repo/', 'a/b/.git/'], 'a/b', 'a',
commandline.CHECKOUT_TYPE_REPO, None)
def testBadGit3(self):
"""'git config' returns error."""
self.rc_mock.AddCmdResult(partial_mock.In('config'), returncode=5)
self.RunTest(['a/.git/'], 'a', None,
commandline.CHECKOUT_TYPE_UNKNOWN, None)
class CacheTest(cros_test_lib.MockTempDirTestCase):
"""Test cache dir specification and finding functionality."""
REPO_ROOT = '/fake/repo/root'
GCLIENT_ROOT = '/fake/gclient/root'
SUBMODULE_ROOT = '/fake/submodule/root'
CACHE_DIR = '/fake/cache/dir'
def setUp(self):
self.PatchObject(commandline.ArgumentParser, 'ConfigureCacheDir')
dir_struct = [
'repo/.repo/',
'gclient/.gclient',
'submodule/.git/',
]
cros_test_lib.CreateOnDiskHierarchy(self.tempdir, dir_struct)
self.repo_root = os.path.join(self.tempdir, 'repo')
self.gclient_root = os.path.join(self.tempdir, 'gclient')
self.submodule_root = os.path.join(self.tempdir, 'submodule')
self.nocheckout_root = os.path.join(self.tempdir, 'nothing')
self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
self.rc_mock.AddCmdResult(
partial_mock.In('config'), output=constants.CHROMIUM_GOB_URL)
self.cwd_mock = self.PatchObject(os, 'getcwd')
self.parser = commandline.ArgumentParser(caching=True)
def _CheckCall(self, expected):
# pylint: disable=E1101
f = self.parser.ConfigureCacheDir
self.assertEquals(1, f.call_count)
self.assertTrue(f.call_args[0][0].startswith(expected))
def testRepoRoot(self):
"""Test when we are inside a repo checkout."""
self.cwd_mock.return_value = self.repo_root
self.parser.parse_args([])
self._CheckCall(self.repo_root)
def testGclientRoot(self):
"""Test when we are inside a gclient checkout."""
self.cwd_mock.return_value = self.gclient_root
self.parser.parse_args([])
self._CheckCall(self.gclient_root)
def testSubmoduleRoot(self):
"""Test when we are inside a git submodule Chrome checkout."""
self.cwd_mock.return_value = self.submodule_root
self.parser.parse_args([])
self._CheckCall(self.submodule_root)
def testTempdir(self):
"""Test when we are not in any checkout."""
self.cwd_mock.return_value = self.nocheckout_root
self.parser.parse_args([])
self._CheckCall('/tmp')
def testSpecifiedDir(self):
"""Test when user specifies a cache dir."""
self.cwd_mock.return_value = self.repo_root
self.parser.parse_args(['--cache-dir', self.CACHE_DIR])
self._CheckCall(self.CACHE_DIR)
class ParseArgsTest(cros_test_lib.TestCase):
"""Test parse_args behavior of our custom argument parsing classes."""
def _CreateOptionParser(self, cls):
"""Create a class of optparse.OptionParser with prepared config.
Args:
cls: Some subclass of optparse.OptionParser.
Returns:
The created OptionParser object.
"""
usage = 'usage: some usage'
parser = cls(usage=usage)
# Add some options.
parser.add_option('-x', '--xxx', action='store_true', default=False,
help='Gimme an X')
parser.add_option('-y', '--yyy', action='store_true', default=False,
help='Gimme a Y')
parser.add_option('-a', '--aaa', type='string', default='Allan',
help='Gimme an A')
parser.add_option('-b', '--bbb', type='string', default='Barry',
help='Gimme a B')
parser.add_option('-c', '--ccc', type='string', default='Connor',
help='Gimme a C')
return parser
def _CreateArgumentParser(self, cls):
"""Create a class of argparse.ArgumentParser with prepared config.
Args:
cls: Some subclass of argparse.ArgumentParser.
Returns:
The created ArgumentParser object.
"""
usage = 'usage: some usage'
parser = cls(usage=usage)
# Add some options.
parser.add_argument('-x', '--xxx', action='store_true', default=False,
help='Gimme an X')
parser.add_argument('-y', '--yyy', action='store_true', default=False,
help='Gimme a Y')
parser.add_argument('-a', '--aaa', type=str, default='Allan',
help='Gimme an A')
parser.add_argument('-b', '--bbb', type=str, default='Barry',
help='Gimme a B')
parser.add_argument('-c', '--ccc', type=str, default='Connor',
help='Gimme a C')
parser.add_argument('args', type=str, nargs='*', help='args')
return parser
def _TestParser(self, parser):
"""Test the given parser with a prepared argv."""
argv = ['-x', '--bbb', 'Bobby', '-c', 'Connor', 'foobar']
parsed = parser.parse_args(argv)
if isinstance(parser, commandline.OptionParser):
# optparse returns options and args separately.
options, args = parsed
self.assertEquals(['foobar'], args)
else:
# argparse returns just options. Options configured above to have the
# args stored at option "args".
options = parsed
self.assertEquals(['foobar'], parsed.args)
self.assertTrue(options.xxx)
self.assertFalse(options.yyy)
self.assertEquals('Allan', options.aaa)
self.assertEquals('Bobby', options.bbb)
self.assertEquals('Connor', options.ccc)
self.assertRaises(AttributeError, getattr, options, 'xyz')
# Now try altering option values.
options.aaa = 'Arick'
self.assertEquals('Arick', options.aaa)
# Now freeze the options and try altering again.
options.Freeze()
self.assertRaises(commandline.cros_build_lib.AttributeFrozenError,
setattr, options, 'aaa', 'Arnold')
self.assertEquals('Arick', options.aaa)
def testOptionParser(self):
self._TestParser(self._CreateOptionParser(commandline.OptionParser))
def testFilterParser(self):
self._TestParser(self._CreateOptionParser(commandline.FilteringParser))
def testArgumentParser(self):
self._TestParser(self._CreateArgumentParser(commandline.ArgumentParser))
class ScriptWrapperMainTest(cros_test_lib.MockTestCase):
"""Test the behavior of the ScriptWrapperMain function."""
def setUp(self):
self.PatchObject(sys, 'exit')
# pylint: disable=W0613
@staticmethod
def _DummyChrootTarget(args):
raise commandline.ChrootRequiredError()
DUMMY_CHROOT_TARGET_ARGS = ['cmd', 'arg1', 'arg2']
@staticmethod
def _DummyChrootTargetArgs(args):
args = ScriptWrapperMainTest.DUMMY_CHROOT_TARGET_ARGS
raise commandline.ChrootRequiredError(args)
def testRestartInChroot(self):
rc = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
rc.SetDefaultCmdResult()
ret = lambda x: ScriptWrapperMainTest._DummyChrootTarget
commandline.ScriptWrapperMain(ret)
rc.assertCommandContains(enter_chroot=True)
rc.assertCommandContains(self.DUMMY_CHROOT_TARGET_ARGS, expected=False)
def testRestartInChrootArgs(self):
rc = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
rc.SetDefaultCmdResult()
ret = lambda x: ScriptWrapperMainTest._DummyChrootTargetArgs
commandline.ScriptWrapperMain(ret)
rc.assertCommandContains(self.DUMMY_CHROOT_TARGET_ARGS, enter_chroot=True)
if __name__ == '__main__':
cros_test_lib.main()
| bsd-3-clause | 2,313,851,678,898,716,700 | 33.63388 | 78 | 0.655491 | false | 3.563677 | true | false | false |
whaleygeek/pyenergenie | setup.py | 1 | 1105 | import os
from setuptools import setup
here = lambda *a: os.path.join(os.path.dirname(__file__), *a)
# read the long description
with open(here('README.md'), 'r') as readme_file:
long_description = readme_file.read()
# read the requirements.txt
with open(here('requirements.txt'), 'r') as requirements_file:
requirements = [x.strip() for x in requirements_file.readlines()]
setup(
name='pyenergenie',
version='0.0.1',
description='A python interface to the Energenie line of products',
long_description=long_description,
author='whaleygeek',
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6'
],
packages=['pyenergenie', 'pyenergenie.energenie'],
package_dir={
'pyenergenie': 'src/',
'pyenergenie.energenie': 'src/energenie/'
},
install_requires=requirements,
package_data={
'pyenergenie': [
'energenie/drv/*'
]
},
entry_points={
'console_scripts': [
'pyenergenie=pyenergenie.setup_tool:main'
]
}
)
| mit | -1,834,221,109,040,907,500 | 25.95122 | 71 | 0.61991 | false | 3.46395 | false | false | false |
jcrocholl/nxdom | languages/utils.py | 1 | 1313 | VOWELS = 'aeiouy'
TRIPLE_SCORES = {}
def word_groups(word):
"""
>>> list(word_groups('weight'))
['w', 'ei', 'ght']
>>> list(word_groups('Eightyfive'))
['ei', 'ght', 'y', 'f', 'i', 'v', 'e']
"""
index = 0
word = word.lower()
while index < len(word):
# Find some consonants.
start = index
while index < len(word) and word[index] not in VOWELS:
index += 1
if index > start:
yield word[start:index]
# Find some vowels.
start = index
while index < len(word) and word[index] in VOWELS:
index += 1
if index > start:
yield word[start:index]
def word_triples(word):
"""
>>> list(word_triples('weight'))
['^wei', 'weight', 'eight$']
>>> list(word_triples('eightyfive'))
['^eight', 'eighty', 'ghtyf', 'yfi', 'fiv', 'ive', 've$']
"""
groups = ['^'] + list(word_groups(word)) + ['$']
for start in range(len(groups) - 2):
yield ''.join(groups[start:start + 3])
def word_score(word, triple_scores):
triples = list(word_triples(word))
result = 0.0
for triple in triples:
result += triple_scores.get(triple, 0.0)
return result / len(triples)
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit | -5,848,024,604,033,005,000 | 24.745098 | 62 | 0.523991 | false | 3.179177 | false | false | false |
abhinavmoudgil95/root | interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/shell.py | 53 | 2157 | # -*- coding: utf-8 -*-
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
""" This module implements basic shell escaping/unescaping methods. """
import re
import shlex
__all__ = ['encode', 'decode']
def encode(command):
""" Takes a command as list and returns a string. """
def needs_quote(word):
""" Returns true if arguments needs to be protected by quotes.
Previous implementation was shlex.split method, but that's not good
for this job. Currently is running through the string with a basic
state checking. """
reserved = {' ', '$', '%', '&', '(', ')', '[', ']', '{', '}', '*', '|',
'<', '>', '@', '?', '!'}
state = 0
for current in word:
if state == 0 and current in reserved:
return True
elif state == 0 and current == '\\':
state = 1
elif state == 1 and current in reserved | {'\\'}:
state = 0
elif state == 0 and current == '"':
state = 2
elif state == 2 and current == '"':
state = 0
elif state == 0 and current == "'":
state = 3
elif state == 3 and current == "'":
state = 0
return state != 0
def escape(word):
""" Do protect argument if that's needed. """
table = {'\\': '\\\\', '"': '\\"'}
escaped = ''.join([table.get(c, c) for c in word])
return '"' + escaped + '"' if needs_quote(word) else escaped
return " ".join([escape(arg) for arg in command])
def decode(string):
""" Takes a command string and returns as a list. """
def unescape(arg):
""" Gets rid of the escaping characters. """
if len(arg) >= 2 and arg[0] == arg[-1] and arg[0] == '"':
arg = arg[1:-1]
return re.sub(r'\\(["\\])', r'\1', arg)
return re.sub(r'\\([\\ $%&\(\)\[\]\{\}\*|<>@?!])', r'\1', arg)
return [unescape(arg) for arg in shlex.split(string)]
| lgpl-2.1 | 4,758,712,418,784,437,000 | 31.681818 | 79 | 0.491887 | false | 4.164093 | false | false | false |
yujikato/DIRAC | src/DIRAC/ResourceStatusSystem/Utilities/RSSCacheNoThread.py | 2 | 14414 | """ Cache
This module provides a generic Cache extended to be used on RSS, RSSCache.
This cache features a lazy update method. It will only be updated if it is
empty and there is a new query. If not, it will remain in its previous state.
However, Cache class internal cache: DictCache sets a validity to its entries.
After that, the cache is empty.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = '$Id$'
import six
import itertools
import random
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.DictCache import DictCache
from DIRAC.Core.Utilities.LockRing import LockRing
from DIRAC.ResourceStatusSystem.Utilities.RssConfiguration import RssConfiguration
class Cache(object):
"""
Cache basic class.
WARNING: None of its methods is thread safe. Acquire / Release lock when
using them !
"""
def __init__(self, lifeTime, updateFunc):
"""
Constructor
:Parameters:
**lifeTime** - `int`
Lifetime of the elements in the cache ( seconds ! )
**updateFunc** - `function`
This function MUST return a S_OK | S_ERROR object. In the case of the first,
its value must be a dictionary.
"""
# We set a 20% of the lifetime randomly, so that if we have thousands of jobs
# starting at the same time, all the caches will not end at the same time.
randomLifeTimeBias = 0.2 * random.random()
self.log = gLogger.getSubLogger(self.__class__.__name__)
self.__lifeTime = int(lifeTime * (1 + randomLifeTimeBias))
self.__updateFunc = updateFunc
# The records returned from the cache must be valid at least 30 seconds.
self.__validSeconds = 30
# Cache
self.__cache = DictCache()
self.__cacheLock = LockRing()
self.__cacheLock.getLock(self.__class__.__name__)
# internal cache object getter
def cacheKeys(self):
"""
Cache keys getter
:returns: list with keys in the cache valid for at least twice the validity period of the element
"""
# Here we need to have more than the validity period because of the logic of the matching:
# * get all the keys with validity T
# * for each key K, get the element K with validity T
# This logic fails for elements just at the limit of the required time
return self.__cache.getKeys(validSeconds=self.__validSeconds * 2)
# acquire / release Locks
def acquireLock(self):
"""
Acquires Cache lock
"""
self.__cacheLock.acquire(self.__class__.__name__)
def releaseLock(self):
"""
Releases Cache lock
"""
self.__cacheLock.release(self.__class__.__name__)
# Cache getters
def get(self, cacheKeys):
"""
Gets values for cacheKeys given, if all are found ( present on the cache and
valid ), returns S_OK with the results. If any is not neither present not
valid, returns S_ERROR.
:Parameters:
**cacheKeys** - `list`
list of keys to be extracted from the cache
:return: S_OK | S_ERROR
"""
result = {}
for cacheKey in cacheKeys:
cacheRow = self.__cache.get(cacheKey, validSeconds=self.__validSeconds)
if not cacheRow:
return S_ERROR('Cannot get %s' % str(cacheKey))
result.update({cacheKey: cacheRow})
return S_OK(result)
def check(self, cacheKeys, vO):
"""
Modified get() method. Attempts to find keys with a vO value appended or 'all'
value appended. The cacheKeys passed in are 'flattened' cache keys (no vO)
Gets values for cacheKeys given, if all are found ( present on the cache and
valid ), returns S_OK with the results. If any is not neither present not
valid, returns S_ERROR.
:Parameters:
**cacheKeys** - `list`
list of keys to be extracted from the cache
:return: S_OK | S_ERROR
"""
result = {}
for cacheKey in cacheKeys:
longCacheKey = cacheKey + ('all',)
cacheRow = self.__cache.get(longCacheKey, validSeconds=self.__validSeconds)
if not cacheRow:
longCacheKey = cacheKey + (vO,)
cacheRow = self.__cache.get(longCacheKey, validSeconds=self.__validSeconds)
if not cacheRow:
return S_ERROR('Cannot get extended %s (neither for VO = %s nor for "all" Vos)' % (str(cacheKey), vO))
result.update({longCacheKey: cacheRow})
return S_OK(result)
# Cache refreshers
def refreshCache(self):
"""
Purges the cache and gets fresh data from the update function.
:return: S_OK | S_ERROR. If the first, its content is the new cache.
"""
self.log.verbose('refreshing...')
self.__cache.purgeAll()
newCache = self.__updateFunc()
if not newCache['OK']:
self.log.error(newCache['Message'])
return newCache
newCache = self.__updateCache(newCache['Value'])
self.log.verbose('refreshed')
return newCache
# Private methods
def __updateCache(self, newCache):
"""
Given the new cache dictionary, updates the internal cache with it. It sets
a duration to the entries of <self.__lifeTime> seconds.
:Parameters:
**newCache** - `dict`
dictionary containing a new cache
:return: dictionary. It is newCache argument.
"""
for cacheKey, cacheValue in newCache.items():
self.__cache.add(cacheKey, self.__lifeTime, value=cacheValue)
# We are assuming nothing will fail while inserting in the cache. There is
# no apparent reason to suspect from that piece of code.
return S_OK(newCache)
class RSSCache(Cache):
"""
The RSSCache is an extension of Cache in which the cache keys are pairs of the
form: ( elementName, statusType ).
When instantiating one object of RSSCache, we need to specify the RSS elementType
it applies, e.g. : StorageElement, CE, Queue, ...
It provides a unique public method `match` which is thread safe. All other
methods are not !!
"""
def __init__(self, lifeTime, updateFunc):
"""
Constructor
:Parameters:
**elementType** - `string`
RSS elementType, e.g.: StorageElement, CE, Queue... note that one RSSCache
can only hold elements of a single elementType to avoid issues while doing
the Cartesian product.
**lifeTime** - `int`
Lifetime of the elements in the cache ( seconds ! )
**updateFunc** - `function`
This function MUST return a S_OK | S_ERROR object. In the case of the first,
its value must follow the dict format: ( key, value ) being key ( elementName,
statusType ) and value status.
"""
super(RSSCache, self).__init__(lifeTime, updateFunc)
self.allStatusTypes = RssConfiguration().getConfigStatusType()
def match(self, elementNames, elementType, statusTypes, vO):
"""
In first instance, if the cache is invalid, it will request a new one from
the server.
It make the Cartesian product of elementNames x statusTypes to generate a key
set that will be compared against the cache set. If the first is included in
the second, we have a positive match and a dictionary will be returned. Otherwise,
we have a cache miss.
However, arguments ( elementNames or statusTypes ) can have a None value. If
that is the case, they are considered wildcards.
:Parameters:
**elementNames** - [ None, `string`, `list` ]
name(s) of the elements to be matched
**elementType** - [ `string` ]
type of the elements to be matched
**statusTypes** - [ None, `string`, `list` ]
name(s) of the statusTypes to be matched
:return: S_OK() || S_ERROR()
"""
self.acquireLock()
try:
return self._match(elementNames, elementType, statusTypes, vO)
finally:
# Release lock, no matter what !
self.releaseLock()
# Private methods: NOT THREAD SAFE !!
def _match(self, elementNames, elementType, statusTypes, vO):
"""
Method doing the actual work. It must be wrapped around locks to ensure no
disaster happens.
:Parameters:
**elementNames** - [ None, `string`, `list` ]
name(s) of the elements to be matched
**elementType** - [ `string` ]
type of the elements to be matched
**statusTypes** - [ None, `string`, `list` ]
name(s) of the statusTypes to be matched
:return: S_OK() || S_ERROR()
"""
# Gets the entire cache or a new one if it is empty / invalid
validCache = self.__getValidCache()
if not validCache['OK']:
return validCache
validCache = validCache['Value']
# Gets matched keys
try:
matchKeys = self.__match(validCache, elementNames, elementType, statusTypes, vO)
except IndexError:
return S_ERROR("RSS cache empty?")
if not matchKeys['OK']:
return matchKeys
# Gets objects for matched keys. It will return S_ERROR if the cache value
# has expired in between. It has 30 valid seconds, which means something was
# extremely slow above.
if matchKeys['CheckVO']:
cacheMatches = self.check(matchKeys['Value'], vO) # add an appropriate VO to the keys
else:
cacheMatches = self.get(matchKeys['Value'])
if not cacheMatches['OK']:
return cacheMatches
cacheMatches = cacheMatches['Value']
if not cacheMatches:
return S_ERROR('Empty cache for: %s, %s' % (elementNames, elementType))
# We undo the key into <elementName> and <statusType>
try:
cacheMatchesDict = self.__getDictFromCacheMatches(cacheMatches)
except ValueError:
cacheMatchesDict = cacheMatches
return S_OK(cacheMatchesDict)
def __getValidCache(self):
"""
Obtains the keys on the cache which are valid. If any, returns the complete
valid dictionary. If the list is empty, we assume the cache is invalid or
not filled, so we issue a cache refresh and return its data.
:return: { ( elementName, statusType, vO ) : status, ... }
"""
cacheKeys = self.cacheKeys()
# If cache is empty, we refresh it.
if not cacheKeys:
cache = self.refreshCache()
else:
cache = self.get(cacheKeys)
return cache
def __match(self, validCache, elementNames, elementType, statusTypes, vO):
"""
Obtains all keys on the cache ( should not be empty ! ).
Gets the sets ( no duplicates ) of elementNames and statusTypes. There is a
slight distinction. A priori we cannot know which are all the elementNames.
So, if elementNames is None, we will consider all elementNames in the cacheKeys.
However, if statusTypes is None, we will get the standard list from the
ResourceStatus configuration in the CS.
If the cartesian product of our sets is on the cacheKeys set, we have a
positive match.
:Parameters:
**validCache** - `dict`
cache dictionary
**elementNames** - [ None, `string`, `list` ]
name(s) of the elements to be matched
**elementType** - [ `string` ]
type of the elements to be matched
**statusTypes** - [ None, `string`, `list` ]
name(s) of the statusTypes to be matched
:return: S_OK() with a Vo check marker || S_ERROR()
"""
cacheKeys = list(validCache)
# flatten the cache. From our VO perspective we are only want to keep:
# 1) keys with vO tuple element equal to our vO,
# 2) keys with vO tuple element equal to 'all', but only if no element described in 1) exists.
# a resource key is set to have 3 elements to allow a comparison with a cartesian product.
checkVo = False
if len(cacheKeys[0]) == 4: # resource
checkVo = True
flattenedCache = {(key[0], key[1], key[2]):
value for key, value in validCache.items() if key[3] == "all"}
flattenedCache.update({(key[0], key[1], key[2]): value
for key, value in validCache.items() if key[3] == vO})
validCache = flattenedCache
else: # site, not VO specific in SiteStatus, eventually to be upgraded there to include the VO
pass
if isinstance(elementNames, six.string_types):
elementNames = [elementNames]
elif elementNames is None:
if isinstance(cacheKeys[0], (tuple, list)):
elementNames = [cacheKey[0] for cacheKey in cacheKeys]
else:
elementNames = cacheKeys
# Remove duplicates, makes Cartesian product faster
elementNamesSet = set(elementNames)
if isinstance(elementType, six.string_types):
if not elementType or elementType == 'Site':
elementType = []
else:
elementType = [elementType]
elif elementType is None:
elementType = [cacheKey[1] for cacheKey in cacheKeys]
# Remove duplicates, makes Cartesian product faster
elementTypeSet = set(elementType)
if isinstance(statusTypes, six.string_types):
if not statusTypes:
statusTypes = []
else:
statusTypes = [statusTypes]
elif statusTypes is None:
statusTypes = self.allStatusTypes
# Remove duplicates, makes Cartesian product faster
statusTypesSet = set(statusTypes)
if not elementTypeSet and not statusTypesSet:
cartesianProduct = elementNamesSet
else:
cartesianProduct = set(itertools.product(elementNamesSet, elementTypeSet, statusTypesSet))
# Some users find funny sending empty lists, which will make the cartesianProduct
# be []. Problem: [] is always subset, no matter what !
if not cartesianProduct:
self.log.warn('Empty cartesian product')
return S_ERROR('Empty cartesian product')
notInCache = list(cartesianProduct.difference(set(validCache)))
if notInCache:
self.log.warn('Cache misses: %s' % notInCache)
return S_ERROR('Cache misses: %s' % notInCache)
result = S_OK(cartesianProduct)
result['CheckVO'] = checkVo
return result
@staticmethod
def __getDictFromCacheMatches(cacheMatches):
"""
Formats the cacheMatches to a format expected by the RSS helpers clients.
:Parameters:
**cacheMatches** - `dict`
cache dictionary of the form { ( elementName, elementType, statusType, vO ) : status, ... }
:return: dict of the form { elementName : { statusType: status, ... }, ... }
"""
result = {}
for cacheKey, cacheValue in cacheMatches.items():
elementName, _elementType, statusType, vO = cacheKey
result.setdefault(elementName, {})[statusType] = cacheValue
return result
| gpl-3.0 | -3,477,067,965,024,068,600 | 31.61086 | 112 | 0.661024 | false | 4.000555 | false | false | false |
duaneloh/Dragonfly | utils/py_src/reademc.py | 1 | 8773 | '''Module containing EMCReader class to parse .emc files'''
from __future__ import print_function
import sys
import numpy as np
try:
import h5py
HDF5_MODE = True
except ImportError:
HDF5_MODE = False
class EMCReader(object):
"""EMC file reader
Provides access to assembled or raw frames given a list of .emc filenames
__init__ arguments:
photons_list - Path or sequence of paths to emc files. If single file, pass as [fname]
geom_list - Single or list of Detector objects.
geom_mapping (list, optional) - Mapping from photons_list to geom_list
If there is only one entry in geom_list, all emc files are assumed to use \
that detector. Otherwise, a mapping must be provided. \
The mapping is a list of the same length as photons_list with entries \
giving indices in geom_list for the corresponding emc file.
Methods:
get_frame(num, raw=False, sparse=False, zoomed=False, sym=False)
get_powder(raw=False, zoomed=False, sym=False)
"""
def __init__(self, photons_list, geom_list, geom_mapping=None):
if hasattr(photons_list, 'strip') or not hasattr(photons_list, '__getitem__'):
photons_list = [photons_list]
if not hasattr(geom_list, '__getitem__'):
geom_list = [geom_list]
self.flist = [{'fname': fname} for fname in photons_list]
num_files = len(photons_list)
self.multiple_geom = False
if len(geom_list) == 1:
for i in range(num_files):
self.flist[i]['geom'] = geom_list[0]
else:
try:
for i in range(num_files):
self.flist[i]['geom'] = geom_list[geom_mapping[i]]
self.multiple_geom = True
except TypeError:
print('Need mapping if multiple geometries are provided')
raise
self._parse_headers()
@staticmethod
def _test_h5file(fname):
if HDF5_MODE:
return h5py.is_hdf5(fname)
if os.path.splitext(fname)[1] == '.h5':
fheader = np.fromfile(fname, '=c', count=8)
if fheader == chr(137)+'HDF\r\n'+chr(26)+'\n':
return True
return False
def _parse_headers(self):
for i, pdict in enumerate(self.flist):
pdict['is_hdf5'] = self._test_h5file(pdict['fname'])
if pdict['is_hdf5'] and not HDF5_MODE:
print('Unable to parse HDF5 dataset')
raise IOError
elif not pdict['is_hdf5']:
self._parse_binaryheader(pdict)
else:
self._parse_h5header(pdict)
if pdict['num_pix'] != len(pdict['geom'].x):
sys.stderr.write(
'Warning: num_pix for %s is different (%d vs %d)\n' %
(pdict['fname'], pdict['num_pix'], len(pdict['geom'].x)))
if i > 0:
pdict['num_data'] += self.flist[i-1]['num_data']
self.num_frames = self.flist[-1]['num_data']
@staticmethod
def _parse_binaryheader(pdict):
with open(pdict['fname'], 'rb') as fptr:
num_data = np.fromfile(fptr, dtype='i4', count=1)[0]
pdict['num_pix'] = np.fromfile(fptr, dtype='i4', count=1)[0]
fptr.seek(1024, 0)
ones = np.fromfile(fptr, dtype='i4', count=num_data)
multi = np.fromfile(fptr, dtype='i4', count=num_data)
pdict['num_data'] = num_data
pdict['ones_accum'] = np.cumsum(ones)
pdict['multi_accum'] = np.cumsum(multi)
@staticmethod
def _parse_h5header(pdict):
with h5py.File(pdict['fname'], 'r') as fptr:
pdict['num_data'] = fptr['place_ones'].shape[0]
pdict['num_pix'] = np.prod(fptr['num_pix'][()])
def get_frame(self, num, **kwargs):
"""Get particular frame from file list
The method determines the file with that frame number and reads it
Arguments:
num (int) - Frame number
Keyword arguments:
raw (bool) - Whether to get unassembled frame (False)
sparse (bool) - Whether to return sparse data (False)
zoomed (bool) - Whether to zoom assembled frame to non-masked region (False)
sym (bool) - Whether to centro-symmetrize assembled frame (False)
Returns:
Assembled or unassembled frame as a dense array
"""
file_num = np.where(num < np.array([pdict['num_data'] for pdict in self.flist]))[0][0]
#file_num = np.where(num < self.num_data_list)[0][0]
if file_num == 0:
frame_num = num
else:
frame_num = num - self.flist[file_num-1]['num_data']
return self._read_frame(file_num, frame_num, **kwargs)
def get_powder(self, raw=False, **kwargs):
"""Get virtual powder sum of all frames in file list
Keyword arguments:
raw (bool) - Whether to return unassembled powder sum (False)
zoomed (bool) - Whether to zoom assembled frame to non-masked region (False)
sym (bool) - Whether to centro-symmetrize assembled frame (False)
Returns:
Assembled or unassembled powder sum as a dense array
"""
if self.multiple_geom:
raise ValueError('Powder sum unreasonable with multiple geometries')
powder = np.zeros((self.flist[0]['num_pix'],), dtype='f8')
for pdict in self.flist:
if pdict['is_hdf5']:
with h5py.File(pdict['fname'], 'r') as fptr:
place_ones = np.hstack(fptr['place_ones'][:])
place_multi = np.hstack(fptr['place_multi'][:])
count_multi = np.hstack(fptr['count_multi'][:])
else:
with open(pdict['fname'], 'rb') as fptr:
num_data = np.fromfile(fptr, dtype='i4', count=1)[0]
fptr.seek(1024, 0)
ones = np.fromfile(fptr, dtype='i4', count=num_data)
multi = np.fromfile(fptr, dtype='i4', count=num_data)
place_ones = np.fromfile(fptr, dtype='i4', count=ones.sum())
place_multi = np.fromfile(fptr, dtype='i4', count=multi.sum())
count_multi = np.fromfile(fptr, dtype='i4', count=multi.sum())
np.add.at(powder, place_ones, 1)
np.add.at(powder, place_multi, count_multi)
#powder *= self.flist[0]['geom'].unassembled_mask
if not raw:
powder = self.flist[0]['geom'].assemble_frame(powder, **kwargs)
return powder
def _read_frame(self, file_num, frame_num, raw=False, sparse=False, **kwargs):
pdict = self.flist[file_num]
if pdict['is_hdf5']:
po, pm, cm = self._read_h5frame(pdict, frame_num) # pylint: disable=invalid-name
else:
po, pm, cm = self._read_binaryframe(pdict, frame_num) # pylint: disable=invalid-name
if sparse:
return po, pm, cm
frame = np.zeros(pdict['num_pix'], dtype='i4')
np.add.at(frame, po, 1)
np.add.at(frame, pm, cm)
#frame *= pdict['geom'].unassembled_mask
if not raw:
frame = pdict['geom'].assemble_frame(frame, **kwargs)
return frame
@staticmethod
def _read_h5frame(pdict, frame_num):
with h5py.File(pdict['fname'], 'r') as fptr:
place_ones = fptr['place_ones'][frame_num]
place_multi = fptr['place_multi'][frame_num]
count_multi = fptr['count_multi'][frame_num]
return place_ones, place_multi, count_multi
@staticmethod
def _read_binaryframe(pdict, frame_num):
with open(pdict['fname'], 'rb') as fptr:
num_data = np.fromfile(fptr, dtype='i4', count=1)[0]
accum = [pdict['ones_accum'], pdict['multi_accum']]
offset = [0, 0]
size = [0, 0]
if frame_num == 0:
size = [accum[0][frame_num], accum[1][frame_num]]
else:
offset = [accum[0][frame_num-1], accum[1][frame_num-1]]
size[0] = accum[0][frame_num] - accum[0][frame_num - 1]
size[1] = accum[1][frame_num] - accum[1][frame_num - 1]
fptr.seek(1024 + num_data*8 + offset[0]*4, 0)
place_ones = np.fromfile(fptr, dtype='i4', count=size[0])
fptr.seek(1024 + num_data*8 + accum[0][-1]*4 + offset[1]*4, 0)
place_multi = np.fromfile(fptr, dtype='i4', count=size[1])
fptr.seek(1024 + num_data*8 + accum[0][-1]*4 + accum[1][-1]*4 + offset[1]*4, 0)
count_multi = np.fromfile(fptr, dtype='i4', count=size[1])
return place_ones, place_multi, count_multi
| gpl-3.0 | -5,664,773,635,451,429,000 | 40.578199 | 96 | 0.556138 | false | 3.479968 | false | false | false |
AakashRaina/radpress | radpress/templatetags/radpress_tags.py | 2 | 2295 | from django import template
from django.conf import settings
from django.core.urlresolvers import reverse, NoReverseMatch
from radpress import settings as radpress_settings, get_version
from radpress.compat import get_user_model
from radpress.models import Article
from radpress.readers import get_markup_choices, get_reader, trim
register = template.Library()
@register.inclusion_tag('radpress/tags/datetime.html')
def radpress_datetime(datetime):
"""
Time format that compatible with html5.
Arguments:
- `datetime`: datetime.datetime
"""
context = {'datetime': datetime}
return context
@register.inclusion_tag('radpress/tags/widget_latest_posts.html')
def radpress_widget_latest_posts():
"""
Receives latest posts.
"""
limit = radpress_settings.LIMIT
context = {
'object_list': Article.objects.all_published()[:limit]
}
return context
@register.simple_tag
def radpress_static_url(path):
"""
Receives Radpress static urls.
"""
version = get_version()
return '%sradpress/%s?ver=%s' % (settings.STATIC_URL, path, version)
@register.assignment_tag
def radpress_get_markup_descriptions():
"""
Provides markup options. It used for adding descriptions in admin and
zen mode.
:return: list
"""
result = []
for markup in get_markup_choices():
markup_name = markup[0]
result.append({
'name': markup_name,
'title': markup[1],
'description': trim(get_reader(markup=markup_name).description)
})
return result
@register.filter
def radpress_full_name(user):
if not isinstance(user, get_user_model()):
full_name = ''
else:
full_name = user.get_full_name()
if not full_name:
full_name = user.username
return full_name
@register.assignment_tag(takes_context=True)
def radpress_get_url(context, obj):
return '%s%s' % (context['DOMAIN'], obj.get_absolute_url())
@register.assignment_tag
def radpress_zen_mode_url(entry):
try:
if not isinstance(entry, Article):
url = reverse('radpress-zen-mode')
else:
url = reverse('radpress-zen-mode-update', args=[entry.pk])
except NoReverseMatch:
url = ''
return url | mit | 718,730,108,097,464,800 | 23.425532 | 75 | 0.656209 | false | 3.818636 | false | false | false |
DiegoCorrea/ouvidoMusical | apps/data/userSongRecommendation/migrations/0001_initial.py | 1 | 1313 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-01-28 22:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('users', '0001_initial'),
('songs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserSongRecommendations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('similarity', models.IntegerField(default=0)),
('iLike', models.BooleanField(default=False)),
('score', models.IntegerField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('song', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='songs.Song')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.User')),
],
),
migrations.AlterUniqueTogether(
name='usersongrecommendations',
unique_together=set([('user', 'song')]),
),
]
| mit | -318,944,554,887,429,250 | 35.472222 | 114 | 0.587966 | false | 4.155063 | false | false | false |
TheImagingSource/tiscamera | examples/python/03-live-stream.py | 1 | 2095 | #!/usr/bin/env python3
# Copyright 2019 The Imaging Source Europe GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example will show you how to start a simply live stream
#
import time
import sys
import gi
gi.require_version("Tcam", "0.1")
gi.require_version("Gst", "1.0")
from gi.repository import Tcam, Gst
def main():
Gst.init(sys.argv) # init gstreamer
# this line sets the gstreamer default logging level
# it can be removed in normal applications
# gstreamer logging can contain verry useful information
# when debugging your application
# see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html
# for further details
Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING)
serial = None
pipeline = Gst.parse_launch("tcambin name=bin "
" ! videoconvert"
" ! ximagesink")
# retrieve the bin element from the pipeline
camera = pipeline.get_by_name("bin")
# serial is defined, thus make the source open that device
if serial is not None:
camera.set_property("serial", serial)
pipeline.set_state(Gst.State.PLAYING)
print("Press Ctrl-C to stop.")
# We wait with this thread until a
# KeyboardInterrupt in the form of a Ctrl-C
# arrives. This will cause the pipline
# to be set to state NULL
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
finally:
pipeline.set_state(Gst.State.NULL)
if __name__ == "__main__":
main()
| apache-2.0 | 9,140,207,896,390,596,000 | 27.310811 | 94 | 0.680668 | false | 3.901304 | false | false | false |
ESRF-BCU/emotion | emotion/controllers/PI_E517.py | 1 | 15654 | import time
from emotion import Controller
from emotion import log as elog
from emotion.controller import add_axis_method
from emotion.axis import AxisState
import pi_gcs
from emotion.comm import tcp
from emotion import event
"""
Emotion controller for ethernet PI E517 piezo controller.
Cyril Guilloud ESRF BLISS
Thu 13 Feb 2014 15:51:41
"""
class PI_E517(Controller):
def __init__(self, name, config, axes, encoders):
Controller.__init__(self, name, config, axes, encoders)
self.host = self.config.get("host")
def move_done_event_received(self, state):
if self.auto_gate_enabled:
if state is True:
elog.info("PI_E517.py : movement is finished")
self._set_gate(0)
elog.debug("mvt finished, gate set to 0")
else:
elog.info("PI_E517.py : movement is starting")
self._set_gate(1)
elog.debug("mvt started, gate set to 1")
def initialize(self):
"""
Opens a single socket for all 3 axes.
"""
self.sock = tcp.Socket(self.host, 50000)
def finalize(self):
"""
Closes the controller socket.
"""
self.sock.close()
def initialize_axis(self, axis):
"""
- Reads specific config
- Adds specific methods
- Switches piezo to ONLINE mode so that axis motion can be caused
by move commands.
Args:
- <axis>
Returns:
- None
"""
axis.channel = axis.config.get("channel", int)
axis.chan_letter = axis.config.get("chan_letter")
add_axis_method(axis, self.get_id, types_info=(None, str))
'''Closed loop'''
add_axis_method(axis, self.open_loop, types_info=(None, None))
add_axis_method(axis, self.close_loop, types_info=(None, None))
'''DCO'''
add_axis_method(axis, self.activate_dco, types_info=(None, None))
add_axis_method(axis, self.desactivate_dco, types_info=(None, None))
'''GATE'''
# to enable automatic gating (ex: zap)
add_axis_method(axis, self.enable_auto_gate, types_info=(bool, None))
# to trig gate from external device (ex: HPZ with setpoint controller)
add_axis_method(axis, self.set_gate, types_info=(bool, None))
if axis.channel == 1:
self.gate_axis = axis
self.ctrl_axis = axis
# NO automatic gating by default.
self.auto_gate_enabled = False
'''end of move event'''
event.connect(axis, "move_done", self.move_done_event_received)
# Enables the closed-loop.
# self.sock.write("SVO 1 1\n")
self.send_no_ans(axis, "ONL %d 1" % axis.channel)
# VCO for velocity control mode ?
# self.send_no_ans(axis, "VCO %d 1" % axis.channel)
# Updates cached value of closed loop status.
self.closed_loop = self._get_closed_loop_status(axis)
def initialize_encoder(self, encoder):
pass
def read_position(self, axis, last_read={"t": time.time(), "pos": [None, None, None]}):
"""
Returns position's setpoint.
Setpoint position is MOV? of VOL? or SVA? depending on closed-loop
mode is ON or OFF.
Args:
- <axis> : emotion axis.
Returns:
- <position> : float : piezo position in Micro-meters or in Volts.
"""
cache = last_read
if time.time() - cache["t"] < 0.005:
# print "en cache not meas %f" % time.time()
_pos = cache["pos"]
else:
# print "PAS encache not meas %f" % time.time()
_pos = self._get_target_pos(axis)
cache["pos"] = _pos
cache["t"] = time.time()
elog.debug("position setpoint read : %r" % _pos)
return _pos[axis.channel - 1]
def read_encoder(self, encoder, last_read={"t": time.time(), "pos": [None, None, None]}):
cache = last_read
if time.time() - cache["t"] < 0.005:
# print "encache meas %f" % time.time()
_pos = cache["pos"]
else:
# print "PAS encache meas %f" % time.time()
_pos = self._get_pos(axis)
cache["pos"] = _pos
cache["t"] = time.time()
elog.debug("position measured read : %r" % _pos)
return _pos[axis.channel - 1]
def read_velocity(self, axis):
"""
Args:
- <axis> : Emotion axis object.
Returns:
- <velocity> : float
"""
_ans = self.send(axis, "VEL? %s" % axis.chan_letter)
# _ans should looks like "A=+0012.0000"
# removes 'X=' prefix
_velocity = float(_ans[2:])
elog.debug("read_velocity : %g " % _velocity)
return _velocity
def set_velocity(self, axis, new_velocity):
self.send_no_ans(axis, "VEL %s %f" %
(axis.chan_letter, new_velocity))
elog.debug("velocity set : %g" % new_velocity)
return self.read_velocity(axis)
def read_acceleration(self, axis):
"""Returns axis current acceleration in steps/sec2"""
return 1
def set_acceleration(self, axis, new_acc):
"""Set axis acceleration given in steps/sec2"""
pass
def state(self, axis):
# if self._get_closed_loop_status(axis):
if self.closed_loop:
# elog.debug("CLOSED-LOOP is active")
if self._get_on_target_status(axis):
return AxisState("READY")
else:
return AxisState("MOVING")
else:
elog.debug("CLOSED-LOOP is not active")
return AxisState("READY")
def prepare_move(self, motion):
"""
- TODO for multiple move...
Args:
- <motion> : Emotion motion object.
Returns:
-
Raises:
- ?
"""
pass
def start_one(self, motion):
"""
- Sends 'MOV' or 'SVA' depending on closed loop mode.
Args:
- <motion> : Emotion motion object.
Returns:
- None
"""
if self.closed_loop:
# Command in position.
self.send_no_ans(motion.axis, "MOV %s %g" %
(motion.axis.chan_letter, motion.target_pos))
else:
# Command in voltage.
self.send_no_ans(motion.axis, "SVA %s %g" %
(motion.axis.chan_letter, motion.target_pos))
def stop(self, axis):
"""
* HLT -> stop smoothly
* STP -> stop asap
* 24 -> stop asap
* to check : copy of current position into target position ???
"""
self.send_no_ans(axis, "HLT %s" % axis.chan_letter)
# self.sock.write("STP\n")
"""
Communication
"""
def raw_write(self, cmd):
self.send_no_ans(self.ctrl_axis, cmd)
# def raw_write_read(self, cmd):
# return self.send(self.ctrl_axis, cmd)
def raw_write_read(self, cmd):
return self.send(self.ctrl_axis, cmd)
def send(self, axis, cmd):
"""
- Adds the 'newline' terminator character : "\\\\n"
- Sends command <cmd> to the PI E517 controller.
- Channel is defined in <cmd>.
- <axis> is passed for debugging purposes.
- Returns answer from controller.
Args:
- <axis> : passed for debugging purposes.
- <cmd> : GCS command to send to controller (Channel is already mentionned in <cmd>).
Returns:
- 1-line answer received from the controller (without "\\\\n" terminator).
"""
_cmd = cmd + "\n"
_t0 = time.time()
# PC
_ans = "toto"
_ans = self.sock.write_readline(_cmd)
_duration = time.time() - _t0
if _duration > 0.005:
elog.info("PI_E517.py : Received %r from Send %s (duration : %g ms) " % (_ans, _cmd, _duration * 1000))
return _ans
def send_no_ans(self, axis, cmd):
"""
- Adds the 'newline' terminator character : "\\\\n"
- Sends command <cmd> to the PI E517 controller.
- Channel is already defined in <cmd>.
- <axis> is passed for debugging purposes.
- Used for answer-less commands, thus returns nothing.
"""
_cmd = cmd + "\n"
self.sock.write(_cmd)
"""
E517 specific
"""
def _get_pos(self, axis):
"""
Args:
- <axis> :
Returns:
- <position> Returns real position (POS? command) read by capacitive sensor.
Raises:
?
"""
# _ans = self.send(axis, "POS? %s" % axis.chan_letter)
# _pos = float(_ans[2:])
_ans = self.sock.write_readlines("POS?\n", 3)
_pos = map(float, [x[2:] for x in _ans])
return _pos
def _get_target_pos(self, axis):
"""
Returns last target position (MOV?/SVA?/VOL? command) (setpoint value).
- SVA? : Query the commanded output voltage (voltage setpoint).
- VOL? : Query the current output voltage (real voltage).
- MOV? : Returns the last valid commanded target position.
Args:
- <>
Returns:
-
Raises:
?
"""
if self.closed_loop:
# _ans = self.send(axis, "MOV? %s" % axis.chan_letter)
_ans = self.sock.write_readlines("MOV?\n", 3)
else:
# _ans = self.send(axis, "SVA? %s" % axis.chan_letter)
_ans = self.sock.write_readlines("SVA?\n", 3)
# _pos = float(_ans[2:])
_pos = map(float, [x[2:] for x in _ans])
return _pos
def open_loop(self, axis):
self.send_no_ans(axis, "SVO %s 0" % axis.chan_letter)
def close_loop(self, axis):
self.send_no_ans(axis, "SVO %s 1" % axis.chan_letter)
"""
DCO : Drift Compensation Offset.
"""
def activate_dco(self, axis):
self.send_no_ans(axis, "DCO %s 1" % axis.chan_letter)
def desactivate_dco(self, axis):
self.send_no_ans(axis, "DCO %s 0" % axis.chan_letter)
"""
Voltage commands
"""
def _get_voltage(self, axis):
"""
Returns Voltage Of Output Signal Channel (VOL? command)
"""
_ans = self.send(axis, "VOL? %s" % axis.channel)
_vol = float(_ans.split("=+")[-1])
return _vol
def _get_closed_loop_status(self, axis):
"""
Returns Closed loop status (Servo state) (SVO? command)
-> True/False
"""
_ans = self.send(axis, "SVO? %s" % axis.chan_letter)
_status = float(_ans[2:])
if _status == 1:
return True
else:
return False
def _get_on_target_status(self, axis):
"""
Returns << On Target >> status (ONT? command).
True/False
"""
_ans = self.send(axis, "ONT? %s" % axis.chan_letter)
_status = float(_ans[2:])
if _status == 1:
return True
else:
return False
def enable_auto_gate(self, axis, value):
if value:
# auto gating
self.auto_gate_enabled = True
self.gate_axis = axis
elog.info("PI_E517.py : enable_gate " + value + "fro axis.channel " + axis.channel)
else:
self.auto_gate_enabled = False
# To keep external gating possible.
self.gate_axis = 1
def set_gate(self, axis, state):
"""
Method to wrap '_set_gate' to be exported to device server.
<axis> parameter is requiered.
"""
self.gate_axis = axis
self._set_gate(state)
def _set_gate(self, state):
"""
CTO [<TrigOutID> <CTOPam> <Value>]+
- <TrigOutID> : {1, 2, 3}
- <CTOPam> :
- 3: trigger mode
- <Value> : {0, 2, 3, 4}
- 0 : position distance
- 2 : OnTarget
- 3 : MinMaxThreshold <----
- 4 : Wave Generator
- 5: min threshold
- 6: max threshold
- 7: polarity : 0 / 1
ex : CTO 1 3 3 1 5 0 1 6 100 1 7 1
Args:
- <state> : True / False
Returns:
-
Raises:
?
"""
if state:
_cmd = "CTO %d 3 3 1 5 0 1 6 100 1 7 1" % (self.gate_axis.channel)
else:
_cmd = "CTO %d 3 3 1 5 0 1 6 100 1 7 0" % (self.gate_axis.channel)
self.send_no_ans(self.gate_axis, _cmd)
def get_id(self, axis):
"""
Returns Identification information (\*IDN? command).
"""
return self.send(axis, "*IDN?")
def get_error(self, axis):
_error_number = self.send(axis, "ERR?")
_error_str = pi_gcs.get_error_str(_error_number)
return (_error_number, _error_str)
def get_info(self, axis):
"""
Returns a set of usefull information about controller.
Helpful to tune the device.
Args:
<axis> : emotion axis
Returns:
None
Raises:
?
"""
_infos = [
("Identifier ", "*IDN?"),
("Serial Number ", "SSN?"),
("Com level ", "CCL?"),
("GCS Syntax version ", "CSV?"),
("Last error code ", "ERR?"),
("Real Position ", "POS? %s" % axis.chan_letter),
("Position low limit ", "NLM? %s" % axis.chan_letter),
("Position high limit ", "PLM? %s" % axis.chan_letter),
("Closed loop status ", "SVO? %s" % axis.chan_letter),
("Voltage output high limit ", "VMA? %s" % axis.channel),
("Voltage output low limit ", "VMI? %s" % axis.channel),
("Output Voltage ", "VOL? %s" % axis.channel),
("Setpoint Position ", "MOV? %s" % axis.chan_letter),
("Drift compensation Offset ", "DCO? %s" % axis.chan_letter),
("Online ", "ONL? %s" % axis.channel),
("On target ", "ONT? %s" % axis.chan_letter),
("ADC Value of input signal ", "TAD? %s" % axis.channel),
("Input Signal Position value", "TSP? %s" % axis.channel),
("Velocity control mode ", "VCO? %s" % axis.chan_letter),
("Velocity ", "VEL? %s" % axis.chan_letter),
("Osensor ", "SPA? %s 0x02000200" %
axis.channel),
("Ksensor ", "SPA? %s 0x02000300" %
axis.channel),
("Digital filter type ", "SPA? %s 0x05000000" %
axis.channel),
("Digital filter Bandwidth ", "SPA? %s 0x05000001" %
axis.channel),
("Digital filter order ", "SPA? %s 0x05000002" %
axis.channel),
]
_txt = ""
for i in _infos:
_txt = _txt + " %s %s\n" % \
(i[0], self.send(axis, i[1]))
_txt = _txt + " %s \n%s\n" % \
("Communication parameters",
"\n".join(self.sock.write_readlines("IFC?\n", 6)))
_txt = _txt + " %s \n%s\n" % \
("Firmware version",
"\n".join(self.sock.write_readlines("VER?\n", 3)))
return _txt
| gpl-2.0 | -8,448,193,370,937,811,000 | 30.183267 | 115 | 0.499744 | false | 3.681562 | true | false | false |
Yepoleb/gogdb | gogdb/updater/dataextractors.py | 1 | 7686 | import dateutil.parser
import collections
import re
import gogdb.core.model as model
from gogdb.core.normalization import normalize_system
def parse_datetime(date_str):
if date_str is None:
return None
else:
return dateutil.parser.isoparse(date_str)
IMAGE_RE = re.compile(r"\w{64}")
def extract_imageid(image_url):
if image_url is None:
return None
m = IMAGE_RE.search(image_url)
if m is None:
return None
else:
return m.group(0)
def extract_properties_v0(prod, v0_cont):
prod.id = v0_cont["id"]
prod.access = 1
prod.title = v0_cont["title"]
prod.type = v0_cont["game_type"]
prod.slug = v0_cont["slug"]
prod.cs_systems = []
for cs_name in ["windows", "osx", "linux"]:
if v0_cont["content_system_compatibility"][cs_name]:
prod.cs_systems.append(normalize_system(cs_name))
prod.cs_systems.sort(reverse=True)
prod.store_date = parse_datetime(v0_cont["release_date"])
prod.is_in_development = v0_cont["in_development"]["active"]
prod.is_pre_order = v0_cont["is_pre_order"]
prod.image_logo = extract_imageid(v0_cont["images"]["logo"])
prod.image_background = extract_imageid(v0_cont["images"]["background"])
prod.image_icon = extract_imageid(v0_cont["images"]["sidebarIcon"])
prod.link_forum = v0_cont["links"]["forum"]
prod.link_store = v0_cont["links"]["product_card"]
prod.link_support = v0_cont["links"]["support"]
prod.screenshots = [x["image_id"] for x in v0_cont.get("screenshots", [])]
prod.videos = [
model.Video(
video_url=v["video_url"],
thumbnail_url=v["thumbnail_url"],
provider=v["provider"]
) for v in v0_cont.get("videos", [])
]
if v0_cont["dlcs"]:
prod.dlcs = [x["id"] for x in v0_cont["dlcs"]["products"]]
prod.changelog = v0_cont["changelog"] or None
def parse_file(file_cont):
return model.File(
id = str(file_cont["id"]),
size = file_cont["size"],
downlink = file_cont["downlink"]
)
def parse_bonusdls(bonus_cont):
return [
model.BonusDownload(
id = str(dl["id"]),
name = dl["name"],
total_size = dl["total_size"],
bonus_type = dl["type"],
count = dl["count"],
files = [parse_file(dlfile) for dlfile in dl["files"]]
) for dl in bonus_cont
]
prod.dl_bonus = parse_bonusdls(v0_cont["downloads"]["bonus_content"])
def parse_softwaredls(software_cont):
return [
model.SoftwareDownload(
id = dl["id"],
name = dl["name"],
total_size = dl["total_size"],
os = normalize_system(dl["os"]),
language = model.Language(dl["language"], dl["language_full"]),
version = dl["version"],
files = [parse_file(dlfile) for dlfile in dl["files"]]
) for dl in software_cont
]
prod.dl_installer = parse_softwaredls(v0_cont["downloads"]["installers"])
prod.dl_langpack = parse_softwaredls(v0_cont["downloads"]["language_packs"])
prod.dl_patch = parse_softwaredls(v0_cont["downloads"]["patches"])
PRODID_RE = re.compile(r"games/(\d+)")
def extract_prodid(apiv2_url):
m = PRODID_RE.search(apiv2_url)
return int(m.group(1))
def extract_properties_v2(prod, v2_cont):
v2_embed = v2_cont["_embedded"]
v2_links = v2_cont["_links"]
prod.features = [
model.Feature(
id=x["id"],
name=x["name"]
) for x in v2_embed["features"]
]
localizations_map = collections.defaultdict(lambda: model.Localization())
for loc in v2_embed["localizations"]:
loc_embed = loc["_embedded"]
localization = localizations_map[loc_embed["language"]["code"]]
localization.code = loc_embed["language"]["code"]
localization.name = loc_embed["language"]["name"]
if loc_embed["localizationScope"]["type"] == "text":
localization.text = True
elif loc_embed["localizationScope"]["type"] == "audio":
localization.audio = True
prod.localizations = list(localizations_map.values())
prod.tags = [
model.Tag(
id=x["id"],
level=x["level"],
name=x["name"],
slug=x["slug"]
) for x in v2_embed["tags"]
]
prod.comp_systems = [
normalize_system(support_entry["operatingSystem"]["name"])
for support_entry in v2_embed["supportedOperatingSystems"]
]
prod.comp_systems.sort(reverse=True)
prod.is_using_dosbox = v2_cont["isUsingDosBox"]
prod.developers = [x["name"] for x in v2_embed["developers"]]
prod.publisher = v2_embed["publisher"]["name"]
prod.copyright = v2_cont["copyrights"] or None
prod.global_date = parse_datetime(v2_embed["product"].get("globalReleaseDate"))
if "galaxyBackgroundImage" in v2_links:
prod.image_galaxy_background = extract_imageid(v2_links["galaxyBackgroundImage"]["href"])
prod.image_boxart = extract_imageid(v2_links["boxArtImage"]["href"])
prod.image_icon_square = extract_imageid(v2_links["iconSquare"]["href"])
prod.editions = [
model.Edition(
id=ed["id"],
name=ed["name"],
has_product_card=ed["hasProductCard"]
) for ed in v2_embed["editions"]
]
prod.includes_games = [
extract_prodid(link["href"])
for link in v2_links.get("includesGames", [])
]
prod.is_included_in = [
extract_prodid(link["href"])
for link in v2_links.get("isIncludedInGames", [])
]
prod.required_by = [
extract_prodid(link["href"])
for link in v2_links.get("isRequiredByGames", [])
]
prod.requires = [
extract_prodid(link["href"])
for link in v2_links.get("requiresGames", [])
]
if v2_embed["series"]:
prod.series = model.Series(
id=v2_embed["series"]["id"],
name=v2_embed["series"]["name"]
)
prod.description = v2_cont["description"]
META_ID_RE = re.compile(r"v2/meta/.{2}/.{2}/(\w+)")
def extract_metaid(meta_url):
m = META_ID_RE.search(meta_url)
if m is None:
return None
else:
return m.group(1)
def extract_builds(prod, build_cont, system):
for build in prod.builds:
# Mark all builds as unlisted to relist them later
if build.os == system:
build.listed = False
for build_item in build_cont["items"]:
build_id = int(build_item["build_id"])
# Find existing build based on id and set `build` to it
for existing_build in prod.builds:
if existing_build.id == build_id:
build = existing_build
break
else: # No existing build found
build = model.Build()
prod.builds.append(build)
build.id = build_id
build.product_id = int(build_item["product_id"])
build.os = build_item["os"]
build.branch = build_item["branch"]
build.version = build_item["version_name"] or None
build.tags = build_item["tags"]
build.public = build_item["public"]
build.date_published = parse_datetime(build_item["date_published"])
build.generation = build_item["generation"]
build.legacy_build_id = build_item.get("legacy_build_id")
build.meta_id = extract_metaid(build_item["link"])
build.link = build_item["link"]
build.listed = True
prod.builds.sort(key=lambda b: b.date_published)
| agpl-3.0 | 4,909,974,780,406,582,000 | 33.00885 | 97 | 0.583268 | false | 3.498407 | false | false | false |
jiady/htdb | crawler/crawler/offlineTask/myTask.py | 1 | 3854 | import abc
import redis
import time
class Task:
redisTaskOngoing = "task-meta/ongoing"
rclient = redis.StrictRedis(host="localhost", port=6379, db=0)
task_sleep = 0
def __init__(self, name, logger, mail, retryFail=False, pipeline_key=[]):
self.taskName = name
self.mail = mail
self.pipeline_key = pipeline_key
self.retryFail = retryFail
self.rclient = redis.StrictRedis(host="localhost", port=6379, db=0)
self.redisTaskPopKey = "task-pending/" + name
self.redisTaskPushKey = "task-finish/" + name
self.redisTaskFailKey = "task-fail/" + name
self.logger = logger
self.task_sleep = 0
def demon(self, time_sleep=10):
while True:
try:
self.go()
except Exception, e:
self.mail.send_timed(600, self.taskName + " exception", str(e))
finally:
time.sleep(time_sleep)
def getInitQueueBySetNames(self, set_names):
ret = []
for name in set_names:
ret.extend(self.rclient.smembers(set_names))
return ret
@abc.abstractmethod
def getInitQueue(self):
pass
@abc.abstractmethod
def taskOperation(self, hash):
pass
def init(self):
pass
def _singleTask(self, hash_id):
try:
r = self.taskOperation(hash_id)
if r is not None:
self.rclient.smove(self.redisTaskPopKey, self.redisTaskPushKey, hash_id)
if r:
for key in self.pipeline_key:
self.rclient.sadd(key, hash_id)
except Exception, e:
self.rclient.smove(self.redisTaskPopKey, self.redisTaskFailKey, hash_id)
self.logger.warning("not success on:" + hash_id + ":" + str(e))
def _initTaskQueue(self):
if not self.rclient.sismember(self.redisTaskOngoing, self.taskName):
list_hash = self.getInitQueue()
for hash_id in list_hash:
self.rclient.sadd(self.redisTaskPopKey, hash_id)
self.logger.info("add person to pending:" + hash_id)
self.logger.info("task added")
self.rclient.sadd(self.redisTaskOngoing, self.taskName)
else:
self.logger.info("task already exists, try to resume")
if self.retryFail:
list_hash = self.rclient.smembers(self.redisTaskFailKey)
for hash_id in list_hash:
self.rclient.smove(self.redisTaskFailKey, self.redisTaskPopKey, hash_id)
self.logger.info("add person to pending:" + hash_id)
self.logger.info("task added")
def _summary(self):
fail = self.rclient.scard(self.redisTaskFailKey)
succ = self.rclient.scard(self.redisTaskPushKey)
total = fail + succ
self.logger.info("task finished:%d, fail:%d", total, fail)
def _valid(self, hash_id):
try:
if self.filter_out(hash_id):
self.rclient.smove(self.redisTaskPopKey, self.redisTaskFailKey, hash_id)
self.logger.warning("filtered:" + hash_id + ":")
return False
return True
except Exception, e:
self.rclient.smove(self.redisTaskPopKey, self.redisTaskFailKey, hash_id)
self.logger.warning("not success on filter:" + hash_id + ":" + str(e))
return False
def filter_out(self, hash_id):
return False
def go(self):
self.init()
self._initTaskQueue()
while True:
hash_id = self.rclient.srandmember(self.redisTaskPopKey)
if hash_id is not None:
if self._valid(hash_id):
self._singleTask(hash_id)
else:
break
self._summary()
| mit | -146,764,696,304,721,820 | 34.357798 | 92 | 0.571354 | false | 3.778431 | false | false | false |
jjdmol/LOFAR | LCU/checkhardware/rtsm.py | 1 | 30018 | #!/usr/bin/python
check_version = '0714'
from threading import Thread
import sys
import traceback
import os
import numpy as np
import time
#import datetime
import logging
mainPath = r'/opt/stationtest'
mainDataPath = r'/localhome/stationtest'
observationsPath = r'/opt/lofar/var/run'
beamletPath = r'/localhome/data/Beamlets'
libPath = os.path.join(mainPath, 'lib')
sys.path.insert(0, libPath)
confPath = os.path.join(mainDataPath, 'config')
logPath = os.path.join(mainDataPath, 'log')
rtsmPath = os.path.join(mainDataPath, 'rtsm_data')
from general_lib import *
from lofar_lib import *
from search_lib import *
from data_lib import *
os.umask(001)
os.nice(15)
# make path if not exists
if not os.access(logPath, os.F_OK):
os.mkdir(logPath)
if not os.access(rtsmPath, os.F_OK):
os.mkdir(rtsmPath)
logger = None
def lbaMode(mode):
if mode in (1, 2, 3, 4):
return (True)
return (False)
def lbaLowMode(mode):
if mode in (1, 2):
return (True)
return (False)
def lbaHighMode(mode):
if mode in (3, 4):
return (True)
return (False)
def hbaMode(mode):
if mode in (5, 6, 7):
return (True)
return (False)
def checkStr(key):
checks = dict({'OSC':"Oscillation", 'HN':"High-noise", 'LN':"Low-noise", 'J':"Jitter", 'SN':"Summator-noise",\
'CR':"Cable-reflection", 'M':"Modem-failure", 'DOWN':"Antenna-fallen", 'SHIFT':"Shifted-band"})
return (checks.get(key, 'Unknown'))
def printHelp():
print "----------------------------------------------------------------------------"
print "Usage of arguments"
print
print "Set logging level, can be: debug|info|warning|error"
print "-ls=debug : print all information on screen, default=info"
print "-lf=info : print debug|warning|error information to log file, default=debug"
print
print "----------------------------------------------------------------------------"
def getArguments():
args = dict()
key = ''
value = '-'
for arg in sys.argv[1:]:
if arg[0] == '-':
opt = arg[1:].upper()
valpos = opt.find('=')
if valpos != -1:
key, value = opt.strip().split('=')
else:
key, value = opt, '-'
if key in ('H','LS','LF'):
if value != '-':
args[key] = value
else:
args[key] = '-'
else:
sys.exit("Unknown key %s" %(key))
return (args)
# get and unpack configuration file
class cConfiguration:
def __init__(self):
self.conf = dict()
full_filename = os.path.join(confPath, 'checkHardware.conf')
f = open(full_filename, 'r')
data = f.readlines()
f.close()
for line in data:
if line[0] in ('#','\n',' '):
continue
if line.find('#') > 0:
line = line[:line.find('#')]
try:
key, value = line.strip().split('=')
key = key.replace('_','-')
self.conf[key] = value
except:
print "Not a valid configuration setting: %s" %(line)
def getInt(self,key, default=0):
return (int(self.conf.get(key, str(default))))
def getFloat(self,key, default=0.0):
return (float(self.conf.get(key, str(default))))
def getStr(self,key):
return (self.conf.get(key, ''))
# setup default python logging system
# logstream for screen output
# filestream for program log file
def init_logging(args):
log_levels = {'DEBUG' : logging.DEBUG,
'INFO' : logging.INFO,
'WARNING': logging.WARNING,
'ERROR' : logging.ERROR}
try:
screen_log_level = args.get('LS', 'INFO')
file_log_level = args.get('LF', 'DEBUG')
except:
print "Not a legal log level, try again"
sys.exit(-1)
station = getHostName()
# create logger
_logger = logging.getLogger()
_logger.setLevel(logging.DEBUG)
# create file handler
filename = '%s_rtsm.log' %(getHostName())
full_filename = os.path.join(logPath, filename)
file_handler = logging.FileHandler(full_filename, mode='w')
formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')
file_handler.setFormatter(formatter)
file_handler.setLevel(log_levels[file_log_level])
_logger.addHandler(file_handler)
if (len(_logger.handlers) == 1) and ('LS' in args):
# create console handler
stream_handler = logging.StreamHandler()
fmt = '%s %%(levelname)-8s %%(message)s' %(station)
formatter = logging.Formatter(fmt)
stream_handler.setFormatter(formatter)
stream_handler.setLevel(log_levels[screen_log_level])
_logger.addHandler(stream_handler)
return (_logger)
def getRcuMode(n_rcus):
# RCU[ 0].control=0x10337a9c => ON, mode:3, delay=28, att=06
rcumode = -1
rcu_info = {}
answer = rspctl("--rcu")
if answer.count('mode:') == n_rcus:
for line in answer.splitlines():
if line.find('mode:') == -1:
continue
rcu = line[line.find('[')+1 : line.find(']')].strip()
state = line[line.find('=>')+2 : line.find(',')].strip()
mode = line[line.find('mode:')+5]
if rcu.isdigit() and state in ("OFF", "ON") and mode.isdigit():
rcu_info[int(rcu)] = (state, int(mode))
for mode in range(8):
mode_cnt = answer.count("mode:%d" %(mode))
if mode == 0:
if mode_cnt == n_rcus:
logger.debug("Not observing")
elif mode_cnt > (n_rcus / 3) and answer.count("mode:0") == (n_rcus - mode_cnt):
logger.debug("Now observing in rcumode %d" %(mode))
rcumode = mode
return (rcumode, rcu_info)
def getAntPol(rcumode, rcu):
pol_str = ('X','Y')
ant = rcu / 2
if rcumode == 1:
pol_str = ('Y','X')
ant += 48
pol = pol_str[rcu % 2]
return (ant, pol)
class CSV:
station = ""
obs_id = ""
filename = ""
rcu_mode = 0
record_timestamp = 0
@staticmethod
def setObsID(obs_id):
CSV.station = getHostName()
CSV.obs_id = obs_id
CSV.filename = "%s_%s_open.dat" %(CSV.station, CSV.obs_id)
CSV.rcu_mode = 0
CSV.rec_timestamp = 0
CSV.writeHeader()
return
@staticmethod
def setRcuMode(rcumode):
CSV.rcu_mode = rcumode
return
@staticmethod
def setRecordTimestamp(timestamp):
CSV.record_timestamp = timestamp
return
@staticmethod
def writeHeader():
full_filename = os.path.join(rtsmPath, CSV.filename)
# write only if new file
if not os.path.exists(full_filename):
f = open(full_filename, 'w')
f.write('# SPECTRA-INFO=rcu,rcumode,obs-id,check,startfreq,stopfreq,rec-timestamp\n')
f.write('#\n')
f.flush()
f.close()
return
@staticmethod
def writeSpectra(data, rcu, check):
#dumpTime = time.gmtime(CSV.record_timestamp)
#date_str = time.strftime("%Y%m%d", dumpTime)
full_filename = os.path.join(rtsmPath, CSV.filename)
logger.debug("start dumping data to %s" %(full_filename))
f = open(full_filename, 'a')
if CSV.rcu_mode in (1, 2, 3, 4):
freq = (0 , 100)
elif CSV.rcu_mode in (5,):
freq = (100, 200)
elif CSV.rcu_mode in (6,):
freq = (160, 240)
elif CSV.rcu_mode in (7,):
freq = (200, 300)
spectra_info = "SPECTRA-INFO=%d,%d,%s,%s,%d,%d,%f\n" %\
(rcu, CSV.rcu_mode, CSV.obs_id, check, freq[0], freq[1], CSV.record_timestamp)
mean_spectra = "MEAN-SPECTRA=["
for i in np.nan_to_num(data.getMeanSpectra(rcu%2)):
mean_spectra += "%3.1f " %(i)
mean_spectra += "]\n"
bad_spectra = "BAD-SPECTRA=["
for i in np.nan_to_num(data.getSpectra(rcu)):
bad_spectra += "%3.1f " %(i)
bad_spectra += "]\n\n"
f.write(spectra_info)
f.write(mean_spectra)
f.write(bad_spectra)
f.close()
return
@staticmethod
def writeInfo(start_time, stop_time, obsid_samples):
full_filename = os.path.join(rtsmPath, CSV.filename)
logger.debug("add obs_info to %s" %(full_filename))
f = open(full_filename, 'a')
f.write('# OBS-ID-INFO=obs_id,start_time,stop_time,obsid_samples\n')
f.write('OBS-ID-INFO=%s,%5.3f,%5.3f,%d\n\n' %(CSV.obs_id, start_time, stop_time, obsid_samples))
f.flush()
f.close()
return
@staticmethod
def closeFile():
full_filename = os.path.join(rtsmPath, CSV.filename)
filename_new = CSV.filename.replace('open','closed')
full_filename_new = os.path.join(rtsmPath, filename_new)
logger.debug("rename file from %s to %s" %(full_filename, full_filename_new))
os.rename(full_filename, full_filename_new)
CSV.obs_id = ""
CSV.filename = ""
return
def checkForOscillation(data, rcumode, error_list, delta):
logger.debug("start oscillation check")
for pol_nr, pol in enumerate(('X', 'Y')):
#test_data = data.getAll()[:,:1,:]
result = search_oscillation(data, pol, delta)
if len(result) > 1:
# get mean values from all rcu's (rcu = -1)
bin_nr, ref_max_sum, ref_n_peaks, ref_rcu_low = result[0]
#rcu, max_sum, n_peaks, rcu_low = sorted(result[1:], reverse=True)[0]
if len(result) == 2:
bin_nr, max_sum, n_peaks, rcu_low = result[1]
else:
ref_low = result[0][3]
max_low_rcu = (-1, -1)
max_sum_rcu = (-1, -1)
for i in result[1:]:
bin_nr, max_sum, n_peaks, rcu_low = i
if max_sum > max_sum_rcu[0]: max_sum_rcu = (max_sum, bin_nr)
if (rcu_low - ref_low) > max_low_rcu[0]: max_low_rcu = (rcu_low, bin_nr)
rcu_low, bin_nr = max_low_rcu
rcu = (bin_nr * 2) + pol_nr
ant, pol = getAntPol(rcumode, rcu)
if lbaMode(rcumode):
logger.info("Mode-%d RCU-%03d Ant-%03d %c Oscillation, sum=%3.1f(%3.1f) peaks=%d(%d) low=%3.1fdB(%3.1f) (=ref)" %\
(rcumode, rcu, ant, pol, max_sum, ref_max_sum, n_peaks, ref_n_peaks, rcu_low, ref_rcu_low))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "OSC")
if hbaMode(rcumode):
if ((max_sum > 5000.0) or (n_peaks > 40)):
logger.info("Mode-%d RCU-%03d Tile-%02d %c Oscillation, sum=%3.1f(%3.1f) peaks=%d(%d) low=%3.1fdB(%3.1f) ref=()" %\
(rcumode, rcu, ant, pol, max_sum, ref_max_sum, n_peaks, ref_n_peaks, rcu_low, ref_rcu_low))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "OSC")
return
def checkForNoise(data, rcumode, error_list, low_deviation, high_deviation, max_diff):
logger.debug("start noise check")
for pol_nr, pol in enumerate(('X', 'Y')):
low_noise, high_noise, jitter = search_noise(data, pol, low_deviation, high_deviation*1.5, max_diff)
for err in high_noise:
bin_nr, val, bad_secs, ref, diff = err
rcu = (bin_nr * 2) + pol_nr
ant, pol = getAntPol(rcumode, rcu)
if lbaMode(rcumode):
logger.info("Mode-%d RCU-%03d Ant-%03d %c High-noise, value=%3.1fdB bad=%d(%d) limit=%3.1fdB diff=%3.1fdB" %\
(rcumode, rcu, ant, pol, val, bad_secs, data.frames, ref, diff))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "HN")
if hbaMode(rcumode):
logger.info("Mode-%d RCU-%03d Tile-%02d %c High-noise, value=%3.1fdB bad=%d(%d) limit=%3.1fdB diff=%3.1fdB" %\
(rcumode, rcu, ant, pol, val, bad_secs, data.frames, ref, diff))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "HN")
for err in low_noise:
bin_nr, val, bad_secs, ref, diff = err
rcu = (bin_nr * 2) + pol_nr
ant, pol = getAntPol(rcumode, rcu)
if lbaMode(rcumode):
logger.info("Mode-%d RCU-%03d Ant-%03d %c Low-noise, value=%3.1fdB bad=%d(%d) limit=%3.1fdB diff=%3.1fdB" %\
(rcumode, rcu, ant, pol, val, bad_secs, data.frames, ref, diff))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "LN")
if hbaMode(rcumode):
logger.info("Mode-%d RCU-%03d Tile-%02d %c Low-noise, value=%3.1fdB bad=%d(%d) limit=%3.1fdB diff=%3.1fdB" %\
(rcumode, rcu, ant, pol, val, bad_secs, data.frames, ref, diff))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "LN")
return
def checkForSummatorNoise(data, rcumode, error_list):
logger.debug("start summator-noise check")
for pol_nr, pol in enumerate(('X', 'Y')):
# sn=SummatorNoise cr=CableReflections
sn, cr = search_summator_noise(data=data, pol=pol, min_peak=2.0)
for msg in sn:
bin_nr, peaks, max_peaks = msg
rcu = (bin_nr * 2) + pol_nr
tile, pol = getAntPol(rcumode, rcu)
logger.info("Mode-%d RCU-%03d Tile-%02d %c Summator-noise, cnt=%d peaks=%d" %\
(rcumode, rcu, tile, pol, peaks, max_peaks))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "SN")
for msg in cr:
bin_nr, peaks, max_peaks = msg
rcu = (bin_nr * 2) + pol_nr
tile, pol = getAntPol(rcumode, rcu)
logger.info("Mode-%d RCU-%03d Tile-%02d %c Cable-reflections, cnt=%d peaks=%d" %\
(rcumode, rcu, tile, pol, peaks, max_peaks))
#if rcu not in error_list:
#error_list.append(rcu)
#CSV.writeSpectra(data, rcu, "CR")
return
def checkForDown(data, rcumode, error_list, subband):
logger.debug("start down check")
down, shifted = searchDown(data, subband)
for msg in down:
ant, max_x_sb, max_y_sb, mean_max_sb = msg
rcu = ant * 2
max_x_offset = max_x_sb - mean_max_sb
max_y_offset = max_y_sb - mean_max_sb
ant, pol = getAntPol(rcumode, rcu)
logger.info("Mode-%d RCU-%02d/%02d Ant-%02d Down, x-offset=%d y-offset=%d" %\
(rcumode, rcu, (rcu+1), ant, max_x_offset, max_y_offset))
if rcu not in error_list:
error_list.append(rcu)
error_list.append(rcu+1)
CSV.writeSpectra(data, rcu, "DOWN")
CSV.writeSpectra(data, rcu+1, "DOWN")
return
def checkForFlat(data, rcumode, error_list):
logger.debug("start flat check")
flat = searchFlat(data)
for msg in flat:
rcu, mean_val = msg
ant, pol = getAntPol(rcumode, rcu)
logger.info("Mode-%d RCU-%02d Ant-%02d Flat, value=%5.1fdB" %\
(rcumode, rcu, ant, mean_val))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "FLAT")
return
def checkForShort(data, rcumode, error_list):
logger.debug("start short check")
short = searchShort(data)
for msg in short:
rcu, mean_val = msg
ant, pol = getAntPol(rcumode, rcu)
logger.info("Mode-%d RCU-%02d Ant-%02d Short, value=%5.1fdB" %\
(rcumode, rcu, ant, mean_val))
if rcu not in error_list:
error_list.append(rcu)
CSV.writeSpectra(data, rcu, "SHORT")
return
def closeAllOpenFiles():
files = os.listdir(rtsmPath)
for filename in files:
if filename.find('open') > -1:
full_filename = os.path.join(rtsmPath, filename)
filename_new = filename.replace('open','closed')
full_filename_new = os.path.join(rtsmPath, filename_new)
os.rename(full_filename, full_filename_new)
return
class cDayInfo:
def __init__(self):
self.date = time.strftime("%Y%m%d", time.gmtime(time.time()))
self.filename = "%s_%s_dayinfo.dat" %(getHostName(), self.date)
self.samples = [0,0,0,0,0,0,0] # RCU-mode 1..7
self.obs_info = list()
self.deleteOldDays()
self.readFile()
def addSample(self, rcumode=-1):
date = time.strftime("%Y%m%d", time.gmtime(time.time()))
# new day reset data and set new filename
if self.date != date:
self.date = date
self.reset()
if rcumode in range(1,8,1):
self.samples[rcumode-1] += 1
self.writeFile()
def addObsInfo(self, obs_id, start_time, stop_time, rcu_mode, samples):
self.obs_info.append([obs_id, start_time, stop_time, rcu_mode, samples])
def reset(self):
self.filename = "%s_%s_dayinfo.dat" %(getHostName(), self.date)
self.samples = [0,0,0,0,0,0,0] # RCU-mode 1..7
self.obs_info = list()
self.deleteOldDays()
# after a restart, earlier data is imported
def readFile(self):
full_filename = os.path.join(rtsmPath, self.filename)
if os.path.exists(full_filename):
f = open(full_filename, 'r')
lines = f.readlines()
f.close()
for line in lines:
if len(line.strip()) == 0 or line.strip()[0] == '#':
continue
key,data = line.split('=')
if key == 'DAY-INFO':
self.samples = [int(i) for i in data.split(',')[1:]]
if key == 'OBSID-INFO':
d = data.split(',')
self.obs_info.append([d[0],float(d[1]),float(d[2]),int(d[3]), int(d[4])])
# rewrite file every sample
def writeFile(self):
full_filename = os.path.join(rtsmPath, self.filename)
f = open(full_filename, 'w')
f.write('#DAY-INFO date,M1,M2,M3,M4,M5,M6,M7\n')
f.write('DAY-INFO=%s,%d,%d,%d,%d,%d,%d,%d\n' %\
(self.date, self.samples[0], self.samples[1], self.samples[2], self.samples[3], self.samples[4], self.samples[5], self.samples[6]))
f.write('\n#OBS-ID-INFO obs_id, start_time, stop_time, rcu_mode, samples\n')
for i in self.obs_info:
f.write('OBS-ID-INFO=%s,%5.3f,%5.3f,%d,%d\n' %\
(i[0],i[1],i[2],i[3],i[4]))
f.close()
def deleteOldDays(self):
files = os.listdir(rtsmPath)
backup = True
for filename in files:
if filename.find('closed') != -1:
backup = False
if backup == True:
for filename in files:
if filename.find('dayinfo') != -1:
if filename.split('.')[0].split('_')[1] != self.date:
full_filename = os.path.join(rtsmPath, filename)
os.remove(full_filename)
def getObsId():
#obs_start_str = ""
#obs_stop_str = ""
#obs_start_time = 0.0
#obs_stop_time = 0.0
obsids = ""
answer = sendCmd('swlevel')
if answer.find("ObsID") > -1:
s1 = answer.find("ObsID:")+6
s2 = answer.find("]")
obsids = answer[s1:s2].strip().split()
return (obsids)
def getObsIdInfo(obsid):
filename = "Observation%s" %(obsid.strip())
fullfilename = os.path.join(observationsPath, filename)
f = open(fullfilename, 'r')
obsinfo = f.read()
f.close()
m1 = obsinfo.find("Observation.startTime")
m2 = obsinfo.find("\n", m1)
obs_start_str = obsinfo[m1:m2].split("=")[1].strip()
obs_start_time = time.mktime(time.strptime(obs_start_str, "%Y-%m-%d %H:%M:%S"))
m1 = obsinfo.find("Observation.stopTime",m2)
m2 = obsinfo.find("\n", m1)
obs_stop_str = obsinfo[m1:m2].split("=")[1].strip()
obs_stop_time = time.mktime(time.strptime(obs_stop_str, "%Y-%m-%d %H:%M:%S"))
logger.debug("obsid %s %s .. %s" %(obsid, obs_start_str, obs_stop_str))
return(obsid, obs_start_time, obs_stop_time)
class RecordBeamletStatistics(Thread):
def __init__(self):
Thread.__init__(self)
self.running = False
self.reset()
def reset(self):
self.dump_dir = ''
self.obsid = ''
self.duration = 0
def set_obsid(self, obsid):
self.dump_dir = os.path.join(beamletPath, obsid)
try:
os.mkdir(self.dump_dir)
except:
pass
self.obsid = obsid
def set_duration(self, duration):
self.duration = duration
def is_running(self):
return self.running
def kill_recording(self):
if self.running:
logger.debug("kill recording beamlet statistics")
sendCmd(cmd='pkill', args='rspctl')
logger.debug("recording killed")
#self.running = False
#self.make_plots()
def make_plots(self):
if self.obsid:
try:
response = sendCmd(cmd='/home/fallows/inspect_bsts.bash', args=self.obsid)
logger.debug('response "inspect.bsts.bash" = {%s}' % response)
except:
logger.debug('exception while running "inspect.bsts.bash"')
self.reset()
def run(self):
if self.duration:
self.running = True
logger.debug("start recording beamlet statistics for %d seconds" % self.duration)
rspctl('--statistics=beamlet --duration=%d --integration=1 --directory=%s' % (self.duration, self.dump_dir))
logger.debug("recording done")
self.make_plots()
self.running = False
def main():
global logger
obs_id = ""
active_obs_id = ""
rcumode = 0
#station = getHostName()
DI = cDayInfo()
args = getArguments()
if args.has_key('H'):
printHelp()
sys.exit()
logger = init_logging(args)
init_lofar_lib()
init_data_lib()
conf = cConfiguration()
#StID = getHostName()
logger.info('== Start rtsm (Real Time Station Monitor) ==')
removeAllDataFiles()
# Read in RemoteStation.conf
ID, nRSP, nTBB, nLBL, nLBH, nHBA, HBA_SPLIT = readStationConfig()
n_rcus = nRSP * 8
data = cRCUdata(n_rcus)
obs_start_time = 0
obs_stop_time = 0
obsid_samples = 0
beamlet_recording = RecordBeamletStatistics()
while True:
try:
# get active obsid from swlevel
obsids = getObsId()
time_now = time.time()
# stop if no more obsids or observation is stoped
if obs_stop_time > 0.0:
if active_obs_id not in obsids or len(obsids) == 0 or time_now > obs_stop_time:
logger.debug("save obs_id %s" %(obs_id))
DI.addObsInfo(obs_id, obs_start_time, obs_stop_time, rcumode, obsid_samples)
DI.writeFile()
CSV.writeInfo(obs_start_time, obs_stop_time, obsid_samples)
CSV.closeFile()
active_obs_id = ""
obs_start_time = 0.0
obs_stop_time = 0.0
# if still running kill recording
if beamlet_recording:
if beamlet_recording.is_running():
beamlet_recording.kill_recording()
beamlet_recording = 0
# if no active observation get obs info if obsid available
if active_obs_id == "":
# if still running kill recording
if beamlet_recording:
if beamlet_recording.is_running():
beamlet_recording.kill_recording()
beamlet_recording = 0
for id in obsids:
obsid, start, stop = getObsIdInfo(id)
if time_now >= (start - 60.0) and (time_now + 15) < stop:
active_obs_id = obsid
obs_start_time = start
obs_stop_time = stop
break
if time_now < obs_start_time:
logger.debug("waiting %d seconds for start of observation" %(int(obs_start_time - time_now)))
time.sleep((obs_start_time - time_now) + 1.0)
# start recording beamlets
if not beamlet_recording:
if obs_start_time > 0.0 and time.time() >= obs_start_time:
duration = obs_stop_time - time.time() - 10
if duration > 2:
beamlet_recording = RecordBeamletStatistics()
beamlet_recording.set_obsid(active_obs_id)
beamlet_recording.set_duration(duration)
beamlet_recording.start()
check_start = time.time()
# if new obs_id save data and reset settings
if obs_id != active_obs_id:
# start new file and set new obsid
obs_id = active_obs_id
obsid_samples = 0
CSV.setObsID(obs_id)
# it takes about 11 seconds to record data, for safety use 15
if (time.time() + 15.0) < obs_stop_time:
# observing, so check mode now
rcumode, rcu_info = getRcuMode(n_rcus)
if rcumode <= 0:
continue
active_rcus = []
for rcu in rcu_info:
state, mode = rcu_info[rcu]
if state == 'ON':
active_rcus.append(rcu)
data.setActiveRcus(active_rcus)
rec_timestamp = time.time()+3.0
data.record(rec_time=1, read=True, slow=True)
#data.fetch()
CSV.setRcuMode(rcumode)
CSV.setRecordTimestamp(rec_timestamp)
DI.addSample(rcumode)
obsid_samples += 1
logger.debug("do tests")
mask = extractSelectStr(conf.getStr('mask-rcumode-%d' %(rcumode)))
data.setMask(mask)
if len(mask) > 0:
logger.debug("mask=%s" %(str(mask)))
error_list = []
# do LBA tests
if lbaMode(rcumode):
checkForDown(data, rcumode, error_list,
conf.getInt('lbh-test-sb',301))
checkForShort(data, rcumode, error_list)
checkForFlat(data, rcumode, error_list)
checkForOscillation(data, rcumode, error_list, 6.0)
checkForNoise(data, rcumode, error_list,
conf.getFloat('lba-noise-min-deviation', -3.0),
conf.getFloat('lba-noise-max-deviation', 2.5),
conf.getFloat('lba-noise-max-difference', 1.5))
# do HBA tests
if hbaMode(rcumode):
checkForOscillation(data, rcumode, error_list, 9.0)
checkForSummatorNoise(data, rcumode, error_list)
checkForNoise(data, rcumode, error_list,
conf.getFloat('hba-noise-min-deviation', -3.0),
conf.getFloat('hba-noise-max-deviation', 2.5),
conf.getFloat('hba-noise-max-difference', 2.0))
else:
closeAllOpenFiles()
if active_obs_id == "":
# if not observing check every 30 seconds for observation start
sleeptime = 30.0
logger.debug("no observation, sleep %1.0f seconds" %(sleeptime))
else:
# if observing do check every 60 seconds
check_stop = time.time()
sleeptime = 60.0 - (check_stop - check_start)
logger.debug("sleep %1.0f seconds till next check" %(sleeptime))
while sleeptime > 0.0:
wait = min(1.0, sleeptime)
sleeptime -= wait
time.sleep(wait)
except KeyboardInterrupt:
logger.info("stopped by user")
sys.exit()
except:
logger.error('Caught %s', str(sys.exc_info()[0]))
logger.error(str(sys.exc_info()[1]))
logger.error('TRACEBACK:\n%s', traceback.format_exc())
logger.error('Aborting NOW')
sys.exit(0)
# do test and write result files to log directory
log_dir = conf.getStr('log-dir-local')
if os.path.exists(log_dir):
logger.info("write result data")
# write result
else:
logger.warn("not a valid log directory")
logger.info("Test ready.")
# if still running kill recording
if beamlet_recording:
if beamlet_recording.is_running():
beamlet_recording.kill_recording()
beamlet_recording = 0
# delete files from data directory
removeAllDataFiles()
sys.exit(0)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,804,538,025,338,516,000 | 35.652015 | 146 | 0.523486 | false | 3.430237 | false | false | false |
UManPychron/pychron | pychron/dashboard/tasks/server/task.py | 2 | 1814 | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from pyface.tasks.task_layout import TaskLayout, PaneItem
from traits.api import Instance
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.dashboard.tasks.server.panes import DashboardDevicePane, DashboardCentralPane
from pychron.dashboard.server import DashboardServer
from pychron.envisage.tasks.base_task import BaseTask
class DashboardServerTask(BaseTask):
name = 'Dashboard Server'
server = Instance(DashboardServer)
def activated(self):
self.server.activate()
def create_central_pane(self):
return DashboardCentralPane(model=self.server)
def create_dock_panes(self):
panes = [DashboardDevicePane(model=self.server)]
return panes
def _default_layout_default(self):
return TaskLayout(left=PaneItem('pychron.dashboard.devices'))
# ============= EOF =============================================
| apache-2.0 | -4,264,338,825,760,189,000 | 39.311111 | 90 | 0.613561 | false | 4.523691 | false | false | false |
jbornschein/mca-genmodel | preproc/preproc-dog.py | 1 | 4641 | #!/usr/bin/env python
#
# Author: Jorg Bornschein <[email protected])
# Lincense: Academic Free License (AFL) v3.0
#
from __future__ import division
import sys
sys.path.insert(0, "lib/")
import numpy as np
import tables
from optparse import OptionParser
from scipy.signal import convolve2d
from pulp.utils.autotable import AutoTable
from pulp.utils.datalog import dlog
import pulp.utils.parallel as parallel
#from viz import *
def DoG(sigma_pos, sigma_neg, size):
""" Difference of gaussians kernel of (size, size)-shape.
The kernel is constructed to be mean free and to have a peak
amplitude of 1.
"""
s2 = size // 2
gy, gx = np.ogrid[ -s2:size-s2 , -s2:size-s2 ]
G1 = np.exp( -(gx*gx+gy*gy) / (2.*sigma_pos**2) ) / (2*np.pi*sigma_pos**2)
G2 = np.exp( -(gx*gx+gy*gy) / (2.*sigma_neg**2) ) / (2*np.pi*sigma_neg**2)
G2 = G2 / G2.sum()*G1.sum() # make DC free
G = G1-G2 # combine positive and negative Gaussians
G = G / G.max() # mormalize peak to 1.
return G
#=============================================================================
if __name__ == "__main__":
parser = OptionParser(usage="Usage: %prog [options] <patches.h5>")
parser.add_option("--mf", dest="mf", action="store_true",
help="make each patch individually mean-free")
parser.add_option("--norm", dest="norm", action="store_true",
help="normalize each patch to [-1 .. 1]")
parser.add_option("--varnorm", dest="varnorm", action="store_true",
help="normalize each patch to variance 1")
parser.add_option("-n", "--num-patches", type="int", dest="num_patches", default=None,
help="number of patches to generate")
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
exit(1)
# Open input file
in_fname = args[0]
in_h5 = tables.openFile(in_fname, "r")
in_patches = in_h5.root.patches
in_oversized = in_h5.root.oversized
# Some asserts in the input data
assert in_patches.shape[0] == in_oversized.shape[0] # number of patches
assert in_patches.shape[1] == in_patches.shape[2] # sqare patches
assert in_oversized.shape[1] == in_oversized.shape[2] # square oversized
# Number of patches to extract
N_patches = in_patches.shape[0]
if options.num_patches is not None:
N_patches = min(N_patches, options.num_patches)
# Size of the patches
size = in_patches.shape[1]
oversize = in_oversized.shape[1]
# Output file name
out_fname = "patches-%d-dog" % size
if options.mf:
out_fname += "-mf"
if options.norm:
out_fname += "-norm"
if options.varnorm:
out_fname += "-varnorm"
#
print "Input file: %s" % in_fname
print "Output file: %s" % out_fname
print "# of patches: %d" % N_patches
print "Patch size : %d x %d" % (size, size)
# Create output file
tbl_out = AutoTable(out_fname+".h5")
# Size magic
left = (oversize // 2)-(size //2)
right = left + size
#============================================================
# Start to do some real work
batch_size = 1000
dog = DoG(1., 3., 9)
for n in xrange(0, N_patches):
if n % batch_size == 0:
dlog.progress("Preprocessing...", n/N_patches)
P = in_oversized[n,:,:]
P_ = convolve2d(P, dog, 'same')
P_ = P_[left:right, left:right]
# Normalize and mean-free
if options.mf:
P_ -= P_.mean()
if options.norm:
P_max = max(P_.max(), -P_.min())
P_ /= (P_max+1e-5)
if options.varnorm:
P_var = np.var(P_)
P_ /= (np.sqrt(P_var)+1e-5)
tbl_out.append("patches", P_)
in_h5.close()
tbl_out.close()
exit(0)
#============================================================
# Safe debug-output
zoom = 6
grid = U.transpose().reshape( (D, size, size) )
img = tiled_gfs(grid, sym_cm=False, global_cm=True)
img = img.resize( (zoom*img.size[0], zoom*img.size[1]) )
img.save(out_fname+"-components.png")
grid = P[:100,:,:]
img = tiled_gfs(grid, sym_cm=False, global_cm=False)
img = img.resize( (zoom*img.size[0], zoom*img.size[1]) )
img.save(out_fname+"-orig.png")
grid = P_[:100,:,:]
img = tiled_gfs(grid, sym_cm=True, global_cm=False)
img = img.resize( (zoom*img.size[0], zoom*img.size[1]) )
img.save(out_fname+"-patches.png")
| agpl-3.0 | 2,671,833,618,587,717,000 | 29.94 | 90 | 0.545357 | false | 3.213989 | false | false | false |
dataversioncontrol/dvc | dvc/system.py | 1 | 8636 | from __future__ import unicode_literals
from dvc.utils.compat import str, open
import os
import errno
class System(object):
@staticmethod
def is_unix():
return os.name != "nt"
@staticmethod
def hardlink(source, link_name):
import ctypes
from dvc.exceptions import DvcException
if System.is_unix():
try:
os.link(source, link_name)
return
except Exception as exc:
raise DvcException("link", cause=exc)
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.argtypes = [
ctypes.c_wchar_p,
ctypes.c_wchar_p,
ctypes.c_void_p,
]
CreateHardLink.restype = ctypes.wintypes.BOOL
res = CreateHardLink(link_name, source, None)
if res == 0:
raise DvcException("CreateHardLinkW", cause=ctypes.WinError())
@staticmethod
def symlink(source, link_name):
import ctypes
from dvc.exceptions import DvcException
if System.is_unix():
try:
os.symlink(source, link_name)
return
except Exception as exc:
msg = "failed to symlink '{}' -> '{}': {}"
raise DvcException(msg.format(source, link_name, str(exc)))
flags = 0
if source is not None and os.path.isdir(source):
flags = 1
func = ctypes.windll.kernel32.CreateSymbolicLinkW
func.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
func.restype = ctypes.c_ubyte
if func(link_name, source, flags) == 0:
raise DvcException("CreateSymbolicLinkW", cause=ctypes.WinError())
@staticmethod
def _reflink_darwin(src, dst):
import ctypes
import dvc.logger as logger
LIBC = "libc.dylib"
LIBC_FALLBACK = "/usr/lib/libSystem.dylib"
try:
clib = ctypes.CDLL(LIBC)
except OSError as exc:
logger.debug(
"unable to access '{}' (errno '{}'). "
"Falling back to '{}'.".format(LIBC, exc.errno, LIBC_FALLBACK)
)
if exc.errno != errno.ENOENT:
raise
# NOTE: trying to bypass System Integrity Protection (SIP)
clib = ctypes.CDLL(LIBC_FALLBACK)
if not hasattr(clib, "clonefile"):
return -1
clonefile = clib.clonefile
clonefile.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int]
clonefile.restype = ctypes.c_int
return clonefile(
ctypes.c_char_p(src.encode("utf-8")),
ctypes.c_char_p(dst.encode("utf-8")),
ctypes.c_int(0),
)
@staticmethod
def _reflink_windows(src, dst):
return -1
@staticmethod
def _reflink_linux(src, dst):
import os
import fcntl
FICLONE = 0x40049409
s = open(src, "r")
d = open(dst, "w+")
try:
ret = fcntl.ioctl(d.fileno(), FICLONE, s.fileno())
except IOError:
s.close()
d.close()
os.unlink(dst)
raise
s.close()
d.close()
if ret != 0:
os.unlink(dst)
return ret
@staticmethod
def reflink(source, link_name):
import platform
from dvc.exceptions import DvcException
system = platform.system()
try:
if system == "Windows":
ret = System._reflink_windows(source, link_name)
elif system == "Darwin":
ret = System._reflink_darwin(source, link_name)
elif system == "Linux":
ret = System._reflink_linux(source, link_name)
else:
ret = -1
except IOError:
ret = -1
if ret != 0:
raise DvcException("reflink is not supported")
@staticmethod
def getdirinfo(path):
import ctypes
from ctypes import c_void_p, c_wchar_p, Structure, WinError, POINTER
from ctypes.wintypes import DWORD, HANDLE, BOOL
# NOTE: use this flag to open symlink itself and not the target
# See https://docs.microsoft.com/en-us/windows/desktop/api/
# fileapi/nf-fileapi-createfilew#symbolic-link-behavior
FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
FILE_SHARE_READ = 0x00000001
OPEN_EXISTING = 3
class FILETIME(Structure):
_fields_ = [("dwLowDateTime", DWORD), ("dwHighDateTime", DWORD)]
class BY_HANDLE_FILE_INFORMATION(Structure):
_fields_ = [
("dwFileAttributes", DWORD),
("ftCreationTime", FILETIME),
("ftLastAccessTime", FILETIME),
("ftLastWriteTime", FILETIME),
("dwVolumeSerialNumber", DWORD),
("nFileSizeHigh", DWORD),
("nFileSizeLow", DWORD),
("nNumberOfLinks", DWORD),
("nFileIndexHigh", DWORD),
("nFileIndexLow", DWORD),
]
flags = FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT
func = ctypes.windll.kernel32.CreateFileW
func.argtypes = [
c_wchar_p,
DWORD,
DWORD,
c_void_p,
DWORD,
DWORD,
HANDLE,
]
func.restype = HANDLE
hfile = func(
path, 0, FILE_SHARE_READ, None, OPEN_EXISTING, flags, None
)
if hfile is None:
raise WinError()
func = ctypes.windll.kernel32.GetFileInformationByHandle
func.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
func.restype = BOOL
info = BY_HANDLE_FILE_INFORMATION()
rv = func(hfile, info)
func = ctypes.windll.kernel32.CloseHandle
func.argtypes = [HANDLE]
func.restype = BOOL
func(hfile)
if rv == 0:
raise WinError()
return info
@staticmethod
def inode(path):
if System.is_unix():
import ctypes
inode = os.lstat(path).st_ino
# NOTE: See https://bugs.python.org/issue29619 and
# https://stackoverflow.com/questions/34643289/
# pythons-os-stat-is-returning-wrong-inode-value
inode = ctypes.c_ulong(inode).value
else:
# getdirinfo from ntfsutils works on both files and dirs
info = System.getdirinfo(path)
inode = abs(
hash(
(
info.dwVolumeSerialNumber,
info.nFileIndexHigh,
info.nFileIndexLow,
)
)
)
assert inode >= 0
assert inode < 2 ** 64
return inode
@staticmethod
def _wait_for_input_windows(timeout):
import sys
import ctypes
import msvcrt
from ctypes.wintypes import DWORD, HANDLE
# https://docs.microsoft.com/en-us/windows/desktop/api/synchapi/nf-synchapi-waitforsingleobject
WAIT_OBJECT_0 = 0
WAIT_TIMEOUT = 0x00000102
func = ctypes.windll.kernel32.WaitForSingleObject
func.argtypes = [HANDLE, DWORD]
func.restype = DWORD
rc = func(msvcrt.get_osfhandle(sys.stdin.fileno()), timeout * 1000)
if rc not in [WAIT_OBJECT_0, WAIT_TIMEOUT]:
raise RuntimeError(rc)
@staticmethod
def _wait_for_input_posix(timeout):
import sys
import select
try:
select.select([sys.stdin], [], [], timeout)
except select.error:
pass
@staticmethod
def wait_for_input(timeout):
if System.is_unix():
return System._wait_for_input_posix(timeout)
else:
return System._wait_for_input_windows(timeout)
@staticmethod
def is_symlink(path):
if System.is_unix():
return os.path.islink(path)
# https://docs.microsoft.com/en-us/windows/desktop/fileio/
# file-attribute-constants
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
if os.path.lexists(path):
info = System.getdirinfo(path)
return info.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT
return False
@staticmethod
def is_hardlink(path):
if System.is_unix():
return os.stat(path).st_nlink > 1
info = System.getdirinfo(path)
return info.nNumberOfLinks > 1
| apache-2.0 | 852,837,283,070,764,700 | 28.077441 | 103 | 0.546086 | false | 4.092891 | false | false | false |
whalerock/ella | test_ella/test_core/test_cache.py | 2 | 17751 | import time
from datetime import date
from django.core.cache import get_cache
from ella.core.cache.utils import normalize_key
from hashlib import md5
from test_ella.cases import RedisTestCase as TestCase
from django.test.client import RequestFactory
from django.contrib.sites.models import Site
from django.contrib.contenttypes.models import ContentType
from ella.core.cache import utils, redis
from ella.core.models import Listing, Publishable
from ella.core.views import ListContentType
from ella.core.managers import ListingHandler
from ella.articles.models import Article
from ella.utils.timezone import from_timestamp
from test_ella.test_core import create_basic_categories, create_and_place_a_publishable, \
create_and_place_more_publishables, list_all_publishables_in_category_by_hour
from nose import tools
class CacheTestCase(TestCase):
def setUp(self):
self.old_cache = utils.cache
self.cache = get_cache('locmem://')
utils.cache = self.cache
super(CacheTestCase, self).setUp()
def tearDown(self):
super(CacheTestCase, self).tearDown()
utils.cache = self.old_cache
class TestCacheUtils(CacheTestCase):
def test_get_many_objects(self):
ct_ct = ContentType.objects.get_for_model(ContentType)
site_ct = ContentType.objects.get_for_model(Site)
objs = utils.get_cached_objects([(ct_ct.id, ct_ct.id), (ct_ct.id, site_ct.id), (site_ct.id, 1)])
tools.assert_equals([ct_ct, site_ct, Site.objects.get(pk=1)], objs)
def test_get_many_publishables_will_respect_their_content_type(self):
create_basic_categories(self)
create_and_place_a_publishable(self)
objs = utils.get_cached_objects([self.publishable.pk], Publishable)
tools.assert_true(isinstance(objs[0], Article))
def test_get_many_objects_raises_by_default(self):
ct_ct = ContentType.objects.get_for_model(ContentType)
site_ct = ContentType.objects.get_for_model(Site)
tools.assert_raises(Site.DoesNotExist, utils.get_cached_objects, [(ct_ct.id, ct_ct.id), (ct_ct.id, site_ct.id), (site_ct.id, 1), (site_ct.id, 100)])
def test_get_many_objects_can_replace_missing_with_none(self):
ct_ct = ContentType.objects.get_for_model(ContentType)
site_ct = ContentType.objects.get_for_model(Site)
objs = utils.get_cached_objects([(ct_ct.id, ct_ct.id), (ct_ct.id, site_ct.id), (site_ct.id, 1), (site_ct.id, 100)], missing=utils.NONE)
tools.assert_equals([ct_ct, site_ct, Site.objects.get(pk=1), None], objs)
def test_get_many_objects_can_skip(self):
ct_ct = ContentType.objects.get_for_model(ContentType)
site_ct = ContentType.objects.get_for_model(Site)
objs = utils.get_cached_objects([(ct_ct.id, ct_ct.id), (ct_ct.id, site_ct.id), (site_ct.id, 1), (site_ct.id, 100)], missing=utils.SKIP)
tools.assert_equals([ct_ct, site_ct, Site.objects.get(pk=1)], objs)
def test_get_publishable_returns_subclass(self):
create_basic_categories(self)
create_and_place_a_publishable(self)
tools.assert_equals(self.publishable, utils.get_cached_object(Publishable, pk=self.publishable.pk))
def test_get_article_uses_the_publishable_key_and_0_for_version(self):
tools.assert_equals(
':'.join((utils.KEY_PREFIX, str(ContentType.objects.get_for_model(Publishable).pk), '123', '0')),
utils._get_key(utils.KEY_PREFIX, ContentType.objects.get_for_model(Article), pk=123)
)
def test_get_article_uses_the_publishable_key_and_version_from_cache(self):
key = utils._get_key(utils.KEY_PREFIX, ContentType.objects.get_for_model(Article), pk=123, version_key=True)
self.cache.set(key, 3)
tools.assert_equals(
':'.join((utils.KEY_PREFIX, str(ContentType.objects.get_for_model(Publishable).pk), '123', '3')),
utils._get_key(utils.KEY_PREFIX, ContentType.objects.get_for_model(Article), pk=123)
)
class TestCacheInvalidation(CacheTestCase):
def test_save_invalidates_object(self):
self.ct = ContentType.objects.get_for_model(ContentType)
ct = utils.get_cached_object(self.ct, pk=self.ct.pk)
tools.assert_equals(ct, self.ct)
tools.assert_equals(self.ct, self.cache.get(utils._get_key(utils.KEY_PREFIX, self.ct, pk=self.ct.pk)))
self.ct.save()
tools.assert_equals(None, self.cache.get(utils._get_key(utils.KEY_PREFIX, self.ct, pkr=self.ct.pk)))
class TestRedisListings(TestCase):
def setUp(self):
super(TestRedisListings, self).setUp()
create_basic_categories(self)
create_and_place_more_publishables(self)
def test_access_to_individual_listings(self):
list_all_publishables_in_category_by_hour(self)
lh = Listing.objects.get_queryset_wrapper(category=self.category, children=ListingHandler.ALL, source='redis')
l = lh[0]
tools.assert_equals(l.publishable, self.listings[0].publishable)
def test_listings_dont_propagate_where_they_shouldnt(self):
self.category_nested.app_data = {'ella': {'propagate_listings': False}}
self.category_nested.save()
# small hack to remove the cached category on Publishable
for p in self.publishables:
del p._category_cache
list_all_publishables_in_category_by_hour(self)
ct_id = self.publishables[0].content_type_id
tools.assert_equals(['%d:1' % ct_id], redis.client.zrange('listing:d:1', 0, 100))
tools.assert_equals(['%d:1' % ct_id], redis.client.zrange('listing:c:1', 0, 100))
tools.assert_equals(['%d:2' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:c:2', 0, 100))
tools.assert_equals(['%d:2' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:d:2', 0, 100))
def test_listing_gets_removed_when_publishable_goes_unpublished(self):
list_all_publishables_in_category_by_hour(self)
p = self.publishables[0]
p.published = False
p.save()
ct_id = p.content_type_id
tools.assert_equals(set([
'listing:2',
'listing:3',
'listing:c:1',
'listing:c:2',
'listing:c:3',
'listing:d:1',
'listing:d:2',
'listing:d:3',
'listing:ct:%d' % ct_id,
]),
set(redis.client.keys())
)
tools.assert_equals(['%d:2' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:ct:%d' % ct_id, 0, 100))
tools.assert_equals(['%d:2' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:d:1', 0, 100))
tools.assert_equals(['%d:2' % ct_id], redis.client.zrange('listing:c:1', 0, 100))
def test_listing_save_adds_itself_to_relevant_zsets(self):
list_all_publishables_in_category_by_hour(self)
ct_id = self.publishables[0].content_type_id
tools.assert_equals(set([
'listing:1',
'listing:2',
'listing:3',
'listing:c:1',
'listing:c:2',
'listing:c:3',
'listing:d:1',
'listing:d:2',
'listing:d:3',
'listing:ct:%d' % ct_id,
]),
set(redis.client.keys())
)
tools.assert_equals(['%d:3' % ct_id], redis.client.zrange('listing:3', 0, 100))
tools.assert_equals(['%d:1' % ct_id, '%d:2' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:ct:%d' % ct_id, 0, 100))
tools.assert_equals(['%d:1' % ct_id, '%d:2' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:d:1', 0, 100))
def test_listing_delete_removes_itself_from_redis(self):
list_all_publishables_in_category_by_hour(self)
self.listings[1].delete()
ct_id = self.publishables[0].content_type_id
tools.assert_equals(set([
'listing:1',
'listing:3',
'listing:c:1',
'listing:c:2',
'listing:c:3',
'listing:d:1',
'listing:d:2',
'listing:d:3',
'listing:ct:%d' % ct_id,
]),
set(redis.client.keys())
)
tools.assert_equals(['%d:3' % ct_id], redis.client.zrange('listing:3', 0, 100))
tools.assert_equals(['%d:3' % ct_id], redis.client.zrange('listing:c:2', 0, 100))
tools.assert_equals(['%d:3' % ct_id], redis.client.zrange('listing:d:2', 0, 100))
tools.assert_equals(['%d:1' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:d:1', 0, 100))
tools.assert_equals(['%d:1' % ct_id], redis.client.zrange('listing:c:1', 0, 100))
tools.assert_equals(['%d:1' % ct_id, '%d:3' % ct_id], redis.client.zrange('listing:ct:%d' % ct_id, 0, 100))
def test_get_listing_uses_data_from_redis(self):
ct_id = self.publishables[0].content_type_id
t1, t2 = time.time()-90, time.time()-100
redis.client.zadd('listing:c:2', '%d:1' % ct_id, repr(t1))
redis.client.zadd('listing:c:2', '%d:3' % ct_id, repr(t2))
dt1, dt2 = from_timestamp(t1), from_timestamp(t2)
lh = Listing.objects.get_queryset_wrapper(category=self.category_nested, children=ListingHandler.IMMEDIATE, source='redis')
tools.assert_equals(2, lh.count())
l1, l2 = lh.get_listings(0, 10)
tools.assert_equals(l1.publishable, self.publishables[0])
tools.assert_equals(l2.publishable, self.publishables[2])
tools.assert_equals(l1.publish_from, dt1)
tools.assert_equals(l2.publish_from, dt2)
def test_get_listing_omits_excluded_publishable(self):
ct_id = self.publishables[0].content_type_id
t1, t2 = time.time()-90, time.time()-100
redis.client.zadd('listing:c:2', '%d:1' % ct_id, repr(t1))
redis.client.zadd('listing:c:2', '%d:3' % ct_id, repr(t2))
dt1, dt2 = from_timestamp(t1), from_timestamp(t2)
lh = Listing.objects.get_queryset_wrapper(category=self.category_nested, children=ListingHandler.IMMEDIATE, exclude=self.publishables[0], source='redis')
tools.assert_equals(1, lh.count())
l = lh.get_listings(0, 10)
tools.assert_equals(l[0].publishable, self.publishables[2])
tools.assert_equals(l[0].publish_from, dt2)
def test_redis_listing_handler_used_from_view_when_requested(self):
ct_id = self.publishables[0].content_type_id
t1, t2 = time.time()-90, time.time()-100
redis.client.zadd('listing:d:2', '%d:1' % ct_id, repr(t1))
redis.client.zadd('listing:d:2', '%d:3' % ct_id, repr(t2))
dt1, dt2 = from_timestamp(t1), from_timestamp(t2)
rf = RequestFactory()
request = rf.get(self.category_nested.get_absolute_url(), {'using': 'redis'})
lct = ListContentType()
context = lct.get_context(request, self.category_nested)
tools.assert_equals(2, len(context['listings']))
l1, l2 = context['listings']
tools.assert_equals(l1.publishable, self.publishables[0])
tools.assert_equals(l2.publishable, self.publishables[2])
tools.assert_equals(l1.publish_from, dt1)
tools.assert_equals(l2.publish_from, dt2)
def test_get_listing_uses_data_from_redis_correctly_for_pagination(self):
ct_id = self.publishables[0].content_type_id
t1, t2, t3 = time.time()-90, time.time()-100, time.time() - 110
redis.client.zadd('listing:c:2', '%d:1' % ct_id, repr(t1))
redis.client.zadd('listing:c:2', '%d:3' % ct_id, repr(t2))
redis.client.zadd('listing:c:2', '%d:2' % ct_id, repr(t3))
lh = Listing.objects.get_queryset_wrapper(category=self.category_nested, children=ListingHandler.IMMEDIATE, source='redis')
tools.assert_equals(3, lh.count())
l = lh.get_listings(2, 1)
tools.assert_equals(1, len(l))
tools.assert_equals(l[0].publishable, self.publishables[1])
def test_redis_lh_slicing(self):
list_all_publishables_in_category_by_hour(self)
# Instantiate the RedisListingHandler and have it fetch all children
lh = redis.RedisListingHandler(self.category, ListingHandler.ALL)
for offset, count in [(0, 10), (0, 1), (0, 2), (1, 2), (2, 3), (3, 3)]:
partial = lh.get_listings(offset=offset, count=count)
tools.assert_equals(
[l.publishable for l in partial],
[l.publishable for l in self.listings[offset:offset + count]]
)
def test_time_based_lh_slicing(self):
list_all_publishables_in_category_by_hour(self)
# Instantiate the RedisListingHandler and have it fetch all children
lh = redis.TimeBasedListingHandler(self.category, ListingHandler.ALL)
for offset, count in [(0, 10), (0, 1), (0, 2), (1, 2), (2, 3), (3, 3)]:
partial = lh.get_listings(offset=offset, count=count)
tools.assert_equals(
[l.publishable for l in partial],
[l.publishable for l in self.listings[offset:offset + count]]
)
class TestAuthorLH(TestCase):
def setUp(self):
from ella.core.models import Author
super(TestAuthorLH, self).setUp()
create_basic_categories(self)
create_and_place_more_publishables(self)
self.author = Author.objects.create(slug='testauthor')
for p in self.publishables:
p.authors = [self.author]
p.save()
def test_listing_save_adds_itself_to_relevant_zsets(self):
list_all_publishables_in_category_by_hour(self)
ct_id = self.publishables[0].content_type_id
tools.assert_equals(set([
'listing:1',
'listing:2',
'listing:3',
'listing:c:1',
'listing:c:2',
'listing:c:3',
'listing:d:1',
'listing:d:2',
'listing:d:3',
'listing:a:%d' % self.author.pk,
'listing:ct:%d' % ct_id,
]),
set(redis.client.keys())
)
tools.assert_equals(['%d:1' % ct_id, '%d:2' % ct_id, '%d:3' % ct_id],
redis.client.zrange('listing:a:1', 0, 100))
class SlidingLH(redis.SlidingListingHandler):
PREFIX = 'sliding'
class TestSlidingListings(TestCase):
def setUp(self):
super(TestSlidingListings, self).setUp()
create_basic_categories(self)
create_and_place_more_publishables(self)
self.ct_id = self.publishables[0].content_type_id
def test_remove_publishable_clears_all_windows(self):
SlidingLH.add_publishable(self.category, self.publishables[0], 10)
SlidingLH.remove_publishable(self.category, self.publishables[0])
tools.assert_equals(set(['sliding:KEYS', 'sliding:WINDOWS']), set(redis.client.keys(SlidingLH.PREFIX + '*')))
def test_add_publishable_pushes_to_day_and_global_keys(self):
SlidingLH.add_publishable(self.category, self.publishables[0], 10)
day = date.today().strftime('%Y%m%d')
expected_base = [
'sliding:1',
'sliding:c:1',
'sliding:d:1',
'sliding:ct:%s' % self.ct_id,
]
expected = expected_base + [k + ':' + day for k in expected_base] + ['sliding:KEYS', 'sliding:WINDOWS']
tools.assert_equals(set(expected), set(redis.client.keys(SlidingLH.PREFIX + '*')))
tools.assert_equals(redis.client.zrange('sliding:d:1', 0, -1, withscores=True), redis.client.zrange('sliding:d:1' + ':' + day, 0, -1, withscores=True))
def test_slide_windows_regenerates_aggregates(self):
SlidingLH.add_publishable(self.category, self.publishables[0], 10)
# register the keys that should exist
redis.client.sadd('sliding:KEYS', 'sliding:1', 'sliding:c:1')
redis.client.zadd('sliding:1:20101010', **{'17:1': 10, '17:2': 1})
redis.client.zadd('sliding:1:20101009', **{'17:1': 9, '17:2': 2})
redis.client.zadd('sliding:1:20101007', **{'17:1': 8, '17:2': 3, '17:3': 11})
redis.client.zadd('sliding:1:20101001', **{'17:1': 8, '17:2': 3, '17:3': 11})
SlidingLH.regenerate(date(2010, 10, 10))
tools.assert_equals([('17:2', 6.0), ('17:3', 11.0), ('17:1', 27.0)], redis.client.zrange('sliding:1', 0, -1, withscores=True))
def test_regenerate_removes_old_slots(self):
redis.client.zadd('sliding:WINDOWS', **{
'sliding:1:20101010': 20101010,
'sliding:1:20101009': 20101009,
'sliding:1:20101007': 20101007,
'sliding:1:20101001': 20101001
})
redis.client.zadd('sliding:1:20101010', **{'17:1': 10, '17:2': 1})
redis.client.zadd('sliding:1:20101009', **{'17:1': 9, '17:2': 2})
redis.client.zadd('sliding:1:20101007', **{'17:1': 8, '17:2': 3, '17:3': 11})
redis.client.zadd('sliding:1:20101001', **{'17:1': 8, '17:2': 3, '17:3': 11})
SlidingLH.regenerate(date(2010, 10, 10))
tools.assert_false(redis.client.exists('sliding:1:20101001'))
tools.assert_true(redis.client.exists('sliding:1:20101007'))
tools.assert_equals([
('sliding:1:20101007', 20101007),
('sliding:1:20101009', 20101009),
('sliding:1:20101010', 20101010)
],
redis.client.zrange('sliding:WINDOWS', 0, -1, withscores=True)
)
def test_normalize_key_doesnt_touch_short_key():
key = "thisistest"
tools.assert_equals(key,normalize_key(key))
def test_normalize_key_md5s_long_key():
key = "0123456789" * 30
tools.assert_equals(md5(key).hexdigest(),normalize_key(key))
| bsd-3-clause | -2,129,175,163,393,120,300 | 42.507353 | 161 | 0.607966 | false | 3.131793 | true | false | false |
quandyfactory/sobidata | setup.py | 1 | 1079 | from distutils.core import setup
version = '0.5'
long_description = ''
try:
with open('README.md') as readme:
# load long_description into memory
long_description = readme.read()
# save README (no extension) for pypi
with open('README', 'w') as myfile:
myfile.write(long_description)
except IOError:
with open('README') as readme:
long_description = readme.read()
setup(
name = 'sobidata',
version = version,
description = 'Downloads your Social Bicycles route data.',
long_description = long_description,
author = 'Ryan McGreal',
author_email = '[email protected]',
license = 'LICENCE.txt',
url = 'https://github.com/quandyfactory/sobidata',
py_modules = ['sobidata'],
install_requires = [
'dicttoxml',
'openpyxl',
'requests'
],
download_url = 'https://pypi.python.org/packages/source/s/sobidata/sobidata-%s.tar.gz?raw=true' % (version),
platforms='Cross-platform',
classifiers=[
'Programming Language :: Python',
],
)
| gpl-2.0 | 1,312,043,398,835,676,400 | 27.394737 | 112 | 0.626506 | false | 3.491909 | false | false | false |
spivachuk/sovrin-node | indy_client/anon_creds/indy_issuer.py | 2 | 2173 | from anoncreds.protocol.issuer import Issuer
from anoncreds.protocol.repo.attributes_repo import AttributeRepo
from anoncreds.protocol.repo.public_repo import PublicRepo
from anoncreds.protocol.wallet.issuer_wallet import IssuerWalletInMemory
from indy_client.anon_creds.indy_public_repo import IndyPublicRepo
from indy_client.client.wallet.wallet import Wallet
class IndyIssuer(Issuer):
def __init__(self, client, wallet: Wallet, attrRepo: AttributeRepo,
publicRepo: PublicRepo = None):
publicRepo = publicRepo or IndyPublicRepo(client=client,
wallet=wallet)
issuerWallet = IndyIssuerWalletInMemory(wallet.name, publicRepo)
super().__init__(issuerWallet, attrRepo)
def prepareForWalletPersistence(self):
# TODO: If we don't set self.wallet._repo.client to None,
# it hangs during wallet persistence, based on findings, it seems,
# somewhere it hangs during persisting client._ledger and
# client.ledgerManager
self.wallet._repo.client = None
def restorePersistedWallet(self, issuerWallet):
curRepoClient = self.wallet._repo.client
self.wallet = issuerWallet
self._primaryIssuer._wallet = issuerWallet
self._nonRevocationIssuer._wallet = issuerWallet
self.wallet._repo.client = curRepoClient
class IndyIssuerWalletInMemory(IssuerWalletInMemory):
def __init__(self, name, pubRepo):
IssuerWalletInMemory.__init__(self, name, pubRepo)
# available claims to anyone whose connection is accepted by the agent
self.availableClaimsToAll = []
# available claims only for certain invitation (by nonce)
self.availableClaimsByNonce = {}
# available claims only for certain invitation (by nonce)
self.availableClaimsByInternalId = {}
# mapping between specific identifier and available claims which would
# have been available once they have provided requested information
# like proof etc.
self.availableClaimsByIdentifier = {}
self._proofRequestsSchema = {} # Dict[str, Dict[str, any]]
| apache-2.0 | -3,514,978,543,121,759,000 | 40 | 78 | 0.698113 | false | 4.009225 | false | false | false |
openspending/gobble | gobble/snapshot.py | 1 | 6331 | """This module has good intentions, like helping you debug API calls"""
from collections import OrderedDict
from copy import deepcopy
from datetime import datetime
from json import loads, dumps
from os import listdir
from os.path import join, isdir
from re import search
from click import command
import io
from json import JSONDecodeError
from gobble.config import ROOT_DIR
from gobble.logger import log
from gobble.config import settings
SNAPSHOTS_DIR = join(ROOT_DIR, 'assets', 'snapshots')
def to_json(response):
"""Safely extract the payload from the response object"""
try:
return loads(response.text)
except JSONDecodeError:
return {}
class SnapShot(OrderedDict):
"""A chatty wrapper around the API transaction"""
def __init__(self, endpoint, url, reponse, params,
headers=None, json=None, is_freeze=False):
"""Log, record and save before returning an instance"""
self.is_freeze = is_freeze
self.url = url
self.endpoint = endpoint
self.response = reponse
self.headers = headers
self.params = params
self.request_payload = json
super(SnapShot, self).__init__(self._template)
self.timestamp = str(datetime.now())
self._log()
self._record()
self._save()
def _log(self):
"""Is there such a thing as too much logging?"""
code = self.response.status_code
reason = self.response.reason
response_json = to_json(self.response)
begin = code, reason, self.endpoint, 'begin'
end = code, reason, self.endpoint, 'end'
transaction = ' [%s] %s - %s (%s) '
log.debug('{:*^100}'.format(transaction % begin))
messages = (
('Request endpoint: %s', self.endpoint.url),
('Request time: %s', self.response.elapsed),
('Request parameters: %s', self.params),
('Request payload: %s', self.request_payload),
('Request headers: %s', self.headers),
('Response headers: %s', self.response.headers),
('Response payload: %s', response_json),
('Response cookies: %s', self.response.cookies),
('Request full URL: %s', self.url),
)
for message in messages:
log.debug(*message)
indent = 4 if settings.EXPANDED_LOG_STYLE else None
log.debug(dumps(response_json, ensure_ascii=False, indent=indent))
log.debug('{:*^100}'.format(transaction % end))
def _record(self):
"""Store the transaction info"""
json = to_json(self.response)
duplicate_json = deepcopy(json)
self['timestamp'] = self.timestamp
self['url'] = self.url
self['query'] = self.params
self['request_json'] = self.request_payload
self['response_json'] = duplicate_json
self['request_headers'] = self.headers
self['response_headers'] = dict(self.response.headers)
self['cookies'] = dict(self.response.cookies)
@property
def _template(self):
return (
('timestamp', None),
('host', settings.OS_URL),
('url', None),
('method', self.endpoint.method),
('path', self.endpoint.path),
('query', None),
('request_json', None),
('response_json', None),
('request_headers', None),
('response_headers', None),
('cookies', None),
)
def _save(self):
"""Save the snapshot as JSON in the appropriate place"""
with io.open(self._filepath, 'w+', encoding='utf-8') as file:
file.write(dumps(self, ensure_ascii=False))
log.debug('Saved request + response to %s', self._filepath)
@property
def _folder(self):
return SNAPSHOTS_DIR if self.is_freeze else settings.USER_DIR
@property
def _filepath(self):
template = '{method}.{path}.json'
dot_path = '.'.join(self.endpoint._path).rstrip('/')
params = {'method': self.endpoint.method, 'path': dot_path}
filename = template.format(**params)
return join(self._folder, filename)
def __str__(self):
return str(self.endpoint) + ' at ' + self.timestamp
def __repr__(self):
return '<SnapShot %s>' % str(self)
@property
def json(self):
return dumps(self, ensure_ascii=False)
def freeze(json):
"""Recursively substitute unwanted strings inside a json-like object
Basically, remove anything in the substitution list below, even when
hidden in inside query strings.
"""
subs = {
'jwt': r'jwt=([^&^"]+)',
"bucket_id": r'\/([\w]{32})\/',
'Signature': r'Signature=([^&^"]+)',
'AWSAccessKeyId': r'AWSAccessKeyId=([^&^"]+)',
'Expires': r'Expires=([^&^"]+)',
'Date': None,
"Set-Cookie": None,
'token': None,
}
def regex(dummy_, json_, key_, pattern_, value_):
match = search(pattern_, value_)
if match:
sub = match.group(1), dummy_
json_[key_] = value_.replace(*sub)
if isinstance(json, list):
for item in json:
freeze(item)
elif isinstance(json, dict):
for field, pattern in subs.items():
for key, value in json.items():
dummy = field.upper()
if key == field:
json[key] = dummy
elif isinstance(value, str):
if pattern:
regex(dummy, json, key, pattern, value)
elif isinstance(value, dict):
freeze(value)
@command
def archive(destination):
"""Freeze and move all snapshots to the destination folder."""
if not isdir(destination):
raise NotADirectoryError(destination)
for file in listdir(settings.USER_DIR):
verb = file.split('.')[0]
if verb in ['GET', 'POST', 'PUT']:
with io.open(file) as source:
snapshot = loads(source.read())
freeze(snapshot)
# Overwrite if necessary
output = join(destination, file)
with io.open(output, 'w+', encoding='utf-8') as target:
target.write(dumps(snapshot, ensure_ascii=False))
| mit | -5,760,641,882,880,226,000 | 30.187192 | 74 | 0.569578 | false | 4.129811 | false | false | false |
clone1612/appstore | nextcloudappstore/core/permissions.py | 2 | 1054 | from rest_framework.permissions import BasePermission
DELETE_METHODS = ('DELETE',)
UPDATE_METHODS = ('PUT', 'PATCH', 'POST')
READ_METHODS = ('GET', 'HEAD', 'OPTIONS')
class UpdateDeletePermission(BasePermission):
"""
Base permission which allows anyone to read the resources but allows
to set different methods for updating and deleting resources for
authenticated users
"""
def has_update_obj_permission(self, user, obj):
return obj.can_update(user)
def has_delete_obj_permission(self, user, obj):
return obj.can_delete(user)
def has_object_permission(self, request, view, obj):
method = request.method
if method in READ_METHODS:
return True
elif request.user and request.user.is_authenticated:
user = request.user
if method in UPDATE_METHODS:
return self.has_update_obj_permission(user, obj)
elif method in DELETE_METHODS:
return self.has_delete_obj_permission(user, obj)
return False
| agpl-3.0 | -806,280,233,992,908,400 | 31.9375 | 72 | 0.655598 | false | 4.337449 | false | false | false |
ahmad88me/tada | tadacode/explore/explore.py | 1 | 3550 |
import os
import pandas as pd
import numpy as np
import random
import matplotlib.pyplot as plt
import matplotlib.cm
from matplotlib.colors import rgb2hex
from matplotlib import colors as matplot_colors
import six
colors = list(six.iteritems(matplot_colors.cnames))
colors_hex = zip(*colors)[1]
#cmap = matplotlib.cm.get_cmap(name='viridis')
cmap = matplotlib.cm.get_cmap(name='hsv')
input_files = [
"badmintonplayers.csv",
"basketballplayers.csv",
"boxers.csv",
"cyclists.csv",
"golfplayers3.csv",
"gymnasts.csv",
"handballplayers.csv",
"Olympic Games.csv",
"rower.csv",
"soccerplayers4.csv",
"stadiums2.csv",
"swimmers.csv",
"tennisplayers.csv",
"volleyballplayers.csv",
"wrestlers.csv",
]
d = "clean_input"
def get_outliers(df, k=1.5):
q1 = df.quantile(q=0.25)
q3 = df.quantile(q=0.75)
return df[(df < q1 - k * (q3 - q1)) | (df > q3 + k * (q3 - q1))], df[(df >= q1 - k * (q3 - q1)) & (df <= q3 + k * (q3 - q1))]
def explore_input_files():
color_idx = 0
print "outliers in: "
for idx_inp, inpf in enumerate(input_files):
df = pd.read_csv(os.path.join(d, inpf)).select_dtypes(include=[np.number]).dropna(axis=1, how='any')
for idx, column in enumerate(df):
if df[column].size == 0:
continue
plt.plot(df[column], [column[0:6].lower() + "(" + inpf[0:6].lower() + ")"] * df[column].size, ".",
c=rgb2hex(cmap(color_idx % cmap.N)), alpha=0.5, label=column[0:6] + "(" + inpf[0:4] + ")")
outliers, _ = get_outliers(df[column])
if outliers.size != 0:
print " > "+column.lower() + "(" + inpf.lower() + ")" + " num of outliers is: "+str(outliers.size)
plt.plot(outliers, [column[0:6].lower() + "(" + inpf[0:6].lower() + ")"] * outliers.size, "X",
c=rgb2hex(cmap(color_idx % cmap.N)), alpha=1.0, label=column[0:6] + "(" + inpf[0:4] + ")")
color_idx += 15
def free_form_visualization():
color_idx = 0
for idx_inp, inpf in enumerate(input_files):
df = pd.read_csv(os.path.join(d, inpf)).select_dtypes(include=[np.number]).dropna(axis=1, how='any')
for idx, column in enumerate(df):
if df[column].size == 0:
continue
plt.plot(df[column], [column[0:6].lower() + "(" + inpf[0:6].lower() + ")"] * df[column].size, "1",
c=rgb2hex(cmap(color_idx % cmap.N)), alpha=0.3, label=column[0:6] + "(" + inpf[0:4] + ")")
# draw the mean
plt.plot([df[column].mean()], [column[0:6].lower() + "(" + inpf[0:6].lower() + ")"], "s",
c=rgb2hex(cmap(color_idx % cmap.N)), alpha=0.5, label=column[0:6] + "(" + inpf[0:4] + ")")
outliers, non_outliers = get_outliers(df[column])
# draw the mean without the outliers
plt.plot([non_outliers.mean()], [column[0:6].lower() + "(" + inpf[0:6].lower() + ")"], "D",
c=rgb2hex(cmap(color_idx % cmap.N)), alpha=0.5, label=column[0:6] + "(" + inpf[0:4] + ")")
color_idx += 15
line_up, = plt.plot([], [], "s", label='mean with outliers', c=rgb2hex(cmap(15 % cmap.N)))
line_down, = plt.plot([], [], "D", label='mean without outliers', c=rgb2hex(cmap(15*10 % cmap.N)))
plt.legend(handles=[line_up, line_down])
if raw_input("Enter:\n1) Data Exploration\n2) Free Form Visualization\n")=="1":
explore_input_files()
else:
free_form_visualization()
plt.show()
| mit | 5,235,105,585,080,472,000 | 34.5 | 129 | 0.552958 | false | 2.890879 | false | false | false |
CityGrid/arsenal | client/arsenalclient/interface/tags.py | 1 | 6469 | '''Arsenal client Tags class.'''
import logging
from arsenalclient.interface.arsenal_interface import ArsenalInterface
from arsenalclient.exceptions import NoResultFound
LOG = logging.getLogger(__name__)
class Tags(ArsenalInterface):
'''The arsenal client Tags class.'''
def __init__(self, **kwargs):
super(Tags, self).__init__(**kwargs)
self.uri = '/api/tags'
# Overridden methods
def search(self, params=None):
'''Search for tags.
Usage:
>>> params = {
... 'name': 'my_tag',
... 'exact_get': True,
... }
>>> Tags.search(params)
Args:
params (dict): a dictionary of url parameters for the request.
Returns:
A json response from ArsenalInterface.check_response_codes().
'''
return super(Tags, self).search(params)
def create(self, params):
'''Create a new tag.
Args:
params (dict): A dictionary with the following attributes:
tag_name : The name of the tag you wish to create.
tag_value: The value of the tag you wish to create.
Usage:
>>> params = {
... 'name': 'meaning',
... 'value': 42,
... }
>>> Tags.create(params)
<Response [200]>
'''
return super(Tags, self).create(params)
def update(self, params):
'''Update a tag. There is nothing to update with tags as every field
must be unique.'''
pass
def delete(self, params):
'''Delete a tag object from the server.
Args:
params: A tag dictionary to delete. Must contain the
tag id, name, and value.
Usage:
>>> params = {
... 'id': 1,
... 'name': 'my_tag',
... 'value': 'my_string',
... }
>>> Tags.delete(params)
'''
return super(Tags, self).delete(params)
def get_audit_history(self, results):
'''Get the audit history for tags.'''
return super(Tags, self).get_audit_history(results)
def get_by_name(self, name):
'''Get a single tag by it's name. This is not possible as a tag's
uniqueness is determined by both it's name and value. Use
Tags.get_by_name_value() instead.
'''
pass
# Custom methods
def get_by_name_value(self, name, value):
'''Get a tag from the server based on it's name and value.'''
LOG.debug('Searching for tag name: {0} value: {1}'.format(name,
value))
data = {
'name': name,
'value': value,
'exact_get': True,
}
resp = self.api_conn('/api/tags', data, log_success=False)
LOG.debug('Results are: {0}'.format(resp))
try:
resource = resp['results'][0]
except IndexError:
msg = 'Tag not found: {0}={1}'.format(name, value)
LOG.info(msg)
raise NoResultFound(msg)
if len(resp['results']) > 1:
msg = 'More than one result found: {0}'.format(name)
LOG.error(msg)
raise RuntimeError(msg)
return resource
def _manage_assignments(self, name, value, object_type, results, api_method):
'''Assign or de-assign a tag to a list of node, node_group, or
data_center dictionaries.'''
action_names = []
action_ids = []
msg = 'Assigning'
if api_method == 'delete':
msg = 'De-assigning'
for action_object in results:
try:
action_names.append(action_object['name'])
except KeyError:
action_names.append(action_object['serial_number'])
action_ids.append(action_object['id'])
try:
this_tag = self.get_by_name_value(name, value)
except NoResultFound:
if api_method == 'delete':
LOG.debug('Tag not found, nothing to do.')
return
else:
params = {
'name': name,
'value': value,
}
resp = self.create(params)
this_tag = resp['results'][0]
LOG.info('{0} tag: {1}={2}'.format(msg,
this_tag['name'],
this_tag['value']))
for action_name in action_names:
LOG.info(' {0}: {1}'.format(object_type, action_name))
data = {
object_type: action_ids
}
try:
uri = '/api/tags/{0}/{1}'.format(this_tag['id'], object_type)
resp = self.api_conn(uri, data, method=api_method)
except:
raise
return resp
def assign(self, name, value, object_type, results):
'''Assign a tag to one or more nodes, node_groups, or data_centers.
Args:
name (str) : The name of the tag to assign to the <Class>.search() results.
value (str) : The value of the tag to assign to the <Class>.search() results.
object_type (str): A string representing the object_type to assign the
tag to. One of nodes, node_groups or data_centers.
results : The nodes, node_groups, or data_centers from the results
of <Class>.search() to assign the tag to.
Usage:
>>> Tags.assign('meaning', 42, 'nodes', <search results>)
<json>
'''
return self._manage_assignments(name, value, object_type, results, 'put')
def deassign(self, name, value, object_type, results):
'''De-assign a tag from one or more nodes, node_groups, or data_centers.
Args:
name (str) : The name of the tag to deassign to the <Class>.search() results.
value (str) : The value of the tag to deassign to the <Class>.search() results.
object_type (str): A string representing the object_type to deassign the
tag to. One of nodes, node_groups or data_centers.
results : The nodes, node_groups, or data_centers from the results
of <Class>.search() to deassign the tag from.
Usage:
>>> Tags.deassign('meaning', 42, 'nodes', <search results>)
<json>
'''
return self._manage_assignments(name, value, object_type, results, 'delete')
| apache-2.0 | 6,039,443,982,611,251,000 | 29.804762 | 88 | 0.529448 | false | 4.157455 | false | false | false |
vvoland/py3status | py3status/modules/thunderbird_calendar.py | 1 | 2920 | # -*- coding: utf-8 -*-
"""
Display tasks in thunderbird calendar.
Configuration parameters:
cache_timeout: how often we refresh usage in seconds (default 120)
err_exception: error message when an exception is raised
(default 'error: calendar parsing failed')
err_profile: error message regarding profile path and read access
(default 'error: profile not readable')
format: see placeholders below
(default 'tasks:[{due}] current:{current}')
profile_path: path to the user thunderbird profile (not optional)
(default '')
Format of status string placeholders:
{completed} completed tasks
{current} title of current running task (sorted by priority and stamp)
{due} due tasks
Make sure to configure profile_path in your i3status config using the full
path or this module will not be able to retrieve any information from your
calendar.
ex: profile_path = "/home/user/.thunderbird/1yawevtp.default"
@author mrt-prodz
SAMPLE OUTPUT
{'full_text': 'tasks[3] current: finish the birdhouse'}
"""
from sqlite3 import connect
from os import access, R_OK
from time import time
class Py3status:
# available configuration parameters
cache_timeout = 120
err_exception = 'error: calendar parsing failed'
err_profile = 'error: profile not readable'
format = 'tasks:[{due}] current:{current}'
profile_path = ''
def _response(self, text, color=None):
response = {
'cached_until': time() + self.cache_timeout,
'full_text': text,
}
if color is not None:
response['color'] = color
return response
# return calendar data
def get_calendar(self, i3s_output_list, i3s_config):
_err_color = i3s_config['color_bad']
db = self.profile_path + '/calendar-data/local.sqlite'
if not access(db, R_OK):
return self._response(self.err_profile, _err_color)
try:
con = connect(db)
cur = con.cursor()
cur.execute('SELECT title, todo_completed FROM cal_todos '
'ORDER BY priority DESC, todo_stamp DESC')
tasks = cur.fetchall()
con.close()
# task[0] is the task name, task[1] is the todo_completed column
duetasks = [task[0] for task in tasks if task[1] is None]
due = len(duetasks)
completed = len(tasks) - due
current = duetasks[0] if due else ''
return self._response(
self.format.format(
due=due, completed=completed, current=current))
except Exception:
return self._response(self.err_exception, _err_color)
if __name__ == "__main__":
x = Py3status()
config = {
'color_good': '#00FF00',
'color_degraded': '#00FFFF',
'color_bad': '#FF0000'
}
print(x.get_calendar([], config))
| bsd-3-clause | -2,663,873,493,278,860,300 | 30.73913 | 76 | 0.618151 | false | 4.010989 | true | false | false |
dcolombo/FilFinder | examples/paper_figures/ks_plots.py | 3 | 1672 | # Licensed under an MIT open source license - see LICENSE
'''
KS p-values for different properties.
'''
import numpy as np
from pandas import read_csv
import matplotlib.pyplot as p
import numpy as np
import seaborn as sn
sn.set_context('talk')
sn.set_style('ticks')
# sn.mpl.rc("figure", figsize=(7, 9))
# Widths
widths = read_csv("width_ks_table_pvals.csv")
widths.index = widths["Unnamed: 0"]
del widths["Unnamed: 0"]
widths_arr = np.asarray(widths)
widths_arr[np.arange(0, 14), np.arange(0, 14)] = 1.0
widths_arr = -np.log10(widths_arr)
# p.figure(figsize=(12, 10))
p.subplot(111)
# p.xlabel("Widths")
p.imshow(widths_arr, origin='lower', cmap='binary', interpolation='nearest')
p.xticks(np.arange(0, 14), widths.columns, rotation=90)
# p.xticks(np.arange(0, 14), [], rotation=90)
p.yticks(np.arange(0, 14), widths.columns)
# p.figtext(0.05, 0.95, "a)", fontsize=20)
cb = p.colorbar()
cb.set_label(r'$-\log_{10}$ p-value')
cb.solids.set_edgecolor("face")
p.tight_layout()
p.show()
# Curvature
# curve = read_csv("curvature_ks_table_pvals.csv")
# curve.index = curve["Unnamed: 0"]
# del curve["Unnamed: 0"]
# curve_arr = np.asarray(curve)
# curve_arr[np.arange(0, 14), np.arange(0, 14)] = 1.0
# curve_arr = -np.log10(curve_arr)
# # p.figure(figsize=(12, 10))
# p.subplot(212)
# # p.xlabel("Curvature")
# p.imshow(curve_arr, interpolation='nearest', origin='lower', cmap='binary')
# p.xticks(np.arange(0, 14), curve.columns, rotation=90)
# p.yticks(np.arange(0, 14), curve.columns)
# p.figtext(0.05, 0.55, "b)", fontsize=20)
# cb = p.colorbar()
# cb.set_label(r'$-\log_{10}$ p-value')
# cb.solids.set_edgecolor("face")
# p.tight_layout()
# p.show()
| mit | 9,140,832,659,395,175,000 | 21.90411 | 77 | 0.667464 | false | 2.518072 | false | false | false |
leprikon-cz/leprikon | leprikon/models/question.py | 1 | 2000 | from json import loads
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from django.utils.translation import ugettext_lazy as _
from ..conf import settings
from ..utils import first_upper
class Question(models.Model):
name = models.CharField(_("name"), max_length=50, unique=True)
question = models.CharField(_("question"), max_length=50)
help_text = models.TextField(
_("help text"),
blank=True,
null=True,
help_text=_("This is help text. The help text is shown next to the form field."),
)
field = models.CharField(
_("field"),
max_length=150,
choices=((key, val["name"]) for key, val in settings.LEPRIKON_QUESTION_FIELDS.items()),
)
field_args = models.TextField(
_("field_args"),
blank=True,
default="{}",
help_text=_("Enter valid JSON structure representing field configuration."),
)
active = models.BooleanField(_("active"), default=True)
class Meta:
app_label = "leprikon"
verbose_name = _("additional question")
verbose_name_plural = _("additional questions")
def __str__(self):
return self.question
@cached_property
def field_class(self):
return import_string(settings.LEPRIKON_QUESTION_FIELDS[self.field]["class"])
@cached_property
def field_kwargs(self):
return loads(self.field_args)
@cached_property
def field_label(self):
return first_upper(self.question)
def get_field(self, initial=None):
return self.field_class(label=self.field_label, initial=initial, help_text=self.help_text, **self.field_kwargs)
def clean(self):
try:
self.get_field()
except Exception as e:
raise ValidationError({"field_args": [_("Failed to create field with given field args: {}").format(e)]})
| bsd-3-clause | 3,681,449,588,112,500,000 | 31.258065 | 119 | 0.6485 | false | 4.07332 | false | false | false |
jtraver/dev | python3/char/alt1.py | 1 | 1140 | #!/usr/local/bin/python3.7
# for mac?
#!/usr/bin/env python3
#!/usr/bin/python3
# https://en.wikipedia.org/wiki/Alt_key
alt_a = "å"
ord_alt_a = ord(alt_a)
ord_a = ord("a")
alt_diff = ord_alt_a - ord_a
print("alt_a = %s, ord_a = %s, ord_alt_a = %s, alt_diff = %s" % (str(alt_a), str(ord_a), str(ord_alt_a), str(alt_diff)))
# √∫˜µ≤Ω≈ß∂ƒ©˙∆˚¬ÅÍÎÏ˝ÓÔ
# ¡£º
# this is not ord_b: why mess up the mapping? didn't make much sense for the chars they wanted I suspect
# I should have known when alt_diff wasn't 128 I guess
ord_b = ord_a + 1
ord_alt_b = ord_b + alt_diff
alt_b = chr(ord_alt_b)
print("alt_b = %s, ord_b = %s, ord_alt_b = %s, alt_diff = %s" % (str(alt_b), str(ord_b), str(ord_alt_b), str(alt_diff)))
#for i1 in range(256):
# str2 = "%s %d 0x%x '%c'" % (str(i1), i1, i1, i1)
# print(("str2 = %s" % str2))
#
#for i1 in range(32, 127):
# c1 = chr(i1)
# print "%d %s" % (i1, c1)
# 1 ¡
# ! ⁄
# 2 ™
# @ €
# ¡™£¢∞§¶•ªº
# 1234567890
#
# ⁄€‹›fifl‡‡°·‚
# !@#$%^&*()
#
# œ∑´®†\¨ˆøπ
# qwertyuiop
#
# Œ„´‰ˇÁ¨ˆØ∏
# QWERTYUIOP
| mit | 4,733,199,907,475,030,000 | 19.431373 | 120 | 0.537428 | false | 1.672552 | false | false | false |
ahmedaljazzar/edx-platform | openedx/core/djangoapps/user_authn/views/tests/test_views.py | 2 | 39023 | # -*- coding: utf-8 -*-
""" Tests for user authn views. """
from http.cookies import SimpleCookie
import logging
import re
from unittest import skipUnless
from urllib import urlencode
import ddt
import mock
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.urls import reverse
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils.translation import ugettext as _
from edx_oauth2_provider.tests.factories import AccessTokenFactory, ClientFactory, RefreshTokenFactory
from oauth2_provider.models import AccessToken as dot_access_token
from oauth2_provider.models import RefreshToken as dot_refresh_token
from provider.oauth2.models import AccessToken as dop_access_token
from provider.oauth2.models import RefreshToken as dop_refresh_token
from testfixtures import LogCapture
from course_modes.models import CourseMode
from openedx.core.djangoapps.user_authn.views.login_form import login_and_registration_form
from openedx.core.djangoapps.oauth_dispatch.tests import factories as dot_factories
from openedx.core.djangoapps.site_configuration.tests.mixins import SiteMixin
from openedx.core.djangoapps.theming.tests.test_util import with_comprehensive_theme_context
from openedx.core.djangoapps.user_api.accounts.api import activate_account, create_account
from openedx.core.djangoapps.user_api.errors import UserAPIInternalError
from openedx.core.djangolib.js_utils import dump_js_escaped_json
from openedx.core.djangolib.markup import HTML, Text
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from third_party_auth.tests.testutil import ThirdPartyAuthTestMixin, simulate_running_pipeline
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
LOGGER_NAME = 'audit'
User = get_user_model() # pylint:disable=invalid-name
FEATURES_WITH_FAILED_PASSWORD_RESET_EMAIL = settings.FEATURES.copy()
FEATURES_WITH_FAILED_PASSWORD_RESET_EMAIL['ENABLE_PASSWORD_RESET_FAILURE_EMAIL'] = True
@skip_unless_lms
@ddt.ddt
class UserAccountUpdateTest(CacheIsolationTestCase, UrlResetMixin):
""" Tests for views that update the user's account information. """
USERNAME = u"heisenberg"
ALTERNATE_USERNAME = u"walt"
OLD_PASSWORD = u"ḅḷüëṡḳÿ"
NEW_PASSWORD = u"🄱🄸🄶🄱🄻🅄🄴"
OLD_EMAIL = u"[email protected]"
NEW_EMAIL = u"[email protected]"
INVALID_ATTEMPTS = 100
INVALID_KEY = u"123abc"
URLCONF_MODULES = ['student_accounts.urls']
ENABLED_CACHES = ['default']
def setUp(self):
super(UserAccountUpdateTest, self).setUp()
# Create/activate a new account
activation_key = create_account(self.USERNAME, self.OLD_PASSWORD, self.OLD_EMAIL)
activate_account(activation_key)
# Login
result = self.client.login(username=self.USERNAME, password=self.OLD_PASSWORD)
self.assertTrue(result)
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in LMS')
def test_password_change(self):
# Request a password change while logged in, simulating
# use of the password reset link from the account page
response = self._change_password()
self.assertEqual(response.status_code, 200)
# Check that an email was sent
self.assertEqual(len(mail.outbox), 1)
# Retrieve the activation link from the email body
email_body = mail.outbox[0].body
result = re.search(r'(?P<url>https?://[^\s]+)', email_body)
self.assertIsNot(result, None)
activation_link = result.group('url')
# Visit the activation link
response = self.client.get(activation_link)
self.assertEqual(response.status_code, 200)
# Submit a new password and follow the redirect to the success page
response = self.client.post(
activation_link,
# These keys are from the form on the current password reset confirmation page.
{'new_password1': self.NEW_PASSWORD, 'new_password2': self.NEW_PASSWORD},
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Your password has been reset.")
# Log the user out to clear session data
self.client.logout()
# Verify that the new password can be used to log in
result = self.client.login(username=self.USERNAME, password=self.NEW_PASSWORD)
self.assertTrue(result)
# Try reusing the activation link to change the password again
# Visit the activation link again.
response = self.client.get(activation_link)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "This password reset link is invalid. It may have been used already.")
self.client.logout()
# Verify that the old password cannot be used to log in
result = self.client.login(username=self.USERNAME, password=self.OLD_PASSWORD)
self.assertFalse(result)
# Verify that the new password continues to be valid
result = self.client.login(username=self.USERNAME, password=self.NEW_PASSWORD)
self.assertTrue(result)
def test_password_change_failure(self):
with mock.patch('openedx.core.djangoapps.user_api.accounts.api.request_password_change',
side_effect=UserAPIInternalError):
self._change_password()
self.assertRaises(UserAPIInternalError)
@override_settings(FEATURES=FEATURES_WITH_FAILED_PASSWORD_RESET_EMAIL)
def test_password_reset_failure_email(self):
"""Test that a password reset failure email notification is sent, when enabled."""
# Log the user out
self.client.logout()
bad_email = '[email protected]'
response = self._change_password(email=bad_email)
self.assertEqual(response.status_code, 200)
# Check that an email was sent
self.assertEqual(len(mail.outbox), 1)
# Verify that the body contains the failed password reset message
sent_message = mail.outbox[0]
text_body = sent_message.body
html_body = sent_message.alternatives[0][0]
for email_body in [text_body, html_body]:
msg = 'However, there is currently no user account associated with your email address: {email}'.format(
email=bad_email
)
assert u'reset for your user account at {}'.format(settings.PLATFORM_NAME) in email_body
assert 'password_reset_confirm' not in email_body, 'The link should not be added if user was not found'
assert msg in email_body
@ddt.data(True, False)
def test_password_change_logged_out(self, send_email):
# Log the user out
self.client.logout()
# Request a password change while logged out, simulating
# use of the password reset link from the login page
if send_email:
response = self._change_password(email=self.OLD_EMAIL)
self.assertEqual(response.status_code, 200)
else:
# Don't send an email in the POST data, simulating
# its (potentially accidental) omission in the POST
# data sent from the login page
response = self._change_password()
self.assertEqual(response.status_code, 400)
def test_access_token_invalidation_logged_out(self):
self.client.logout()
user = User.objects.get(email=self.OLD_EMAIL)
self._create_dop_tokens(user)
self._create_dot_tokens(user)
response = self._change_password(email=self.OLD_EMAIL)
self.assertEqual(response.status_code, 200)
self.assert_access_token_destroyed(user)
def test_access_token_invalidation_logged_in(self):
user = User.objects.get(email=self.OLD_EMAIL)
self._create_dop_tokens(user)
self._create_dot_tokens(user)
response = self._change_password()
self.assertEqual(response.status_code, 200)
self.assert_access_token_destroyed(user)
def test_password_change_inactive_user(self):
# Log out the user created during test setup
self.client.logout()
# Create a second user, but do not activate it
create_account(self.ALTERNATE_USERNAME, self.OLD_PASSWORD, self.NEW_EMAIL)
# Send the view the email address tied to the inactive user
response = self._change_password(email=self.NEW_EMAIL)
# Expect that the activation email is still sent,
# since the user may have lost the original activation email.
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
def test_password_change_no_user(self):
# Log out the user created during test setup
self.client.logout()
with LogCapture(LOGGER_NAME, level=logging.INFO) as logger:
# Send the view an email address not tied to any user
response = self._change_password(email=self.NEW_EMAIL)
self.assertEqual(response.status_code, 200)
logger.check((LOGGER_NAME, 'INFO', 'Invalid password reset attempt'))
def test_password_change_rate_limited(self):
# Log out the user created during test setup, to prevent the view from
# selecting the logged-in user's email address over the email provided
# in the POST data
self.client.logout()
# Make many consecutive bad requests in an attempt to trigger the rate limiter
for __ in xrange(self.INVALID_ATTEMPTS):
self._change_password(email=self.NEW_EMAIL)
response = self._change_password(email=self.NEW_EMAIL)
self.assertEqual(response.status_code, 403)
@ddt.data(
('post', 'password_change_request', []),
)
@ddt.unpack
def test_require_http_method(self, correct_method, url_name, args):
wrong_methods = {'get', 'put', 'post', 'head', 'options', 'delete'} - {correct_method}
url = reverse(url_name, args=args)
for method in wrong_methods:
response = getattr(self.client, method)(url)
self.assertEqual(response.status_code, 405)
def _change_password(self, email=None):
"""Request to change the user's password. """
data = {}
if email:
data['email'] = email
return self.client.post(path=reverse('password_change_request'), data=data)
def _create_dop_tokens(self, user=None):
"""Create dop access token for given user if user provided else for default user."""
if not user:
user = User.objects.get(email=self.OLD_EMAIL)
client = ClientFactory()
access_token = AccessTokenFactory(user=user, client=client)
RefreshTokenFactory(user=user, client=client, access_token=access_token)
def _create_dot_tokens(self, user=None):
"""Create dop access token for given user if user provided else for default user."""
if not user:
user = User.objects.get(email=self.OLD_EMAIL)
application = dot_factories.ApplicationFactory(user=user)
access_token = dot_factories.AccessTokenFactory(user=user, application=application)
dot_factories.RefreshTokenFactory(user=user, application=application, access_token=access_token)
def assert_access_token_destroyed(self, user):
"""Assert all access tokens are destroyed."""
self.assertFalse(dot_access_token.objects.filter(user=user).exists())
self.assertFalse(dot_refresh_token.objects.filter(user=user).exists())
self.assertFalse(dop_access_token.objects.filter(user=user).exists())
self.assertFalse(dop_refresh_token.objects.filter(user=user).exists())
@skip_unless_lms
@ddt.ddt
class LoginAndRegistrationTest(ThirdPartyAuthTestMixin, UrlResetMixin, ModuleStoreTestCase):
""" Tests for the student account views that update the user's account information. """
shard = 7
USERNAME = "bob"
EMAIL = "[email protected]"
PASSWORD = "password"
URLCONF_MODULES = ['openedx.core.djangoapps.embargo']
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self): # pylint: disable=arguments-differ
super(LoginAndRegistrationTest, self).setUp()
# Several third party auth providers are created for these tests:
self.google_provider = self.configure_google_provider(enabled=True, visible=True)
self.configure_facebook_provider(enabled=True, visible=True)
self.configure_dummy_provider(
visible=True,
enabled=True,
icon_class='',
icon_image=SimpleUploadedFile('icon.svg', '<svg><rect width="50" height="100"/></svg>'),
)
self.hidden_enabled_provider = self.configure_linkedin_provider(
visible=False,
enabled=True,
)
self.hidden_disabled_provider = self.configure_azure_ad_provider()
@ddt.data(
("signin_user", "login"),
("register_user", "register"),
)
@ddt.unpack
def test_login_and_registration_form(self, url_name, initial_mode):
response = self.client.get(reverse(url_name))
expected_data = '"initial_mode": "{mode}"'.format(mode=initial_mode)
self.assertContains(response, expected_data)
@ddt.data("signin_user", "register_user")
def test_login_and_registration_form_already_authenticated(self, url_name):
# Create/activate a new account and log in
activation_key = create_account(self.USERNAME, self.PASSWORD, self.EMAIL)
activate_account(activation_key)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result)
# Verify that we're redirected to the dashboard
response = self.client.get(reverse(url_name))
self.assertRedirects(response, reverse("dashboard"))
@ddt.data(
(None, "signin_user"),
(None, "register_user"),
("edx.org", "signin_user"),
("edx.org", "register_user"),
)
@ddt.unpack
def test_login_and_registration_form_signin_not_preserves_params(self, theme, url_name):
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
]
# The response should not have a "Sign In" button with the URL
# that preserves the querystring params
with with_comprehensive_theme_context(theme):
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
expected_url = '/login?{}'.format(self._finish_auth_url_param(params + [('next', '/dashboard')]))
self.assertNotContains(response, expected_url)
# Add additional parameters:
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
('course_mode', CourseMode.DEFAULT_MODE_SLUG),
('email_opt_in', 'true'),
('next', '/custom/final/destination')
]
# Verify that this parameter is also preserved
with with_comprehensive_theme_context(theme):
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
expected_url = '/login?{}'.format(self._finish_auth_url_param(params))
self.assertNotContains(response, expected_url)
@mock.patch.dict(settings.FEATURES, {"ENABLE_THIRD_PARTY_AUTH": False})
@ddt.data("signin_user", "register_user")
def test_third_party_auth_disabled(self, url_name):
response = self.client.get(reverse(url_name))
self._assert_third_party_auth_data(response, None, None, [], None)
@mock.patch('openedx.core.djangoapps.user_authn.views.login_form.enterprise_customer_for_request')
@mock.patch('openedx.core.djangoapps.user_api.api.enterprise_customer_for_request')
@ddt.data(
("signin_user", None, None, None, False),
("register_user", None, None, None, False),
("signin_user", "google-oauth2", "Google", None, False),
("register_user", "google-oauth2", "Google", None, False),
("signin_user", "facebook", "Facebook", None, False),
("register_user", "facebook", "Facebook", None, False),
("signin_user", "dummy", "Dummy", None, False),
("register_user", "dummy", "Dummy", None, False),
(
"signin_user",
"google-oauth2",
"Google",
{
'name': 'FakeName',
'logo': 'https://host.com/logo.jpg',
'welcome_msg': 'No message'
},
True
)
)
@ddt.unpack
def test_third_party_auth(
self,
url_name,
current_backend,
current_provider,
expected_enterprise_customer_mock_attrs,
add_user_details,
enterprise_customer_mock_1,
enterprise_customer_mock_2
):
params = [
('course_id', 'course-v1:Org+Course+Run'),
('enrollment_action', 'enroll'),
('course_mode', CourseMode.DEFAULT_MODE_SLUG),
('email_opt_in', 'true'),
('next', '/custom/final/destination'),
]
if expected_enterprise_customer_mock_attrs:
expected_ec = {
'name': expected_enterprise_customer_mock_attrs['name'],
'branding_configuration': {
'logo': 'https://host.com/logo.jpg',
'welcome_message': expected_enterprise_customer_mock_attrs['welcome_msg']
}
}
else:
expected_ec = None
email = None
if add_user_details:
email = '[email protected]'
enterprise_customer_mock_1.return_value = expected_ec
enterprise_customer_mock_2.return_value = expected_ec
# Simulate a running pipeline
if current_backend is not None:
pipeline_target = "openedx.core.djangoapps.user_authn.views.login_form.third_party_auth.pipeline"
with simulate_running_pipeline(pipeline_target, current_backend, email=email):
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
# Do NOT simulate a running pipeline
else:
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
# This relies on the THIRD_PARTY_AUTH configuration in the test settings
expected_providers = [
{
"id": "oa2-dummy",
"name": "Dummy",
"iconClass": None,
"iconImage": settings.MEDIA_URL + "icon.svg",
"loginUrl": self._third_party_login_url("dummy", "login", params),
"registerUrl": self._third_party_login_url("dummy", "register", params)
},
{
"id": "oa2-facebook",
"name": "Facebook",
"iconClass": "fa-facebook",
"iconImage": None,
"loginUrl": self._third_party_login_url("facebook", "login", params),
"registerUrl": self._third_party_login_url("facebook", "register", params)
},
{
"id": "oa2-google-oauth2",
"name": "Google",
"iconClass": "fa-google-plus",
"iconImage": None,
"loginUrl": self._third_party_login_url("google-oauth2", "login", params),
"registerUrl": self._third_party_login_url("google-oauth2", "register", params)
},
]
self._assert_third_party_auth_data(
response,
current_backend,
current_provider,
expected_providers,
expected_ec,
add_user_details
)
def _configure_testshib_provider(self, provider_name, idp_slug):
"""
Enable and configure the TestShib SAML IdP as a third_party_auth provider.
"""
kwargs = {}
kwargs.setdefault('name', provider_name)
kwargs.setdefault('enabled', True)
kwargs.setdefault('visible', True)
kwargs.setdefault('slug', idp_slug)
kwargs.setdefault('entity_id', 'https://idp.testshib.org/idp/shibboleth')
kwargs.setdefault('metadata_source', 'https://mock.testshib.org/metadata/testshib-providers.xml')
kwargs.setdefault('icon_class', 'fa-university')
kwargs.setdefault('attr_email', 'dummy-email-attr')
kwargs.setdefault('max_session_length', None)
self.configure_saml_provider(**kwargs)
@mock.patch('django.conf.settings.MESSAGE_STORAGE', 'django.contrib.messages.storage.cookie.CookieStorage')
@mock.patch('openedx.core.djangoapps.user_authn.views.login_form.enterprise_customer_for_request')
@ddt.data(
(
'signin_user',
'tpa-saml',
'TestShib',
)
)
@ddt.unpack
def test_saml_auth_with_error(
self,
url_name,
current_backend,
current_provider,
enterprise_customer_mock,
):
params = []
request = RequestFactory().get(reverse(url_name), params, HTTP_ACCEPT='text/html')
SessionMiddleware().process_request(request)
request.user = AnonymousUser()
self.enable_saml()
dummy_idp = 'testshib'
self._configure_testshib_provider(current_provider, dummy_idp)
enterprise_customer_data = {
'uuid': '72416e52-8c77-4860-9584-15e5b06220fb',
'name': 'Dummy Enterprise',
'identity_provider': dummy_idp,
}
enterprise_customer_mock.return_value = enterprise_customer_data
dummy_error_message = 'Authentication failed: SAML login failed ' \
'["invalid_response"] [SAML Response must contain 1 assertion]'
# Add error message for error in auth pipeline
MessageMiddleware().process_request(request)
messages.error(request, dummy_error_message, extra_tags='social-auth')
# Simulate a running pipeline
pipeline_response = {
'response': {
'idp_name': dummy_idp
}
}
pipeline_target = 'openedx.core.djangoapps.user_authn.views.login_form.third_party_auth.pipeline'
with simulate_running_pipeline(pipeline_target, current_backend, **pipeline_response):
with mock.patch('edxmako.request_context.get_current_request', return_value=request):
response = login_and_registration_form(request)
expected_error_message = Text(_(
u'We are sorry, you are not authorized to access {platform_name} via this channel. '
u'Please contact your learning administrator or manager in order to access {platform_name}.'
u'{line_break}{line_break}'
u'Error Details:{line_break}{error_message}')
).format(
platform_name=settings.PLATFORM_NAME,
error_message=dummy_error_message,
line_break=HTML('<br/>')
)
self._assert_saml_auth_data_with_error(
response,
current_backend,
current_provider,
expected_error_message
)
def test_hinted_login(self):
params = [("next", "/courses/something/?tpa_hint=oa2-google-oauth2")]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertContains(response, '"third_party_auth_hint": "oa2-google-oauth2"')
tpa_hint = self.hidden_enabled_provider.provider_id
params = [("next", "/courses/something/?tpa_hint={0}".format(tpa_hint))]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertContains(response, '"third_party_auth_hint": "{0}"'.format(tpa_hint))
tpa_hint = self.hidden_disabled_provider.provider_id
params = [("next", "/courses/something/?tpa_hint={0}".format(tpa_hint))]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertNotIn(response.content, tpa_hint)
@ddt.data(
('signin_user', 'login'),
('register_user', 'register'),
)
@ddt.unpack
def test_hinted_login_dialog_disabled(self, url_name, auth_entry):
"""Test that the dialog doesn't show up for hinted logins when disabled. """
self.google_provider.skip_hinted_login_dialog = True
self.google_provider.save()
params = [("next", "/courses/something/?tpa_hint=oa2-google-oauth2")]
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
expected_url = '/auth/login/google-oauth2/?auth_entry={}&next=%2Fcourses'\
'%2Fsomething%2F%3Ftpa_hint%3Doa2-google-oauth2'.format(auth_entry)
self.assertRedirects(
response,
expected_url,
target_status_code=302
)
@override_settings(FEATURES=dict(settings.FEATURES, THIRD_PARTY_AUTH_HINT='oa2-google-oauth2'))
@ddt.data(
'signin_user',
'register_user',
)
def test_settings_tpa_hinted_login(self, url_name):
"""
Ensure that settings.FEATURES['THIRD_PARTY_AUTH_HINT'] can set third_party_auth_hint.
"""
params = [("next", "/courses/something/")]
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
self.assertContains(response, '"third_party_auth_hint": "oa2-google-oauth2"')
# THIRD_PARTY_AUTH_HINT can be overridden via the query string
tpa_hint = self.hidden_enabled_provider.provider_id
params = [("next", "/courses/something/?tpa_hint={0}".format(tpa_hint))]
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
self.assertContains(response, '"third_party_auth_hint": "{0}"'.format(tpa_hint))
# Even disabled providers in the query string will override THIRD_PARTY_AUTH_HINT
tpa_hint = self.hidden_disabled_provider.provider_id
params = [("next", "/courses/something/?tpa_hint={0}".format(tpa_hint))]
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
self.assertNotIn(response.content, tpa_hint)
@override_settings(FEATURES=dict(settings.FEATURES, THIRD_PARTY_AUTH_HINT='oa2-google-oauth2'))
@ddt.data(
('signin_user', 'login'),
('register_user', 'register'),
)
@ddt.unpack
def test_settings_tpa_hinted_login_dialog_disabled(self, url_name, auth_entry):
"""Test that the dialog doesn't show up for hinted logins when disabled via settings.THIRD_PARTY_AUTH_HINT. """
self.google_provider.skip_hinted_login_dialog = True
self.google_provider.save()
params = [("next", "/courses/something/")]
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
expected_url = '/auth/login/google-oauth2/?auth_entry={}&next=%2Fcourses'\
'%2Fsomething%2F%3Ftpa_hint%3Doa2-google-oauth2'.format(auth_entry)
self.assertRedirects(
response,
expected_url,
target_status_code=302
)
@mock.patch('openedx.core.djangoapps.user_authn.views.login_form.enterprise_customer_for_request')
@ddt.data(
('signin_user', False, None, None),
('register_user', False, None, None),
('signin_user', True, 'Fake EC', 'http://logo.com/logo.jpg'),
('register_user', True, 'Fake EC', 'http://logo.com/logo.jpg'),
('signin_user', True, 'Fake EC', None),
('register_user', True, 'Fake EC', None),
)
@ddt.unpack
def test_enterprise_register(self, url_name, ec_present, ec_name, logo_url, mock_get_ec):
"""
Verify that when an EnterpriseCustomer is received on the login and register views,
the appropriate sidebar is rendered.
"""
if ec_present:
mock_get_ec.return_value = {
'name': ec_name,
'branding_configuration': {'logo': logo_url}
}
else:
mock_get_ec.return_value = None
response = self.client.get(reverse(url_name), HTTP_ACCEPT="text/html")
enterprise_sidebar_div_id = u'enterprise-content-container'
if not ec_present:
self.assertNotContains(response, text=enterprise_sidebar_div_id)
else:
self.assertContains(response, text=enterprise_sidebar_div_id)
welcome_message = settings.ENTERPRISE_SPECIFIC_BRANDED_WELCOME_TEMPLATE
expected_message = Text(welcome_message).format(
start_bold=HTML('<b>'),
end_bold=HTML('</b>'),
line_break=HTML('<br/>'),
enterprise_name=ec_name,
platform_name=settings.PLATFORM_NAME,
privacy_policy_link_start=HTML("<a href='{pp_url}' target='_blank'>").format(
pp_url=settings.MKTG_URLS.get('PRIVACY', 'https://www.edx.org/edx-privacy-policy')
),
privacy_policy_link_end=HTML("</a>"),
)
self.assertContains(response, expected_message)
if logo_url:
self.assertContains(response, logo_url)
def test_enterprise_cookie_delete(self):
"""
Test that enterprise cookies are deleted in login/registration views.
Cookies must be deleted in login/registration views so that *default* login/registration branding
is displayed to subsequent requests from non-enterprise customers.
"""
cookies = SimpleCookie()
cookies[settings.ENTERPRISE_CUSTOMER_COOKIE_NAME] = 'test-enterprise-customer'
response = self.client.get(reverse('signin_user'), HTTP_ACCEPT="text/html", cookies=cookies)
self.assertIn(settings.ENTERPRISE_CUSTOMER_COOKIE_NAME, response.cookies)
enterprise_cookie = response.cookies[settings.ENTERPRISE_CUSTOMER_COOKIE_NAME]
self.assertEqual(enterprise_cookie['domain'], settings.BASE_COOKIE_DOMAIN)
self.assertEqual(enterprise_cookie.value, '')
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_microsite_uses_old_login_page(self):
# Retrieve the login page from a microsite domain
# and verify that we're served the old page.
resp = self.client.get(
reverse("signin_user"),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertContains(resp, "Log into your Test Site Account")
self.assertContains(resp, "login-form")
def test_microsite_uses_old_register_page(self):
# Retrieve the register page from a microsite domain
# and verify that we're served the old page.
resp = self.client.get(
reverse("register_user"),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertContains(resp, "Register for Test Site")
self.assertContains(resp, "register-form")
def test_login_registration_xframe_protected(self):
resp = self.client.get(
reverse("register_user"),
{},
HTTP_REFERER="http://localhost/iframe"
)
self.assertEqual(resp['X-Frame-Options'], 'DENY')
self.configure_lti_provider(name='Test', lti_hostname='localhost', lti_consumer_key='test_key', enabled=True)
resp = self.client.get(
reverse("register_user"),
HTTP_REFERER="http://localhost/iframe"
)
self.assertEqual(resp['X-Frame-Options'], 'ALLOW')
def _assert_third_party_auth_data(self, response, current_backend, current_provider, providers, expected_ec,
add_user_details=False):
"""Verify that third party auth info is rendered correctly in a DOM data attribute. """
finish_auth_url = None
if current_backend:
finish_auth_url = reverse("social:complete", kwargs={"backend": current_backend}) + "?"
auth_info = {
"currentProvider": current_provider,
"providers": providers,
"secondaryProviders": [],
"finishAuthUrl": finish_auth_url,
"errorMessage": None,
"registerFormSubmitButtonText": "Create Account",
"syncLearnerProfileData": False,
"pipeline_user_details": {"email": "[email protected]"} if add_user_details else {}
}
if expected_ec is not None:
# If we set an EnterpriseCustomer, third-party auth providers ought to be hidden.
auth_info['providers'] = []
auth_info = dump_js_escaped_json(auth_info)
expected_data = '"third_party_auth": {auth_info}'.format(
auth_info=auth_info
)
self.assertContains(response, expected_data)
def _assert_saml_auth_data_with_error(
self, response, current_backend, current_provider, expected_error_message
):
"""
Verify that third party auth info is rendered correctly in a DOM data attribute.
"""
finish_auth_url = None
if current_backend:
finish_auth_url = reverse('social:complete', kwargs={'backend': current_backend}) + '?'
auth_info = {
'currentProvider': current_provider,
'providers': [],
'secondaryProviders': [],
'finishAuthUrl': finish_auth_url,
'errorMessage': expected_error_message,
'registerFormSubmitButtonText': 'Create Account',
'syncLearnerProfileData': False,
'pipeline_user_details': {'response': {'idp_name': 'testshib'}}
}
auth_info = dump_js_escaped_json(auth_info)
expected_data = '"third_party_auth": {auth_info}'.format(
auth_info=auth_info
)
self.assertContains(response, expected_data)
def _third_party_login_url(self, backend_name, auth_entry, login_params):
"""Construct the login URL to start third party authentication. """
return u"{url}?auth_entry={auth_entry}&{param_str}".format(
url=reverse("social:begin", kwargs={"backend": backend_name}),
auth_entry=auth_entry,
param_str=self._finish_auth_url_param(login_params),
)
def _finish_auth_url_param(self, params):
"""
Make the next=... URL parameter that indicates where the user should go next.
>>> _finish_auth_url_param([('next', '/dashboard')])
'/account/finish_auth?next=%2Fdashboard'
"""
return urlencode({
'next': '/account/finish_auth?{}'.format(urlencode(params))
})
def test_english_by_default(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html")
self.assertEqual(response['Content-Language'], 'en')
def test_unsupported_language(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html", HTTP_ACCEPT_LANGUAGE="ts-zx")
self.assertEqual(response['Content-Language'], 'en')
def test_browser_language(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html", HTTP_ACCEPT_LANGUAGE="es")
self.assertEqual(response['Content-Language'], 'es-419')
def test_browser_language_dialent(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html", HTTP_ACCEPT_LANGUAGE="es-es")
self.assertEqual(response['Content-Language'], 'es-es')
@skip_unless_lms
@override_settings(SITE_NAME=settings.MICROSITE_LOGISTRATION_HOSTNAME)
class MicrositeLogistrationTests(TestCase):
"""
Test to validate that microsites can display the logistration page
"""
def test_login_page(self):
"""
Make sure that we get the expected logistration page on our specialized
microsite
"""
resp = self.client.get(
reverse('signin_user'),
HTTP_HOST=settings.MICROSITE_LOGISTRATION_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertIn('<div id="login-and-registration-container"', resp.content)
def test_registration_page(self):
"""
Make sure that we get the expected logistration page on our specialized
microsite
"""
resp = self.client.get(
reverse('register_user'),
HTTP_HOST=settings.MICROSITE_LOGISTRATION_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertIn('<div id="login-and-registration-container"', resp.content)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_no_override(self):
"""
Make sure we get the old style login/registration if we don't override
"""
resp = self.client.get(
reverse('signin_user'),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertNotIn('<div id="login-and-registration-container"', resp.content)
resp = self.client.get(
reverse('register_user'),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertNotIn('<div id="login-and-registration-container"', resp.content)
@skip_unless_lms
class AccountCreationTestCaseWithSiteOverrides(SiteMixin, TestCase):
"""
Test cases for Feature flag ALLOW_PUBLIC_ACCOUNT_CREATION which when
turned off disables the account creation options in lms
"""
def setUp(self):
"""Set up the tests"""
super(AccountCreationTestCaseWithSiteOverrides, self).setUp()
# Set the feature flag ALLOW_PUBLIC_ACCOUNT_CREATION to False
self.site_configuration_values = {
'ALLOW_PUBLIC_ACCOUNT_CREATION': False
}
self.site_domain = 'testserver1.com'
self.set_up_site(self.site_domain, self.site_configuration_values)
def test_register_option_login_page(self):
"""
Navigate to the login page and check the Register option is hidden when
ALLOW_PUBLIC_ACCOUNT_CREATION flag is turned off
"""
response = self.client.get(reverse('signin_user'))
self.assertNotIn('<a class="btn-neutral" href="/register?next=%2Fdashboard">Register</a>',
response.content)
| agpl-3.0 | 4,609,709,022,389,832,000 | 40.970936 | 119 | 0.633403 | false | 3.988441 | true | false | false |
cjneasbi/mitmextract | pcaptomitm/extract.py | 1 | 12783 |
import sys
import re
import nids
from . import regex
DEBUG = False
#initialize on every call of extract_flows
ts = None # request timestamp
requestdata = None # buffer for requests from open connections
responsedata = None # buffer for responses from open connections
requestcounter = None
http_req = None # contains data from closed connections
NIDS_END_STATES = (nids.NIDS_CLOSE, nids.NIDS_TIMEOUT, nids.NIDS_RESET)
class FlowHeader(object):
def __init__(self, ts_request_start, ts_request_finish, ts_response_start,
ts_response_finish, srcip, sport, dstip, dport):
self.ts_request_start = ts_request_start
self.ts_request_finish = ts_request_finish
self.ts_response_start = ts_response_start
self.ts_response_finish = ts_response_finish
self.srcip = srcip
self.sport = sport
self.dstip = dstip
self.dport = dport
def __eq__(self, other):
return (self.ts_request_start, self.ts_request_finish,
self.ts_response_start, self.ts_response_finish,
self.srcip, self.sport, self.dstip, self.dport) == \
(other.ts_request_start, other.ts_request_finish,
other.ts_response_start, other.ts_response_finish,
other.srcip, other.sport, other.dstip, other.dport)
def __hash__(self):
return hash((self.ts_request_start, self.ts_request_finish,
self.ts_response_start, self.ts_response_finish,
self.srcip, self.sport, self.dstip, self.dport))
def __repr__(self):
return ("FlowHeader(ts_request_start=%r,ts_request_finish=%r,ts_response_start=%r"
",ts_response_finish=%r,srcip=%r,sport=%r,dstip=%r,dport=%r)") % \
(self.ts_request_start, self.ts_request_finish,
self.ts_response_start, self.ts_response_finish,
self.srcip, self.sport, self.dstip, self.dport)
def __str__(self):
return self.__repr__()
#http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
#finds the ending index of chucked response starting at the index start
def find_chunk_end(h, start):
matches = re.finditer(regex.END_CHUNK_REGEX, responsedata[h])
end_size_line = -1
for m in matches:
if m.start() > start:
#we subtract 2 because if there is no trailer after the
#last chuck the first CRLF of the ending double CRLF is
#the CRLF at the end of the regex
end_size_line = m.end() - 2
break
if end_size_line != -1:
matches = re.finditer('\r\n\r\n', responsedata[h])
for m in matches:
if m.start() >= end_size_line:
return m.end()
return None
def get_response_headers(h, start):
return get_headers(responsedata, h, start)
def get_request_headers(h, start):
return get_headers(requestdata, h, start)
def get_headers(buf, h, start):
header_start = None
header_end = None
matches = re.finditer('\r\n\r\n', responsedata[h])
for m in matches:
if m.start() > start:
header_end = m.end()
break
matches = re.finditer('\r\n', responsedata[h])
for m in matches:
if m.start() > start:
header_start = m.end()
break
if header_start is not None and header_end is not None:
return buf[h][header_start:header_end]
return None
def split_responses(h):
matches = re.finditer(regex.HTTP_RESP_REGEX, responsedata[h])
responses = list()
start = -1
for m in matches:
end = -1
if start != -1 and start < m.start():
headers = get_response_headers(h, start)
if "Transfer-Encoding: chunked" in headers:
end = find_chunk_end(h, start)
else :
end = m.start()
responses.append(responsedata[h][start : end])
else:
end = m.start()
start = end
responses.append(responsedata[h][start:])
return responses
def split_requests(h):
matches = re.finditer(regex.HTTP_REQ_REGEX, requestdata[h])
requests = list()
start = -1
for m in matches:
if start != -1:
requests.append(requestdata[h][start : m.start()])
start = m.start()
requests.append(requestdata[h][start:])
return requests
def is_http_response(data):
m = re.search(regex.HTTP_RESP_REGEX, data)
if m:
if m.start() == 0:
return True
return False
def is_http_request(data):
m = re.search(regex.HTTP_REQ_REGEX, data)
if m:
if m.start() == 0:
return True
return False
def num_requests(h):
return len(re.findall(regex.HTTP_REQ_REGEX, requestdata[h]))
def num_responses(h):
matches = re.finditer(regex.HTTP_RESP_REGEX, responsedata[h])
resp_count = 0
start = -1
for m in matches:
end = -1
if start != -1 and start < m.start():
headers = get_response_headers(h, start)
if "Transfer-Encoding: chunked" in headers:
end = find_chunk_end(h, start)
else:
end = m.start()
resp_count += 1
else:
end = m.start()
start = end
if len(responsedata[h][start:].strip()) > 0:
resp_count += 1
return resp_count
# returns a list of tuple, each tuple contains (count, request, response)
def add_reconstructed_flow(h):
retval = list()
requests = list()
responses = list()
if num_requests(h) > 1:
requests = split_requests(h)
else:
requests.append(requestdata[h])
if num_responses(h) > 1:
responses = split_responses(h)
else:
responses.append(responsedata[h])
maxlen = 0
if len(requests) > len(responses):
maxlen = len(requests)
else:
maxlen = len(responses)
if DEBUG and len(requests) != len(responses):
print "Unequal number of requests and responses. " + str(h)
print(str(len(requests)) + " " + str(len(responses)) + "\n")
for i in range(maxlen):
countval = None
reqval = None
respval = None
if i < len(requests) and len(requests[i].strip()) > 0 and is_http_request(requests[i]):
reqval = requests[i]
if i < len(responses) and len(responses[i].strip()) > 0 and is_http_response(responses[i]):
respval = responses[i]
if reqval or respval:
countval = requestcounter[h]
requestcounter[h] = requestcounter[h] + 1
if countval != None:
if DEBUG:
print "Appending request " + str(countval) + " to " + str(h)
retval.append((countval, reqval, respval))
requestdata[h] = ''
responsedata[h] = ''
if DEBUG:
print "Tuples in list for " + str(h) + " = " + str(len(retval))
return retval
def handle_tcp_stream(tcp):
global DEBUG
# print "tcps -", str(tcp.addr), " state:", tcp.nids_state
if tcp.nids_state == nids.NIDS_JUST_EST:
# new tcp flow
((srcip, sport), (dstip, dport)) = tcp.addr
h = (srcip, sport, dstip, dport)
#(req_start, req_stop, resp_start, resp_stop)
ts[h] = [nids.get_pkt_ts(), 0, 0 ,0]
requestcounter[h] = 0
requestdata[h] = ''
responsedata[h] = ''
if DEBUG: print "Reconstructing TCP flow:", tcp.addr
tcp.client.collect = 1 # collects server -> client data
tcp.server.collect = 1 # collects client -> server data
elif tcp.nids_state == nids.NIDS_DATA:
# keep all of the stream's new data
tcp.discard(0)
((srcip, sport), (dstip, dport)) = tcp.addr
h = (srcip, sport, dstip, dport)
if requestdata.has_key(h):
client2server_data = tcp.server.data[tcp.server.count-tcp.server.count_new:tcp.server.count]
server2client_data = tcp.client.data[tcp.client.count-tcp.client.count_new:tcp.client.count]
#this if statement is necessary to ensure proper ordering of request/response pairs in the output
if is_http_request(client2server_data):
if len(requestdata[h]) > 0:
if DEBUG: print "Added request/response..."
k = FlowHeader(ts[h][0], ts[h][1], ts[h][2], ts[h][3], h[0], h[1], h[2], h[3])
http_req[k] = add_reconstructed_flow(h)
ts[h] = [nids.get_pkt_ts(), 0, 0 ,0]
if len(client2server_data) > 0:
#sets the start timestamp for request
if(requestdata[h] == ''):
ts[h][0] = nids.get_pkt_ts()
requestdata[h] = requestdata[h] + client2server_data
#sets the end timestamp for request
ts[h][1] = nids.get_pkt_ts()
if len(server2client_data) > 0:
#sets the start timestamp for response
if(responsedata[h] == ''):
ts[h][2] = nids.get_pkt_ts()
responsedata[h] = responsedata[h] + server2client_data
#sets the end timestamp for response
ts[h][3] = nids.get_pkt_ts()
elif tcp.nids_state in NIDS_END_STATES:
((srcip, sport), (dstip, dport)) = tcp.addr
if DEBUG: print "End of flow:", tcp.addr
h = (srcip, sport, dstip, dport)
if requestdata.has_key(h) and is_http_request(requestdata[h]) and is_http_response(responsedata[h]):
k = FlowHeader(ts[h][0], ts[h][1], ts[h][2], ts[h][3], h[0], h[1], h[2], h[3])
http_req[k] = add_reconstructed_flow(h)
else:
if DEBUG:
print "Failed to add flow"
print str(h)
print "has_key? " + str(requestdata.has_key(h))
print "is_http_request? " + str(is_http_request(requestdata[h]))
print "is_http_response? " + str(is_http_response(responsedata[h]))
del ts[h]
del requestdata[h]
del responsedata[h]
del requestcounter[h]
# adds the remaining open connections to the http_req dictionary
def finalize_http_flows():
for h in requestdata.keys():
finalize_http_flow_header(ts[h])
k = FlowHeader(ts[h][0], ts[h][1], ts[h][2], ts[h][3], h[0], h[1], h[2], h[3])
if DEBUG:
print "Finalizing flow", k
http_req[k] = add_reconstructed_flow(h)
for h in http_req.keys():
if len(http_req[h]) < 1:
del http_req[h]
if DEBUG: print "Num of flows " + str(len(http_req.keys()))
# sets the empty timestamp values for the remaining open connections
def finalize_http_flow_header(header):
for i in range(len(header)):
if header[i] == 0:
header[i] = nids.get_pkt_ts()
# prints flow headers in timestamp order
def print_flows(http_req):
for fh in sorted(http_req.keys(), key=lambda x: x.ts):
print str(fh) + " " + str(len(http_req[fh]))
# if DEBUG:
# for tup in http_req[fh]:
# print tup
# extracts the http flows from a pcap file
# returns a dictionary of the reconstructed flows, keys are FlowHeader objects
# values are lists of tuples of the form (count, request, response)
def extract_flows(pcap_file):
global ts, requestdata, responsedata, requestcounter, http_req
ts, requestdata, responsedata, requestcounter, http_req = \
dict([]), dict([]), dict([]), dict([]), dict([])
nids.param("tcp_workarounds", 1)
nids.param("pcap_filter", "tcp") # bpf restrict to TCP only, note
nids.param("scan_num_hosts", 0) # disable portscan detection
nids.chksum_ctl([('0.0.0.0/0', False)]) # disable checksumming
nids.param("filename", pcap_file)
nids.init()
nids.register_tcp(handle_tcp_stream)
# print "pid", os.getpid()
if DEBUG: print "Reading from pcap file:", pcap_file
try:
nids.run()
except nids.error, e:
print "nids/pcap error: ", pcap_file + " ", e
except KeyboardInterrupt:
print "Control C!"
sys.exit(0)
except Exception, e:
print "Exception (runtime error in user callback?): ", pcap_file + " ", e
finalize_http_flows()
if DEBUG: print "Done!\n"
return http_req
| gpl-3.0 | -3,504,836,389,444,445,000 | 33.273458 | 109 | 0.55785 | false | 3.5867 | false | false | false |
247687009/mcrouter | mcrouter/test/test_wch3.py | 2 | 3091 | # Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import defaultdict
from functools import reduce
from mcrouter.test.mock_servers import MockServer
from mcrouter.test.McrouterTestCase import McrouterTestCase
class EchoServer(MockServer):
"""A server that responds to get requests with its port number.
"""
def runServer(self, client_socket, client_address):
while not self.is_stopped():
cmd = client_socket.recv(1000)
if not cmd:
return
if cmd.startswith('get'):
client_socket.send('VALUE hit 0 %d\r\n%s\r\nEND\r\n' %
(len(str(self.port)), str(self.port)))
class TestWCH3(McrouterTestCase):
config = './mcrouter/test/test_wch3.json'
extra_args = []
def setUp(self):
for i in range(8):
self.add_server(EchoServer())
self.mcrouter = self.add_mcrouter(
self.config,
'/test/A/',
extra_args=self.extra_args)
def test_wch3(self):
valid_ports = []
for i in [1, 2, 4, 5, 6, 7]:
valid_ports.append(self.get_open_ports()[i])
invalid_ports = []
for i in [0, 3]:
invalid_ports.append(self.get_open_ports()[i])
request_counts = defaultdict(int)
n = 20000
for i in range(0, n):
key = 'someprefix:{}:|#|id=123'.format(i)
resp = int(self.mcrouter.get(key))
respB = int(self.mcrouter.get('/test/B/' + key))
respC = int(self.mcrouter.get('/test/C/' + key))
self.assertEqual(resp, respB)
self.assertEqual(resp, respC)
request_counts[resp] += 1
self.assertTrue(resp in valid_ports)
self.assertTrue(resp not in invalid_ports)
# Make sure that the fraction of keys to a server are what we expect
# within a tolerance
expected_fractions = {
0: 0,
1: 1,
2: 1,
3: 0.0,
4: 0.5,
5: 1,
6: 0.3,
7: 0.5
}
tolerance = 0.075
total_weight = reduce(lambda x, y: x + y,
map(lambda x: x[1], expected_fractions.items()))
for i, weight in expected_fractions.items():
expected_frac = weight / total_weight
port = int(self.get_open_ports()[i])
measured_frac = request_counts[port] / float(n)
if expected_frac > 0:
delta = measured_frac - expected_frac
self.assertTrue(abs(delta) <= tolerance)
else:
self.assertEqual(measured_frac, 0.0)
| bsd-3-clause | 7,331,603,049,058,234,000 | 34.125 | 78 | 0.563572 | false | 3.801968 | true | false | false |
xapple/seqenv | seqenv/common/color.py | 3 | 2374 | ################################################################################
class Color:
"""Shortcuts for the ANSI escape sequences to control
formatting, color, etc. on text terminals. Use it like this:
print Color.red + "Hello world" + Color.end
"""
# Special #
end = '\033[0m'
# Regular #
blk = '\033[0;30m' # Black
red = '\033[0;31m' # Red
grn = '\033[0;32m' # Green
ylw = '\033[0;33m' # Yellow
blu = '\033[0;34m' # Blue
pur = '\033[0;35m' # Purple
cyn = '\033[0;36m' # Cyan
wht = '\033[0;37m' # White
# Bold #
bold = '\033[1m'
b_blk = '\033[1;30m' # Black
b_red = '\033[1;31m' # Red
b_grn = '\033[1;32m' # Green
b_ylw = '\033[1;33m' # Yellow
b_blu = '\033[1;34m' # Blue
b_pur = '\033[1;35m' # Purple
b_cyn = '\033[1;36m' # Cyan
b_wht = '\033[1;37m' # White
# Light #
light = '\033[2m'
l_blk = '\033[2;30m' # Black
l_red = '\033[2;31m' # Red
l_grn = '\033[2;32m' # Green
l_ylw = '\033[2;33m' # Yellow
l_blu = '\033[2;34m' # Blue
l_pur = '\033[2;35m' # Purple
l_cyn = '\033[2;36m' # Cyan
l_wht = '\033[2;37m' # White
# Italic #
italic = '\033[1m'
i_blk = '\033[3;30m' # Black
i_red = '\033[3;31m' # Red
i_grn = '\033[3;32m' # Green
i_ylw = '\033[3;33m' # Yellow
i_blu = '\033[3;34m' # Blue
i_pur = '\033[3;35m' # Purple
i_cyn = '\033[3;36m' # Cyan
i_wht = '\033[3;37m' # White
# Underline #
underline = '\033[4m'
u_blk = '\033[4;30m' # Black
u_red = '\033[4;31m' # Red
u_grn = '\033[4;32m' # Green
u_ylw = '\033[4;33m' # Yellow
u_blu = '\033[4;34m' # Blue
u_pur = '\033[4;35m' # Purple
u_cyn = '\033[4;36m' # Cyan
u_wht = '\033[4;37m' # White
# Glitter #
flash = '\033[5m'
g_blk = '\033[5;30m' # Black
g_red = '\033[5;31m' # Red
g_grn = '\033[5;32m' # Green
g_ylw = '\033[5;33m' # Yellow
g_blu = '\033[5;34m' # Blue
g_pur = '\033[5;35m' # Purple
g_cyn = '\033[5;36m' # Cyan
g_wht = '\033[5;37m' # White
# Fill #
f_blk = '\033[40m' # Black
f_red = '\033[41m' # Red
f_grn = '\033[42m' # Green
f_ylw = '\033[43m' # Yellow
f_blu = '\033[44m' # Blue
f_pur = '\033[45m' # Purple
f_cyn = '\033[46m' # Cyan
f_wht = '\033[47m' # White | mit | -4,178,673,585,366,318,600 | 29.844156 | 80 | 0.466302 | false | 2.307094 | false | false | false |
phufbv/journal-stats | script.py | 1 | 1445 | import sys
import parameters as pars
import html
from article import Article
from file_writer import FileWriter
def run(journal, num_articles):
# Setup output file, get input parameters, and use brief run if testing
writer = FileWriter(pars.filename)
journal = journal # journal name
num_articles = num_articles # number of articles to use from each issue
num_volumes = 18 # 18 volumes per year
issue = 1 # sample issue for each volume
# if len(sys.argv) > 1:
# print "Testing....."
# num_articles = 10
# num_volumes = 1
# Sample papers accepted in previous year
date = html.detect_start_volume()
start_volume = date[0]
acceptance_year = date[1]
volumes = range(start_volume-num_volumes+1, start_volume+1)
# for volume in reversed(volumes):
# # Go to volume/issue contents page, and extract URLs of articles
# articles = html.build_urls(journal, volume, issue)
# for num in range(1, num_articles+1):
# # For first 'num_articles' in this volume/issue, try to extract date string from article webpage
# url = articles[num]
# try:
# date_string = html.get_date_div(url)
# except:
# print "Some error occurred (URL '",url,"' not available?). Skipping."
# break
# article = Article(date_string)
# if article.get_year() == acceptance_year:
# writer.write_to_file(article)
writer.close_file()
if __name__ == "__main__":
run(pars.journal, pars.num_articles)
| mit | 2,807,207,387,406,766,000 | 24.350877 | 101 | 0.680969 | false | 3.048523 | false | false | false |
steny138/ShyGiProg | webapi/prog_api.py | 1 | 4041 | #coding=utf-8
from flask import Flask, request, jsonify
from flask import g, Response
from flask_restful.reqparse import RequestParser
from flask.views import MethodView
import time, datetime
from Crypto.Cipher import AES
from Crypto import Random
import base64
app = Flask(__name__)
ENCRYPT_KEY = "AUJJSLSPDVMDSSJSODSLIDmlcxsxslin"
@app.before_request
def option_autoreply():
""" Always reply 200 on OPTIONS request """
if request.method == 'OPTIONS':
resp = app.make_default_options_response()
headers = None
if 'ACCESS_CONTROL_REQUEST_HEADERS' in request.headers:
headers = request.headers['ACCESS_CONTROL_REQUEST_HEADERS']
h = resp.headers
# Allow the origin which made the XHR
h['Access-Control-Allow-Origin'] = request.headers['Origin']
# Allow the actual method
h['Access-Control-Allow-Methods'] = request.headers['Access-Control-Request-Method']
# Allow for 10 seconds
h['Access-Control-Max-Age'] = "10"
# We also keep current headers
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
@app.after_request
def set_allow_origin(resp):
""" Set origin for GET, POST, PUT, DELETE requests """
h = resp.headers
# Allow crossdomain for other HTTP Verbs
if request.method != 'OPTIONS' and 'Origin' in request.headers:
h['Access-Control-Allow-Origin'] = request.headers['Origin']
return resp
def encrypt(data, encrypt_key):
bs = AES.block_size
pad = lambda s: s + (bs - len(s) % bs) * chr(bs - len(s) % bs)
iv = Random.new().read(bs)
cipher = AES.new(encrypt_key, AES.MODE_CBC, iv)
data = cipher.encrypt(pad(data))
data = iv + data
data = base64.encodestring(data).strip()
return data
def decrypt(data, encrypt_key):
data = base64.decodestring(data)
bs = AES.block_size
if len(data) <= bs:
return data
unpad = lambda s : s[0:-ord(s[-1])]
iv = data[:bs]
cipher = AES.new(encrypt_key, AES.MODE_CBC, iv)
data = unpad(cipher.decrypt(data[bs:]))
return data
class Login(MethodView):
@classmethod
def login(cls):
p_token = "123123"
strs = request.json['account'] + request.json['password'] + datetime.datetime.now().strftime('%Y%m%d%H%M%S')
enrypt_data = encrypt(strs, ENCRYPT_KEY)
res = {'account': request.json['account'],
'p_token': enrypt_data
# ,"decrypt_data" : decrypt(enrypt_data, ENCRYPT_KEY)
}
return jsonify(res)
app.add_url_rule('/prog/api/login', 'login', Login.login, methods=['POST'])
class Profile(MethodView):
@classmethod
def get(cls):
res = {
"member": {
"name":"bill yang",
"age": 26,
"id_number": "R123123123123",
"token": "GJKLGFSAS",
"create_time": "2016/04/10 00:10:20"
},
"alerts": [
{
"event": "Morning Call",
"level": 1,
"disabled": False
}, {
"event": "study",
"level": 2,
"disabled": True
}, {
"event": "Go to taipei",
"level": 3,
"disabled": False
}
],
"messages": [
{
"sender": "Shyshyhao",
"send_time": "2016/03/25 10:10:20",
"message": "Hello, yuchen Nice to meet you."
}, {
"sender": "Adela Lin",
"send_time": "2016/03/25 07:10:20",
"message": "Get up!! Now is your time."
}
]
}
return jsonify(res)
app.add_url_rule('/prog/api/profile', 'user_profile', Profile.get, methods=['GET'])
if __name__ == '__main__':
app.debug = True
# or add debug=True in param
app.run(host='0.0.0.0') | apache-2.0 | 2,760,284,153,073,627,600 | 27.871429 | 116 | 0.542935 | false | 3.657014 | false | false | false |
pymfony/pymfony | src/pymfony/component/system/reflection.py | 2 | 11433 | # -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <[email protected]>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
import sys;
import inspect;
from pymfony.component.system import Object;
from pymfony.component.system import ClassLoader;
from pymfony.component.system import Tool;
from pymfony.component.system.oop import final;
from pymfony.component.system.types import String;
from pymfony.component.system.exception import StandardException;
from pymfony.component.system.oop import abstract;
"""
"""
class ReflectionException(StandardException):
pass;
class ReflectionParameter(Object):
def __init__(self, function, parameter):
"""Constructor
@param: The function to reflect parameters from.
@param: The parameter.
"""
self.__name = str(parameter);
self.__defaultValue = None;
self.__isDefaultValueAvailable = None;
self.__isOptional = None;
self.__position = None;
args, varargs, varkw, defaults = inspect.getargspec(function);
offset = -1 if inspect.ismethod(function) else 0;
self.__position = list(args).index(self.__name) + offset;
defaults = defaults if defaults else tuple();
firstOptional = len(args) + offset - len(defaults);
if self.__position >= firstOptional:
self.__isOptional = True;
self.__isDefaultValueAvailable = True;
self.__defaultValue = defaults[self.__position - firstOptional];
else:
self.__isOptional = False;
self.__isDefaultValueAvailable = False;
def __str__(self):
return self.__name;
@final
def __clone__(self):
raise TypeError();
def getName(self):
"""Gets the name of the parameter
@return: string The name of the reflected parameter.
"""
return self.__name;
def getDefaultValue(self):
"""Gets the default value of the parameter for a user-defined function
or method. If the parameter is not optional a ReflectionException will
be raise.
@return: mixed The parameters default value.
"""
if not self.isOptional():
raise ReflectionException("The parameter {0} is not optional".format(
self.__name
));
return self.__defaultValue;
def isDefaultValueAvailable(self):
"""Checks if a default value for the parameter is available.
@return: Boolean TRUE if a default value is available, otherwise FALSE
"""
return self.__isDefaultValueAvailable;
def isOptional(self):
"""Checks if the parameter is optional.
@return: Boolean TRUE if the parameter is optional, otherwise FALSE
"""
return self.__isOptional;
def getPosition(self):
"""Gets the position of the parameter.
@return: int The position of the parameter, left to right, starting at position #0.
"""
return self.__position;
@abstract
class AbstractReflectionFunction(Object):
@final
def __clone__(self):
"""The clone method prevents an object from being cloned.
Reflection objects cannot be cloned.
"""
raise TypeError("Reflection objects cannot be cloned.");
@abstract
def getParameters(self):
"""Get the parameters as a list of ReflectionParameter.
@return: list A list of Parameters, as a ReflectionParameter object.
"""
pass;
class ReflectionFunction(AbstractReflectionFunction):
def __init__(self, function):
"""Constructs a ReflectionFunction object.
@param: string|function The name of the function to reflect or a closure.
@raise ReflectionException: When the name parameter does not contain a valid function.
"""
if isinstance(function, String):
try:
function = ClassLoader.load(function);
except ImportError:
function = False;
if not inspect.isfunction(function):
raise ReflectionException(
"The {0} parameter is not a valid function.".format(
function
))
self._name = function.__name__;
self._parameters = None;
self._function = function;
def __str__(self):
return self._name;
def getName(self):
return self._name;
def getParameters(self):
"""Get the parameters as a list of ReflectionParameter.
@return: list A list of Parameters, as a ReflectionParameter object.
"""
if self._parameters is None:
self._parameters = list();
args = inspect.getargspec(self._function)[0];
for arg in args:
self._parameters.append(ReflectionParameter(self._function, arg));
return self._parameters;
class ReflectionMethod(AbstractReflectionFunction):
IS_STATIC = 1;
IS_ABSTRACT = 2;
IS_FINAL = 4;
IS_PUBLIC = 256;
IS_PROTECTED = 512;
IS_PRIVATE = 1024;
def __init__(self, method):
"""Constructs a ReflectionFunction object.
@param: method The method to reflect.
@raise ReflectionException: When the name parameter does not contain a valid method.
"""
if not inspect.ismethod(method):
raise ReflectionException(
"The {0} parameter is not a valid method.".format(
method
))
self._className = None;
self._parameters = None;
self._mode = None;
self._name = method.__name__;
self._method = method;
def __str__(self):
return self._name;
def getName(self):
return self._name;
def getClassName(self):
if self._className is None:
if sys.version_info < (2, 7):
cls = self._method.im_class;
else:
cls = self._method.__self__.__class__;
self._className = ReflectionClass(cls).getName();
return self._className;
def getMode(self):
if self._mode is None:
if self._name.startswith('__') and self._name.endswith('__'):
self._mode = self.IS_PUBLIC;
elif self._name.startswith('__'):
self._mode = self.IS_PRIVATE;
elif self._name.startswith('_'):
self._mode = self.IS_PROTECTED;
else:
self._mode = self.IS_PUBLIC;
if getattr(self._method, '__isabstractmethod__', False):
self._mode = self._mode | self.IS_ABSTRACT;
if getattr(self._method, '__isfinalmethod__', False):
self._mode = self._mode | self.IS_FINAL;
if isinstance(self._method, classmethod):
self._mode = self._mode | self.IS_STATIC;
return self._mode;
def getParameters(self):
"""Get the parameters as a list of ReflectionParameter.
@return: list A list of Parameters, as a ReflectionParameter object.
"""
if self._parameters is None:
self._parameters = list();
args = inspect.getargspec(self._method)[0];
for arg in args[1:]:
self._parameters.append(ReflectionParameter(self._method, arg));
return self._parameters;
class ReflectionClass(Object):
def __init__(self, argument):
if isinstance(argument, String):
qualClassName = argument;
try:
argument = ClassLoader.load(argument);
except ImportError:
argument = False;
if argument is not False:
assert issubclass(argument, object);
self.__exists = True;
self._class = argument;
self._fileName = None;
self._mro = None;
self._namespaceName = None;
self._parentClass = None;
self._name = None;
else:
self.__exists = False;
self._name = qualClassName;
self._fileName = '';
self._mro = tuple();
self._namespaceName = Tool.split(qualClassName)[0];
self._parentClass = False;
self._class = None;
self._methods = None;
def getFileName(self):
if self._fileName is not None:
return self._fileName;
try:
self._fileName = inspect.getabsfile(self._class);
except TypeError:
self._fileName = False;
return self._fileName;
def getParentClass(self):
"""
@return: ReflexionClass|False
"""
if self._parentClass is None:
if len(self.getmro()) > 1:
self._parentClass = ReflectionClass(self.getmro()[1]);
else:
self._parentClass = False;
return self._parentClass;
def getmro(self):
if self._mro is None:
self._mro = inspect.getmro(self._class);
return self._mro;
def getNamespaceName(self):
if self._namespaceName is None:
self._namespaceName = str(self._class.__module__);
return self._namespaceName;
def getName(self):
if self._name is None:
self._name = self.getNamespaceName()+'.'+str(self._class.__name__);
return self._name;
def exists(self):
return self.__exists;
def newInstance(self, *args, **kargs):
return self._class(*args, **kargs);
class ReflectionObject(ReflectionClass):
def __init__(self, argument):
assert isinstance(argument, object);
ReflectionClass.__init__(self, argument.__class__);
self.__object = argument;
def getMethod(self, name):
"""Gets a ReflectionMethod for a class method.
@param: string The method name to reflect.
@return: ReflectionMethod A ReflectionMethod.
@raise: ReflectionException When the method does not exist.
"""
if hasattr(self.__object, name):
return ReflectionMethod(getattr(self.__object, name));
raise ReflectionException("The method {0} of class {1} does not exist.".format(
name,
self.getName(),
));
def getMethods(self, flag = 0):
"""Gets a list of methods for the class.
@param: int flag Filter the results to include only methods with certain
attributes. Defaults to no filtering.
Any combination of ReflectionMethod.IS_STATIC,
ReflectionMethod.IS_PUBLIC,
ReflectionMethod.IS_PROTECTED,
ReflectionMethod.IS_PRIVATE,
ReflectionMethod.IS_ABSTRACT,
ReflectionMethod.IS_FINAL.
@return: list A list of ReflectionMethod objects reflecting each method.
"""
if self._methods is None:
self._methods = list();
for name, method in inspect.getmembers(self.__object, inspect.ismethod):
refMethod = ReflectionMethod(method);
if flag == flag & refMethod.getMode():
self._methods.append(refMethod);
return self._methods;
| mit | 7,362,900,226,967,280,000 | 29.733871 | 94 | 0.581037 | false | 4.562251 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.