hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6a849b7bcd124ad715f2ce345cebb1f79d3397f0 | 1,132 | py | Python | discovery-infra/test_infra/helper_classes/config/controller_config.py | lranjbar/assisted-test-infra | 89cd4e16744afa646af88975f8038ca1774bcfa4 | [
"Apache-2.0"
] | null | null | null | discovery-infra/test_infra/helper_classes/config/controller_config.py | lranjbar/assisted-test-infra | 89cd4e16744afa646af88975f8038ca1774bcfa4 | [
"Apache-2.0"
] | 30 | 2021-11-15T07:10:49.000Z | 2022-03-28T07:10:26.000Z | discovery-infra/test_infra/helper_classes/config/controller_config.py | lranjbar/assisted-test-infra | 89cd4e16744afa646af88975f8038ca1774bcfa4 | [
"Apache-2.0"
] | null | null | null | from abc import ABC
from pathlib import Path
from typing import Any
from dataclasses import dataclass
from test_infra import consts
from test_infra.utils.global_variables import GlobalVariables
from .base_config import _BaseConfig
global_variables = GlobalVariables()
| 26.325581 | 63 | 0.719965 |
6a84b5159878bc48cef9594078edc989fb798f13 | 952 | py | Python | bitcoinpy/mempool.py | obulpathi/bitcoinpy | 8f41e0221f2ff2d35697b6d4e5397deb7de09c3d | [
"MIT"
] | 21 | 2016-01-03T14:52:07.000Z | 2021-08-09T18:05:08.000Z | bitcoinpy/mempool.py | obulpathi/bitcoinpy | 8f41e0221f2ff2d35697b6d4e5397deb7de09c3d | [
"MIT"
] | null | null | null | bitcoinpy/mempool.py | obulpathi/bitcoinpy | 8f41e0221f2ff2d35697b6d4e5397deb7de09c3d | [
"MIT"
] | 15 | 2015-02-07T20:08:11.000Z | 2019-10-03T04:45:45.000Z | # MemPool.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
from lib.serialize import uint256_to_shortstr
| 23.8 | 74 | 0.703782 |
6a84b6d6b4154a68e4e0a9485928b636bf10d1b0 | 13,530 | py | Python | bashspy/parser.py | sarvi/bashspy | 088f2cdfd00d29b4c7a98ec311f6e6c382ba4749 | [
"MIT"
] | null | null | null | bashspy/parser.py | sarvi/bashspy | 088f2cdfd00d29b4c7a98ec311f6e6c382ba4749 | [
"MIT"
] | 1 | 2021-06-12T12:47:44.000Z | 2021-06-12T12:47:44.000Z | bashspy/parser.py | sarvi/bashspy | 088f2cdfd00d29b4c7a98ec311f6e6c382ba4749 | [
"MIT"
] | 1 | 2020-05-18T08:55:14.000Z | 2020-05-18T08:55:14.000Z | '''
Created on Jun 13, 2019
@author: sarvi
'''
from sly import Parser
from .lexer import BashLexer
if __name__ == '__main__':
lexer = BashLexer()
parser = BashParser()
while True:
try:
text = input('Command:>')
result = parser.parse(lexer.tokenize(text))
print(result)
except EOFError:
break | 32.760291 | 135 | 0.602882 |
6a84df704829c829063b750bd4cbc4f7f7261e8a | 1,555 | py | Python | Module03/pregnancy_wheel.py | biomed-bioinformatics-bootcamp/bmes-t580-2019-coursework-charrison620 | 4f53e9290b456f582464c86d114c794c1448b995 | [
"MIT"
] | null | null | null | Module03/pregnancy_wheel.py | biomed-bioinformatics-bootcamp/bmes-t580-2019-coursework-charrison620 | 4f53e9290b456f582464c86d114c794c1448b995 | [
"MIT"
] | null | null | null | Module03/pregnancy_wheel.py | biomed-bioinformatics-bootcamp/bmes-t580-2019-coursework-charrison620 | 4f53e9290b456f582464c86d114c794c1448b995 | [
"MIT"
] | null | null | null | import datetime
#avg pregnancy length is 281 days
main() | 27.767857 | 87 | 0.65209 |
6a863c2235ddfde035ee8193728b86e170dd1480 | 1,471 | py | Python | costcalculator/forms.py | connor-c/Trip-Gas-Cost-Calculator | 6101093ffd48b6cb6c4f847b8c1f40351617750b | [
"MIT"
] | null | null | null | costcalculator/forms.py | connor-c/Trip-Gas-Cost-Calculator | 6101093ffd48b6cb6c4f847b8c1f40351617750b | [
"MIT"
] | 8 | 2020-02-11T23:59:35.000Z | 2022-02-10T07:16:43.000Z | costcalculator/forms.py | connor-c/Trip-Gas-Cost-Calculator | 6101093ffd48b6cb6c4f847b8c1f40351617750b | [
"MIT"
] | null | null | null | from django import forms
from django.core.validators import MinValueValidator, MinLengthValidator
| 70.047619 | 220 | 0.747791 |
6a86a5adbb68bba6dc2b067c07b59de722a8d5ca | 1,940 | py | Python | tasks/storm_raffle_handler.py | Ayouuuu/bili2.0 | 1108e39208e56f129fb5eb6605a5b3f1aadc0d8f | [
"MIT"
] | 2 | 2020-01-03T09:27:53.000Z | 2020-04-07T05:06:36.000Z | tasks/storm_raffle_handler.py | Ayouuuu/bili2.0 | 1108e39208e56f129fb5eb6605a5b3f1aadc0d8f | [
"MIT"
] | null | null | null | tasks/storm_raffle_handler.py | Ayouuuu/bili2.0 | 1108e39208e56f129fb5eb6605a5b3f1aadc0d8f | [
"MIT"
] | 1 | 2019-08-23T07:43:21.000Z | 2019-08-23T07:43:21.000Z | import bili_statistics
from reqs.storm_raffle_handler import StormRaffleHandlerReq
from tasks.utils import UtilsTask
from .base_class import Forced, DontWait, Multi
| 41.276596 | 83 | 0.635567 |
6a86acff1cb947e60b02c94c6dbdcc5c7b79e9bf | 4,767 | py | Python | u24_lymphocyte/third_party/treeano/sandbox/nodes/gradnet.py | ALSM-PhD/quip_classification | 7347bfaa5cf11ae2d7a528fbcc43322a12c795d3 | [
"BSD-3-Clause"
] | 45 | 2015-04-26T04:45:51.000Z | 2022-01-24T15:03:55.000Z | u24_lymphocyte/third_party/treeano/sandbox/nodes/gradnet.py | ALSM-PhD/quip_classification | 7347bfaa5cf11ae2d7a528fbcc43322a12c795d3 | [
"BSD-3-Clause"
] | 8 | 2018-07-20T20:54:51.000Z | 2020-06-12T05:36:04.000Z | u24_lymphocyte/third_party/treeano/sandbox/nodes/gradnet.py | ALSM-PhD/quip_classification | 7347bfaa5cf11ae2d7a528fbcc43322a12c795d3 | [
"BSD-3-Clause"
] | 22 | 2018-05-21T23:57:20.000Z | 2022-02-21T00:48:32.000Z | import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
fX = theano.config.floatX
def GradNetOptimizerInterpolationNode(name,
children,
early,
late,
**kwargs):
"""
interpolates updates from 2 optimizers nodes
NOTE: this is a hack to take in node constructors as arguments
"""
assert set(children.keys()) == {"subtree", "cost"}
subtree = children["subtree"]
cost = children["cost"]
cost_ref = tn.ReferenceNode(name + "_costref", reference=cost.name)
late_subtree = tn.UpdateScaleNode(name + "_late_update_scale", subtree)
late_node = late(name + "_late", {"subtree": late_subtree, "cost": cost})
early_subtree = tn.UpdateScaleNode(name + "_early_update_scale", late_node)
early_node = early(name + "_early",
{"subtree": early_subtree, "cost": cost_ref})
# NOTE: need separate node to forward hyperparameter
return _GradNetOptimizerInterpolationNode(name, early_node, **kwargs)
def GradualSimpleBatchNormalizationNode(name):
from treeano.sandbox.nodes import batch_normalization as bn
return GradNetInterpolationNode(
name,
{"early": bn.SimpleBatchNormalizationNode(name + "_bn"),
"late": tn.IdentityNode(name + "_identity")})
GradualBNNode = GradualSimpleBatchNormalizationNode
| 36.389313 | 79 | 0.594084 |
6a88dea060f9ea00f0bb7c465137c71904b3c14f | 662 | py | Python | minus80/RawFile.py | brohammer/Minus80 | 3cd5b61a7349b9fa6d35ed192d8a4f38523f92bb | [
"MIT"
] | null | null | null | minus80/RawFile.py | brohammer/Minus80 | 3cd5b61a7349b9fa6d35ed192d8a4f38523f92bb | [
"MIT"
] | null | null | null | minus80/RawFile.py | brohammer/Minus80 | 3cd5b61a7349b9fa6d35ed192d8a4f38523f92bb | [
"MIT"
] | null | null | null | import gzip #pragma: no cover
import bz2 #pragma: no cover
import lzma #pragma: no cover
| 33.1 | 50 | 0.601208 |
6a89b2893b587e6d66f6aa207ca89999bce84710 | 846 | py | Python | utils/config.py | jtr109/Alpha2kindle | a411d05cafa9036a732eeb75fa13f68963f254e3 | [
"MIT"
] | null | null | null | utils/config.py | jtr109/Alpha2kindle | a411d05cafa9036a732eeb75fa13f68963f254e3 | [
"MIT"
] | null | null | null | utils/config.py | jtr109/Alpha2kindle | a411d05cafa9036a732eeb75fa13f68963f254e3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
CURCONF = TestConf
| 27.290323 | 75 | 0.51773 |
6a89d65e11282b8c81495e2795c8364f65d2114c | 4,724 | py | Python | framework/database/__init__.py | fabmiz/osf.io | 8d86af3f0a6e5388bd5b18383e68e27b65a66247 | [
"Apache-2.0"
] | null | null | null | framework/database/__init__.py | fabmiz/osf.io | 8d86af3f0a6e5388bd5b18383e68e27b65a66247 | [
"Apache-2.0"
] | null | null | null | framework/database/__init__.py | fabmiz/osf.io | 8d86af3f0a6e5388bd5b18383e68e27b65a66247 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import functools
import httplib as http
import markupsafe
from django.core.paginator import Paginator
from django.db.models import Q, QuerySet
from framework.exceptions import HTTPError
def get_or_http_error(Model, pk_or_query, allow_deleted=False, display_name=None):
"""Load an instance of Model by primary key or modularodm.Q query. Raise an appropriate
HTTPError if no record is found or if the query fails to find a unique record
:param type Model: StoredObject subclass to query
:param pk_or_query:
:type pk_or_query: either
- a <basestring> representation of the record's primary key, e.g. 'abcdef'
- a <QueryBase> subclass query to uniquely select a record, e.g.
Q('title', 'eq', 'Entitled') & Q('version', 'eq', 1)
:param bool allow_deleted: allow deleleted records?
:param basestring display_name:
:raises: HTTPError(404) if the record does not exist
:raises: HTTPError(400) if no unique record is found
:raises: HTTPError(410) if the resource is deleted and allow_deleted = False
:return: Model instance
"""
display_name = display_name or ''
# FIXME: Not everything that uses this decorator needs to be markupsafe, but OsfWebRenderer error.mako does...
safe_name = markupsafe.escape(display_name)
if isinstance(pk_or_query, Q):
try:
instance = Model.objects.get(pk_or_query)
except Model.DoesNotExist:
raise HTTPError(http.NOT_FOUND, data=dict(
message_long='No {name} record matching that query could be found'.format(name=safe_name)
))
except Model.MultipleObjectsReturned:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='The query must match exactly one {name} record'.format(name=safe_name)
))
else:
instance = Model.load(pk_or_query)
if not instance:
raise HTTPError(http.NOT_FOUND, data=dict(
message_long='No {name} record with that primary key could be found'.format(name=safe_name)
))
if getattr(instance, 'is_deleted', False) and getattr(instance, 'suspended', False):
raise HTTPError(451, data=dict( # 451 - Unavailable For Legal Reasons
message_short='Content removed',
message_long='This content has been removed'
))
if not allow_deleted and getattr(instance, 'is_deleted', False):
raise HTTPError(http.GONE)
return instance
def autoload(Model, extract_key, inject_key, func):
"""Decorator to autoload a StoredObject instance by primary key and inject into kwargs. Raises
an appropriate HTTPError (see #get_or_http_error)
:param type Model: database collection model to query (should be a subclass of StoredObject)
:param basestring extract_key: named URL field containing the desired primary key to be fetched
from the database
:param basestring inject_key: name the instance will be accessible as when it's injected as an
argument to the function
Example usage: ::
def get_node(node_id):
node = Node.load(node_id)
...
becomes
import functools
autoload_node = functools.partial(autoload, Node, 'node_id', 'node')
@autoload_node
def get_node(node):
...
"""
return wrapper
| 38.406504 | 114 | 0.67591 |
6a8b93de9ef615a88be0dad5abda769599f3cf01 | 2,886 | py | Python | neptune/internal/client_library/job_development_api/image.py | jiji-online/neptune-cli | 50cf680a80d141497f9331ab7cdaee49fcb90b0c | [
"Apache-2.0"
] | null | null | null | neptune/internal/client_library/job_development_api/image.py | jiji-online/neptune-cli | 50cf680a80d141497f9331ab7cdaee49fcb90b0c | [
"Apache-2.0"
] | null | null | null | neptune/internal/client_library/job_development_api/image.py | jiji-online/neptune-cli | 50cf680a80d141497f9331ab7cdaee49fcb90b0c | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, deepsense.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from future import standard_library
standard_library.install_aliases()
# pylint: disable=wrong-import-position
from future.builtins import object
import base64
import io
import PIL.Image
from neptune.generated.swagger_client import InputImage
from neptune.internal.common.models.parameters_validation import (
of_type_validator,
text_conv,
validate
)
| 26.722222 | 93 | 0.647263 |
6a8bca56c22e3267d252604b1793381dda94c58e | 626 | py | Python | src/picome/hukeyboard.py | guibohnert91/picome | 9b91c28f7e83a2b730cafbda1a97205672f3e676 | [
"MIT"
] | null | null | null | src/picome/hukeyboard.py | guibohnert91/picome | 9b91c28f7e83a2b730cafbda1a97205672f3e676 | [
"MIT"
] | null | null | null | src/picome/hukeyboard.py | guibohnert91/picome | 9b91c28f7e83a2b730cafbda1a97205672f3e676 | [
"MIT"
] | null | null | null | from adafruit_hid.keyboard import Keyboard
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
from adafruit_hid.keycode import Keycode
import usb_hid
import time | 28.454545 | 73 | 0.702875 |
6a8c272cd22c6193695ebfa5fa34ff4d88d4565d | 579 | py | Python | src/solutions/part2/q104_max_bi_tree_depth.py | hychrisli/PyAlgorithms | 71e537180f3b371d0d2cc47b11cb68ec13a8ac68 | [
"Apache-2.0"
] | null | null | null | src/solutions/part2/q104_max_bi_tree_depth.py | hychrisli/PyAlgorithms | 71e537180f3b371d0d2cc47b11cb68ec13a8ac68 | [
"Apache-2.0"
] | null | null | null | src/solutions/part2/q104_max_bi_tree_depth.py | hychrisli/PyAlgorithms | 71e537180f3b371d0d2cc47b11cb68ec13a8ac68 | [
"Apache-2.0"
] | null | null | null | from src.base.solution import Solution
from src.tests.part2.q104_test_max_bi_tree_depth import MaxBiTreeDepthTestCases
if __name__ == '__main__':
sol = MaxBiTreeDepth()
sol.run_tests() | 23.16 | 79 | 0.661485 |
6a8c916961dcdf5b4bdd11f085941afc268401f1 | 771 | py | Python | inventory/admin.py | shakyasaijal/businessAnalytics | 9312bae79709387c6eadd50f87f6be85bd52c396 | [
"BSD-3-Clause"
] | null | null | null | inventory/admin.py | shakyasaijal/businessAnalytics | 9312bae79709387c6eadd50f87f6be85bd52c396 | [
"BSD-3-Clause"
] | 8 | 2021-03-30T13:03:11.000Z | 2022-03-12T00:20:13.000Z | inventory/admin.py | shakyasaijal/businessAnalytics | 9312bae79709387c6eadd50f87f6be85bd52c396 | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from . import models
admin.site.register(models.Suppliers, SupplierAdmin)
admin.site.register(models.InventoryUser, InventoryUserAdmin)
admin.site.register(models.Product, ProductsAdmin)
| 32.125 | 72 | 0.731518 |
6a8e0d766c7cdfdc409946fd3a6196d6981baf1d | 55 | py | Python | python/testData/resolve/TryExceptElse.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/resolve/TryExceptElse.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/resolve/TryExceptElse.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | try:
name = ""
except:
pass
else:
print na<ref>me | 9.166667 | 17 | 0.6 |
6a8e67d20152ca7acb56c8c6199b1999e7f99406 | 500 | py | Python | pybyte/session.py | ms7m/py-byte | c5872ff5b8536160d8cbd7f88406ed593113e77d | [
"MIT"
] | 4 | 2020-01-26T17:22:05.000Z | 2020-08-15T12:23:31.000Z | pybyte/session.py | ms7m/py-byte | c5872ff5b8536160d8cbd7f88406ed593113e77d | [
"MIT"
] | 3 | 2020-01-27T18:10:06.000Z | 2020-03-31T10:56:03.000Z | pybyte/session.py | ms7m/py-byte | c5872ff5b8536160d8cbd7f88406ed593113e77d | [
"MIT"
] | 2 | 2020-01-27T17:59:45.000Z | 2020-02-01T16:43:53.000Z |
import requests | 26.315789 | 91 | 0.594 |
6a8e7fcaf4ca3d67de4aab013987d7db788188b5 | 252 | py | Python | pyqtgraph/examples/template.py | secantsquared/pyqtgraph | 3ef7f5b91639543e43bcd66a84290fb9bc18fc5c | [
"MIT"
] | null | null | null | pyqtgraph/examples/template.py | secantsquared/pyqtgraph | 3ef7f5b91639543e43bcd66a84290fb9bc18fc5c | [
"MIT"
] | null | null | null | pyqtgraph/examples/template.py | secantsquared/pyqtgraph | 3ef7f5b91639543e43bcd66a84290fb9bc18fc5c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Description of example
"""
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui, mkQApp
import numpy as np
app = mkQApp()
# win.setWindowTitle('pyqtgraph example: ____')
if __name__ == '__main__':
pg.exec()
| 15.75 | 47 | 0.68254 |
6a8f3e25920be24fb569cc55eff90ae879efa647 | 73,328 | py | Python | ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py | cas-packone/ambari-chs | 68033fbd4b810b6642853f2ad9128cbbd4e0cb7b | [
"Apache-2.0"
] | 3 | 2019-06-20T11:49:36.000Z | 2020-12-11T10:44:29.000Z | ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py | cas-packone/ambari-chs | 68033fbd4b810b6642853f2ad9128cbbd4e0cb7b | [
"Apache-2.0"
] | null | null | null | ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py | cas-packone/ambari-chs | 68033fbd4b810b6642853f2ad9128cbbd4e0cb7b | [
"Apache-2.0"
] | 1 | 2019-03-20T08:36:17.000Z | 2019-03-20T08:36:17.000Z | '''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import socket
from unittest import TestCase
from mock.mock import patch, MagicMock
| 36.13997 | 166 | 0.551836 |
6a8f6efd4560f4b302cef1f8c07a3c86f509c35d | 642 | py | Python | debugging/code/multiprocess_main.py | awesome-archive/python-debugging-skills | 69af455302a805d6f198a06ea934f79d5913cb3e | [
"MIT"
] | null | null | null | debugging/code/multiprocess_main.py | awesome-archive/python-debugging-skills | 69af455302a805d6f198a06ea934f79d5913cb3e | [
"MIT"
] | null | null | null | debugging/code/multiprocess_main.py | awesome-archive/python-debugging-skills | 69af455302a805d6f198a06ea934f79d5913cb3e | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
import multiprocessing as mp
import time
from pudb.remote import set_trace
def worker(worker_id):
""" Simple worker process"""
i = 0
while i < 10:
if worker_id == 1: # debug process with id 1
set_trace(term_size=(80, 24))
time.sleep(1) # represents some work
print('In Process {}, i:{}'.format(worker_id, i))
i = i + 1
if __name__ == '__main__':
processes = []
for p_id in range(2): # 2 worker processes
p = mp.Process(target=worker, args=(p_id,))
p.start()
processes.append(p)
for p in processes:
p.join()
| 22.928571 | 57 | 0.573209 |
6a8fb5db5439c528b72c62e081f71f595115b9ad | 1,317 | py | Python | lldb/packages/Python/lldbsuite/test/expression_command/anonymous-struct/TestCallUserAnonTypedef.py | bytesnake/Enzyme | 247606c279920d476645d2e319e574bf8be10fc9 | [
"Apache-2.0"
] | null | null | null | lldb/packages/Python/lldbsuite/test/expression_command/anonymous-struct/TestCallUserAnonTypedef.py | bytesnake/Enzyme | 247606c279920d476645d2e319e574bf8be10fc9 | [
"Apache-2.0"
] | null | null | null | lldb/packages/Python/lldbsuite/test/expression_command/anonymous-struct/TestCallUserAnonTypedef.py | bytesnake/Enzyme | 247606c279920d476645d2e319e574bf8be10fc9 | [
"Apache-2.0"
] | null | null | null | """
Test calling user defined functions using expression evaluation.
This test checks that typesystem lookup works correctly for typedefs of
untagged structures.
Ticket: https://llvm.org/bugs/show_bug.cgi?id=26790
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
| 28.021277 | 85 | 0.658314 |
6a8fddf8511ca7d429d8644119f475536d5dae17 | 2,486 | py | Python | main.py | ThomasDLi/simple-photo-editor | f8b3f1025155e2542b93b94c12d607b9b5e45731 | [
"MIT"
] | 1 | 2021-05-21T19:21:26.000Z | 2021-05-21T19:21:26.000Z | main.py | ThomasDLi/simple-photo-editor | f8b3f1025155e2542b93b94c12d607b9b5e45731 | [
"MIT"
] | null | null | null | main.py | ThomasDLi/simple-photo-editor | f8b3f1025155e2542b93b94c12d607b9b5e45731 | [
"MIT"
] | null | null | null | from PIL import Image, ImageEnhance
user_account_name = "Thomas.Li26"
if __name__ == "__main__":
main()
| 38.84375 | 104 | 0.650442 |
6a90540c04aa6d7b6d0419ad6f3c05ff1c4aa797 | 377 | py | Python | scripts/field/Curbrock_Summon1.py | G00dBye/YYMS | 1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb | [
"MIT"
] | 54 | 2019-04-16T23:24:48.000Z | 2021-12-18T11:41:50.000Z | scripts/field/Curbrock_Summon1.py | G00dBye/YYMS | 1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb | [
"MIT"
] | 3 | 2019-05-19T15:19:41.000Z | 2020-04-27T16:29:16.000Z | scripts/field/Curbrock_Summon1.py | G00dBye/YYMS | 1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb | [
"MIT"
] | 49 | 2020-11-25T23:29:16.000Z | 2022-03-26T16:20:24.000Z | # Curbrock Summon 2
CURBROCK2 = 9400930 # MOD ID
CURBROCKS_ESCAPE_ROUTE_VER2 = 600050040 # MAP ID
CURBROCKS_ESCAPE_ROUTE_VER3 = 600050050 # MAP ID 2
sm.spawnMob(CURBROCK2, 190, -208, False)
sm.createClock(1800)
sm.addEvent(sm.invokeAfterDelay(1800000, "warp", CURBROCKS_ESCAPE_ROUTE_VER3, 0))
sm.waitForMobDeath(CURBROCK2)
sm.warp(CURBROCKS_ESCAPE_ROUTE_VER2)
sm.stopEvents() | 31.416667 | 81 | 0.806366 |
6a9096da46f0e3f90b72b1a819bf62ce11ddc6bd | 786 | py | Python | trainloops/listeners/cluster_killswitch.py | Gerryflap/master_thesis | 5dc16e21b23837fee8a4532679bb5cb961af0b7c | [
"MIT"
] | null | null | null | trainloops/listeners/cluster_killswitch.py | Gerryflap/master_thesis | 5dc16e21b23837fee8a4532679bb5cb961af0b7c | [
"MIT"
] | null | null | null | trainloops/listeners/cluster_killswitch.py | Gerryflap/master_thesis | 5dc16e21b23837fee8a4532679bb5cb961af0b7c | [
"MIT"
] | null | null | null | """
Cancelling jobs on the University cluster forces programs to instantly quit,
which sometimes crashes cluster nodes.
As a remedy, this killswitch listener will stop the experiment in a nicer way to prevent this from happening.
The experiment will be stopped if a file named "stop" is encountered in the results folder of the experiment.
The existence of this file is checked after each epoch.
"""
import os
from trainloops.listeners.listener import Listener
| 32.75 | 113 | 0.704835 |
6a920dacf31156e85a0fcebb52765a1d1ca683fe | 2,255 | py | Python | authors/apps/notifications/views.py | andela/ah-backend-spaces- | 58e031a96a6b9555f1a4133cf8cb688c236d3f3b | [
"BSD-3-Clause"
] | 2 | 2018-08-17T15:47:36.000Z | 2018-09-13T13:58:34.000Z | authors/apps/notifications/views.py | andela/ah-backend-spaces- | 58e031a96a6b9555f1a4133cf8cb688c236d3f3b | [
"BSD-3-Clause"
] | 35 | 2018-07-24T11:42:53.000Z | 2021-06-10T20:34:41.000Z | authors/apps/notifications/views.py | andela/ah-backend-spaces- | 58e031a96a6b9555f1a4133cf8cb688c236d3f3b | [
"BSD-3-Clause"
] | 3 | 2018-07-17T13:05:35.000Z | 2018-09-06T16:03:52.000Z | from rest_framework import status
from rest_framework.generics import (
RetrieveUpdateAPIView, CreateAPIView,
RetrieveUpdateDestroyAPIView
)
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from ..authentication.backends import JWTAuthentication
from ..authentication.models import User
from .models import Notifications
from .renderers import (
NotificationsJSONRenderer
)
from .serializers import (
NotificationsAPIViewSerializer, GetNotificationsAPIViewSerializer
)
| 34.692308 | 93 | 0.73082 |
6a921ec9df90e9d0bc4821cbf3d19c03f4f29792 | 1,882 | py | Python | scripts/common/frozendict.py | bopopescu/build | 4e95fd33456e552bfaf7d94f7d04b19273d1c534 | [
"BSD-3-Clause"
] | null | null | null | scripts/common/frozendict.py | bopopescu/build | 4e95fd33456e552bfaf7d94f7d04b19273d1c534 | [
"BSD-3-Clause"
] | null | null | null | scripts/common/frozendict.py | bopopescu/build | 4e95fd33456e552bfaf7d94f7d04b19273d1c534 | [
"BSD-3-Clause"
] | 1 | 2020-07-23T11:05:06.000Z | 2020-07-23T11:05:06.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implements a frozen dictionary-like object"""
import collections
import copy
import common.memo as memo
| 26.507042 | 79 | 0.698193 |
6a92b244776a352d3c8cb2387f8e203d0ce669c3 | 22 | py | Python | avatar/__init__.py | yogeshkheri/geonode-avatar | 293474f814117ae680278223c8cdf8d59c67862d | [
"BSD-3-Clause"
] | 3 | 2021-10-17T20:37:40.000Z | 2022-03-17T10:29:14.000Z | avatar/__init__.py | yogeshkheri/geonode-avatar | 293474f814117ae680278223c8cdf8d59c67862d | [
"BSD-3-Clause"
] | 4 | 2021-09-02T13:26:11.000Z | 2022-03-16T12:26:36.000Z | avatar/__init__.py | yogeshkheri/geonode-avatar | 293474f814117ae680278223c8cdf8d59c67862d | [
"BSD-3-Clause"
] | null | null | null | __version__ = '5.0.2'
| 11 | 21 | 0.636364 |
6a9450d0c04cca0024a14e15346193d320a70e58 | 50 | py | Python | __init__.py | mmanganel/neurecon | 5e86324675985f1fedfc4d5d3ac2e750f480538f | [
"MIT"
] | null | null | null | __init__.py | mmanganel/neurecon | 5e86324675985f1fedfc4d5d3ac2e750f480538f | [
"MIT"
] | null | null | null | __init__.py | mmanganel/neurecon | 5e86324675985f1fedfc4d5d3ac2e750f480538f | [
"MIT"
] | null | null | null | from neurecon.reconstruction import reconstruct
| 12.5 | 47 | 0.86 |
6a95707999c6dd8d718ea3549fc898fb5e496ed8 | 965 | py | Python | python/flexflow/keras/datasets/cifar.py | zmxdream/FlexFlow | 7ea50d71a02e853af7ae573d88c911511b3e82e0 | [
"Apache-2.0"
] | 455 | 2018-12-09T01:57:46.000Z | 2022-03-22T01:56:47.000Z | python/flexflow/keras/datasets/cifar.py | zmxdream/FlexFlow | 7ea50d71a02e853af7ae573d88c911511b3e82e0 | [
"Apache-2.0"
] | 136 | 2019-04-19T08:24:27.000Z | 2022-03-28T01:39:19.000Z | python/flexflow/keras/datasets/cifar.py | zmxdream/FlexFlow | 7ea50d71a02e853af7ae573d88c911511b3e82e0 | [
"Apache-2.0"
] | 102 | 2018-12-22T07:38:05.000Z | 2022-03-30T06:04:39.000Z | # -*- coding: utf-8 -*-
"""Utilities common to CIFAR10 and CIFAR100 datasets.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from six.moves import cPickle
def load_batch(fpath, label_key='labels'):
"""Internal utility for parsing CIFAR data.
# Arguments
fpath: path the file to parse.
label_key: key for label data in the retrieve
dictionary.
# Returns
A tuple `(data, labels)`.
"""
with open(fpath, 'rb') as f:
if sys.version_info < (3,):
d = cPickle.load(f)
else:
d = cPickle.load(f, encoding='bytes')
# decode utf8
d_decoded = {}
for k, v in d.items():
d_decoded[k.decode('utf8')] = v
d = d_decoded
data = d['data']
labels = d[label_key]
data = data.reshape(data.shape[0], 3, 32, 32)
return data, labels | 26.081081 | 53 | 0.586528 |
6a96000d55f22510def511958af1f99b01dba806 | 523 | py | Python | day7/p2.py | Seralpa/Advent-of-code | 9633624e4ff48c50d8be3deac54c83059e9c3b04 | [
"MIT"
] | 1 | 2020-12-18T16:06:25.000Z | 2020-12-18T16:06:25.000Z | day7/p2.py | Seralpa/Advent-of-code | 9633624e4ff48c50d8be3deac54c83059e9c3b04 | [
"MIT"
] | null | null | null | day7/p2.py | Seralpa/Advent-of-code | 9633624e4ff48c50d8be3deac54c83059e9c3b04 | [
"MIT"
] | null | null | null |
with open('day7/input.txt') as f:
rules=dict([l.split(' contain') for l in f.read().replace(' bags', '').replace(' bag', '').replace('.', '').replace(' no other', '0 ').splitlines()])
for key in rules:
rules[key]=[(d[2:].strip(), int(d[:2].strip())) for d in rules[key].split(', ')]
print(getNumBags('shiny gold')-1) #-1 cause shiny bag not included | 40.230769 | 153 | 0.596558 |
6a9611abd97c536f926ca250cbefadb44ebcbbc2 | 471 | py | Python | adv/luther.py | 6tennis/dl | 69eb7e71da9fabe9e7ec40c461b525b4f967f345 | [
"Apache-2.0"
] | null | null | null | adv/luther.py | 6tennis/dl | 69eb7e71da9fabe9e7ec40c461b525b4f967f345 | [
"Apache-2.0"
] | null | null | null | adv/luther.py | 6tennis/dl | 69eb7e71da9fabe9e7ec40c461b525b4f967f345 | [
"Apache-2.0"
] | null | null | null | from core.advbase import *
from slot.d import *
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv) | 20.478261 | 44 | 0.552017 |
6a9615693970b5561756763d7533ebc0f325ce0c | 21,646 | py | Python | wandb/sdk/data_types/image.py | macio232/client | 295380c99b1a0946470672d40348b17a674ad17f | [
"MIT"
] | null | null | null | wandb/sdk/data_types/image.py | macio232/client | 295380c99b1a0946470672d40348b17a674ad17f | [
"MIT"
] | null | null | null | wandb/sdk/data_types/image.py | macio232/client | 295380c99b1a0946470672d40348b17a674ad17f | [
"MIT"
] | null | null | null | import hashlib
from io import BytesIO
import logging
import os
from typing import Any, cast, Dict, List, Optional, Sequence, Type, TYPE_CHECKING, Union
from pkg_resources import parse_version
import wandb
from wandb import util
from ._private import MEDIA_TMP
from .base_types.media import BatchableMedia, Media
from .helper_types.bounding_boxes_2d import BoundingBoxes2D
from .helper_types.classes import Classes
from .helper_types.image_mask import ImageMask
if TYPE_CHECKING: # pragma: no cover
import matplotlib # type: ignore
import numpy as np # type: ignore
import PIL # type: ignore
import torch # type: ignore
from wandb.apis.public import Artifact as PublicArtifact
from ..wandb_artifacts import Artifact as LocalArtifact
from ..wandb_run import Run as LocalRun
ImageDataType = Union[
"matplotlib.artist.Artist", "PIL.Image", "TorchTensorType", "np.ndarray"
]
ImageDataOrPathType = Union[str, "Image", ImageDataType]
TorchTensorType = Union["torch.Tensor", "torch.Variable"]
def guess_mode(self, data: "np.ndarray") -> str:
"""
Guess what type of image the np.array is representing
"""
# TODO: do we want to support dimensions being at the beginning of the array?
if data.ndim == 2:
return "L"
elif data.shape[-1] == 3:
return "RGB"
elif data.shape[-1] == 4:
return "RGBA"
else:
raise ValueError(
"Un-supported shape for image conversion %s" % list(data.shape)
)
def __ne__(self, other: object) -> bool:
return not self.__eq__(other)
def __eq__(self, other: object) -> bool:
if not isinstance(other, Image):
return False
else:
self_image = self.image
other_image = other.image
if self_image is not None:
self_image = list(self_image.getdata())
if other_image is not None:
other_image = list(other_image.getdata())
return (
self._grouping == other._grouping
and self._caption == other._caption
and self._width == other._width
and self._height == other._height
and self_image == other_image
and self._classes == other._classes
)
def to_data_array(self) -> List[Any]:
res = []
if self.image is not None:
data = list(self.image.getdata())
for i in range(self.image.height):
res.append(data[i * self.image.width : (i + 1) * self.image.width])
self._free_ram()
return res
def _free_ram(self) -> None:
if self._path is not None:
self._image = None
| 36.688136 | 119 | 0.572438 |
6a961a7708b268c3d81ea73ab8b93515bd578d6c | 669 | py | Python | src/ACC_Backend_Utils.py | skostic14/isda-racing-backend | 41b5f9760dc17a29aa8ab5e4cc1894a27496a72c | [
"Apache-2.0"
] | 1 | 2021-07-29T05:29:06.000Z | 2021-07-29T05:29:06.000Z | src/ACC_Backend_Utils.py | skostic14/isda-racing-backend | 41b5f9760dc17a29aa8ab5e4cc1894a27496a72c | [
"Apache-2.0"
] | null | null | null | src/ACC_Backend_Utils.py | skostic14/isda-racing-backend | 41b5f9760dc17a29aa8ab5e4cc1894a27496a72c | [
"Apache-2.0"
] | null | null | null | import datetime
# Gets time from milliseconds
# Returns string formatted as HH:MM:SS:mmm, MM:SS:mmm or S:mmm, depending on the time.
# Returns a string formatted as YYYY-MM-DD | 35.210526 | 87 | 0.651719 |
6a961b6b72524b941aa7777c8c1e4c9ea87f76f0 | 2,721 | py | Python | examples/advanced/pidigits.py | ovolve/sympy | 0a15782f20505673466b940454b33b8014a25c13 | [
"BSD-3-Clause"
] | 3 | 2015-01-17T23:15:04.000Z | 2015-05-26T14:11:44.000Z | examples/advanced/pidigits.py | ovolve/sympy | 0a15782f20505673466b940454b33b8014a25c13 | [
"BSD-3-Clause"
] | 7 | 2015-03-23T23:33:02.000Z | 2019-02-09T00:19:41.000Z | examples/advanced/pidigits.py | ovolve/sympy | 0a15782f20505673466b940454b33b8014a25c13 | [
"BSD-3-Clause"
] | 1 | 2019-10-18T12:39:41.000Z | 2019-10-18T12:39:41.000Z | #!/usr/bin/env python
"""Pi digits example
Example shows arbitrary precision using mpmath with the
computation of the digits of pi.
"""
from mpmath import libmp, pi
from mpmath import functions as mpf_funs
import math
from time import clock
import sys
def display_fraction(digits, skip=0, colwidth=10, columns=5):
"""Pretty printer for first n digits of a fraction"""
perline = colwidth * columns
printed = 0
for linecount in range((len(digits) - skip) // (colwidth * columns)):
line = digits[skip + linecount*perline:skip + (linecount + 1)*perline]
for i in range(columns):
print(line[i*colwidth: (i + 1)*colwidth],)
print(":", (linecount + 1)*perline)
if (linecount + 1) % 10 == 0:
print
printed += colwidth*columns
rem = (len(digits) - skip) % (colwidth * columns)
if rem:
buf = digits[-rem:]
s = ""
for i in range(columns):
s += buf[:colwidth].ljust(colwidth + 1, " ")
buf = buf[colwidth:]
print(s + ":", printed + colwidth*columns)
def calculateit(func, base, n, tofile):
"""Writes first n base-digits of a mpmath function to file"""
prec = 100
intpart = libmp.numeral(3, base)
if intpart == 0:
skip = 0
else:
skip = len(intpart)
print("Step 1 of 2: calculating binary value...")
prec = int(n*math.log(base, 2)) + 10
t = clock()
a = func(prec)
step1_time = clock() - t
print("Step 2 of 2: converting to specified base...")
t = clock()
d = libmp.bin_to_radix(a.man, -a.exp, base, n)
d = libmp.numeral(d, base, n)
step2_time = clock() - t
print("\nWriting output...\n")
if tofile:
out_ = sys.stdout
sys.stdout = tofile
print("%i base-%i digits of pi:\n" % (n, base))
print(intpart, ".\n")
display_fraction(d, skip, colwidth=10, columns=5)
if tofile:
sys.stdout = out_
print("\nFinished in %f seconds (%f calc, %f convert)" % \
((step1_time + step2_time), step1_time, step2_time))
def interactive():
"""Simple function to interact with user"""
print("Compute digits of pi with SymPy\n")
base = input("Which base? (2-36, 10 for decimal) \n> ")
digits = input("How many digits? (enter a big number, say, 10000)\n> ")
tofile = raw_input("Output to file? (enter a filename, or just press enter\nto print directly to the screen) \n> ")
if tofile:
tofile = open(tofile, "w")
calculateit(pi, base, digits, tofile)
def main():
"""A non-interactive runner"""
base = 16
digits = 500
tofile = None
calculateit(pi, base, digits, tofile)
if __name__ == "__main__":
interactive()
| 30.233333 | 119 | 0.602352 |
6a975a412e5d4fce44f6c3c23326742e66d78cbe | 1,272 | py | Python | authserver/mailauth/migrations/0011_mnserviceuser.py | yopiti/authserver | 0a1f7f5a83d03963d1ecfb5199be8e05d3068dfd | [
"MIT"
] | 8 | 2017-07-04T10:07:32.000Z | 2022-01-02T10:31:43.000Z | authserver/mailauth/migrations/0011_mnserviceuser.py | yopiti/authserver | 0a1f7f5a83d03963d1ecfb5199be8e05d3068dfd | [
"MIT"
] | 14 | 2020-02-11T21:42:38.000Z | 2022-03-28T16:00:55.000Z | authserver/mailauth/migrations/0011_mnserviceuser.py | yopiti/authserver | 0a1f7f5a83d03963d1ecfb5199be8e05d3068dfd | [
"MIT"
] | 1 | 2020-03-01T10:39:28.000Z | 2020-03-01T10:39:28.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-13 00:16
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import mailauth.models
import uuid
| 36.342857 | 118 | 0.633648 |
6a97a004a7c418b0d32aaf5764a1c6b24a50f26a | 10,580 | py | Python | tempest/hacking/checks.py | rishabh20111990/tempest | df15531cd4231000b0da016f5cd8641523ce984e | [
"Apache-2.0"
] | 2 | 2015-08-13T00:07:49.000Z | 2020-08-07T06:38:44.000Z | tempest/hacking/checks.py | rishabh20111990/tempest | df15531cd4231000b0da016f5cd8641523ce984e | [
"Apache-2.0"
] | null | null | null | tempest/hacking/checks.py | rishabh20111990/tempest | df15531cd4231000b0da016f5cd8641523ce984e | [
"Apache-2.0"
] | 3 | 2016-08-30T06:53:54.000Z | 2021-03-22T16:54:39.000Z | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
from hacking import core
import pycodestyle
PYTHON_CLIENTS = ['cinder', 'glance', 'keystone', 'nova', 'swift', 'neutron',
'ironic', 'heat', 'sahara']
PYTHON_CLIENT_RE = re.compile('import (%s)client' % '|'.join(PYTHON_CLIENTS))
TEST_DEFINITION = re.compile(r'^\s*def test.*')
SETUP_TEARDOWN_CLASS_DEFINITION = re.compile(r'^\s+def (setUp|tearDown)Class')
SCENARIO_DECORATOR = re.compile(r'\s*@.*services\((.*)\)')
RAND_NAME_HYPHEN_RE = re.compile(r".*rand_name\(.+[\-\_][\"\']\)")
mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
TESTTOOLS_SKIP_DECORATOR = re.compile(r'\s*@testtools\.skip\((.*)\)')
METHOD = re.compile(r"^ def .+")
METHOD_GET_RESOURCE = re.compile(r"^\s*def (list|show)\_.+")
METHOD_DELETE_RESOURCE = re.compile(r"^\s*def delete_.+")
CLASS = re.compile(r"^class .+")
EX_ATTRIBUTE = re.compile(r'(\s+|\()(e|ex|exc|exception).message(\s+|\))')
NEGATIVE_TEST_DECORATOR = re.compile(
r'\s*@decorators\.attr\(type=.*negative.*\)')
_HAVE_NEGATIVE_DECORATOR = False
def _common_service_clients_check(logical_line, physical_line, filename,
ignored_list_file=None):
if not re.match('tempest/(lib/)?services/.*', filename):
return False
if ignored_list_file is not None:
ignored_list = []
with open('tempest/hacking/' + ignored_list_file) as f:
for line in f:
ignored_list.append(line.strip())
if filename in ignored_list:
return False
if not METHOD.match(physical_line):
return False
if pycodestyle.noqa(physical_line):
return False
return True
| 31.963746 | 79 | 0.636578 |
6a98547230e4cc83fa248137ca0fde09ebb67dcf | 1,018 | py | Python | data/train/python/6a98547230e4cc83fa248137ca0fde09ebb67dcfController.py | harshp8l/deep-learning-lang-detection | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | [
"MIT"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/6a98547230e4cc83fa248137ca0fde09ebb67dcfController.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 5 | 2018-03-29T11:50:46.000Z | 2021-04-26T13:33:18.000Z | data/train/python/6a98547230e4cc83fa248137ca0fde09ebb67dcfController.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | import SimpleXMLRPCServer
import sys
import logging
from K8055Controller import K8055Controller
logging.basicConfig()
controller_log = logging.getLogger("Controller")
controller = Controller()
server = SimpleXMLRPCServer.SimpleXMLRPCServer(("d6349.mysql.zone.ee", 7000))
server.register_instance(controller)
server.serve_forever() | 24.829268 | 77 | 0.634578 |
6a98f5590ec68008681144a3cad2ee8a6d9f0359 | 55 | py | Python | model/__init__.py | sun1638650145/CRNN | 485157e5803b9be861a63ebb04f04fccb16ef5f1 | [
"Apache-2.0"
] | 11 | 2020-09-18T02:35:48.000Z | 2022-02-26T21:31:55.000Z | model/__init__.py | sun1638650145/CRNN | 485157e5803b9be861a63ebb04f04fccb16ef5f1 | [
"Apache-2.0"
] | null | null | null | model/__init__.py | sun1638650145/CRNN | 485157e5803b9be861a63ebb04f04fccb16ef5f1 | [
"Apache-2.0"
] | null | null | null | from .crnn import CRNN
from .crnn import CRNN_Attention | 27.5 | 32 | 0.836364 |
6a9907c6e19624e9a00da0b3cff99ba87e746680 | 3,206 | py | Python | models2.py | Lydia-Tan/MindLife | 644f1a3834f337d51c99650c3924df99c5200d06 | [
"MIT"
] | 1 | 2020-01-20T19:49:07.000Z | 2020-01-20T19:49:07.000Z | models2.py | lindaweng/Mindlife | 30be070b39728fb3fe149d4c95e5bce280a3b6a7 | [
"MIT"
] | null | null | null | models2.py | lindaweng/Mindlife | 30be070b39728fb3fe149d4c95e5bce280a3b6a7 | [
"MIT"
] | null | null | null | import nltk
import re
import sys
from sys import argv
from nltk.sentiment.vader import SentimentIntensityAnalyzer
| 51.709677 | 98 | 0.585153 |
6a9a4141ccd8a77a2a296371f9b8eb6510494db4 | 1,487 | py | Python | tokendito/tool.py | pcmxgti/tokendito | c1672917b1b95e463c5bdf8e9c3c039189da8e42 | [
"Apache-2.0"
] | 40 | 2019-07-31T03:21:03.000Z | 2022-03-29T23:57:19.000Z | tokendito/tool.py | pcmxgti/tokendito | c1672917b1b95e463c5bdf8e9c3c039189da8e42 | [
"Apache-2.0"
] | 27 | 2019-08-07T06:40:15.000Z | 2022-03-21T18:46:49.000Z | tokendito/tool.py | pcmxgti/tokendito | c1672917b1b95e463c5bdf8e9c3c039189da8e42 | [
"Apache-2.0"
] | 16 | 2019-07-31T14:22:04.000Z | 2022-02-16T12:55:27.000Z | # vim: set filetype=python ts=4 sw=4
# -*- coding: utf-8 -*-
"""This module retrieves AWS credentials after authenticating with Okta."""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from future import standard_library
from tokendito import aws_helpers
from tokendito import helpers
from tokendito import okta_helpers
from tokendito import settings
standard_library.install_aliases()
def cli(args):
"""Tokendito retrieves AWS credentials after authenticating with Okta."""
# Set some required initial values
args = helpers.setup(args)
logging.debug("tokendito retrieves AWS credentials after authenticating with Okta.")
# Collect and organize user specific information
helpers.process_options(args)
# Authenticate okta and AWS also use assumerole to assign the role
logging.debug("Authenticate user with Okta and AWS.")
secret_session_token = okta_helpers.authenticate_user(
settings.okta_org, settings.okta_username, settings.okta_password
)
saml_response_string, saml_xml = aws_helpers.authenticate_to_roles(
secret_session_token, settings.okta_aws_app_url
)
assume_role_response, role_name = aws_helpers.select_assumeable_role(
saml_response_string, saml_xml
)
aws_helpers.ensure_keys_work(assume_role_response)
helpers.set_local_credentials(
assume_role_response, role_name, settings.aws_region, settings.aws_output
)
| 31.638298 | 88 | 0.774042 |
6a9b47904f23c1124cf4fbc27654a8fe5f3b7493 | 42 | py | Python | resources/__init__.py | Boryslavq/UHMI_Chalenge | 4b7df902c0a0901c727a6fb26347dabca1067494 | [
"MIT"
] | null | null | null | resources/__init__.py | Boryslavq/UHMI_Chalenge | 4b7df902c0a0901c727a6fb26347dabca1067494 | [
"MIT"
] | null | null | null | resources/__init__.py | Boryslavq/UHMI_Chalenge | 4b7df902c0a0901c727a6fb26347dabca1067494 | [
"MIT"
] | null | null | null | from . import rest
from . import helpers
| 14 | 22 | 0.738095 |
6a9c07074d315021100d6322a18c6bc3087be1db | 15,833 | py | Python | ir_datasets/formats/trec.py | cakiki/ir_datasets | 7f9f8e9ff62e49d40383220ecc2daa250695d267 | [
"Apache-2.0"
] | null | null | null | ir_datasets/formats/trec.py | cakiki/ir_datasets | 7f9f8e9ff62e49d40383220ecc2daa250695d267 | [
"Apache-2.0"
] | null | null | null | ir_datasets/formats/trec.py | cakiki/ir_datasets | 7f9f8e9ff62e49d40383220ecc2daa250695d267 | [
"Apache-2.0"
] | null | null | null | import io
import codecs
import tarfile
import re
import gzip
import xml.etree.ElementTree as ET
from fnmatch import fnmatch
from pathlib import Path
from typing import NamedTuple
import ir_datasets
from ir_datasets.indices import PickleLz4FullStore
from .base import GenericDoc, GenericQuery, GenericScoredDoc, BaseDocs, BaseQueries, BaseScoredDocs, BaseQrels
# Default content tags from Anserini's TrecCollection
CONTENT_TAGS = 'TEXT HEADLINE TITLE HL HEAD TTL DD DATE LP LEADPARA'.split()
DEFAULT_QTYPE_MAP = {
'<num> *(Number:)?': 'query_id',
'<title> *(Topic:)?': 'title',
'<desc> *(Description:)?': 'description',
'<narr> *(Narrative:)?': 'narrative'
}
| 38.429612 | 284 | 0.55978 |
6a9c552700ad0a75cac33278ee8dc5a5139c2432 | 844 | py | Python | textpand/download.py | caufieldjh/textpand-for-kgs | 42853c53c5a4cc06fbd745c147d02fe7916690fa | [
"BSD-3-Clause"
] | 3 | 2021-12-10T21:13:47.000Z | 2021-12-10T23:36:18.000Z | textpand/download.py | caufieldjh/textpand-for-kgs | 42853c53c5a4cc06fbd745c147d02fe7916690fa | [
"BSD-3-Clause"
] | 1 | 2022-01-06T20:59:07.000Z | 2022-01-06T20:59:07.000Z | textpand/download.py | caufieldjh/textpand-for-kgs | 42853c53c5a4cc06fbd745c147d02fe7916690fa | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .utils import download_from_yaml
def download(output_dir: str, snippet_only: bool, ignore_cache: bool = False) -> None:
"""Downloads data files from list of URLs (default: download.yaml) into data directory (default: data/).
Args:
output_dir: A string pointing to the location to download data to.
snippet_only: Downloads only the first 5 kB of the source, for testing and file checks.
ignore_cache: Ignore cache and download files even if they exist [false]
Returns:
None.
"""
download_from_yaml(yaml_file="download.yaml",
output_dir=output_dir,
snippet_only=snippet_only,
ignore_cache=ignore_cache,
verbose=True)
return None
| 31.259259 | 108 | 0.625592 |
6a9cb003c79f63e5985173912dffc928314248d4 | 6,770 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/netapp/ontap/plugins/modules/na_ontap_autosupport_invoke.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/netapp/ontap/plugins/modules/na_ontap_autosupport_invoke.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/netapp/ontap/plugins/modules/na_ontap_autosupport_invoke.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | #!/usr/bin/python
# (c) 2020, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
'''
na_ontap_autosupport_invoke
'''
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'
}
DOCUMENTATION = '''
module: na_ontap_autosupport_invoke
author: NetApp Ansible Team (@carchi8py) <[email protected]>
short_description: NetApp ONTAP send AutoSupport message
extends_documentation_fragment:
- netapp.ontap.netapp.na_ontap
version_added: '20.4.0'
description:
- Send an AutoSupport message from a node
options:
name:
description:
- The name of the node to send the message to.
- Not specifying this option invokes AutoSupport on all nodes in the cluster.
type: str
autosupport_message:
description:
- Text sent in the subject line of the AutoSupport message.
type: str
aliases:
- message
version_added: 20.8.0
type:
description:
- Type of AutoSupport Collection to Issue.
choices: ['test', 'performance', 'all']
default: 'all'
type: str
uri:
description:
- send the AutoSupport message to the destination you specify instead of the configured destination.
type: str
'''
EXAMPLES = '''
- name: Send message
na_ontap_autosupport_invoke:
name: node1
message: invoked test autosupport rest
uri: http://1.2.3.4/delivery_uri
type: test
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible_collections.netapp.ontap.plugins.module_utils.netapp_module import NetAppModule
from ansible_collections.netapp.ontap.plugins.module_utils.netapp import OntapRestAPI
import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
if __name__ == '__main__':
main()
| 34.365482 | 109 | 0.644018 |
6a9cd0c545ed5aa451bbc0bc26a2e800d471ecd0 | 304 | py | Python | tests/api/serializer/test_user.py | armandomeeuwenoord/freight | 31ae2fa9252ab0b25385abd04742475e6671e3b1 | [
"Apache-2.0"
] | 562 | 2015-02-20T08:25:24.000Z | 2021-11-12T19:58:44.000Z | tests/api/serializer/test_user.py | armandomeeuwenoord/freight | 31ae2fa9252ab0b25385abd04742475e6671e3b1 | [
"Apache-2.0"
] | 129 | 2015-02-20T07:41:14.000Z | 2022-02-17T21:14:40.000Z | tests/api/serializer/test_user.py | armandomeeuwenoord/freight | 31ae2fa9252ab0b25385abd04742475e6671e3b1 | [
"Apache-2.0"
] | 54 | 2015-02-28T01:12:23.000Z | 2021-03-02T11:14:52.000Z | from freight.api.serializer import serialize
from freight.testutils import TestCase
| 25.333333 | 44 | 0.680921 |
6a9cdc6c74a18d65dd44c9480dd5e3953a78dd18 | 1,639 | py | Python | binning/pozo_5m_class_dem.py | UP-RS-ESP/GEW-DAP04-WS201819 | 18341620d9168e1eec476af1d8f568cf0017bf56 | [
"MIT"
] | 2 | 2020-10-12T11:33:00.000Z | 2021-12-20T06:33:54.000Z | binning/pozo_5m_class_dem.py | UP-RS-ESP/GEW-DAP04-WS201819 | 18341620d9168e1eec476af1d8f568cf0017bf56 | [
"MIT"
] | null | null | null | binning/pozo_5m_class_dem.py | UP-RS-ESP/GEW-DAP04-WS201819 | 18341620d9168e1eec476af1d8f568cf0017bf56 | [
"MIT"
] | null | null | null | import sys
import numpy as np
from matplotlib import pyplot as pl
from rw import WriteGTiff
fn = '../pozo-steep-vegetated-pcl.npy'
pts = np.load(fn)
x, y, z, c = pts[:, 0], pts[:, 1], pts[:, 2], pts[:, 5]
ix = (0.2 * (x - x.min())).astype('int')
iy = (0.2 * (y - y.min())).astype('int')
shape = (100, 100)
xb = np.arange(shape[1]+1)
yb = np.arange(shape[0]+1)
fg, ax = pl.subplots(ncols = 2, nrows = 2,
figsize = (10.24, 10.24),
sharex = True, sharey = True)
uc = (2, 5)
for j in range(len(uc)):
print('Class %i' % uc[j])
b = c == uc[j]
cx, cy, cz = ix[b], iy[b], z[b]
mean = np.zeros(shape)
stdr = np.zeros(shape)
for i in range(shape[0]):
print('% 3d%%' % i)
for k in range(shape[1]):
b = (cy == i) * (cx == k)
mean[i, k] = cz[b].mean()
stdr[i, k] = cz[b].std()
fname = 'pozo_5m_dem_mean_cl%i.tif' % uc[j]
WriteGTiff(fname, mean, x.min(), y.min()+500, step = 5)
np.save('pozo_5m_dem_mean_cl%i.npy' % uc[j], mean)
np.save('pozo_5m_dem_stdr_cl%i.npy' % uc[j], stdr)
ax[0, j].set_title('Class %i' % uc[j])
im = ax[0, j].pcolormesh(xb, yb,
np.ma.masked_invalid(mean),
cmap = pl.cm.viridis_r)
cb = fg.colorbar(im, ax = ax[0, j])
cb.set_label('Mean elevation [m]')
im = ax[1, j].pcolormesh(xb, yb,
np.ma.masked_invalid(stdr),
cmap = pl.cm.magma_r)
cb = fg.colorbar(im, ax = ax[1, j])
cb.set_label('Elevation STD')
ax[0, j].set_aspect('equal')
ax[1, j].set_aspect('equal')
pl.savefig('%s.png' % sys.argv[0][:-3])
| 30.351852 | 59 | 0.52349 |
6a9cfc593e93acc1f1c0f3afda04be08e714940c | 2,228 | py | Python | comtypes/_meta.py | phuslu/pyMSAA | 611bc4c31e0d6ba36f0f0bebdc6e6be14b994eb0 | [
"MIT"
] | 23 | 2015-05-28T15:31:35.000Z | 2022-02-16T07:51:34.000Z | comtypes/_meta.py | kar98kar/pyMSAA | 611bc4c31e0d6ba36f0f0bebdc6e6be14b994eb0 | [
"MIT"
] | 3 | 2020-05-19T03:00:52.000Z | 2020-11-03T09:22:51.000Z | comtypes/_meta.py | kar98kar/pyMSAA | 611bc4c31e0d6ba36f0f0bebdc6e6be14b994eb0 | [
"MIT"
] | 13 | 2016-08-26T23:00:40.000Z | 2022-03-03T09:58:36.000Z | # comtypes._meta helper module
from ctypes import POINTER, c_void_p, cast
import comtypes
################################################################
# metaclass for CoClass (in comtypes/__init__.py)
#
# The mro() of a POINTER(App) type, where class App is a subclass of CoClass:
#
# POINTER(App)
# App
# CoClass
# c_void_p
# _SimpleCData
# _CData
# object
# will not work if we change the order of the two base classes!
| 35.935484 | 85 | 0.601436 |
6a9d299ac035789dcfbdc5b67b56e5ebe19176e2 | 33,321 | py | Python | bin/ADFRsuite/CCSBpckgs/mglutil/gui/BasicWidgets/Tk/Dial.py | AngelRuizMoreno/Jupyter_Dock_devel | 6d23bc174d5294d1e9909a0a1f9da0713042339e | [
"MIT"
] | null | null | null | bin/ADFRsuite/CCSBpckgs/mglutil/gui/BasicWidgets/Tk/Dial.py | AngelRuizMoreno/Jupyter_Dock_devel | 6d23bc174d5294d1e9909a0a1f9da0713042339e | [
"MIT"
] | null | null | null | bin/ADFRsuite/CCSBpckgs/mglutil/gui/BasicWidgets/Tk/Dial.py | AngelRuizMoreno/Jupyter_Dock_devel | 6d23bc174d5294d1e9909a0a1f9da0713042339e | [
"MIT"
] | 1 | 2021-11-04T21:48:14.000Z | 2021-11-04T21:48:14.000Z | ################################################################################
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## (C) Copyrights Dr. Michel F. Sanner and TSRI 2016
##
################################################################################
#########################################################################
#
# Date: Mai 2001 Authors: Michel Sanner, Daniel Stoffler
#
# [email protected]
# [email protected]
#
# Copyright: Michel Sanner, Daniel Stoffler and TSRI
#
#########################################################################
import Tkinter
import math
import types
import sys
import os
from mglutil.util.callback import CallbackManager
from mglutil.util.misc import ensureFontCase
from optionsPanel import OptionsPanel
from KeyboardEntry import KeyboardEntry
if __name__ == '__main__':
d = Dial(size=50)
d.configure(showLabel=1)
d.callbacks.AddCallback(foo)
| 36.376638 | 87 | 0.547643 |
6a9d42bd307c1507375c76e403f46b3901bbf76d | 3,560 | py | Python | qt-creator-opensource-src-4.6.1/scripts/checkInstalledFiles.py | kevinlq/Qt-Creator-Opensource-Study | b8cadff1f33f25a5d4ef33ed93f661b788b1ba0f | [
"MIT"
] | 5 | 2018-12-22T14:49:13.000Z | 2022-01-13T07:21:46.000Z | qt-creator-opensource-src-4.6.1/scripts/checkInstalledFiles.py | kevinlq/Qt-Creator-Opensource-Study | b8cadff1f33f25a5d4ef33ed93f661b788b1ba0f | [
"MIT"
] | null | null | null | qt-creator-opensource-src-4.6.1/scripts/checkInstalledFiles.py | kevinlq/Qt-Creator-Opensource-Study | b8cadff1f33f25a5d4ef33ed93f661b788b1ba0f | [
"MIT"
] | 8 | 2018-07-17T03:55:48.000Z | 2021-12-22T06:37:53.000Z | #!/usr/bin/env python
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import os
import sys
import stat
import difflib
import inspect
import getopt
if __name__ == "__main__":
main()
| 31.504425 | 136 | 0.608989 |
6a9da9d2fe8534cba2998ec7d3c2190abe55abec | 5,190 | py | Python | deep_sdf/workspace.py | huajian1069/non-convex_optimisation | cf4cd5070524c3f7e6b814fe9b85a15a06e7b8db | [
"MIT"
] | 2 | 2020-10-12T19:22:50.000Z | 2021-08-21T21:48:27.000Z | deep_sdf/workspace.py | huajian1069/non-convex_optimisation | cf4cd5070524c3f7e6b814fe9b85a15a06e7b8db | [
"MIT"
] | 13 | 2020-04-17T09:07:06.000Z | 2020-07-25T19:43:44.000Z | deep_sdf/workspace.py | huajian1069/non-convex-optimisation | cf4cd5070524c3f7e6b814fe9b85a15a06e7b8db | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import json
import os
import torch
model_params_subdir = "ModelParameters"
optimizer_params_subdir = "OptimizerParameters"
latent_codes_subdir = "LatentCodes"
logs_filename = "Logs.pth"
reconstructions_subdir = "Reconstructions"
reconstruction_meshes_subdir = "Meshes"
reconstruction_codes_subdir = "Codes"
optimizations_subdir = "Optimizations"
optimizations_meshes_subdir = "Meshes"
optimizations_codes_subdir = "Codes"
specifications_filename = "specs.json"
data_source_map_filename = ".datasources.json"
evaluation_subdir = "Evaluation"
sdf_samples_subdir = "SdfSamples"
renders_subdir = "Renders"
surface_samples_subdir = "SurfaceSamples"
normalization_param_subdir = "NormalizationParameters"
training_meshes_subdir = "TrainingMeshes"
| 25.566502 | 82 | 0.7158 |
6a9e779e59feb114fa5c597307d0f0ba536c3a82 | 1,571 | py | Python | EmoPy/EmoPy/examples/convolutional_dropout_model.py | Rahmatullina/FinalYearProject | 326f521b9f600dbbc7ace2223bd5aafc79b2267c | [
"Apache-2.0"
] | null | null | null | EmoPy/EmoPy/examples/convolutional_dropout_model.py | Rahmatullina/FinalYearProject | 326f521b9f600dbbc7ace2223bd5aafc79b2267c | [
"Apache-2.0"
] | 9 | 2020-09-26T01:09:35.000Z | 2022-02-10T01:32:30.000Z | EmoPy/EmoPy/examples/convolutional_dropout_model.py | Rahmatullina/FinalYearProject | 326f521b9f600dbbc7ace2223bd5aafc79b2267c | [
"Apache-2.0"
] | null | null | null | from EmoPy.src.fermodel import FERModel
from EmoPy.src.directory_data_loader import DirectoryDataLoader
from EmoPy.src.csv_data_loader import CSVDataLoader
from EmoPy.src.data_generator import DataGenerator
from EmoPy.src.neuralnets import ConvolutionalNNDropout
from sklearn.model_selection import train_test_split
import numpy as np
from pkg_resources import resource_filename,resource_exists
validation_split = 0.15
target_dimensions = (48, 48)
channels = 1
verbose = True
print('--------------- Convolutional Dropout Model -------------------')
print('Loading data...')
directory_path = resource_filename('EmoPy.examples','image_data/sample_image_directory')
data_loader = DirectoryDataLoader(datapath=directory_path, validation_split=validation_split)
dataset = data_loader.load_data()
if verbose:
dataset.print_data_details()
print('Preparing training/testing data...')
train_images, train_labels = dataset.get_training_data()
train_gen = DataGenerator().fit(train_images, train_labels)
test_images, test_labels = dataset.get_test_data()
test_gen = DataGenerator().fit(test_images, test_labels)
print('Training net...')
model = ConvolutionalNNDropout(target_dimensions, channels, dataset.get_emotion_index_map(), verbose=True)
model.fit_generator(train_gen.generate(target_dimensions, batch_size=5),
test_gen.generate(target_dimensions, batch_size=5),
epochs=15)
# Save model configuration
# model.export_model('output/conv2d_model.json','output/conv2d_weights.h5',"output/conv2d_emotion_map.json", emotion_map)
| 38.317073 | 121 | 0.789306 |
6a9f3ae293c23801e109a6d38cb1d3e6cf12238a | 189 | py | Python | ENV/lib/python3.6/site-packages/pyramid_jinja2/tests/extensions.py | captain-c00keys/pyramid-stocks | 0acf3363a6a7ee61cd41b855f43c9d6f9582ae6a | [
"MIT"
] | null | null | null | ENV/lib/python3.6/site-packages/pyramid_jinja2/tests/extensions.py | captain-c00keys/pyramid-stocks | 0acf3363a6a7ee61cd41b855f43c9d6f9582ae6a | [
"MIT"
] | null | null | null | ENV/lib/python3.6/site-packages/pyramid_jinja2/tests/extensions.py | captain-c00keys/pyramid-stocks | 0acf3363a6a7ee61cd41b855f43c9d6f9582ae6a | [
"MIT"
] | null | null | null | from jinja2 import nodes
from jinja2.ext import Extension
| 27 | 73 | 0.746032 |
6a9f71d63576d36e576c5ed1a561ba09b6a33e88 | 45,622 | py | Python | deepstream_ignition_usb_yolo.py | valdivj/Deepstream-IGN-Maker-YOLO | f38ece731e9797a525da932c3da2de77e48f45af | [
"Unlicense"
] | 18 | 2021-02-09T11:07:57.000Z | 2022-03-16T12:35:34.000Z | deepstream_ignition_usb_yolo.py | valdivj/Deepstream-IGN-Maker-YOLO | f38ece731e9797a525da932c3da2de77e48f45af | [
"Unlicense"
] | null | null | null | deepstream_ignition_usb_yolo.py | valdivj/Deepstream-IGN-Maker-YOLO | f38ece731e9797a525da932c3da2de77e48f45af | [
"Unlicense"
] | 3 | 2021-02-11T00:23:56.000Z | 2021-11-16T02:15:37.000Z | #!/usr/bin/env python3
################################################################################
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
################################################################################
import sys
sys.path.append('../')
sys.path.insert(0, "../../../client_libraries/python/")
import paho.mqtt.client as mqtt
import sparkplug_b as sparkplug
import time
import time, threading
import random
import string
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
from common.is_aarch_64 import is_aarch64
from common.bus_call import bus_call
from sparkplug_b import *
import pyds
# Application Variables
serverUrl = "localhost"
myGroupId = "Sparkplug B Devices"
myNodeName = "NVIDIA"
myDeviceName = "XavierNX"
publishPeriod = 5000
myUsername = "admin"
myPassword = "changeme"
client = mqtt.Client(serverUrl, 1883, 60)
WAIT_SECONDS = 1
frame_numberx = 0
num_rectsx = 0
counter1 = 0
counter2 = 0
Object1 = 0
Object2 = 0
Object3 = 0
Object4 = 0
Object5 = 0
Object6 = 0
Object7 = 0
Object8 = 0
Object9 = 0
Object10 = 0
newValue1 = 0
newValue2 = 0
newValue3 = 0
newValue4 = 0
newValue5 = 0
newValue6 = 0
newValue7 = 0
newValue8 = 0
newValue9 = 0
newValue10 = 0
MAX_DISPLAY_LEN=64
PGIE_CLASS_ID_TOOTHBRUSH = 79
PGIE_CLASS_ID_HAIR_DRYER = 78
PGIE_CLASS_ID_TEDDY_BEAR = 77
PGIE_CLASS_ID_SCISSORS = 76
PGIE_CLASS_ID_VASE = 75
PGIE_CLASS_ID_CLOCK = 74
PGIE_CLASS_ID_BOOK = 73
PGIE_CLASS_ID_REFRIGERATOR = 72
PGIE_CLASS_ID_SINK = 71
PGIE_CLASS_ID_TOASTER = 70
PGIE_CLASS_ID_OVEN = 69
PGIE_CLASS_ID_MICROWAVE = 68
PGIE_CLASS_ID_CELL_PHONE = 67
PGIE_CLASS_ID_KEYBOARD = 66
PGIE_CLASS_ID_REMOTE = 65
PGIE_CLASS_ID_MOUSE = 64
PGIE_CLASS_ID_LAPTOP = 63
PGIE_CLASS_ID_TVMONITOR = 62
PGIE_CLASS_ID_TOILET = 61
PGIE_CLASS_ID_DININGTABLE= 60
PGIE_CLASS_ID_BED = 59
PGIE_CLASS_ID_POTTEDPLANT = 58
PGIE_CLASS_ID_SOFA = 57
PGIE_CLASS_ID_CHAIR = 56
PGIE_CLASS_ID_CAKE = 55
PGIE_CLASS_ID_DONUT = 54
PGIE_CLASS_ID_PIZZA = 53
PGIE_CLASS_ID_HOT_DOG = 52
PGIE_CLASS_ID_CARROT = 51
PGIE_CLASS_ID_BROCCOLI = 50
PGIE_CLASS_ID_ORANGE = 49
PGIE_CLASS_ID_SANDWICH = 48
PGIE_CLASS_ID_APPLE = 47
PGIE_CLASS_ID_BANANA = 46
PGIE_CLASS_ID_BOWL = 45
PGIE_CLASS_ID_SPOON = 44
PGIE_CLASS_ID_KNIFE = 43
PGIE_CLASS_ID_FORK = 42
PGIE_CLASS_ID_CUP = 41
PGIE_CLASS_ID_WINE_GLASS = 40
PGIE_CLASS_ID_BOTTLE = 39
PGIE_CLASS_ID_TENNIS_RACKET = 38
PGIE_CLASS_ID_SURFBOARD = 37
PGIE_CLASS_ID_SKATEBOARD = 36
PGIE_CLASS_ID_BASEBALL_GLOVE = 35
PGIE_CLASS_ID_BASEBALL_BAT = 34
PGIE_CLASS_ID_KITE = 33
PGIE_CLASS_ID_SPORTS_BALL = 32
PGIE_CLASS_ID_SNOWBOARD = 31
PGIE_CLASS_ID_SKIS = 30
PGIE_CLASS_ID_FRISBEE = 29
PGIE_CLASS_ID_SUITCASE = 28
PGIE_CLASS_ID_TIE = 27
PGIE_CLASS_ID_HANDBAG = 26
PGIE_CLASS_ID_UMBRELLA = 25
PGIE_CLASS_ID_BACKPACK = 24
PGIE_CLASS_ID_GIRAFFE = 23
PGIE_CLASS_ID_ZEBRA = 22
PGIE_CLASS_ID_BEAR = 21
PGIE_CLASS_ID_ELEPHANT = 20
PGIE_CLASS_ID_COW = 19
PGIE_CLASS_ID_SHEEP = 18
PGIE_CLASS_ID_HORSE = 17
PGIE_CLASS_ID_DOG = 16
PGIE_CLASS_ID_CAT = 15
PGIE_CLASS_ID_BIRD = 14
PGIE_CLASS_ID_BENCH = 13
PGIE_CLASS_ID_PARKING_METER = 12
PGIE_CLASS_ID_STOP_SIGN = 11
PGIE_CLASS_ID_FIRE_HYDRANT = 10
PGIE_CLASS_ID_TRAFFIC_LIGHT = 9
PGIE_CLASS_ID_BOAT = 8
PGIE_CLASS_ID_TRUCK = 7
PGIE_CLASS_ID_TRAIN = 6
PGIE_CLASS_ID_BUS = 5
PGIE_CLASS_ID_AEROPLANE = 4
PGIE_CLASS_ID_MOTORBIKE = 3
PGIE_CLASS_ID_VEHICLE = 2
PGIE_CLASS_ID_BICYCLE = 1
PGIE_CLASS_ID_PERSON = 0
pgie_classes_str= ["Toothbrush", "Hair dryer", "Teddy bear","Scissors","Vase", "Clock", "Book","Refrigerator", "Sink", "Toaster","Oven","Microwave", "Cell phone", "Keyboard","Remote", "Mouse", "Laptop","Tvmonitor","Toilet", "Diningtable", "Bed","Pottedplant", "Sofa", "Chair","Cake","Donut", "Pizza", "Hot dog","Carrot", "Broccli", "Orange","Sandwich","Apple", "Banana", "Bowl","Spoon", "Knife", "Fork","Cup","Wine Glass", "Bottle", "Tennis racket","Surfboard", "Skateboard", "Baseball glove","Baseball bat","Kite", "Sports ball", "Snowboard","Skis", "Frisbee", "Suitcase","Tie","Handbag", "Umbrella", "Backpack","Giraffe", "Zebra", "Bear","Elephant","Cow", "Sheep", "Horse","Dog", "Cat", "Bird","Bench","Parking meter", "Stop sign", "Fire hydrant","Traffic light", "Boat", "Truck","Train","Bus", "Areoplane", "Motorbike","Car", "Bicycle", "Person"]
######################################################################
# The callback for when the client receives a CONNACK response from the server.
######################################################################
######################################################################
######################################################################
# The callback for when a PUBLISH message is received from the server.
######################################################################
#####################################################################
######################################################################
######################################################################
# Publish the BIRTH certificates
######################################################################
######################################################################
######################################################################
# Publish the NBIRTH certificate
######################################################################
######################################################################
######################################################################
# Publish the DBIRTH certificate
######################################################################
######################################################################
######################################################################
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 46.410987 | 849 | 0.645829 |
6a9fcd8ecc089595a2ffc3a48b4ee67000ac218d | 799 | py | Python | src/pyams_i18n/tests/__init__.py | Py-AMS/pyams-i18n | dbb3953302311977653145385af02e4d1ae41431 | [
"ZPL-2.1"
] | null | null | null | src/pyams_i18n/tests/__init__.py | Py-AMS/pyams-i18n | dbb3953302311977653145385af02e4d1ae41431 | [
"ZPL-2.1"
] | null | null | null | src/pyams_i18n/tests/__init__.py | Py-AMS/pyams-i18n | dbb3953302311977653145385af02e4d1ae41431 | [
"ZPL-2.1"
] | null | null | null | #
# Copyright (c) 2015-2019 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""
Generic test cases for pyams_i18n doctests
"""
__docformat__ = 'restructuredtext'
import os
import sys
def get_package_dir(value):
"""Get package directory"""
package_dir = os.path.split(value)[0]
if package_dir not in sys.path:
sys.path.append(package_dir)
return package_dir
| 26.633333 | 75 | 0.740926 |
6a9fd697438b33b3c4d1fb77bd83291ab92dfcca | 1,811 | py | Python | tests/test_custom_experts.py | protagohhz/hivemind | 487fb68feea4d27ede0afdef211f6edc889b1a9e | [
"MIT"
] | 1,026 | 2020-06-04T05:02:30.000Z | 2022-03-29T13:05:04.000Z | tests/test_custom_experts.py | mryab/hivemind | 595b831bcaac6b4d8da215de70b8138ac548c562 | [
"MIT"
] | 301 | 2020-04-04T14:26:49.000Z | 2022-03-19T15:25:39.000Z | tests/test_custom_experts.py | learning-at-home/tesseract | c6b2b2d84ccfc890314a2bfece8eef238372d410 | [
"MIT"
] | 89 | 2020-04-16T19:39:29.000Z | 2022-03-25T17:32:43.000Z | import os
import pytest
import torch
from hivemind import RemoteExpert
from hivemind.moe.server import background_server
CUSTOM_EXPERTS_PATH = os.path.join(os.path.dirname(__file__), "test_utils", "custom_networks.py")
| 27.439394 | 97 | 0.600773 |
6aa02482ee4345f8d62c98b8785e029ed85945dd | 1,639 | py | Python | tqsdk/demo/example/momentum.py | boyscout2008/tqsdk-python | 79496a938a44f79ea9164569637509d0cc7db70a | [
"Apache-2.0"
] | null | null | null | tqsdk/demo/example/momentum.py | boyscout2008/tqsdk-python | 79496a938a44f79ea9164569637509d0cc7db70a | [
"Apache-2.0"
] | null | null | null | tqsdk/demo/example/momentum.py | boyscout2008/tqsdk-python | 79496a938a44f79ea9164569637509d0cc7db70a | [
"Apache-2.0"
] | 1 | 2020-11-20T01:19:11.000Z | 2020-11-20T01:19:11.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Ringo"
'''
()
: https://www.shinnytech.com/blog/momentum-strategy/
: , /
'''
from tqsdk import TqAccount, TqApi, TargetPosTask
# ,NK
SYMBOL = "SHFE.au1912"
N = 15
api = TqApi()
klines = api.get_kline_serial(SYMBOL, 60*60*24, N)
quote = api.get_quote(SYMBOL)
target_pos = TargetPosTask(api, SYMBOL)
position = api.get_position(SYMBOL)
# ARN-1Kar
ar = AR(klines)
print("")
while True:
api.wait_update()
# Kar
if api.is_changing(klines.iloc[-1], "datetime"):
ar = AR(klines)
print("", ar)
#
if api.is_changing(quote, "last_price"):
#
if position.pos_long == 0 and position.pos_short == 0:
# ar110150
if 110 < ar < 150:
print("110150")
target_pos.set_target_volume(100)
# ar5090
elif 50 < ar < 90:
print("5090")
target_pos.set_target_volume(-100)
# ar90ar110
elif (position.pos_long > 0 and ar < 90) or (position.pos_short > 0 and ar > 110):
print("")
target_pos.set_target_volume(0)
| 26.015873 | 90 | 0.621721 |
6aa1d7c9f54267d6e42717a153600f7e111a7f9f | 10,323 | py | Python | color_transfer/__init__.py | AdamSpannbauer/color_transfer | 155e0134615f35bf19bf32f4cacf056603604914 | [
"MIT"
] | null | null | null | color_transfer/__init__.py | AdamSpannbauer/color_transfer | 155e0134615f35bf19bf32f4cacf056603604914 | [
"MIT"
] | null | null | null | color_transfer/__init__.py | AdamSpannbauer/color_transfer | 155e0134615f35bf19bf32f4cacf056603604914 | [
"MIT"
] | 1 | 2020-11-05T17:35:14.000Z | 2020-11-05T17:35:14.000Z | # import the necessary packages
import numpy as np
import cv2
import imutils
def color_transfer(source, target, clip=True, preserve_paper=True):
"""
Transfers the color distribution from the source to the target
image using the mean and standard deviations of the L*a*b*
color space.
This implementation is (loosely) based on to the "Color Transfer
between Images" paper by Reinhard et al., 2001.
Parameters:
-------
source: NumPy array
OpenCV image in BGR color space (the source image)
target: NumPy array
OpenCV image in BGR color space (the target image)
clip: Should components of L*a*b* image be scaled by np.clip before
converting back to BGR color space?
If False then components will be min-max scaled appropriately.
Clipping will keep target image brightness truer to the input.
Scaling will adjust image brightness to avoid washed out portions
in the resulting color transfer that can be caused by clipping.
preserve_paper: Should color transfer strictly follow methodology
laid out in original paper? The method does not always produce
aesthetically pleasing results.
If False then L*a*b* components will scaled using the reciprocal of
the scaling factor proposed in the paper. This method seems to produce
more consistently aesthetically pleasing results
Returns:
-------
transfer: NumPy array
OpenCV image (w, h, 3) NumPy array (uint8)
"""
# convert the images from the RGB to L*ab* color space, being
# sure to utilizing the floating point data type (note: OpenCV
# expects floats to be 32-bit, so use that instead of 64-bit)
source = cv2.cvtColor(source, cv2.COLOR_BGR2LAB).astype("float32")
target = cv2.cvtColor(target, cv2.COLOR_BGR2LAB).astype("float32")
# compute color statistics for the source and target images
(lMeanSrc, lStdSrc, aMeanSrc, aStdSrc, bMeanSrc, bStdSrc) = image_stats(source)
(lMeanTar, lStdTar, aMeanTar, aStdTar, bMeanTar, bStdTar) = image_stats(target)
# subtract the means from the target image
(l, a, b) = cv2.split(target)
l -= lMeanTar
a -= aMeanTar
b -= bMeanTar
if preserve_paper:
# scale by the standard deviations using paper proposed factor
l = (lStdTar / lStdSrc) * l
a = (aStdTar / aStdSrc) * a
b = (bStdTar / bStdSrc) * b
else:
# scale by the standard deviations using reciprocal of paper proposed factor
l = (lStdSrc / lStdTar) * l
a = (aStdSrc / aStdTar) * a
b = (bStdSrc / bStdTar) * b
# add in the source mean
l += lMeanSrc
a += aMeanSrc
b += bMeanSrc
# clip/scale the pixel intensities to [0, 255] if they fall
# outside this range
l = _scale_array(l, clip=clip)
a = _scale_array(a, clip=clip)
b = _scale_array(b, clip=clip)
# merge the channels together and convert back to the RGB color
# space, being sure to utilize the 8-bit unsigned integer data
# type
transfer = cv2.merge([l, a, b])
transfer = cv2.cvtColor(transfer.astype("uint8"), cv2.COLOR_LAB2BGR)
# return the color transferred image
return transfer
def auto_color_transfer(source, target):
"""Pick color_transfer result truest to source image color
Applies color_transfer with all possible combinations of the clip & preserve_paper arguments.
Mean absolute error (MAE) is computed for the HSV channels of each result and the source image.
The best_result that minimizes the MAE is returned as well as a montage of all candidate results.
Parameters:
-------
source: NumPy array
OpenCV image in BGR color space (the source image)
target: NumPy array
OpenCV image in BGR color space (the target image)
Returns:
-------
tuple: (best_result, comparison)
best_result: NumPy array
result that minimizes mean absolute error between compared to source image in HSV color space
comparison: NumPy array
image showing the results of all combinations of color_transfer options
"""
# get mean HSV stats from source image for comparison
hsv_source = cv2.cvtColor(source, cv2.COLOR_BGR2HSV)
hsv_hist_src = cv2.calcHist([hsv_source], [0, 1, 2], None,
[8, 8, 8], [0, 256, 0, 256, 0, 256])
# iterate through all 4 options for toggling color transfer
bools = [True, False]
candidates = []
best_result = None
best_dist = float('inf')
for clip in bools:
for preserve_paper in bools:
# create candidate image from options of this iteration
candidate = color_transfer(source, target, clip, preserve_paper)
# get mean HSV stats from candidate image for comparison
hsv_candidate = cv2.cvtColor(candidate, cv2.COLOR_BGR2HSV)
hsv_hist_cand = cv2.calcHist([hsv_candidate], [0, 1, 2], None,
[8, 8, 8], [0, 256, 0, 256, 0, 256])
# calc chi square dist
chi2_dist = chi2_distance(hsv_hist_src, hsv_hist_cand)
# propose new truest result if found new smallest mae
if chi2_dist < best_dist:
best_result = candidate[:]
candidates.append(candidate)
# build 2 by 2 image matrix of all candidates for comparison
comparison = np.hstack((np.vstack(candidates[:2]),
np.vstack(candidates[2:])))
# add border annotations showing values of params for each output
comparison = _bool_matrix_border(comparison)
return best_result, comparison
def _bool_matrix_border(comparison_image):
"""Apply table formatting for comparison of color_transfer options
Parameters:
-------
target: NumPy array
OpenCV image in BGR color space (the comparison image produced in auto_color_transfer)
Returns:
-------
comparison: NumPy array
OpenCV image in BGR color space with borders applied to easily compare the different
results of the auto_color_transfer
"""
# 200 seems to work well as border size
border_size = 200
# put black border on top and left of input image
h, w = comparison_image.shape[:2]
top = np.zeros(w * border_size, dtype='uint8').reshape(border_size, w)
left = np.zeros((h + border_size) * border_size, dtype='uint8').reshape(h + border_size, border_size)
top = cv2.cvtColor(top, cv2.COLOR_GRAY2BGR)
left = cv2.cvtColor(left, cv2.COLOR_GRAY2BGR)
bordered_comparison_image = np.vstack((top, comparison_image))
bordered_comparison_image = np.hstack((left, bordered_comparison_image))
# add text for clip arg options to top border
top_title_loc = (border_size, 75)
top_true_loc = (border_size, 190)
top_false_loc = (int(border_size + w / 2), 190)
cv2.putText(bordered_comparison_image, 'Clip', top_title_loc,
cv2.FONT_HERSHEY_SIMPLEX, 3, (255, 255, 255), 2)
cv2.putText(bordered_comparison_image, 'True', top_true_loc,
cv2.FONT_HERSHEY_SIMPLEX, 2, (255, 255, 255), 2)
cv2.putText(bordered_comparison_image, 'False', top_false_loc,
cv2.FONT_HERSHEY_SIMPLEX, 2, (255, 255, 255), 2)
# rotate 90 degrees for writing text to left border
bordered_comparison_image = imutils.rotate_bound(bordered_comparison_image, 90)
# add text for preserve paper arg options to left border
top_title_loc = (5, 75)
top_true_loc = (5 + int(h / 2), 190)
top_false_loc = (5, 190)
cv2.putText(bordered_comparison_image, 'Preserve Paper', top_title_loc,
cv2.FONT_HERSHEY_SIMPLEX, 3, (255, 255, 255), 2)
cv2.putText(bordered_comparison_image, 'True', top_true_loc,
cv2.FONT_HERSHEY_SIMPLEX, 2, (255, 255, 255), 2)
cv2.putText(bordered_comparison_image, 'False', top_false_loc,
cv2.FONT_HERSHEY_SIMPLEX, 2, (255, 255, 255), 2)
# rotate -90 degrees to return image in correct orientation
bordered_comparison_image = imutils.rotate_bound(bordered_comparison_image, -90)
return bordered_comparison_image
def image_stats(image):
"""
Parameters:
-------
image: NumPy array
OpenCV image in L*a*b* color space
Returns:
-------
Tuple of mean and standard deviations for the L*, a*, and b*
channels, respectively
"""
# compute the mean and standard deviation of each channel
(l, a, b) = cv2.split(image)
(lMean, lStd) = (l.mean(), l.std())
(aMean, aStd) = (a.mean(), a.std())
(bMean, bStd) = (b.mean(), b.std())
# return the color statistics
return lMean, lStd, aMean, aStd, bMean, bStd
def _min_max_scale(arr, new_range=(0, 255)):
"""
Perform min-max scaling to a NumPy array
Parameters:
-------
arr: NumPy array to be scaled to [new_min, new_max] range
new_range: tuple of form (min, max) specifying range of
transformed array
Returns:
-------
NumPy array that has been scaled to be in
[new_range[0], new_range[1]] range
"""
# get array's current min and max
mn = arr.min()
mx = arr.max()
# check if scaling needs to be done to be in new_range
if mn < new_range[0] or mx > new_range[1]:
# perform min-max scaling
scaled = (new_range[1] - new_range[0]) * (arr - mn) / (mx - mn) + new_range[0]
else:
# return array if already in range
scaled = arr
return scaled
def _scale_array(arr, clip=True):
"""
Trim NumPy array values to be in [0, 255] range with option of
clipping or scaling.
Parameters:
-------
arr: array to be trimmed to [0, 255] range
clip: should array be scaled by np.clip? if False then input
array will be min-max scaled to range
[max([arr.min(), 0]), min([arr.max(), 255])]
Returns:
-------
NumPy array that has been scaled to be in [0, 255] range
"""
if clip:
scaled = np.clip(arr, 0, 255)
else:
scale_range = (max([arr.min(), 0]), min([arr.max(), 255]))
scaled = _min_max_scale(arr, new_range=scale_range)
return scaled
| 36.477032 | 105 | 0.657173 |
6aa21222a53d441e6c157bad6965004f0771b6e4 | 250 | py | Python | Python/Tree/TestCreateTreeLibraryImport.py | zseen/hackerrank-challenges | c154f039f58073ee3d94d012462c7055e68784b2 | [
"MIT"
] | null | null | null | Python/Tree/TestCreateTreeLibraryImport.py | zseen/hackerrank-challenges | c154f039f58073ee3d94d012462c7055e68784b2 | [
"MIT"
] | null | null | null | Python/Tree/TestCreateTreeLibraryImport.py | zseen/hackerrank-challenges | c154f039f58073ee3d94d012462c7055e68784b2 | [
"MIT"
] | null | null | null | from Library.CreateATree import CreateATree
tree = CreateATree.BinarySearchTree()
nodesList = list((4, 5, 1, 3, 2))
for i in range(0, len(nodesList)):
tree.insert(nodesList[i])
#tree.printInorder()
tree.printPreorder()
#tree.printPostorder()
| 19.230769 | 43 | 0.732 |
6aa25f7e4d64679c81ca1e60dffb6ddf922f9c4c | 522 | py | Python | application/siteApp/urls.py | Marcelotsvaz/vaz-projects | 8ccc0bf8d25f9276714e1e5ecb0a4e80f07442b4 | [
"Unlicense"
] | null | null | null | application/siteApp/urls.py | Marcelotsvaz/vaz-projects | 8ccc0bf8d25f9276714e1e5ecb0a4e80f07442b4 | [
"Unlicense"
] | null | null | null | application/siteApp/urls.py | Marcelotsvaz/vaz-projects | 8ccc0bf8d25f9276714e1e5ecb0a4e80f07442b4 | [
"Unlicense"
] | null | null | null | #
# VAZ Projects
#
#
# Author: Marcelo Tellier Sartori Vaz <[email protected]>
from django.urls import path
from . import views
app_name = 'siteApp'
urlpatterns = [
path( '', views.Home.as_view(), name = 'home' ),
path( 'about-me', views.About_me.as_view(), name = 'about_me' ),
path( 'search', views.Search.as_view(), name = 'search' ),
path( 'search/page/<int:page>', views.Search.as_view(), name = 'search' ),
path( 'sitemap.xml', views.Sitemap.as_view(), name = 'sitemap' ),
] | 23.727273 | 77 | 0.628352 |
6aa2cafc9ca0f9283336142e3b81fea44a3587b3 | 1,286 | py | Python | Classes/ServiceBase.py | tkeske/SMS-Fetcher | 7b3ec0ea4517ad11164b8e2d7ee2c60d2a9f0ed2 | [
"BSD-3-Clause"
] | null | null | null | Classes/ServiceBase.py | tkeske/SMS-Fetcher | 7b3ec0ea4517ad11164b8e2d7ee2c60d2a9f0ed2 | [
"BSD-3-Clause"
] | null | null | null | Classes/ServiceBase.py | tkeske/SMS-Fetcher | 7b3ec0ea4517ad11164b8e2d7ee2c60d2a9f0ed2 | [
"BSD-3-Clause"
] | null | null | null | '''
@author Tom Keske
@since 10.8.2019
'''
import sys
from jnius import autoclass
from Conf.Conf import * | 32.15 | 98 | 0.667963 |
6aa359b860399eb8f9859835f9d9ac0f53b4de56 | 723 | py | Python | api/queue/__init__.py | sofia008/api-redis-queue | 8d65665c8a9f44990565baa8c7ba43d7f01425d3 | [
"Apache-2.0"
] | null | null | null | api/queue/__init__.py | sofia008/api-redis-queue | 8d65665c8a9f44990565baa8c7ba43d7f01425d3 | [
"Apache-2.0"
] | null | null | null | api/queue/__init__.py | sofia008/api-redis-queue | 8d65665c8a9f44990565baa8c7ba43d7f01425d3 | [
"Apache-2.0"
] | null | null | null | # api/queue/__init__.py
import os
from flask import Flask
from flask_bootstrap import Bootstrap
# instantiate the extensions
bootstrap = Bootstrap()
| 19.026316 | 51 | 0.697095 |
6aa447f55a379751c7664d4eb5818450b99462c4 | 2,183 | py | Python | tests/test_engine.py | Foxboron/python-adblock | 50b2ddba9f7b237b38c848c7d4a1637917444924 | [
"Apache-2.0",
"MIT"
] | 35 | 2020-06-26T21:06:13.000Z | 2022-03-19T10:50:35.000Z | tests/test_engine.py | Foxboron/python-adblock | 50b2ddba9f7b237b38c848c7d4a1637917444924 | [
"Apache-2.0",
"MIT"
] | 34 | 2020-04-27T02:59:40.000Z | 2022-03-06T20:55:00.000Z | tests/test_engine.py | Foxboron/python-adblock | 50b2ddba9f7b237b38c848c7d4a1637917444924 | [
"Apache-2.0",
"MIT"
] | 6 | 2020-12-22T21:56:02.000Z | 2022-02-16T02:13:21.000Z | import adblock
import pytest
SMALL_FILTER_LIST = """
||wikipedia.org^
||old.reddit.com^
||lobste.rs^
"""
| 29.106667 | 78 | 0.724233 |
6aa4b1f3d6675f767aaa7329c04a4c62bcde0e63 | 232 | py | Python | v1/status_updates/urls.py | DucPhamTV/Bank | 4905ec7d63ef4daafe2119bf6b32928d4db2d4f2 | [
"MIT"
] | 94 | 2020-07-12T23:08:47.000Z | 2022-03-05T14:00:01.000Z | v1/status_updates/urls.py | DucPhamTV/Bank | 4905ec7d63ef4daafe2119bf6b32928d4db2d4f2 | [
"MIT"
] | 84 | 2020-07-13T23:30:50.000Z | 2022-03-15T15:47:46.000Z | v1/status_updates/urls.py | DucPhamTV/Bank | 4905ec7d63ef4daafe2119bf6b32928d4db2d4f2 | [
"MIT"
] | 63 | 2020-07-13T02:46:51.000Z | 2021-11-26T09:29:29.000Z | from rest_framework.routers import SimpleRouter
from .views.upgrade_notice import UpgradeNoticeViewSet
router = SimpleRouter(trailing_slash=False)
router.register('upgrade_notice', UpgradeNoticeViewSet, basename='upgrade_notice')
| 33.142857 | 82 | 0.857759 |
6aa55947380a65f7c24093ff3b3feee2ac3b5948 | 1,048 | py | Python | data_structures/stack/largest_rectangle_area_in_histogram.py | ruler30cm/python-ds | f84605c5b746ea1d46de3d00b86f5fba399445c7 | [
"MIT"
] | 1,723 | 2019-07-30T07:06:22.000Z | 2022-03-31T15:22:22.000Z | data_structures/stack/largest_rectangle_area_in_histogram.py | ruler30cm/python-ds | f84605c5b746ea1d46de3d00b86f5fba399445c7 | [
"MIT"
] | 213 | 2019-10-06T08:07:47.000Z | 2021-10-04T15:38:36.000Z | data_structures/stack/largest_rectangle_area_in_histogram.py | ruler30cm/python-ds | f84605c5b746ea1d46de3d00b86f5fba399445c7 | [
"MIT"
] | 628 | 2019-10-06T10:26:25.000Z | 2022-03-31T01:41:00.000Z | '''
Largest rectangle area in a histogram::
Find the largest rectangular area possible in a given histogram where the largest rectangle can be made of a number of contiguous bars.
For simplicity, assume that all bars have same width and the width is 1 unit.
'''
hist = [4, 7, 1, 8, 4, 9, 5]
print("Maximum area is",
max_area_histogram(hist))
| 28.324324 | 136 | 0.603053 |
6aa62343269180c72e1026d8bfdc9d3fa9196b1e | 7,448 | py | Python | gluon/contrib/pbkdf2_ctypes.py | Cwlowe/web2py | 6ae4c3c274be1026cbc45b0fcd8d1180c74b9070 | [
"BSD-3-Clause"
] | 9 | 2018-04-19T05:08:30.000Z | 2021-11-23T07:36:58.000Z | gluon/contrib/pbkdf2_ctypes.py | mohit3011/Quiz-Mate | 17988a623abde439aef2b43fc8dc3162b5cae15e | [
"BSD-3-Clause"
] | 98 | 2017-11-02T19:00:44.000Z | 2022-03-22T16:15:39.000Z | gluon/contrib/pbkdf2_ctypes.py | mohit3011/Quiz-Mate | 17988a623abde439aef2b43fc8dc3162b5cae15e | [
"BSD-3-Clause"
] | 9 | 2017-10-24T21:53:36.000Z | 2021-11-23T07:36:59.000Z | # -*- coding: utf-8 -*-
"""
pbkdf2_ctypes
~~~~~~
Fast pbkdf2.
This module implements pbkdf2 for Python using crypto lib from
openssl or commoncrypto.
Note: This module is intended as a plugin replacement of pbkdf2.py
by Armin Ronacher.
Git repository:
$ git clone https://github.com/michele-comitini/pbkdf2_ctypes.git
:copyright: Copyright (c) 2013: Michele Comitini <[email protected]>
:license: LGPLv3
"""
import ctypes
import ctypes.util
import hashlib
import platform
import os.path
import binascii
import sys
__all__ = ['pkcs5_pbkdf2_hmac', 'pbkdf2_bin', 'pbkdf2_hex']
__version__ = '0.99.3'
def _commoncrypto_pbkdf2(data, salt, iterations, digest, keylen):
"""Common Crypto compatibile wrapper
"""
c_hashfunc = ctypes.c_uint32(_commoncrypto_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_size_t(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_size_t(len(salt))
c_iter = ctypes.c_uint(iterations)
c_keylen = ctypes.c_size_t(keylen)
c_buff = ctypes.create_string_buffer(keylen)
crypto.CCKeyDerivationPBKDF.restype = ctypes.c_int
crypto.CCKeyDerivationPBKDF.argtypes = [ctypes.c_uint32,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_uint32,
ctypes.c_uint,
ctypes.c_char_p,
ctypes.c_size_t]
ret = crypto.CCKeyDerivationPBKDF(2, # hardcoded 2-> PBKDF2
c_pass, c_passlen,
c_salt, c_saltlen,
c_hashfunc,
c_iter,
c_buff,
c_keylen)
return (1 - ret, c_buff)
def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
"""OpenSSL compatibile wrapper
"""
c_hashfunc = ctypes.c_void_p(_openssl_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_int(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_int(len(salt))
c_iter = ctypes.c_int(iterations)
c_keylen = ctypes.c_int(keylen)
c_buff = ctypes.create_string_buffer(keylen)
# PKCS5_PBKDF2_HMAC(const char *pass, int passlen,
# const unsigned char *salt, int saltlen, int iter,
# const EVP_MD *digest,
# int keylen, unsigned char *out);
crypto.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p, ctypes.c_int,
ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_char_p]
crypto.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int
err = crypto.PKCS5_PBKDF2_HMAC(c_pass, c_passlen,
c_salt, c_saltlen,
c_iter,
c_hashfunc,
c_keylen,
c_buff)
return (err, c_buff)
try: # check that we have proper OpenSSL or Common Crypto on the system.
system = platform.system()
if system == 'Windows':
if platform.architecture()[0] == '64bit':
libname = ctypes.util.find_library('libeay64')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(libname)
else:
libname = ctypes.util.find_library('libeay32')
if not libname:
raise OSError('Library libeay32 not found.')
crypto = ctypes.CDLL(libname)
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
elif system == 'Darwin': # think different(TM)! i.e. break things!
if [int(x) for x in platform.mac_ver()[0].split('.')] < [10, 7, 0]:
raise OSError('OS X Version too old %s < 10.7.0' % platform.mac_ver()[0])
libname = ctypes.util.find_library('System')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _commoncrypto_pbkdf2
else:
libname = ctypes.util.find_library('crypto')
if not libname:
raise OSError('Library crypto not found.')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
except (OSError, AttributeError):
_, e, _ = sys.exc_info()
raise ImportError('Cannot find a compatible cryptographic library '
'on your system. %s' % e)
if __name__ == '__main__':
try:
crypto.SSLeay_version.restype = ctypes.c_char_p
print(crypto.SSLeay_version(0))
except:
pass
import platform
if platform.python_version_tuple() < ('3', '0', '0'):
for h in [hashlib.sha1, hashlib.sha224, hashlib.sha256,
hashlib.sha384, hashlib.sha512]:
print(binascii.hexlify(pkcs5_pbkdf2_hmac(bytes('secret', 'utf-8') * 11,
bytes('salt', 'utf-8'),
hashfunc=h)))
| 38.194872 | 88 | 0.569683 |
6aa72ed7ab8eb40be3928ae652b97a0368992b42 | 2,389 | py | Python | auth_backend/src/key_op.py | cispa/bitahoy | ffc2004930a033cfb94d13671bc6068b473ce226 | [
"MIT"
] | null | null | null | auth_backend/src/key_op.py | cispa/bitahoy | ffc2004930a033cfb94d13671bc6068b473ce226 | [
"MIT"
] | null | null | null | auth_backend/src/key_op.py | cispa/bitahoy | ffc2004930a033cfb94d13671bc6068b473ce226 | [
"MIT"
] | 2 | 2021-12-30T16:48:15.000Z | 2022-01-14T14:21:15.000Z | import sys
import os
import psycopg2
import base64
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import padding, rsa
from cryptography.hazmat.backends import default_backend
import time
if len(sys.argv) < 2:
print("Please enter either create or remove as a argv[1]")
sys.exit(0)
with psycopg2.connect("dbname='auth_db' user='auth_db' host='authdb' [redacted-2]") as conn:
with conn.cursor() as cursor:
if sys.argv[1] == "generate":
#Load the key or generate a new one:
cursor.execute("CREATE TABLE IF NOT EXISTS key (key varchar(4096),time bigint UNIQUE PRIMARY KEY)")
privkey = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
pem = privkey.private_bytes(encoding=serialization.Encoding.PEM,format=serialization.PrivateFormat.TraditionalOpenSSL,encryption_algorithm=serialization.NoEncryption())
cursor.execute("INSERT INTO key (key,time) VALUES('"+str(pem.decode("utf-8"))+"',"+str(int(time.time()))+")")
conn.commit()
print("New key generated!")
elif sys.argv[1] == "generate_if_needed":
#Load the key or generate a new one:
cursor.execute("CREATE TABLE IF NOT EXISTS key (key varchar(4096),time bigint UNIQUE PRIMARY KEY)")
cursor.execute("SELECT * FROM key")
res = cursor.fetchall()
if len(res) == 0:
privkey = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
pem = privkey.private_bytes(encoding=serialization.Encoding.PEM,format=serialization.PrivateFormat.TraditionalOpenSSL,encryption_algorithm=serialization.NoEncryption())
cursor.execute("INSERT INTO key (key,time) VALUES('"+str(pem.decode("utf-8"))+"',"+str(int(time.time()))+")")
conn.commit()
print("New key generated, as database was empty!")
else:
print("Database has key ready!")
elif sys.argv[1] == "drop":
cursor.execute("DROP TABLE key")
conn.commit()
print("Dropped old keys")
else:
print("Invalid option! Try 'drop', 'generate' or 'generate_if_needed'...") | 45.942308 | 184 | 0.637505 |
6aa73c2ad6102f4cce8d4e90a98090c3f6fe5363 | 342 | py | Python | src/tools/types/obj.py | loongson-zn/build | d4bedebfa046b763c316e31c98b48ed2779741b9 | [
"BSL-1.0"
] | 215 | 2015-01-10T17:16:34.000Z | 2022-02-23T15:22:08.000Z | src/tools/types/obj.py | loongson-zn/build | d4bedebfa046b763c316e31c98b48ed2779741b9 | [
"BSL-1.0"
] | 594 | 2015-01-22T16:17:55.000Z | 2022-02-26T22:11:01.000Z | src/tools/types/obj.py | loongson-zn/build | d4bedebfa046b763c316e31c98b48ed2779741b9 | [
"BSL-1.0"
] | 302 | 2015-02-03T01:20:29.000Z | 2022-02-12T07:01:28.000Z | # Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt)
from b2.build import type
register ()
| 28.5 | 69 | 0.692982 |
6aa7fd8436efabe5593a8174e9772f897fb7aec0 | 4,465 | py | Python | sympy/polys/tests/test_sqfreetools.py | eriknw/sympy | b7544e2bb74c011f6098a7e886fd77f41776c2c4 | [
"BSD-3-Clause"
] | 7 | 2015-01-14T06:55:33.000Z | 2018-08-11T14:43:52.000Z | sympy/polys/tests/test_sqfreetools.py | pbeltran/sympy-1 | 94f92b36731c2bebe6de1037c063c2a258a8a399 | [
"BSD-3-Clause"
] | 1 | 2018-02-19T04:56:04.000Z | 2018-02-19T04:56:04.000Z | sympy/polys/tests/test_sqfreetools.py | pbeltran/sympy-1 | 94f92b36731c2bebe6de1037c063c2a258a8a399 | [
"BSD-3-Clause"
] | 1 | 2016-04-24T14:39:22.000Z | 2016-04-24T14:39:22.000Z | """Tests for square-free decomposition algorithms and related tools. """
from sympy.polys.rings import ring
from sympy.polys.domains import FF, ZZ, QQ
from sympy.polys.polyclasses import DMP
from sympy.polys.specialpolys import f_polys
from sympy.utilities.pytest import raises
f_0, f_1, f_2, f_3, f_4, f_5, f_6 = f_polys()
| 29.569536 | 100 | 0.519821 |
6aa848925fe885025486d711e7226e473656a954 | 1,377 | py | Python | ezno_convert/enums.py | ofersadan85/ezno_convert | 4c5cf7d41c72698e5486068673f170d968a9de27 | [
"MIT"
] | 2 | 2021-02-07T21:27:04.000Z | 2021-03-13T06:47:25.000Z | ezno_convert/enums.py | ofersadan85/ezno_convert | 4c5cf7d41c72698e5486068673f170d968a9de27 | [
"MIT"
] | 1 | 2021-02-10T05:45:00.000Z | 2021-02-10T05:45:00.000Z | ezno_convert/enums.py | ofersadan85/ezno_convert | 4c5cf7d41c72698e5486068673f170d968a9de27 | [
"MIT"
] | null | null | null | import enum
from typing import Union
enum_types = Union[PPT, WORD, XL]
| 20.863636 | 114 | 0.611474 |
6aa897704d8b8b96376b6c78aa9de27ecec18071 | 378 | py | Python | app/django_first/news/migrations/0002_movies_year.py | vvuri/flask_pipeline | d3f283b8a6a6239e56d85e67dbe3edce55bcb980 | [
"MIT"
] | null | null | null | app/django_first/news/migrations/0002_movies_year.py | vvuri/flask_pipeline | d3f283b8a6a6239e56d85e67dbe3edce55bcb980 | [
"MIT"
] | null | null | null | app/django_first/news/migrations/0002_movies_year.py | vvuri/flask_pipeline | d3f283b8a6a6239e56d85e67dbe3edce55bcb980 | [
"MIT"
] | null | null | null | # Generated by Django 4.0.1 on 2022-01-19 23:58
from django.db import migrations, models
| 19.894737 | 60 | 0.582011 |
6aa93bd0cfbc5bae7eaa0365dd95b7de863c0e17 | 653 | py | Python | scripts/issue_param_value.py | Jhsmit/awesome-panel-extensions | 41eba7cf84caa911be4ed0df2a96e16fc1e70263 | [
"CC-BY-4.0"
] | 3 | 2020-07-16T07:28:45.000Z | 2020-07-17T12:53:56.000Z | scripts/issue_param_value.py | MarcSkovMadsen/panel-extensions-template | f41ad8d8fb8502f87de3a4992917cbffb6299012 | [
"CC-BY-4.0"
] | null | null | null | scripts/issue_param_value.py | MarcSkovMadsen/panel-extensions-template | f41ad8d8fb8502f87de3a4992917cbffb6299012 | [
"CC-BY-4.0"
] | null | null | null | import panel as pn
import param
from awesome_panel_extensions.frameworks.fast import FastTemplate, FastTextInput
WIDGETS = {
"some_text": {"type": FastTextInput, "readonly": True, "sizing_mode": "fixed", "width": 400}
}
parameterized_app = ParameterizedApp()
paremeterized_template = FastTemplate(main=[parameterized_app.view])
paremeterized_template.servable()
| 27.208333 | 96 | 0.735069 |
6aaa29259fb6e01655aa91ee60654bb2eceee036 | 1,271 | py | Python | gjqyxyxxcxxt/gjqyxyxxcxxt/queue_companies.py | AisinoPythonTeam/PythonAiniso | 983a29962752679d8cc26a2c3cdb0ba8fcfa3f02 | [
"Apache-2.0"
] | null | null | null | gjqyxyxxcxxt/gjqyxyxxcxxt/queue_companies.py | AisinoPythonTeam/PythonAiniso | 983a29962752679d8cc26a2c3cdb0ba8fcfa3f02 | [
"Apache-2.0"
] | null | null | null | gjqyxyxxcxxt/gjqyxyxxcxxt/queue_companies.py | AisinoPythonTeam/PythonAiniso | 983a29962752679d8cc26a2c3cdb0ba8fcfa3f02 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import pymysql
import sys, os, json, time, pymongo
app_dir = os.path.abspath("../")
sys.path.append(app_dir)
from gjqyxyxxcxxt import settings
from gjqyxyxxcxxt.database.my_redis import QueueRedis
conn = None
if __name__ == '__main__':
main()
| 24.921569 | 125 | 0.650669 |
6aab26683b9b2a063b1ca8928d6b0655775e0f6b | 86,132 | py | Python | model/entity_quotes.py | tkuculo/QuoteKG | a7b7d323679624a9cd3805e866028fad0a5a5408 | [
"MIT"
] | null | null | null | model/entity_quotes.py | tkuculo/QuoteKG | a7b7d323679624a9cd3805e866028fad0a5a5408 | [
"MIT"
] | null | null | null | model/entity_quotes.py | tkuculo/QuoteKG | a7b7d323679624a9cd3805e866028fad0a5a5408 | [
"MIT"
] | null | null | null | #main_section > lines > line > text
#main_section > lines > line > sub_line > text
#main_section > sub_sections
#main_section > templates > type
#main_section > templates > empty_values
#main_section > templates > values
#main_section > templates > sub_templates
#main_section > title > line > text
from transformers.models.auto import configuration_auto
from model.quote import *
import collections
languages_with_templates=["fr","da","nl","be","is","ca","bg","da","ka"]
hybrid_languages = ["uk","ru","sv","et"] + ["ko","fa","cs","fi", "hy"]
misattributed = {
'ar': ['', ' ', ' ', ' ', ' ', ' ', '', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '', '', ' ', ' ', ' ', ' ', ' ', ' ', ' '],\
'az': ['zif', 'mbahisli', 'yanl', 'yanl kild aid olduunu sylmk', 'shv yanna aiddir', 'Tyin olunmu sitatlar', 'yanlsaq', 'shv aiddir', 'shv baldr', 'shv aiddir', 'mbahisli', 'shv tyin olunur', 'shv tyin olunmudur', 'shv hesablanr', 'bhli', 'zif', 'shv', 'shv hesablanr', 'tsdiqlnmmi', 'shv lav olunur', 'shv hesablanr', 'yanl', 'shvn aiddir', 'bhli'],\
'be': ['', '', '', ' ', ' ', ' ', 'misatributed', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'bg': ['', '', '', ' ', ' ', 'Misattributed.', ' ', ' ', '', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'bs': ['slab', 'sporan', 'pogreno', 'govorei pogreno pripisano', 'pogreno se pripisuje', 'Citati dodijeljene', 'misao', 'Netano pripisan', 'Nepravilno povezani', 'pogreno pripisan', 'kontroverzan', 'pogreno je dodeljen', 'pogreno dodijeljeno', 'pripisuju pogreno', 'sumnjiv', 'maltretiran', 'slabo', 'pogreno', 'pogreno pripisan', 'neprovjeren', 'pogreno priloen', 'pogreno pripisan', 'netaan', 'pripisuje se pogreno', 'sumnjiv'], \
'ca': ['feble', 'en disputa', 'incorrectament', 'dient incorrectament atribut', "s'atribueix incorrectament a", 'Cotitzacions assignades', 'Misattributed', 'atributs incorrectament', 'connectat incorrectament', 'atribut incorrectament a', 'controvertit', 'est assignat incorrectament', 'assignat incorrectament', 'acreditat incorrectament', 'dubts', 'maltractat', 'pobrament', 'mal', 'acreditat incorrectament', 'no verificat', 'incorrectament adjunt', 'acreditat incorrectament', 'incorrecte', "s'atribueix a errniament", 'sospits'], \
'co': ['debuli', 'disputa', 'sbagliatu', 'dicendu attribuitu sbagliatu', 'sbagliatu h attribuita ', 'Quotes assignati', 'misattribuitu', 'attribuitu sbagliatu', 'cunnessu sbagliatu', 'attribuitu sbagliatu ', 'cuntruversuale', 'h incorrectamente assignatu', 'assignatu sbagliatu', 'creditu sbagliatu', 'dubbitu', 'MISTORATU', 'Poviru', 'sbagliatu', 'sbagliatu creditu', 'Unvererazionatu', 'sbagliatu attaccatu', 'incorrectamente creditu', 'sbagliatu', 'h attribuita sbagli', 'suspicosu'],\
"cs": ['pochybn', 'nesprvn je pipisovn', 'je pitn omylem', 'neosgejavan.', 'k se nesprvn piazen', 'sporn', 'je nesprvn piazen', 'patn', 'nesprvn pipojeno', 'nesprvn', 'nezbytn', 'nesprvn piazeno', 'nesprvn pisuzovno', 'patn zachzen', 'slab', 'nesprvn', 'nesprvn pipsny', 'nesprvn pipsan', 'pidlen nabdky', 'podezel', 'neoven'],\
'da': ['svag', 'bestridt', 'forkert', 'siger fejlagtigt tilskrevet', 'fejlagtigt tilskrives', 'citater tildelt', 'misattributed.', 'forkert tilskrevet', 'forkert forbundet', 'forkert tilskrives', 'kontroversielt', 'er forkert tildelt', 'forkert tildelt', 'krediteret forkert', 'tvivlsom', 'mishandlet', 'Drlig', 'forkert', 'fejlagtigt krediteret', 'unverified.', 'forkert vedhftet', 'forkert krediteret', 'ukorrekt', 'er tilskrevet fejlagtigt', 'mistnksom'], \
"de": ['falsch verbunden', 'falsch angebracht', 'falsch zugewiesen', 'wird fehlerhaft zurckgefhrt', 'schwach', 'flschlich zugeschrieben', 'falsch zugerechnet', 'falsch wird zugeschrieben', 'falsch', 'falsch angeschlossen', 'misshandelt', 'unrecht zugeschrieben werden', 'misstrauisch', 'falsch gutgeschrieben', 'zweifelhaft', 'ist falsch zugewiesen', 'notwendig', 'zitate zugewiesen', 'nicht verifiziert'],\
'el': ['', '', '', ' ', ' ', '', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
"en": ['weak', 'disputed', 'incorrectly', 'saying wrongly attributed', 'wrongly is attributed to', 'quotes assigned', 'misattributed', 'incorrectly attributed', 'incorrectly connected', 'incorrectly attributed to', 'controversial', 'is incorrectly assigned', 'incorrectly assigned', 'credited incorrectly', 'doubtful', 'mistreated', 'poorly', 'wrong', 'wrongly credited', 'unverified', 'incorrectly attached', 'incorrectly credited', 'incorrect', 'is attributed to mistakenly', 'suspicious'],\
"es": ['dbil', 'disputado', 'incorrectamente', 'decir atribuido incorrectamente', 'atribuido incorrectamente a', 'citas asignadas', 'atribuido incorrectamente', 'atribuido incorrectamente', 'conectado incorrectamente', ' atribuido incorrectamente a ',' controvertido ',' asignado incorrectamente ',' asignado incorrectamente ',' acreditado incorrectamente ',' dudoso ',' maltratado ',' mal ',' incorrecto ',' acreditado incorrectamente ',' no verificado ', 'adjunto incorrectamente', 'acreditado incorrectamente', 'incorrecto', 'atribuido errneamente', 'sospechoso'],\
'et': ['nrk', 'vaidlustatud', 'valesti', 'eldes valesti omistatud', 'valesti omistatakse', 'mratud hinnapakkumisi', 'eksima', 'valesti omistatud', 'valesti hendatud', 'valesti omistatud', 'vastuoluline', 'on valesti mratud', 'valesti mratud', 'krediteeritud valesti', 'kahtlane', 'vrkohtlemine', 'halvasti', 'vale', 'valesti krediteeritud', 'vastamata jtmine', 'valesti kinnitatud', 'valesti krediteeritud', 'vale', 'omistatakse ekslikult', 'kahtlane'],\
'eu': ['ahul', 'jokatu', 'gaizki', 'gaizki egozten esanda', 'gaizki egozten zaio', 'esleitutako aipuak', 'Misattributatua', 'oker egotzi', 'Gaizki konektatuta', 'oker egotzita', 'Polemika', 'gaizki esleitzen da', 'gaizki esleituta', 'oker kreditua', 'zalantzazko', 'tratu txarrak', 'txarto', 'okerreko', 'gaizki kreditatu', 'irentetu gabe', 'oker erantsita', 'Gaizki kreditatu', 'ez zuzen', 'oker egozten zaio', 'goganbehartsu'],\
'fa': ['', '', '', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '', '', '', '', ' ', ' ', ' ', ' ', '', ' ', ''],\
'fi': ['heikko', 'kiistanalainen', 'vrin', 'sanomalla vrin', 'virheellisesti johtuu', 'Lainaukset', 'huonosti', 'virheellisesti', 'Vrin kytketty', 'virheellisesti', 'kiistanalainen', 'on asetettu virheellisesti', 'Virheellisesti mritetty', 'hyvitetn vrin', 'epilyttv', 'kohteliaisuus', 'huonosti', 'vr', 'Vrin hyvitetty', 'vahvistettu', 'Virheellisesti kiinnitetty', 'Virheellisesti hyvitetty', 'vr', 'johtuu virheellisesti', 'epilyttv'],\
'fr': ['faible', 'contest', 'incorrectement', 'dire attribu tort', 'est attribu tort ', 'citations attribues', 'mal attribu', 'mal attribu', 'incorrectement connect', ' attribu tort ', 'controvers', 'est attribu de manire incorrecte', 'attribu de manire incorrecte', 'crdit de manire incorrecte', 'douteux', 'maltrait', 'mal', 'mauvais', 'crdit tort', 'non vrifi', 'incorrectement joint', 'mal crdit', 'incorrect', 'est attribu tort', 'suspect'],\
'he': ['', '', ' ', ' ', ' ', ' ', 'misattributed', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '', '', '', ' ', 'unverified', ' ', ' ', ' ', ' ', ''], 'hi': ['', '', ' ', ' ', ' ', ' ', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'hr': ['slab', 'osporen', 'nepravilno', 'govorei pogreno pripisuje se', 'pogreno se pripisuje', 'dodijeljeni citati', 'pogrean', 'Neispravno se pripisuje', 'pogreno povezan', 'pogreno pripisuje', 'kontroverzno', 'je pogreno dodijeljen', 'pogreno dodijeljen', 'pogreno pripisano', 'sumnjiv', 'maltretiran', 'slabo', 'pogreno', 'pogreno pripisano', 'neveritian', 'pogreno privren', 'pogreno pripisano', 'netono', 'se pripisuje pogreno', 'sumnjiav'],\
'hu': ['gyenge', 'vitatott', 'tvesen', 'rosszul mondvn', 'helytelenl tulajdonthat', 'Idzetek hozzrendeltek', 'flrerthetetlen', 'helytelenl tulajdonthat', 'Helytelenl csatlakoztatva van', 'helytelenl tulajdonthat', 'vitatott', 'helytelenl hozzrendelt', 'Helytelenl hozzrendelt', 'helytelenl jvrjk', 'ktsges', 'rosszul kezelt', 'rosszul', 'rossz', 'tvesen jvrta', 'ellenrizetlen', 'Helytelenl csatolt', 'helytelenl jvrta', 'helytelen', 'tvesen tulajdonthat', 'gyans'],\
'hy': ['', '', '', ' , ', ' ', ' ', ' ', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'id': ['lemah', 'diperdebatkan', 'salah', 'mengatakan salah dikaitkan.', 'salah dikaitkan dengan', 'Kutipan ditugaskan', 'salah penyibaran', 'salah dikaitkan', 'salah terhubung', 'salah dikaitkan dengannya', 'kontroversial', 'salah ditugaskan', 'salah ditugaskan', 'dikreditkan secara salah', 'diragukan lagi', 'Dianiaya', 'buruk', 'salah', 'salah dikreditkan', 'tidak diverifikasi', 'salah melekat', 'salah dikreditkan', 'salah', 'dikaitkan dengan keliru', 'mencurigakan'],\
'is': ['veik', 'umdeildur', 'rangt', 'segja a ranglega rekja til', 'rangt stafar af', 'Tilvitnanir thluta', 'misertributed.', 'rangt rekja m', 'rangt tengt', 'rangt rekja til', 'umdeild', 'er rangt thluta', 'rangt thluta', 'lg rangt', 'efast', 'mistreated.', 'illa', 'rangt', 'ranglega lg inn', 'unverfied.', 'rangt fylgir', 'Rangt viurkennt', 'rangt', 'er rekja til ranglega', 'grunsamlegt'],\
'it': ['debole', 'disputato', 'erroneamente', 'detto erroneamente attribuito', 'erroneamente attribuito a', 'virgolette assegnate', 'erroneamente attribuito', 'erroneamente attribuito', 'erroneamente connesso', ' erroneamente attribuito a', 'controverso', ' assegnato in modo errato', 'assegnato in modo errato', 'accreditato in modo errato', 'dubbio', 'maltrattato', 'male', 'sbagliato', 'accreditato erroneamente', 'non verificato', 'erroneamente allegato', 'erroneamente accreditato', 'errato', ' attribuito a erroneamente', 'sospetto'],\
'ja': ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''],\
'ka': ['', '', '', ' ', ' ', '', 'misattributed', ' ', ' ', ' ', '', ' ', ' ', ' ', '', 'mistreated', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'ko': ['', '', '', ' ', ' ', ' ', '', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '', '', ' ', '', ' ', ' ', ' ', ' ', '', ' ', ''],\
'lt': ['Silpnas', 'ginijama', 'Neteisingai', 'sakydamas neteisingai priskirtas', 'neteisingai priskiriama', 'Citatos', 'nesuderinta', 'neteisingai priskiriama', 'neteisingai prijungta', 'neteisingai priskirta', 'prietaringas', 'yra neteisingai priskirtas', 'neteisingai priskirtas', 'neteisingai skaityta', 'abejotina', 'netinkamai elgiamasi', 'blogai', 'neteisingas', 'neteisingai skaityta', 'nepatvirtinta', 'neteisingai prijungtas', 'neteisingai skaityta', 'Neteisinga', 'priskiriama klaidingai', 'tartinas'],\
'nl': ['zwak', 'twijfelachtig', 'onjuist', 'Samenstellen ten onrechte toegeschreven', 'ten onrechte wordt toegeschreven aan', 'Citaten toegewezen', 'verkeerd ingesteld', 'Onjuist toegeschreven', 'Onjuist aangesloten', 'onjuist toegeschreven aan', 'controverseel', 'is verkeerd toegewezen', 'Onjuist toegewezen', 'verkeerd gecrediteerd', 'twijfelachtig', 'mishandeld', 'slecht', 'mis', 'ten onrechte gecrediteerd', 'ongehroken', 'verkeerd bevestigd', 'onjuist gecrediteerd', 'niet correct', 'wordt toegeschreven aan ten onrechte', 'verdacht'],\
'no': ['svak', 'omstridt', 'feil', 'sier feilaktig tilskrives det', 'feil er tilskrevet', 'Sitater tildelt', 'misattributed.', 'feilaktig tilskrives det', 'feil tilkoblet', 'feilaktig tilskrives', 'kontroversiell', 'er feil tildelt', 'feilaktig tildelt', 'krediteres feil', 'tvilsom', 'feilbehandlet', 'drlig', 'feil', 'feil kreditert', 'unverified.', 'feil festet', 'feil kreditert', 'stemmer ikke', 'er tilskrevet feilaktig', 'mistenkelig'],\
'ro': ['slab', 'contestat', 'incorect', 'spunnd atribuit greit', 'este atribuit n mod greit', 'Citate atribuite', 'misattribuit', 'incorect atribuit', 'incorect conectat', 'incorect atribuit', 'controversat', 'este atribuit incorect', 'incorect atribuite', 'creditat incorect', 'ndoielnic', 'maltratat', 'slab', 'gresit', 'creditat greit', 'neveriectificat', 'n mod incorect ataat', 'incorect creditate', 'incorect', 'este atribuit n mod eronat', 'suspicios'],\
'ru': ['', '', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', ' ', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'sk': ['slab', 'sporn', 'nesprvne', 'hovor nesprvne pripisovan', 'nesprvne sa pripisuje', 'Pridelen citcie', 'nesprvny', 'Nesprvne pripsan', 'Nesprvne pripojen', 'nesprvne pripsan', 'kontroverzn', 'je nesprvne priraden', 'Nesprvne priraden', 'nesprvne pripsan', 'pochybn', 'nespokojn', 'boho', 'vhodn', 'nesprvne pripsan', 'neoveren', 'Nesprvne pripojen', 'Nesprvne pripsan', 'nesprvny', 'sa pripisuje mylne', 'podozriv'],\
"sl": ["neozdrojovan"'napano prijavljeno', 'rekel napano pripisano', 'napano nakazana', 'napano povezan', 'slabo', 'sumljivega', 'nepravilno dodeljena', 'neosgejavan.', 'dodeljeni citati', 'sporno', 'nepravilno pritrjena', 'nepreverjeno', 'napano', 'je nepravilno dodeljen', 'nepravilno', 'napano pripisano', 'se pripisuje pomotoma', 'in pavipe.', 'napano pripisuje', 'dvomljiv', 'ibko', 'narobe', 'nepravilno pripisana'],\
"sq": ['i diskutueshm', 'atribuohet gabimisht', 'i keqtrajtuar', 'i atribuohet gabimisht', 'i pasakt', 'kredituar gabimisht', 'caktohet gabimisht', 'i lidhur gabimisht', 'i dyshimt', 'i pavepi', 'i gabuar', 'thnie t atribuara gabimisht', 'bashkangjitur gabimisht', 'dobet'],\
"pl": ['zo', 'bdny', 'misattriruted.', 'le traktowa', 'sabo', 'wtpliwy', 'nieprawidowo przymocowany', 'nieprawidowo przypisany do', 'niepoprawnie przypisany', 'niepoprawnie poczony', 'mwic bdnie przypisany', 'kwestionowa', 'cytaty przypisywane', 'niesprawdzony', 'bdnie przypisany', 'nieprawidowo przypisany'], \
'pt': ['fraca', 'contestada', 'incorretamente', 'dizendo atribuda incorretamente', 'atribuda incorretamente a', 'citaes atribudas', 'atribuda incorretamente', 'atribuda incorretamente', 'conectada incorretamente', ' atribudo incorretamente a ',' controverso ',' atribudo incorretamente ',' atribudo incorretamente ',' creditado incorretamente ',' duvidoso ',' maltratado ',' mal ',' errado ',' creditado incorretamente ',' no verificado ', 'incorretamente anexado', 'incorretamente creditado', 'incorreto', 'atribudo a incorretamente', 'suspeito'], \
'ta': ['', '', '', ' ', ' ', ' ', 'misattributed.', ' ', ' ', ' ', '', ' ', ' ', ' ', '', ' ', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'te': ['', '', '', ' ', ' ', ' ', 'myatattributed', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'uk': ['', '', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ''],\
'ur': ['', '', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', ' ', ' ', ' ', '', ' ', ''],\
'vi': ['Yu', 'tranh chp', 'khng chnh xc', 'ni sai quy kt', 'sai c quy cho', 'Bo gi c giao', 'sai lch', 'quy cho khng chnh xc', 'kt ni khng chnh xc', 'quy cho khng chnh xc cho.', 'gy tranh ci', 'c giao khng chnh xc', 'ch nh khng chnh xc', 'ghi c khng chnh xc', 'nghi ng', 'ngc i', 'km', 'Sai lm', 'Tn dng sai', 'cha c xc minh', 'nh km khng chnh xc', 'Credited khng chnh xc', 'khng ng', 'c quy cho nhm', 'kh nghi'],\
'zh': ['', '', '', '', '', '', '', '', '', ' ', '', '', '','','','','','','','', '','','','','']
}
#attributed? Neoven disputed
# to be checked: Djela, Obras, Povedali o
forbidden_by_language = {
"ar" : [" "," "," "," "],\
"az" : ["stinadlar","Mnb","Xarici keidlr","Haqqnda deyilnlr","istinadlar"],\
"be":[" ", "",""],\
"bg":[" "," ","",""," ",""],\
"bs":["Drugi o njemu","Djela","Takoer pogledajte","Vanjski linkovi","Izdanja"],\
"ca":["citacions sobre","Referncies","Bibliografia","Enllaos externs","referncies"],\
"co":["daveo"],\
"cs":["ve vrocch","Reference","Extern odkazy","Souvisejc"],\
"da":["Eksterne henvisninger","Kilder"],\
"de":["zitate mit bezug auf", ],\
"el":[" "],\
"es":["sobre", "Obras", "Vase tambin", "Bibliografa","referencias"],\
"et":["vlislingid"],\
"en":["quotes about", "filmography", "footnote", "sources", "resources", "other projects","external links","links",\
"notes", "note", "weblinks", "bibliogprahy", "related items","works", "references","literature","see","see also",\
"footnote","other projects"],\
"eu":["Kanpo loturak","Erreferentziak"],\
"fa":[" "," "," ","", ""," "],\
"fi":["sanottua","lhteet"],\
"fr":["sur "],\
"he":[" ", " ",""," "," "," "],\
"hi":[" "],\
"hr":["vanjske poveznice"],\
"hu":["rla mondtk","kls hivatkozsok","Mvei"],\
"hy":["","",""],\
"is":["tenglar"],\
"id":["pranala luar"],\
"it":["citazioni su","Doppiaggio","film","filmografia","altri progetti","voci correlate"], \
"ja":[""],\
"ka":[" "],\
"ko":[""," "],\
"lt":["nuorodos"],\
"nl":["over "], \
"no":["eksterne lenker","referanser"],\
"pl":["zobacz te","o "],\
"pt":["obras", "sobre","Ligaes externas"],\
"ro":["legturi externe","despre"],\
"ru":["","","","", ". "],\
"sk":["Povedali o","in projekty","referencie"],\
"sl":["viri","sklici"],\
"sq":["Thnie pr t","Referimet","Shiko edhe","lidhje t jashtme","referime"],\
"ta":[" ",""],\
"te":[""],\
"tr":["Hakknda","kaynaka"],\
"uk":["","","",""],\
"ur":[" "],\
"vi":["Lin kt ngoi","notenno"],\
"zh":["","",""]
}
forbidden_by_language["ar"] = [" ", " ", " ", " ", " ", "", "", "", "", " ", " ", " ", ".", "", "", " ", " ", "", " ", "", " ", " ", " ", ":", "", " ", "", "", ".", "", "", " ", "", "", " ", "", "", "", " ", "", "", "", " ", "", "", " ", "", " ", " ", "", ":", "", "", "", " - ", "", "", " ", " ", "daveo.", " ", "", " ", "", "", "", " O.", " ", " ", " ", "", " ", "", "", " ", "", " ", " ", " ", " ", "", "", " ", " ", ".", "", "", "", " ", " ", "", "", "", " ", "", " ", "", "", "", "", " ", "", " "]
forbidden_by_language["az"] = ["stinadlar", "Mnb", "Xarici keidlr", "Haqqnda deyilnlr", "istinadlar", "D baxmaq", "Biblioqrafiya", "sr", "lamtdar", "istinad", "Bu bard baqa bir ey", "Yubileyd", "Bu bard dedilr", "Filmoqrafiya", "Deyn", "linklr", "Onun haqqnda dedilr", "Haqqnda deyilir", "sitat gtirn", "Birldirmk", "refertral", "n mhur kitablar", "Xarici TADS", "Xarici laqlr", "Mnblr:", "onun haqqnda", "suallar asl idi", "stinad", "Dublying", "filmoqrafiya", "onun n", "O", "xarici linklr", "pyes", "izdihaml", "Onun haqqnda deyildi", "sr", "Pyes", "stn", "Digr layihlr", "Haqqnda", "onun haqqnda", "Resurslar", "Xarici laq", "araylar", "Mnblr", "Onun haqqnda deyildi", "izahat", "Xarici stinadlar", "Oxar yalar", "Mnbyi", "Qeydlr:", "Linklr", "Onun n", "Buraxl", "hrtli", "Qeydlr", "rz etmk", "nternetdki mnblr", "Hminin bax", "daveo", "Xarici il laq", "Onun haqqnda", "hminin bax", "film", "yan", "Araylar", "Dedilr.", "Bahal", "xarici laq", "Haqqnda ifadlr", "haqqnda", "Yuxardak sitatlar", "mnblr", "Sfir", "Ona deyilir", "dbiyyat", "haqqnda z haqqnda", "xarici linklr", "laqdar ttbiqlr", "Hrmtl sitatlar", "Grmk", "artq", "Hddindn artq balantlar", "haqqnda sitatlar", "filmoqrafiya", "izdihaml", "mnblr", "resurslar", "Digr layihlr", "xarici linklr", "linklr", "qeydlr", "Qeyd", "vaklar", "biblioqrafiya", "Oxar yalar", "sr", "araylar", "dbiyyat", "grmk", "hminin bax", "izdihaml", "Digr layihlr"]
forbidden_by_language["be"] = [" ", "", "", " ", "", "", "", "", " ", " ", " ", "", "", "", " ", " ", "", " ", "", " ", " ", " ", ":", " ", " ", "", "", "", " ", "", " ", "'", "", " ", "", "'", "", " ", "", " ", "", " ", "", "", " ", "", " ", " ", "", ":", "", " ", "", "", "", "", " ", " ", " ", " ", " ", "", "", "", " , .", "", " ", " ", "", " ", "", "", " ", " ", " ", "", "", " ", " ", "", "", "", " ", " ", "", "", "", "weblinks", "", " ", "", "", "", "", " ", " "]
forbidden_by_language["bg"] = [" ", " ", "", "", " ", "", " ", "", " ", "", "", " ", " ", " ", "", " ", "", " ", " ", " ", "", "- ", " ", " ", ":", " ", "", "", "", "", " ", "", " ", "", " ", " ", " ", "", "", " ", "", " ", "", " ", "", "", " ", " ", " ", " ", "", ":", "", " ", "", "", "", "", " ", " ", "", " ", " ", " ", "", "", "", " .", "", " ", " ", "", " ", "", "", " ", "", " ", " ", " ", " ", "", "", " ", " ", "", " ", "", "", " ", " ", "", "", "", "WeBlinks.", "", " ", " ", "", "", "", " ", " ", " "]
forbidden_by_language["bs"] = ["Drugi o njemu", "Djela", "Takoer pogledajte", "Vanjski linkovi", "Izdanja", "Takoe pogledajte", "Bibliografija", "radovi", "Primijetan", "referenca", "Jo jedan o tome", "u godinjici", "Rekli su o tome", "Filmografija", "Govorei", "linkove", "Rekli su o njemu", "Su rekli o", "citati", "Link na", "preporuke", "Najpoznatije knjige", "Vanjski tads", "Vanjske veze", "Izvori:", "o njemu", "Zavito upiti", "Referenca", "Presnimav", "Filmografija", "za njega", "O", "Vanjske veze", "igra", "fusnota", "Reeno je o njemu", "Radovi", "Igra", "na", "Ostali projekti", "O", "o njoj", "Resursi", "Vanjska veza", "reference", "Izvori", "Reeno je o njoj", "fusnote", "Vanjske reference", "Srodni predmeti", "Izvor", "Napomene:", "Linkove", "Za nju", "Izdanja", "Testimonials", "Biljeke", "izgovoriti", "Resursi na Internetu", "Vidjeti i", "Daveo", "Veza sa spolja", "O njemu", "vidjeti i", "film", "na", "Reference", "Rekli su O.", "Povezani", "Vanjska veza", "Izjave o", "o", "Citati gore", "izvori", "Ambasador", "Kae mu", "literatura", "o sebi", "Vanjske veze", "Srodne aplikacije", "Citati u odnosu na", "Vidjeti", "preko", "Viak veze", "citati o", "Filmografija", "fusnota", "izvori", "resursi", "Ostali projekti", "Vanjske veze", "linkove", "biljeke", "Biljeka", "Webliks", "bibliografija", "Srodni predmeti", "radovi", "reference", "literatura", "vidjeti", "vidjeti i", "fusnota", "Ostali projekti"]
forbidden_by_language["ca"] = ["citacions sobre", "Referncies", "Bibliografia", "Enllaos externs", "referncies", "Tamb mireu", "Bibliografia", "treballa", "Notable", "referncia", "Un altre sobre aix", "En l'aniversari", "Van dir sobre aix", "Filtrografia", "Dient", "enlla", "Van dir sobre ell", "Es diu sobre", "cites", "Enlla a", "referncies", "Els llibres ms famosos", "Tads exteriors", "Connexions externes", "Fonts:", "sobre ell", "Consultes dependents", "Referncia", "% De comportament", "filtrografia", "per ell", "O a", "Enllaos externs", "obert", "Nota al peu", "Es va dir sobre ell", "Treballa", "Obert", "a sobre de", "Altres projectes", "Sobre", "sobre ella", "Recursos", "Enlla extern", "referncies", "Fonts", "Es va dir sobre ella", "Notes al peu de pgina", "Referncies externes", "Articles relacionats", "Font", "NOTES:", "Enlla", "Per ella", "Llanaments", "Testimonis", "Notes", "dir", "Recursos a Internet", "Vegeu tamb", "daveo", "Enlla a l'exterior", "Sobre ell", "Vegeu tamb", "pellcula", "conectada", "Referncies", "Van dir O.", "Relacionada", "Enlla extern", "Declaracions sobre", "Sobre", "Cites anteriors", "fonts", "Ambaixador", "Se li diu", "literatura", "sobre ella mateixa", "Enllaos externs", "Aplicacions relacionades", "Cites respecte a", "Veure", "sobrar", "Enllaos d'excs", "cites sobre", "filtrografia", "Nota al peu", "fonts", "recursos", "Altres projectes", "Enllaos externs", "enlla", "notes", "nota", "Weblinks", "bibliografia", "Articles relacionats", "treballa", "referncies", "literatura", "veure", "Vegeu tamb", "Nota al peu", "Altres projectes"]
forbidden_by_language["co"] = ["daveo", "Fighj ancu", "Bibliografia", "FUNZIONI", "Notabile", "Riferimentu", "Un altru nantu questu", "In l'anniversariu", "Anu dettu di questu", "Filmografia", "Dicendu ", "Ligami", "Anu dettu di ellu", "S dettu di circa", "Ligame c", "I referenze", "I libri pi famosi", "Tadri esterni", "Cunnessioni esterni", "FONTI:", "circa ellu", "Quistioni dipendenti", "Riferimentu", "Dubaghju", "Filmografia", "per ellu", "O", "Ligami esterni", "Ghjuc", "nota di nota", "si dicia di ellu", "FUNZIONI", "Ghjuc", "", "Altri prughjetti", "Circa ", "circa ella", "Risorse", "Link esternu", "Riferimenti", "Fonti", "Si dicia di ella", "Testrootes", "Riferimenti esterni", "Oggetti Relativi", "Fonte", "NOTI:", "Ligami", "Per ella", "Release", "Testimonianza", "Note", "d", "Risorse in Internet", "Vede ancu", "daveo", "Ligame l'esterno", "Circa ellu", "vede ancu", "film", "avanti", "Riferimenti", "Anu dettu O.", "Ligatu", "Link esternu", "Dichjarazioni circa", "circa ", "Citazioni sopra", "fonti", "Ambasciatore", "Si dice ellu", "Letteratura", "circa ella stessu", "ligami esterni", "Applicazioni ligate", "Quotes cun rispettu ", "Vede", "finitu", "Ligami d'uccasioni", "citazioni circa", "Filmografia", "nota di nota", "fonti", "Risorse", "altri prughjetti", "ligami esterni", "Ligami", "Note", "Nota", "weblinks", "bibliografia", "Oggetti Relativi", "FUNZIONI", "Riferimenti", "Letteratura", "vede", "vede ancu", "nota di nota", "altri prughjetti"]
forbidden_by_language["cs"] = ["ve vrocch", "Reference", "Extern odkazy", "Souvisejc", "Tak se podvejte na", "Bibliografie", "prce", "Pozoruhodn", "odkaz", "Dal o tom", "v vro", "ekli o tom", "Filmografie", "kat", "Odkazy", "ekli o nm", "kaj se asi", "citty", "Odkaz na", "odkazy", "Nejznmj knihy", "Vnj Tads.", "Extern pipojen", "Prameny:", "o nm", "Zvisl dotazy", "Odkaz", "Dabing", "filmografie", "pro nj", "", "extern odkazy", "hra", "poznmka pod arou", "ekl to o nm", "Prce", "Hra", "na", "Dal projekty", "O", "o n", "Zdroje", "Extern odkaz", "Reference", "Prameny", "ekl to o n", "poznmky pod arou", "Extern odkazy", "Souvisejc zbo", "Zdroj", "Poznmky:", "Odkazy", "Pro ni", "Releases", "Svdectv", "Poznmky", "ci", "Zdroje v Internetu", "Viz tak", "daveo.", "Odkaz na vnj stranu", "O nm", "viz tak", "film", "na", "Reference", "ekli O.", "Pbuzn", "Extern odkaz", "Vkazy", "o", "Citace ve", "prameny", "Velvyslanec", "k se mu", "literatura", "o sob", "extern odkazy", "Souvisejc aplikace", "S ohledem na", "Vidt", "pes", "Pebyten odkazy", "cituje", "filmografie", "poznmka pod arou", "prameny", "zdroje", "Dal projekty", "extern odkazy", "Odkazy", "poznmky", "Poznmka", "webov odkazy", "bibliografie", "Souvisejc zbo", "prce", "Reference", "literatura", "vidt", "viz tak", "poznmka pod arou", "Dal projekty"]
forbidden_by_language["da"] = ["Eksterne henvisninger", "Kilder", "Se ogs p", "Bibliografi.", "arbejder", "Bemrkelsesvrdig", "reference", "En anden om det", "i jubilet.", "de sagde om det", "Filmografi.", "Siger til", "links.", "De sagde om ham", "Er sagt omkring", "citater", "Link til", "henvisninger.", "De mest bermte bger", "Ydre tads.", "Eksterne forbindelser", "Kilder:", "om ham", "Afhngige foresprgsler", "Reference", "Dubbing.", "Filmografi.", "For ham", "O.", "eksterne links", "spiller.", "fodnote.", "Det blev sagt om ham", "Arbejder", "Spiller.", "p", "Andre projekter", "Om", "om hende", "Ressourcer.", "Eksternt link", "Referencer.", "Kilder.", "Det blev sagt om hende", "fodnoter.", "Eksterne referencer.", "Relaterede elementer.", "Kilde", "Noter:", "Links.", "For hende", "Udgivelser.", "Testimonials.", "Noter.", "sige", "Ressourcer p internettet", "Se ogs", "daveo.", "Link til ydersiden", "Om ham", "se ogs", "film", "p", "Referencer.", "De sagde O.", "Relaterede", "Eksternt link", "Udsagn om", "om", "Citater ovenfor", "Kilder.", "Ambassadr", "Det siges til ham", "litteratur", "om sig selv.", "eksterne links", "Relaterede applikationer", "Citater med hensyn til", "Se", "over", "Overskydende links.", "citater om", "Filmografi.", "fodnote.", "Kilder.", "ressourcer.", "andre projekter", "eksterne links", "links.", "noter.", "Bemrk", "Weblinks.", "bibliografi", "relaterede elementer.", "arbejder", "Referencer.", "litteratur", "se", "se ogs", "fodnote.", "andre projekter"]
forbidden_by_language["de"] = ["Zitate ber", "Filmografie", "Funote", "Quellen", "Ressourcen", "andere Projekte", "externe Links", "Links", "Notizen", "Hinweis", "Weblinks", "Literaturverzeichnis", "verwandte Artikel", "Werke", "Referenzen", "Literatur", "sehen", "siehe auch", "Funote", "andere Projekte", "Auch anschauen", "Bibliographie", "Werke", "Bemerkenswert", "Referenz", "Noch einer darber", "im Jubilum", "Sie sagten darber", "Filmografie", "Sagen zu", "Links", "Sie sagten ber ihn", "Sind sagte ber", "Zitate", "Link zu", "Empfehlungen", "Die berhmtesten Bcher", "Outer tads", "Externe Verbindungen", "Quellen:", "ber ihn", "Abhngige Anfragen", " Referenz", "Synchronisation", "Filmografie", "fr ihn", "O", "Externe Links", "Spiele", "Funote", "es wurde ber ihn gesagt", "Werke", "Spiele", " auf", "Andere Projekte", "ber", "ber sie", "Ressourcen", "Externer Link", "Referenzen", "Quellen", "Es wurde ber sie gesagt", "Funoten", "Externe Verweise", "Verwandte Artikel", "Quelle", "Notizen:", "Links", "Fr sie", "Verffentlichungen", "Testimonials", "Nicht es", "sagen", "Ressourcen im Internet", "Siehe auch", "daveo", "Link nach auen", "ber ihn", "Siehe auch", "Film", "on", "Referenzen", "Sie sagten O.", "Verwandte", "externer Link", "Aussagen ber", "ber", "Zitate oben", "Quellen", "Botschafter", "Es wird ihm gesagt", "Literatur", "ber sich selbst", "externe Links", "Verwandte Anwendungen", "Zitate in Bezug auf", "Siehe", "ber", "berzhlige Links", "Zitate ber", "Filmografie", "Funote", " Quellen", "Ressourcen", "andere Projekte", "externe Links", "Links", "Notizen", "Hinweis", "Weblinks", "Bibliographie", "Verwandte Artikel", "Werke", "Referenzen", "Literatur", "sehen", "siehe auch", "Funote", "andere Projekte"]
forbidden_by_language["el"] = [" ", " ", "", "", "", "", " ", " ", " '", "", "", "", " '", "", "", " ", "", " ", " ", " ", ":", " ", " ", "", "", "", " ", "O", " ", "", "", " '", "", "", " ", " ", " ", " ", "", " ", " ", "", " '", "", " ", " ", "", ":", "", " ", "", "", "", "", " ", " ", "daveo", " ", " ", " ", "", "", " ", " .", " ", " ", " ", " ", " ", "", "", " ", "", " ", " ", " ", " ", "", " ", " ", " ", "", "", "", "", " ", " ", "", "", "", " ", "", " ", "", " ", "", "", " ", "", " "]
forbidden_by_language["et"] = ["vlislingid", "Vaata ka", "Bibliograafia", "ttama", "Mrkimisvrne", "viide", "Teine sellest", "aastapeval", "Nad tlesid sellest", "Filmograafia", "eldes", "lingid", "Nad tlesid temast", "eldakse", "tsitaat", "Link", "viited", "Kige kuulsamad raamatud", "Outer Tads", "Vlised hendused", "Allikad:", "temast", "sltus pringutest", "Viide", "Dubleerimine", "filmograafia", "tema jaoks", "O", "Vlised lingid", "mngima", "joonealune mrkus", "Ta tles temast", "Ttama", "Mngima", "peale", "Muud projektid", "Umbes", "temast", "Vahendid", "Vline link", "viited", "Allikad", "Tema kohta eldi", "joonealused mrkused", "Vlised viited", "Seotud ksused", "Allikas", "Mrkused:", "Lingid", "Temale", "Vljaanded", "Iseloomustused", "Mrgib", "tlema", "Ressursid Internetis", "Vaata ka", "daveo", "Link vljastpoolt", "Temast", "Vaata ka", "film", "peal", "Viited", "Nad tlesid O.", "Seotud", "Vline link", "Avaldused", "umbes", "Valitud tsitaadid", "allikad", "Suursaadik", "See on talle eldud", "kirjandus", "ennast", "Vlised lingid", "Seotud rakendused", "Hinnapakkumisi", "Ngema", "le", "Liigne lingid", "hinnapakkumisi", "filmograafia", "joonealune mrkus", "allikad", "vahendid", "Muud projektid", "Vlised lingid", "lingid", "mrgib", "Mrge", "weblinks", "bibliograafia", "Seotud ksused", "ttama", "viited", "kirjandus", "ngema", "Vaata ka", "joonealune mrkus", "Muud projektid"]
forbidden_by_language["en"] = ["quotes about", "filmography", "footnote", "sources", "resources", "other projects", "external links", "links", "notes", "note", "weblinks", "bibliography", "related items", "works", "references", "literature", "see", "see also", "footnote", "other projects", "Also look at", "Bibliography", "works", "Notable", "reference", "Another about it", "in the anniversary", "they said about it", "Filmography", "Saying to", "links", "They said about him", "Are said about", "Link to", "referrals", "The most famous books", "Outer tads", "External connections", "Sources:", "about him", "depended queries", "Reference", "Dubbing", "filmography", "for him", "O", "External links", "plays", "footnote", "it was said about him", "Works", "Plays", "upon", "Other projects", "About", "about her", "Resources", "External link", "references", "Sources", "It was said about her", "footnotes", "External references", "Related items", "Source", "Notes:", "Links", "For her", "Releases", "Testimonials", "Notes", "say", "resources in Internet", "See also", "daveo", "Link to the outside", "About him", "see also", "film", "on", "References", "They said O.", "Related", "external link", "Statements about", "about", "Citations above", "sources", "Ambassador", "It is said to him", "literature", "about herself", "external links", "Related Applications", "Quotes with respect to", "See", "over", "Excess links", "quotes about", "filmography", "footnote", "sources", "resources", "other projects", "external links", "links", "notes", "note", "weblinks", "bibliography", "related items", "works", "references", "literature", "see", "see also", "footnote", "other projects"]
forbidden_by_language["eu"] = ["Kanpo loturak", "Erreferentziak", "Begira ere", "Bibliografia", "zeregin", "Nabarmen", "kontsulta", "Horri buruz", "Urteurrenean", "Esan zuten", "Filmografia", "Esanda", "estekak", "Berari buruz esan zuten", "Esaten da", "aipamen", "Esteka", "ikuskapen", "Liburu ospetsuenak", "Kanpoko Tads", "Kanpoko konexioak", "Iturriak:", "Berari buruz", "Dependatutako kontsultak", "Kontsulta", "Bosbing", "Filmografia", "harentzat", "O", "Kanpoko estekak", "Plays", "oharra", "Berari buruz esan zen", "Zeregin", "Plays", "-en gainean", "Beste proiektu batzuk", "Ei buruz", "haren inguruan", "Baliabide", "Kanpoko esteka", "erreferentziak", "Iturriak", "Berari buruz esan zen", "Oharrak", "Kanpoko erreferentziak", "Lotutako elementuak", "Iturri", "Oharrak:", "Estekak", "Berarentzat", "Oheratu", "Testigantzak", "Ohar", "esan", "Baliabideak Interneten", "Ikusi ere", "Daveo", "Kanpotik estekatu", "Berari buruz", "ikusi ere", "mintz", "-en gainean", "Erreferentziak", "Esan zuten O.", "Lotinduta", "Kanpoko esteka", "Adierazpenak", "ei buruz", "Goiko aipuak", "iturriak", "Enbaxadore", "Esan dio", "literatura", "bere buruari buruz", "Kanpoko estekak", "Lotutako aplikazioak", "Aipamenak", "Ikusi", "-en gainetik", "Gehiegizko estekak", "aipamenak buruz", "Filmografia", "oharra", "iturriak", "baliabide", "Beste proiektu batzuk", "Kanpoko estekak", "estekak", "ohar", "ohar", "Weblinkak", "Bibliografia", "Lotutako elementuak", "zeregin", "erreferentziak", "literatura", "ikusi", "ikusi ere", "oharra", "Beste proiektu batzuk"]
forbidden_by_language["fa"] = [" ", " ", " ", "\u200c", "", " \u200c", " ", " - ", "", " ", "", " ", " ", " ", " ", " ", "", " ", " ", " ", " ", "", " ", "", " ", ":", " ", " ", "", "", " ", " ", "o", " ", "", "", " ", "", "", "", " ", " ", " ", "", " ", "", "", " ", " ", " ", " ", "", ":", "", " ", " ", "", "", "", " ", " ", "daveo", " ", " ", " ", "", "", "", " O.", "", " ", " ", " ", " ", "", "", " ", "", " ", " ", " ", " ", "", " ", " ", " ", " ", "", "", "", " ", " ", "", "", " ", "weblinks", " - ", " ", "", "", "", "", " ", "", " "]
forbidden_by_language["es"] = ["citas sobre", "filmografa", "nota al pie", "fuentes", "recursos", "otros proyectos", "enlaces externos", "enlaces", "notas", "nota", "enlaces web", "bibliografa"," artculos relacionados"," obras"," referencias"," literatura"," ver"," ver tambin"," nota al pie"," otros proyectos"," Mirar tambin"," Bibliografa"," obras", "Notable", "referencia", "Otro sobre eso", "en el aniversario", "Ellos dijeron al respecto", "Filmografa", "Diciendo a", "Enlaces", "Ellos dijeron sobre l", "Son dijo sobre"," citas"," Enlace a"," referencias"," Los libros ms famosos"," Tads externos"," Conexiones externas"," Fuentes:"," sobre l"," consultas dependientes"," Referencia"," Doblaje"," filmografa"," para l"," O"," Enlaces externos"," obras de teatro"," nota al pie"," se dijo sobre l"," Obras"," Obras de teatro"," sobre"," Otros proyectos"," Acerca de"," Acerca de ella"," Recursos"," Enlace externo"," Referencias"," Fuentes"," Se dijo sobre ella"," Notas al pie"," Referencias externas", "Artculos relacionados", "Fuente", "Notas:", "Enlaces", "Para ella", "Lanzamientos", "Testimonios", "No es"," decir"," recursos en Internet"," Ver tambin"," daveo"," Enlace con el exterior"," Acerca de l"," ver tambin"," pelcula"," sobre"," Referencias", "Dijeron O.", "Relacionado", "Enlace externo", "Declaraciones sobre", "Sobre", "Citas arriba", "Fuentes", "Embajador", "Se le dice a l", "Literatura", "sobre ella", "enlaces externos", "Aplicaciones relacionadas", "Citas con respecto a", "Ver", "sobre", "Enlaces en exceso", "Citas sobre", "filmografa", "nota al pie", " fuentes"," recursos"," otros proyectos"," enlaces externos"," enlaces"," notas"," nota"," enlaces web"," bibliografa"," artculos relacionados"," obras"," referencias", "literatura", "ver", "ver tambin", "nota al pie", "otros proyectos"]
forbidden_by_language["fi"] = ["lainaukset aiheesta","Aiheesta muualla" , "filmografia", "alaviite", "lhteet", "resurssit", "muut projektit", "ulkoiset linkit", "linkit", "muistiinpanot", "huomautus", "weblinks", "bibliografia", "liittyvt kohteet", "teokset", "viitteet", "kirjallisuus", "katso", "katso mys", "alaviite", "muut projektit", "katso mys", "Bibliografia", "teokset", "Huomattava", "viite", "Toinen siit", "juhlapivn", "he sanoivat siit", "Filmografia", "Sanominen", "linkit", "He sanoivat hnest", "Ovatko sanoi aiheesta", "lainaukset", "Linkki", "viittaukset", "kuuluisimmat kirjat", "Ulkoiset", "Ulkoiset yhteydet", "Lhteet:", "Hnest", "riippuvaiset kyselyt", " Viite", "Kopiointi", "filmografia", "hnelle", "O", "ulkoiset linkit", "nytelmt", "alaviite", "hnest sanottiin", "teokset", "nytelmt", " upon", "Muut projektit", "Tietoja", "Hnest", "Resurssit", "Ulkoinen linkki", "viitteet", "Lhteet", "Hnest sanottiin", "alaviitteet", "Ulkoiset viitteet", "Aiheeseen liittyvt kohteet", "Lhde", "Huomautukset:", "Linkit", "Hnelle", "Julkaisut", "Lausunnot", "Ei es", "sano", "resurssit Internetiss", "Katso mys", "daveo", "Linkki ulkopuolelta", "Tietoa hnest", "katso mys", "elokuva", "pll", "viitteet", "He sanoivat O.", "Aiheeseen liittyv", "ulkoinen linkki", "Lausunnot aiheesta", "Tietoja", "Yll olevat lainaukset", "lhteet", "suurlhettils", "Hnelle sanotaan", "kirjallisuus", "itsestn", "ulkoiset linkit", "Aiheeseen liittyvt sovellukset", "Lainaukset suhteessa", "Katso", "yli", "Ylimriset linkit", "lainauksia", "filmografia", "alaviite", " lhteet", "resurssit", "muut projektit", "ulkoiset linkit", "linkit", "muistiinpanot", "huomautus", "verkkolinkit", "bibliografia", "liittyvt kohteet", "teokset", "viitteet", "kirjallisuus", "katso", "katso mys", "alaviite", "muut hankkeet"]
forbidden_by_language["fr"] = ["citations sur", "filmographie", "note de bas de page", "sources", "ressources", "autres projets", "liens externes", "liens", "notes", "note", "liens web", "bibliogprahie", "lments lis", "uvres", "rfrences", "littrature", "voir", "voir aussi", "note de bas de page", "autres projets", "Regarder aussi", "Bibliographie", "uvres", "Remarquable", "rfrence", "Un autre ce sujet", " l'anniversaire", "ils en ont dit", "Filmographie", "En disant ", "liens", "Ils ont dit propos de lui", "Sont dit propos de", "citations", "Lien vers", "rfrences", "Les livres les plus clbres", "Tads externes", "Connexions externes", "Sources :", " propos de lui", "requtes dpendantes", " Rfrence", "Doublage", "filmographie", "pour lui", "O", "Liens externes", "pices", "note de bas de page", "on a dit de lui", "Travaux", "Joues", " sur", "Autres projets", " propos", " propos d'elle", "Ressources", "Lien externe", "Rfrences", "Sources", "On a dit d'elle", "Notes de bas de page", "Rfrences externes", "Articles associs", "Source", "Notes :", "Liens", "Pour elle", "Releases", "Tmoignages", "Non es", "dire", "ressources sur Internet", "Voir aussi", "daveo", "Lien vers l'extrieur", "A propos de lui", "voir aussi", "film", "sur", "Rfrences", "Ils ont dit O.", "Connexe", "lien externe", "Dclarations sur", " propos", "Citations ci-dessus", "sources", "Ambassadeur", "On lui dit", "littrature", " propos d'elle-mme", "liens externes", "Applications associes", "Citations concernant", "Voir", "over", "Liens excdentaires", "Citations sur", "filmographie", "note de bas de page", " sources", "ressources", "autres projets", "liens externes", "liens", "notes", "note", "liens web", "bibliographie", "lments associs", "ouvrages", "rfrences", "littrature", "voir", "voir aussi", "note de bas de page", "autres projets"]
forbidden_by_language["he"] = [" ", "", " ", "", "", " ", " ", "", "", "", " ", "'", " ", "", "", "", "", " ", " ", " ", " ", "", "", " ", "", " ", " ", " ", "", " ", "", " ", " ", "", " ", "", "' ", " ", " ", ":", "", " ", " ", "", "", "", "O", " ", "", " ", " ", "", "", " ", " ", "", "", "", " ", "", "", " ", " ", " ", " ", "", ":", "", "", "", "", " es", "", " ", " ", "daveo", " ", "", " ", "", "", "", " O", "", " ", " ", "", " ", "", "", " ", "", " ", " ", " ", " ", "", "", " ", " ", "", " ", " ", "", " ", " ", "", "", "", " ", "", " ", "", "", "", "", " ", " ", " "]
forbidden_by_language["hi"] = [" ", "", "", "", "", " ", " ", "", "", "", "", " ", " ", "", "", "", "", " ", "", " ", " ", " ", " ", "", "", " ", " ", " ", "", " ", "", " ", " ", "", " ", "", " ", " ", " ", ":", " ", " ", " ", "", "", " ", "", " ", "", "", " ", " ", "", " ", "", " ", "", " ", "", "", " ", "", " ", " ", "", ":", "", " ", "", "", " es", " ", " ", " ", "", " ", " ", " ", "", "", "", " ", "", " ", " ", " ", " ", "", "", " ", "", " ", " ", " ", " ", "", "", " ", " ", "", "", " ", "", " ", " ", "", "", "", "", " ", " ", "", "", "", "", " ", "", " "]
forbidden_by_language["hr"] = ["navodnici o", "filmografija", "fusnota", "izvori", "izvori", "drugi projekti", "vanjske veze", "veze", "biljeke", "napomena", "weblinks", "bibliografija", "srodne stavke", "radovi", "reference", "literatura", "vidi", "vidi takoer", "fusnota", "drugi projekti", "takoer pogledajte", "Bibliografija", "radovi", "Zapaeno", "referenca", "Jo jedan o tome", "u obljetnici", "rekli su o tome", "Filmografija", "Kae se", "linkovi", "Rekli su o njemu", "Jesu li rekao o", "citati", "Veza na", "preporuke", "Najpoznatije knjige", "Vanjski tad", "Vanjske veze", "Izvori:", "o njemu", "ovisni upiti", " Referenca", "Sinhronizacija", "filmografija", "za njega", "O", "Vanjske veze", "predstave", "fusnota", "reeno je o njemu", "Djela", "Predstave", " na", "Drugi projekti", "O njoj", "O njoj", "Resursi", "Vanjski link", "reference", "Izvori", "Reeno je o njoj", "fusnote", "Vanjske reference", "Povezane stavke", "Izvor", "Napomene:", "Veze", "Za nju", "Izdanja", "Izjave", "Ne es", "recimo", "resursi na Internetu", "Vidi takoer", "daveo", "Veza prema van", "O njemu", "vidi takoer", "film", "on", "Reference", "Rekli su O.", "Povezano", "vanjska veza", "Izjave o", "o", "Navodi gore", "izvori", "Ambasador", "Reeno mu je", "knjievnost", "o sebi", "vanjske veze", "Povezane aplikacije", "Citati s obzirom na", "Vidi", "preko", "Viak veza", "citati o", "filmografija", "fusnota", " izvori", "resursi", "ostali projekti", "vanjske veze", "veze", "biljeke", "biljeka", "web-veze", "bibliografija", "srodne stavke", "radovi", "reference", "knjievnost", "vidi", "vidi takoer", "fusnota", "drugi projekti"]
forbidden_by_language["is"] = ["tilvitnanir um", "kvikmyndafri", "neanmlsgrein", "heimildir", "aulindir", "nnur verkefni", "ytri tenglar", "tenglar", "aths", "ath", "weblinks", "heimildaskr", "tengd atrii", "verk", "tilvsanir", "bkmenntir", "sj", "sj einnig", "neanmlsgrein", "nnur verkefni", "Skoau lka", "Heimildaskr", "verk", "Athyglisvert", "tilvsun", "Anna um a", " afmlinu", "eir sgu um a", "Kvikmyndataka", "Seggja vi", "tenglar", "eir sgu um hann", "Eru sagi um", "tilvitnanir", "Tengill ", "tilvsanir", "Frgustu bkurnar", "Ytri tads", "Ytri tengingar", "Heimildir:", "um hann", "har fyrirspurnir", " Tilvsun", "talsetning", "kvikmyndataka", "fyrir hann", "O", "Ytri hlekkir", "leikrit", "neanmlsgrein", "a var sagt um hann", "verk", "leikrit", " ", "nnur verkefni", "Um", "um hana", "Aulindir", "Ytri tengill", "tilvsanir", "Heimildir", "a var sagt um hana", "neanmlsgrein", "Ytri tilvsanir", "Tengd atrii", "Heimild", "Athugasemdir:", "Tenglar", "Fyrir hana", "tgfur", "Vitnisburur", "Ekki es", "segja", "tilfng internetinu", "Sj lka", "daveo", "Tengill a utan", "Um hann", "sj lka", "kvikmynd", "on", "Tilvsanir", "eir sgu O.", "Tengd", "ytri tengill", "Yfirlsingar um", "um", "Tilvitnanir a ofan", "heimildir", "sendiherra", "a er sagt vi hann", "bkmenntir", "um sjlfa sig", "ytri tenglar", "Tengd forrit", "Tilvitnanir me tilliti til", "Sj", "yfir", "Umframtenglar", "tilvitnanir um", "kvikmyndafri", "neanmls", " heimildir", "tilfng", "nnur verkefni", "ytri hlekkir", "tenglar", "athugasemdir", "aths", "veftenglar", "heimildaskr", "tengd atrii", "verk", "tilvsanir", "bkmenntir", "sj", "sj einnig", "neanmlsgrein", "nnur verkefni"]
forbidden_by_language["it"] = ["citazioni su", "filmografia", "nota", "fonti", "risorse", "altri progetti", "link esterni", "link", "note", "nota", "link web", "bibliografia", "articoli correlati", "opere", "riferimenti", "letteratura", "vedi", "vedi anche", "nota a pi di pagina", "altri progetti", "guarda anche", "bibliografia", "lavori", "Notevole", "riferimento", "Un altro a riguardo", "nell'anniversario", "hanno detto a riguardo", "Filmografia", "Detto a", "link", "Hanno detto di lui", "Sono ha detto su", "citazioni", "Link a", "riferimenti", "I libri pi famosi", "Schede esterne", "Connessioni esterne", "Fonti:", "su di lui", "domande dipendenti", " Riferimento", "Doppiaggio", "filmografia", "per lui", "O", "Link esterni", "ascolta", "nota a pi di pagina", "si diceva di lui", "Lavori", "Riproduzioni", " su", "Altri progetti", "Su", "su di lei", "Risorse", "Link esterno", "riferimenti", "Fonti", "Si diceva di lei", "note a pi di pagina", "Riferimenti esterni", "Articoli correlati", "Fonte", "Note:", "Link", "Per lei", "Pubblicazioni", "Testimonianze", "Non es", "say", "risorse in Internet", "Vedi anche", "daveo", "Link all'esterno", "Su di lui", "vedi anche", "film", "on", "Riferimenti", "Hanno detto O.", "Correlato", "link esterno", "Dichiarazioni su", "su", "Citazioni sopra", "fonti", "Ambasciatore", "Si dice a lui", "letteratura", "su di s", "link esterni", "applicazioni correlate", "citazioni rispetto a", "vedere", "sopra", "collegamenti in eccesso", "citazioni su", "filmografia", "nota a pi di pagina", " fonti", "risorse", "altri progetti", "link esterni", "link", "note", "nota", "link web", "bibliografia", "articoli correlati", "lavori", "riferimenti", "letteratura", "vedi", "vedi anche", "nota a pi di pagina", "altri progetti"]
forbidden_by_language["ja"] = ["' '' '' '' '' '' '' '' '' '' '' ' '' '' '' '' '' '' '' '' '' '' ' '' '' '' '' '' '' '' '' '' ' '' '' '' '' '' '' '' '' '' '' '' '' '' '' O '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' ' '' '' '' '' '' '' '","' '' '' '' '' '' '' '' '' 'O ''' '' '' '' '' '' '' '' ' '' '' '' '' '' '' "]
forbidden_by_language["ka"] = ["", "", "", "", "", " ", " ", "", "", "", "", "'", " ", "", "", "", "", " ", "", " ", " ", "", "' , , , , , , , , , , . ", "", "", "", " ", " ", " ", ":", " ", " ", " , , , , , , , , , , , ", " ", "", " ", "", " ", "", "", " ", "", " ", " ", "", ":", "", "", "", "", "N ", "", " ", " ", "", " ", " ", " ", "", "", "", " .", "", " ", "", "", " ", "", "", " ", "", " ", " ", " ", " ", "", "", " ", "", "", "", " ", "", " ", " ", "", "", "", "", "", " ", "", "", "", "", " ", "", " "]
forbidden_by_language["pl"] = ['zobacz te', 'o ', 'Zasoby', 'Wydanie', 'o nim', 'Link do zewntrz', 'Cytaty w odniesieniu do', 'Bibliografia', 'Najbardziej znane ksiki', 'powiedzieli o tym', 'Powiedziane s o', 'Powizane przedmioty', 'na', 'spinki do mankietw', 'Powizane zastosowania', 'referencja', 'Powiedzieli o nim', 'Rwnie patrze', 'Pracuje', 'literatura', 'Link zewntrzny', 'Referencje.', 'Bibliografia', 'zaleao zapytania', 'Daveo.', 'Powiedzia o niej', 'Spinki do mankietw', 'Pracuje', 'Uwagi:', 'Dubbing.', 'przypisy', 'Widzie', 'Mwiono o nim', 'o niej', 'Ambasador', 'cytaty', 'bawi si', 'film', 'O.', 'Filmografia', 'O nim', 'Zwizane z', 'Zewntrzne odniesienia', 'Cytaty powyej', 'link zewntrzny', 'Bibliografia', 'Inne projekty', 'Filmografia', 'Outer Tads.', 'rdo', 'Zewntrzne linki', 'Zasoby w Internecie.', 'notatka', 'Zobacz te', 'Referencja', 'Powiedzieli O.', 'Notatki', 'Dla niej', 'Znaczny', 'nad', 'Mwi si mu', 'Nadmiarowe linki', 'o', 'O sobie', 'Bawi si', 'RDA', 'mowi', 'Inny o tym', 'Mwic do', 'Poczenia zewntrzne', 'Zobacz te', 'od', 'O', 'w rocznicy.', 'czy z', 'skierowania', 'dla niego', 'rda:', 'Owiadczenia o', 'RDA', 'Zewntrzne linki', 'cytaty', 'Filmografia', 'notatka', 'RDA', 'Surowce', 'inne projekty', 'Zewntrzne linki', 'spinki do mankietw', 'notatki', 'Notatka', 'linki internetowe', 'bibliografia', 'powizane przedmioty', 'Pracuje', 'Bibliografia', 'literatura', 'zobaczy', 'Zobacz te', 'notatka', 'inne projekty']
forbidden_by_language["pt"] = ["Ligaes externas","citaes sobre ele", "citaes sobre ela", "filmografia", "nota de rodap", "fontes", "recursos", "outros projetos", "links externos", "links", "notas", "nota", "links da web", "bibliografia", "itens relacionados", "obras", "referncias", "literatura", "ver", "ver tambm", "nota de rodap", "outros projetos" , "Veja tambm", "Bibliografia", "obras", "Notvel", "Referncia", "Outra sobre isso", "no aniversrio", "foi dito sobre ela", "Filmografia", "Dizendo a "," links "," Disseram sobre ele "," Dizem sobre "," Link para "," referncias "," Os livros mais famosos "," Meninos de fora "," Conexes externas "," Fontes: ", "sobre ele", "consultas dependentes", "Referncia", "Dublagem", "filmografia", "para ele", "O", "Ligaes externas", "peas", "nota de rodap", "foi falado sobre ele "," Funciona "," Joga "," sobre "," Outros projetos "," Sobre "," sobre ela "," Recursos "," Link externo "," Referncias "," Fontes "," Foi dito sobre ela "," notas de rodap "," Referncias externas "," Itens relacionados "," Fonte "," Notas: "," Link s "," Releases "," Notes "," resources in Internet "," See also "," daveo "," Link to the outside "," About him "," see also "," film ", "Referncias", "Disseram sobre ele", "Relacionadas", "link externo", "Declaraes sobre" , "Citaes acima", "fontes", "Embaixador", "Diz-se sobre ele", "literatura "," Disseram sobre ela "," links externos "," Aplicativos relacionados "," Citaes a respeito de "," Ver ", " sobre "," Excesso de links "," citaes sobre "," filmografia "," nota de rodap "," fontes "," recursos "," outros projetos "," links externos "," links "," notas "," nota "," links da web "," bibliografia "," itens relacionados "," trabalhos "," referncias "," literatura "," ver "," ver tambm "," nota de rodap "," outros projetos "]
forbidden_by_language["ro"] = ['legturi externe', 'despre', 'NOTE:', 'literatur', 'sa spus despre el', 'despre el', 'Dobbing.', 'Pentru ea', 'Se spune despre', 'Articole conexe', 'Notabil', 'Notele de subsol', 'Aplicaii nrudite', 'Filmografie', 'Surse:', 'depinde de interogri', 'Referine externe', 'Au spus despre el', 'Alte proiecte', 'Vedea', 'Uitai de asemenea la', 'Filmografie', 'Despre', 'pe', 'Legate de', 'O.', 'Ambasador', 'joac', 'referin', 'pentru el', 'TADS OUTER.', 'Bibliografie', 'linkuri externe', 'n aniversare', 'Link-uri', 'Releases.', 'despre ea nsi', 'Link-uri', 'lucrri', 'Referin', 'Declaraii despre', 'Vezi si', 'Cele mai cunoscute cri', 'Lucrri', 'Sa spus despre ea', 'Link-uri excesive', 'citate', 'Link-ul la exterior', 'Surs', 'Altul despre el', 'Spunnd', 'film', 'Citate cu privire la', 'Spune', 'Daveo.', 'Link extern', 'Citri de mai sus', 'Vezi si', 'peste', 'Surse.', 'i se spune', 'Au spus O.', 'Referine', 'despre', 'peste', 'Legtura cu', 'Joac', 'Referine', 'despre ea', 'Surse.', 'linkuri externe', 'Au spus despre asta', 'Link extern', 'Mrturii', 'not de subsol', 'Referine', 'Note', 'Resurse pe Internet', 'Despre el', 'Resurse', 'Conexiuni externe', 'Citate despre', 'Filmografie', 'not de subsol', 'Surse.', 'resurse', 'Alte proiecte', 'linkuri externe', 'Link-uri', 'note', 'Not', 'Link-uri web', 'bibliografie', 'Articole conexe', 'lucrri', 'Referine', 'literatur', 'vedea', 'Vezi si', 'not de subsol', 'Alte proiecte']
forbidden_by_language["ru"] = [' ', '', '', ' ', '. ', ':', '', ' ', ' ', '', ' ', ' ', ' ', '', '', ' ', '', ':', ' ', ' ', ' ', ' ', '', ' ', '', '', '', '', '', '', '', '', ' ', ' ', '', ' ', ' ', '', '', ' ', '', '', '', ' ', ' ', ' ', '', ' ', ' ', ' ', '', ' ', '', '', ' ', '', 'Daveo.', ' ', ' ', ' ', '', '', ' ', ' .', ' ', '', '', ' ', '', '', ' ', '', ' ', ' ', ' ', '', '', ' ', '', ' ', ' ', '', ' ', ' ', '', '', '', '', ' ', ' ', '', '', '', ' ', '', ' ', '', ' ', '', '', ' ', '', ' ']
forbidden_by_language["sk"] = ['Povedali o', 'in projekty', 'referencie', 'Poznmky:', 'literatra', 'Hovorilo sa o om', 'o om', 'Dabovanie', 'Pre u', 'Hovoria', 'Svisiace poloky', 'Pozoruhodn', 'poznmky pod iarou', 'Svisiace aplikcie', 'Filmograf', 'Zdroje:', 'zvisl dotazy', 'Extern referencie', 'Povedali o om', 'Ostatn projekty', 'Pozrie sa', 'Pozrite sa aj na', 'filmograf', 'O', 'zapnut', 'Svisiaci', 'O', 'Vevyslanec', 'hra', 'referencia', 'pre neho', 'Vonkajie tads', 'Bibliografia', 'vonkajie odkazy', 'v vronom', 'Spojenie', 'Vydania', 'o sebe', 'spojenie', 'Tvorba', 'Referencia', 'Vyhlsenia', 'pozri tie', 'Najznmejie knihy', 'Tvorba', 'Povedala sa o om', 'Prebyton odkazy', 'citcie', 'Odkaz na vonkajiu stranu', 'Zdroj', 'O tom', 'Hovori', 'film', 'Citty s ohadom na', 'poveda', 'daveo', 'Extern odkaz', 'Vyie uveden citcie', 'Pozri tie', 'nad', 'Zdroje', 'Hovor sa mu', 'Povedali o.', 'Referencie', 'o', 'na', 'Odkaz na', 'Hra', 'referencie', 'o nej', 'zdroje', 'vonkajie odkazy', 'Povedali o tom', 'extern odkaz', 'Referencie', 'poznmka pod iarou', 'referencie', 'Poznmky', 'Zdroje na internete', 'O om', 'Prostriedky', 'Extern pripojenia', 'cituje', 'filmograf', 'poznmka pod iarou', 'zdroje', 'prostriedky', 'Ostatn projekty', 'vonkajie odkazy', 'spojenie', 'poznmky', 'Poznmka', 'weblinks', 'Bibliografia', 'Svisiace poloky', 'Tvorba', 'referencie', 'literatra', 'pozrie sa', 'pozri tie', 'poznmka pod iarou', 'Ostatn projekty']
forbidden_by_language["sl"] = ['viri', 'sklici', 'Opombe:', 'Literatura.', 'Reeno je bilo o njem', 'o njem', 'Dubbing.', 'Za njo', 'Reeno', 'Podobni elementi', 'Opazno', 'Opombe', 'Povezane aplikacije', 'Filmografija', 'Viri:', 'odvisne poizvedbe', 'Zunanje reference', 'Rekli so o njem', 'Drugi projekti', 'Glejte', 'Oglejte si tudi', 'filmografija', 'Priblino', 'On.', 'Povezano', 'O.', 'Veleposlanik', 'igra', 'Referenca', 'zanj', 'Zunanji tads.', 'Bibliografija', 'Zunanje povezave', 'V obletnici', 'Povezave', 'Sprosti', 'o sebi', 'Povezave', 'dela', 'Referenca', 'Izjave', 'Poglej tudi', 'Najbolj znane knjige', 'Dela', 'Reeno je bilo o njej', 'Presene povezave', 'citate', 'Povezava na zunanjost', 'Vir.', 'Drugo o tem', 'Rekel', 'film', 'Citati v zvezi s tem', 'rei.', 'daveo.', 'Zunanja povezava', 'Zgoraj', 'Poglej tudi', 'nad', 'Viri', 'Reeno mu je', 'Rekli so O.', 'Reference', 'priblino', 'AN.', 'Povezava do', 'Igra', 'napotitve', 'o njej', 'Viri', 'Zunanje povezave', 'Rekli so o tem', 'Zunanja povezava', 'Prievanja', 'opomba', 'Reference', 'Opombe', 'Viri na internetu', 'O njem', 'Viri', 'Zunanje povezave', 'navaja', 'filmografija', 'opomba', 'Viri', 'Viri', 'Drugi projekti', 'Zunanje povezave', 'Povezave', 'Opombe', 'Opomba', 'weblinks.', 'Bibliografija', 'Podobni elementi', 'dela', 'Reference', 'Literatura.', 'Glejte', 'Poglej tudi', 'opomba', 'Drugi projekti']
forbidden_by_language["sq"] = ['Thnie pr t', 'Referimet', 'Shiko edhe', 'lidhje t jashtme', 'referime', 'Shnime:', 'letrsi', 'U tha pr t', 'pr t', 'Dublim', 'Pr t', 'Jan thn', 'Artikuj t ngjashm', 'I dukshm', 'fusnotat', 'Aplikime t ngjashme', 'Film', 'Burimet:', 'Pyetje t varura', 'Referencat e jashtme', 'Ata than pr t', 'Projekte t tjera', 'Shiko', 'Gjithashtu shikoni', 'film', 'Rreth', 'n', 'I lidhur', 'O', 'Ambasador', 'luaj', 'referim', 'per at', 'Tads e jashtme', 'Bibliografi', 'Linqe te jashtme', 'N prvjetorin', 'Lidhje', 'Liron', 'pr veten', 'lidhje', 'vepron', 'Referim', 'Deklaratat rreth', 'Shiko gjithashtu', 'Librat m t famshm', 'Vepron', 'U tha pr t', 'Lidhje t teprta', 'kuotat', 'Lidhje me pjesn e jashtme', 'Burim', 'Nj tjetr pr kt', 'Duke thn', 'film', 'Kuotat n lidhje me', 'thua', 'daveo', 'Lidhje e jashtme', 'Citimet e msiprme', 'Shiko gjithashtu', 'mbi', 'Burime', 'sht thn atij', 'Ata than O.', 'Referencat', 'rreth', 'n', 'Lidh me', 'Luaj', 'referime', 'pr t', 'burime', 'Linqe te jashtme', 'ata than pr kt', 'lidhje e jashtme', 'Dshmi', 'shnim shnim', 'referencat', 'Shnim', 'Burimet n Internet', 'Pr t', 'Burime', 'Lidhjet e jashtme', 'citon rreth', 'film', 'shnim shnim', 'burime', 'burime', 'Projekte t tjera', 'Linqe te jashtme', 'lidhje', 'shnim', 'shnim', 'weblinks', 'bibliografi', 'Artikuj t ngjashm', 'vepron', 'referencat', 'letrsi', 'Shiko', 'Shiko gjithashtu', 'shnim shnim', 'Projekte t tjera']
forbidden_by_language["ta"] = [' ', '', ':', '', ' ', ' ', '', '', ' ', ' ', '', '', ' ', '', ':', ' ', ' ', ' ', ' ', '', ' ', '', '', '', '', '', '', '', '', '', ' tads.', '', ' ', ' ', '', '', ' ', '', '', '', ' ', ' ', ' ', '', ' ', ' ', '', ' ', '', ' ', '', '', ' ', '', 'daveo.', ' ', ' ', ' ', '', '', ' ', ' .', '', '', '', '', '', '', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', ' ', '', ' ', ' ', '', '', '', '', ' ', ' ', '', '', '', 'weblinks.', '', ' ', '', '', '', '', ' ', '', ' ']
forbidden_by_language["te"] = ['', ':', '', ' ', ' ', '', ' ', ' ', ' ', '', '', ' ', '', ':', ' ', ' ', ' ', ' ', '', ' ', '', '', '', '', 'O.', '', '', '', '', ' tads.', '', ' ', '', '', '', ' ', '', '', '', ' ', ' ', ' ', '', ' ', ' ', '', ' ', '', ' ', '', '', ' ', '', 'daveo.', ' ', ' ', ' ', '', '', ' ', ' ', '', '', ' ', '', '', '', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', ' ', '', ' ', ' ', '', '', '', '', ' ', ' ', '', '', '', 'weblinks.', '', ' ', '', '', '', '', ' ', '', ' ']
forbidden_by_language["tr"] = ['Hakknda', 'kaynaka', 'Notlar:', 'Edebiyat', 'Onun hakknda sylendi', 'onun hakknda', 'Dublaj', 'Onun iin', 'Hakknda syleniyor', 'lgili eler', 'Dikkate deer', 'dipnotlar', 'lgili uygulamalar', 'Filmografi', 'Kaynaklar:', 'SORUMLULUKLAR', 'D referanslar', 'Onun hakknda sylediler', 'Dier projeler', 'Grmek', 'Ayrca bak', 'filmografi', 'Hakknda', 'zerinde', 'lgili', '', 'Bykeli', 'oynar', 'referans', 'onun iin', 'D tads', 'Bibliyografya', 'D balantlar', 'yldnmnde', 'Linkler', 'Salverme', 'kendisi hakknda', 'linkler', 'ler', 'Referans', 'Hakknda aklamalar', 'Ayrca baknz', 'En nl kitaplar', 'ler', 'Onun hakknda sylendi', 'Ar balantlar', 'alnt', 'Da balant', 'Kaynak', 'Bunun hakknda baka', 'Syleyerek', 'film', 'le ilgili alntlar', 'sylemek', 'Daveo', 'Harici balant', 'Yukardaki alntlar', 'Ayrca baknz', 'zerinde', 'Kaynaklar', 'Ona syleniyor', 'O dediler.', 'Referanslar', 'hakknda', 'zerine', 'Balamak', 'Oynar', 'ynlendirmeler', 'Onun hakknda', 'kaynaklar', 'D balantlar', 'Bunun hakknda sylediler', 'harici balant', 'Tanklk', 'dipnot', 'Referanslar', 'Notlar', 'nternetteki kaynaklar', 'Onun hakknda', 'Kaynaklar', 'Harici Balantlar', 'hakknda alntlar', 'filmografi', 'dipnot', 'kaynaklar', 'Kaynaklar', 'dier projeler', 'D balantlar', 'linkler', 'notalar', 'Not', 'nternet linkleri', 'bibliyografya', 'ilgili eler', 'ler', 'Referanslar', 'Edebiyat', 'grmek', 'Ayrca baknz', 'dipnot', 'dier projeler']
forbidden_by_language["uk"] = [' ', '', '', '', ':', '', ' ', ' ', '', ' ', '', "' ", '', '', "' ", '', ':', ' ', ' ', ' ', ' ', '', ' ', '', '', '', '', 'O', '', '', '', ' ', ' tads', '', ' ', ' ', '', '', ' ', '', '', '', ' ', ' ', ' ', '', ' ', " '", '', ' ', '', ' ', '', '', ' ', '', '', ' ', ' ', ' ', '', '', ' ', ' .', '', '', '', ' ', '', '', ' ', '', ' ', ' ', ' ', '', '', '', '', ' ', ' ', '', " '", '', '', '', '', ' ', ' ', '', '', '', 'weblinks', '', "' ", '', '', '', '', ' ', '', ' ']
forbidden_by_language["ur"] = [' ', ':', '', ' ', ' ', '', ' ', ' ', ' ', ' ', '', ' ', '', ':', ' ', ' ', ' ', ' ', '', ' ', '', ' ', '', '', '', '', ' ', '', ' ', ' ', '', ' ', ' ', '', '', ' ', '', '', '', ' ', ' ', ' ', '', ' ', ' ', ' ', ' ', '', ' ', ' ', '', ' ', ' ', '', ' ', ' ', ' ', '', '', ' ', ' ', ' ', ' ', '', ' ', ' ', ' ', ' ', '', ' ', ' ', ' ', '', '', ' ', '', ' ', ' ', ' ', ' ', ' ', '', '', '', ' ', ' ', ' ', '', '', '', ' ', '', ' ', '', ' ', '', '', ' ', '', ' ']
forbidden_by_language["zh"] = ["","","","","","","","","","","","","","","","","","","","","","","", "","","","","","","","","","","","","","","","",""]
forbidden = [f.lower() for l in list(forbidden_by_language.values()) for f in l]
| 337.772549 | 1,878 | 0.652603 |
6aab6b3ba732d64220b4fb1bf6b4cc739254d1fe | 1,019 | py | Python | tests/pm/update_sla.py | supsi-dacd-isaac/parity-sidechain-interface | b64a5fb724955332afb4998344081d1b93ac216a | [
"MIT"
] | null | null | null | tests/pm/update_sla.py | supsi-dacd-isaac/parity-sidechain-interface | b64a5fb724955332afb4998344081d1b93ac216a | [
"MIT"
] | null | null | null | tests/pm/update_sla.py | supsi-dacd-isaac/parity-sidechain-interface | b64a5fb724955332afb4998344081d1b93ac216a | [
"MIT"
] | null | null | null | # Importing section
import json
import requests
import argparse
import hashlib
import time
from http import HTTPStatus
# Main
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser()
args = arg_parser.parse_args()
set_cmd = 'updateSla'
params = {
'idx': 'sla04',
'start': 3000,
'end': 3900
}
cmd_url = 'http://localhost:9119/%s' % set_cmd
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
print('COMMAND: %s' % cmd_url)
print('PARAMS: %s' % params)
r = requests.post(cmd_url, headers=headers, json=params)
data = json.loads(r.text)
print('RESPONSE: %s\n' % data)
# Wait some seconds to be sure that the transaction has been handled
time.sleep(5)
check_tx_url = 'http://localhost:9119/checkTx/%s' % data['tx_hash']
print('CHECK TX: %s' % check_tx_url)
r = requests.get(check_tx_url)
data = json.loads(r.text)
print('RESPONSE: %s\n' % data)
| 24.261905 | 80 | 0.617272 |
6aac1f4d092634d65b03e7c6699787370a84bac7 | 498 | py | Python | array/python3/5_move_all_negative_elements.py | jitendragangwar123/cp | 8d9da1abd841784da8304e7ebb64a6b94cb804bb | [
"MIT"
] | null | null | null | array/python3/5_move_all_negative_elements.py | jitendragangwar123/cp | 8d9da1abd841784da8304e7ebb64a6b94cb804bb | [
"MIT"
] | 1 | 2020-12-12T19:09:01.000Z | 2020-12-12T19:09:01.000Z | array/python3/5_move_all_negative_elements.py | jitendragangwar123/cp | 8d9da1abd841784da8304e7ebb64a6b94cb804bb | [
"MIT"
] | 1 | 2020-12-12T18:36:24.000Z | 2020-12-12T18:36:24.000Z |
if __name__ == "__main__":
arr = [-1, 2, -3, 4, 5, 6, -7, 8, 9]
sort(arr)
print(arr)
| 23.714286 | 74 | 0.5 |
6aac551e77cffa8d22df81867eace49a7797fd1d | 1,199 | py | Python | misc.py | hldai/wikiprocesspy | 788ccb6f0e0e54a7322863d5a13332635afc240d | [
"MIT"
] | null | null | null | misc.py | hldai/wikiprocesspy | 788ccb6f0e0e54a7322863d5a13332635afc240d | [
"MIT"
] | null | null | null | misc.py | hldai/wikiprocesspy | 788ccb6f0e0e54a7322863d5a13332635afc240d | [
"MIT"
] | null | null | null | import json
wiki19_anchor_sents_file = 'd:/data/res/wiki/anchor/enwiki-20190101-anchor-sents.txt'
anchor_sent_texts_file = 'd:/data/res/wiki/anchor/enwiki-20190101-anchor-sents-tok-texts.txt'
# __text_from_anchor_sents_file(wiki19_anchor_sents_file, anchor_sent_texts_file)
part_pos_tag_files = [f'd:/data/res/wiki/anchor/enwiki-20190101-anchor-sents-tok-texts-pos-{i}.txt' for i in range(4)]
pos_tag_file = 'd:/data/res/wiki/anchor/enwiki-20190101-anchor-sents-tok-texts-pos.txt'
# merge_files(part_pos_tag_files, pos_tag_file)
| 35.264706 | 118 | 0.686405 |
6aad34cdb7b95e79e68448f602b65f3d09cae50a | 1,106 | py | Python | test/smptest.py | myrtam/CANNR | b966a873ec60264b0fd42b81edadb5495237d7ea | [
"Apache-2.0"
] | null | null | null | test/smptest.py | myrtam/CANNR | b966a873ec60264b0fd42b81edadb5495237d7ea | [
"Apache-2.0"
] | null | null | null | test/smptest.py | myrtam/CANNR | b966a873ec60264b0fd42b81edadb5495237d7ea | [
"Apache-2.0"
] | 1 | 2021-05-14T08:37:37.000Z | 2021-05-14T08:37:37.000Z | """
Test harness for smp.py
"""
import sys
import os
sys.path.append('/Users/ptendick/open-source-workspace/cannr Image/source/cannr/lib')
os.environ['PATH'] = '/Library/Frameworks/Python.framework/Versions/3.7/bin:' + os.environ['PATH']
import cannr
import smp
# Test openProcess by opening a Flask process
# Test openProcess by opening a Plumber process
# Test countPorts
| 32.529412 | 108 | 0.705244 |
6aad4ce5dfa92a930b5b7dfb6e85c80cb8498743 | 2,833 | py | Python | neural_toolbox/inception.py | ibrahimSouleiman/GuessWhat | 60d140de1aae5ccda27e7d3eef2b9fb9548f0854 | [
"Apache-2.0"
] | null | null | null | neural_toolbox/inception.py | ibrahimSouleiman/GuessWhat | 60d140de1aae5ccda27e7d3eef2b9fb9548f0854 | [
"Apache-2.0"
] | null | null | null | neural_toolbox/inception.py | ibrahimSouleiman/GuessWhat | 60d140de1aae5ccda27e7d3eef2b9fb9548f0854 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
import tensorflow.contrib.slim as slim
import tensorflow.contrib.slim.python.slim.nets.resnet_v1 as resnet_v1
import tensorflow.contrib.slim.python.slim.nets.inception_v1 as inception_v1
import tensorflow.contrib.slim.python.slim.nets.resnet_utils as slim_utils
from tensorflow.contrib import layers as layers_lib
from tensorflow.contrib.framework.python.ops import arg_scope
import os
def get_resnet_arg_scope(bn_fn):
"""
Trick to apply CBN from a pretrained tf network. It overides the batchnorm constructor with cbn
:param bn_fn: cbn factory
:return: tensorflow scope
"""
with arg_scope(
[layers_lib.conv2d],
activation_fn=tf.nn.relu,
normalizer_fn=bn_fn,
normalizer_params=None) as arg_sc:
return arg_sc
def create_inception(image_input, is_training, scope="", inception_out="Mixed_5c", resnet_version=50, cbn=None):
"""
Create a resnet by overidding the classic batchnorm with conditional batchnorm
:param image_input: placeholder with image
:param is_training: are you using the resnet at training_time or test_time
:param scope: tensorflow scope
:param resnet_version: 50/101/152
:param cbn: the cbn factory
:return: the resnet output
"""
# assert False, "\n" \
# "There is a bug with classic batchnorm with slim networks (https://github.com/tensorflow/tensorflow/issues/4887). \n" \
# "Please use the following config -> 'cbn': {'use_cbn':true, 'excluded_scope_names': ['*']}"
# arg_sc = slim_utils.resnet_arg_scope(is_training=is_training)
# print("--- 1")
arg_sc = inception_v1.inception_v1_arg_scope()
# Pick the correct version of the resnet
# if resnet_version == 50:
# current_resnet = resnet_v1.resnet_v1_50
# elif resnet_version == 101:
# current_resnet = resnet_v1.resnet_v1_101
# elif resnet_version == 152:
# current_resnet = resnet_v1.resnet_v1_152
# else:
# raise ValueError("Unsupported resnet version")
# inception_scope = os.path.join('InceptionV1/InceptionV1', inception_out)
# print("--- 2")
inception_scope = inception_out
# print(" resnet_out = {} , resnet_scope = {}".format(resnet_out,resnet_scope))
# print("--- 3")
with slim.arg_scope(arg_sc):
net, end_points = inception_v1.inception_v1(image_input, 1001) # 1000 is the number of softmax class
print("Net = ",net)
# print("--- 4")
if len(scope) > 0 and not scope.endswith("/"):
scope += "/"
# print("--- 5")
# print(end_points)
print(" Batch ",inception_scope)
out = end_points[scope + inception_scope]
print("-- out Use: {},output = {}".format(inception_scope,out))
return out,end_points
| 36.320513 | 143 | 0.676668 |
6aad74ee52655f68220f799efaffcbccdd0748ad | 6,133 | py | Python | timm/utils/checkpoint_saver.py | Robert-JunWang/pytorch-image-models | 7c67d6aca992f039eece0af5f7c29a43d48c00e4 | [
"Apache-2.0"
] | 17,769 | 2019-05-02T08:08:25.000Z | 2022-03-31T22:14:44.000Z | timm/utils/checkpoint_saver.py | jonychoi/pytorch-image-models | e4360e6125bb0bb4279785810c8eb33b40af3ebd | [
"Apache-2.0"
] | 556 | 2019-05-26T16:31:37.000Z | 2022-03-30T04:21:07.000Z | timm/utils/checkpoint_saver.py | jonychoi/pytorch-image-models | e4360e6125bb0bb4279785810c8eb33b40af3ebd | [
"Apache-2.0"
] | 3,029 | 2019-05-14T01:18:28.000Z | 2022-03-31T20:09:50.000Z | """ Checkpoint Saver
Track top-n training checkpoints and maintain recovery checkpoints on specified intervals.
Hacked together by / Copyright 2020 Ross Wightman
"""
import glob
import operator
import os
import logging
import torch
from .model import unwrap_model, get_state_dict
_logger = logging.getLogger(__name__)
| 40.615894 | 104 | 0.626121 |
6aad9dd74183fdbafeb45c7c06a4bb4ab92534aa | 292 | py | Python | AGC004/AGC004a.py | VolgaKurvar/AtCoder | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | [
"Unlicense"
] | null | null | null | AGC004/AGC004a.py | VolgaKurvar/AtCoder | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | [
"Unlicense"
] | null | null | null | AGC004/AGC004a.py | VolgaKurvar/AtCoder | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | [
"Unlicense"
] | null | null | null | # AGC004a
if __name__ == '__main__':
main()
| 18.25 | 46 | 0.506849 |
6aae27568c85842fa9dbea1ace5c81d9190ab20e | 12,603 | py | Python | glance/tests/functional/test_api.py | arvindn05/glance | 055d15a6ba5d132f649156eac0fc91f4cd2813e4 | [
"Apache-2.0"
] | null | null | null | glance/tests/functional/test_api.py | arvindn05/glance | 055d15a6ba5d132f649156eac0fc91f4cd2813e4 | [
"Apache-2.0"
] | null | null | null | glance/tests/functional/test_api.py | arvindn05/glance | 055d15a6ba5d132f649156eac0fc91f4cd2813e4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Version-independent api tests"""
import httplib2
from oslo_serialization import jsonutils
from six.moves import http_client
from glance.tests import functional
# TODO(rosmaita): all the EXPERIMENTAL stuff in this file can be ripped out
# when v2.6 becomes CURRENT in Queens
| 39.261682 | 78 | 0.614933 |
6ab135f81cd0354b89240b44a37bacfa732bfab3 | 13,664 | py | Python | qcore/asserts.py | corey-sobel/qcore | 719a44617789e3cc384ce860031d9479ee0877e4 | [
"Apache-2.0"
] | 1 | 2022-01-31T23:15:48.000Z | 2022-01-31T23:15:48.000Z | qcore/asserts.py | corey-sobel/qcore | 719a44617789e3cc384ce860031d9479ee0877e4 | [
"Apache-2.0"
] | null | null | null | qcore/asserts.py | corey-sobel/qcore | 719a44617789e3cc384ce860031d9479ee0877e4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Quora, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module with assertion helpers.
The advantages of using a method like
assert_eq(expected, actual)
instead of
assert expected == actual
include:
1 - On failures, assert_eq prints an informative message of the actual
values compared (e.g. AssertionError: 1 != 2) for free, which makes it
faster and easier to iterate on tests.
2 - In the context of refactors, basic asserts incorrectly shift the burden of
adding printouts and writing good test code to people refactoring code
rather than the person who initially wrote the code.
"""
__all__ = [
"assert_is",
"assert_is_not",
"assert_is_instance",
"assert_eq",
"assert_dict_eq",
"assert_ne",
"assert_gt",
"assert_ge",
"assert_lt",
"assert_le",
"assert_in",
"assert_not_in",
"assert_in_with_tolerance",
"assert_unordered_list_eq",
"assert_raises",
"AssertRaises",
# Strings
"assert_is_substring",
"assert_is_not_substring",
"assert_startswith",
"assert_endswith",
]
# The unittest.py testing framework checks for this variable in a module to
# filter out stack frames from that module from the test output, in order to
# make the output more concise.
# __unittest = 1
import traceback
from .inspection import get_full_name
_number_types = (int, float, complex)
def assert_is(expected, actual, message=None, extra=None):
"""Raises an AssertionError if expected is not actual."""
assert expected is actual, _assert_fail_message(
message, expected, actual, "is not", extra
)
def assert_is_not(expected, actual, message=None, extra=None):
"""Raises an AssertionError if expected is actual."""
assert expected is not actual, _assert_fail_message(
message, expected, actual, "is", extra
)
def assert_is_instance(value, types, message=None, extra=None):
"""Raises an AssertionError if value is not an instance of type(s)."""
assert isinstance(value, types), _assert_fail_message(
message, value, types, "is not an instance of", extra
)
def assert_eq(expected, actual, message=None, tolerance=None, extra=None):
"""Raises an AssertionError if expected != actual.
If tolerance is specified, raises an AssertionError if either
- expected or actual isn't a number, or
- the difference between expected and actual is larger than the tolerance.
"""
if tolerance is None:
assert expected == actual, _assert_fail_message(
message, expected, actual, "!=", extra
)
else:
assert isinstance(tolerance, _number_types), (
"tolerance parameter to assert_eq must be a number: %a" % tolerance
)
assert isinstance(expected, _number_types) and isinstance(
actual, _number_types
), "parameters must be numbers when tolerance is specified: %a, %a" % (
expected,
actual,
)
diff = abs(expected - actual)
assert diff <= tolerance, _assert_fail_message(
message, expected, actual, "is more than %a away from" % tolerance, extra
)
def assert_dict_eq(expected, actual, number_tolerance=None, dict_path=[]):
"""Asserts that two dictionaries are equal, producing a custom message if they are not."""
assert_is_instance(expected, dict)
assert_is_instance(actual, dict)
expected_keys = set(expected.keys())
actual_keys = set(actual.keys())
assert expected_keys <= actual_keys, "Actual dict at %s is missing keys: %a" % (
_dict_path_string(dict_path),
expected_keys - actual_keys,
)
assert actual_keys <= expected_keys, "Actual dict at %s has extra keys: %a" % (
_dict_path_string(dict_path),
actual_keys - expected_keys,
)
for k in expected_keys:
key_path = dict_path + [k]
assert_is_instance(
actual[k],
type(expected[k]),
extra="Types don't match for %s" % _dict_path_string(key_path),
)
assert_is_instance(
expected[k],
type(actual[k]),
extra="Types don't match for %s" % _dict_path_string(key_path),
)
if isinstance(actual[k], dict):
assert_dict_eq(
expected[k],
actual[k],
number_tolerance=number_tolerance,
dict_path=key_path,
)
elif isinstance(actual[k], _number_types):
assert_eq(
expected[k],
actual[k],
extra="Value doesn't match for %s" % _dict_path_string(key_path),
tolerance=number_tolerance,
)
else:
assert_eq(
expected[k],
actual[k],
extra="Value doesn't match for %s" % _dict_path_string(key_path),
)
def assert_ne(expected, actual, message=None, tolerance=None, extra=None):
"""Raises an AssertionError if expected == actual.
If tolerance is specified, raises an AssertionError if either
- expected or actual isn't a number, or
- the difference between expected and actual is smaller than the tolerance.
"""
if tolerance is None:
assert expected != actual, _assert_fail_message(
message, expected, actual, "==", extra
)
else:
assert isinstance(tolerance, _number_types), (
"tolerance parameter to assert_eq must be a number: %a" % tolerance
)
assert isinstance(expected, _number_types) and isinstance(
actual, _number_types
), "parameters must be numbers when tolerance is specified: %a, %a" % (
expected,
actual,
)
diff = abs(expected - actual)
assert diff > tolerance, _assert_fail_message(
message, expected, actual, "is less than %a away from" % tolerance, extra
)
def assert_gt(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand <= right_hand."""
assert left > right, _assert_fail_message(message, left, right, "<=", extra)
def assert_ge(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand < right_hand."""
assert left >= right, _assert_fail_message(message, left, right, "<", extra)
def assert_lt(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand >= right_hand."""
assert left < right, _assert_fail_message(message, left, right, ">=", extra)
def assert_le(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand > right_hand."""
assert left <= right, _assert_fail_message(message, left, right, ">", extra)
def assert_in(obj, seq, message=None, extra=None):
"""Raises an AssertionError if obj is not in seq."""
assert obj in seq, _assert_fail_message(message, obj, seq, "is not in", extra)
def assert_not_in(obj, seq, message=None, extra=None):
"""Raises an AssertionError if obj is in iter."""
# for very long strings, provide a truncated error
if isinstance(seq, str) and obj in seq and len(seq) > 200:
index = seq.find(obj)
start_index = index - 50
if start_index > 0:
truncated = "(truncated) ..."
else:
truncated = ""
start_index = 0
end_index = index + len(obj) + 50
truncated += seq[start_index:end_index]
if end_index < len(seq):
truncated += "... (truncated)"
assert False, _assert_fail_message(message, obj, truncated, "is in", extra)
assert obj not in seq, _assert_fail_message(message, obj, seq, "is in", extra)
def assert_in_with_tolerance(obj, seq, tolerance, message=None, extra=None):
"""Raises an AssertionError if obj is not in seq using assert_eq cmp."""
for i in seq:
try:
assert_eq(obj, i, tolerance=tolerance, message=message, extra=extra)
return
except AssertionError:
pass
assert False, _assert_fail_message(message, obj, seq, "is not in", extra)
def assert_unordered_list_eq(expected, actual, message=None):
"""Raises an AssertionError if the objects contained
in expected are not equal to the objects contained
in actual without regard to their order.
This takes quadratic time in the umber of elements in actual; don't use it for very long lists.
"""
missing_in_actual = []
missing_in_expected = list(actual)
for x in expected:
try:
missing_in_expected.remove(x)
except ValueError:
missing_in_actual.append(x)
if missing_in_actual or missing_in_expected:
if not message:
message = (
"%a not equal to %a; missing items: %a in expected, %a in actual."
% (expected, actual, missing_in_expected, missing_in_actual)
)
assert False, message
def assert_raises(fn, *expected_exception_types):
"""Raises an AssertionError if calling fn does not raise one of the expected_exception-types."""
with AssertRaises(*expected_exception_types):
fn()
# ===================================================
# Strings
# ===================================================
def assert_is_substring(substring, subject, message=None, extra=None):
"""Raises an AssertionError if substring is not a substring of subject."""
assert (
(subject is not None)
and (substring is not None)
and (subject.find(substring) != -1)
), _assert_fail_message(message, substring, subject, "is not in", extra)
def assert_is_not_substring(substring, subject, message=None, extra=None):
"""Raises an AssertionError if substring is a substring of subject."""
assert (
(subject is not None)
and (substring is not None)
and (subject.find(substring) == -1)
), _assert_fail_message(message, substring, subject, "is in", extra)
def assert_startswith(prefix, subject, message=None, extra=None):
"""Raises an AssertionError if the subject string does not start with prefix."""
assert (
(type(subject) is str)
and (type(prefix) is str)
and (subject.startswith(prefix))
), _assert_fail_message(message, subject, prefix, "does not start with", extra)
def assert_endswith(suffix, subject, message=None, extra=None):
"""Raises an AssertionError if the subject string does not end with suffix."""
assert (
(type(subject) is str) and (type(suffix) is str) and (subject.endswith(suffix))
), _assert_fail_message(message, subject, suffix, "does not end with", extra)
| 34.592405 | 100 | 0.640222 |
6ab1bd9218aece261b575574072df1d919112085 | 1,108 | py | Python | lib/galaxy/web/__init__.py | rikeshi/galaxy | c536a877e4a9b3d12aa0d00fd4d5e705109a0d0a | [
"CC-BY-3.0"
] | 4 | 2015-05-12T20:36:41.000Z | 2017-06-26T15:34:02.000Z | lib/galaxy/web/__init__.py | rikeshi/galaxy | c536a877e4a9b3d12aa0d00fd4d5e705109a0d0a | [
"CC-BY-3.0"
] | 52 | 2015-03-16T14:02:14.000Z | 2021-12-24T09:50:23.000Z | lib/galaxy/web/__init__.py | rikeshi/galaxy | c536a877e4a9b3d12aa0d00fd4d5e705109a0d0a | [
"CC-BY-3.0"
] | 1 | 2016-03-21T12:54:06.000Z | 2016-03-21T12:54:06.000Z | """
The Galaxy web application framework
"""
from .framework import url_for
from .framework.base import httpexceptions
from .framework.decorators import (
do_not_cache,
error,
expose,
expose_api,
expose_api_anonymous,
expose_api_anonymous_and_sessionless,
expose_api_raw,
expose_api_raw_anonymous,
expose_api_raw_anonymous_and_sessionless,
format_return_as_json,
json,
json_pretty,
legacy_expose_api,
legacy_expose_api_anonymous,
legacy_expose_api_raw,
legacy_expose_api_raw_anonymous,
require_admin,
require_login,
)
__all__ = ('FormBuilder', 'do_not_cache', 'error', 'expose', 'expose_api',
'expose_api_anonymous', 'expose_api_anonymous_and_sessionless',
'expose_api_raw', 'expose_api_raw_anonymous',
'expose_api_raw_anonymous_and_sessionless', 'form',
'format_return_as_json', 'httpexceptions', 'json', 'json_pretty',
'legacy_expose_api', 'legacy_expose_api_anonymous',
'legacy_expose_api_raw', 'legacy_expose_api_raw_anonymous',
'require_admin', 'require_login', 'url_for')
| 30.777778 | 74 | 0.737365 |
6ab21446cecd0d46b1a47275470353f326cec4d7 | 6,318 | py | Python | src/python/pants/core/goals/check_test.py | yoav-orca/pants | 995448e9add343975844c7a43d5d64618fc4e4d9 | [
"Apache-2.0"
] | 1,806 | 2015-01-05T07:31:00.000Z | 2022-03-31T11:35:41.000Z | src/python/pants/core/goals/check_test.py | yoav-orca/pants | 995448e9add343975844c7a43d5d64618fc4e4d9 | [
"Apache-2.0"
] | 9,565 | 2015-01-02T19:01:59.000Z | 2022-03-31T23:25:16.000Z | src/python/pants/core/goals/check_test.py | riisi/pants | b33327389fab67c47b919710ea32f20ca284b1a6 | [
"Apache-2.0"
] | 443 | 2015-01-06T20:17:57.000Z | 2022-03-31T05:28:17.000Z | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from abc import ABCMeta, abstractmethod
from pathlib import Path
from textwrap import dedent
from typing import ClassVar, Iterable, List, Optional, Tuple, Type
from pants.core.goals.check import Check, CheckRequest, CheckResult, CheckResults, check
from pants.core.util_rules.distdir import DistDir
from pants.engine.addresses import Address
from pants.engine.fs import Workspace
from pants.engine.target import FieldSet, MultipleSourcesField, Target, Targets
from pants.engine.unions import UnionMembership
from pants.testutil.option_util import create_options_bootstrapper
from pants.testutil.rule_runner import MockGet, RuleRunner, mock_console, run_rule_with_mocks
from pants.util.logging import LogLevel
def make_target(address: Optional[Address] = None) -> Target:
if address is None:
address = Address("", target_name="tests")
return MockTarget({}, address)
def run_typecheck_rule(
*, request_types: List[Type[CheckRequest]], targets: List[Target]
) -> Tuple[int, str]:
union_membership = UnionMembership({CheckRequest: request_types})
with mock_console(create_options_bootstrapper()) as (console, stdio_reader):
rule_runner = RuleRunner()
result: Check = run_rule_with_mocks(
check,
rule_args=[
console,
Workspace(rule_runner.scheduler, _enforce_effects=False),
Targets(targets),
DistDir(relpath=Path("dist")),
union_membership,
],
mock_gets=[
MockGet(
output_type=CheckResults,
input_type=CheckRequest,
mock=lambda field_set_collection: field_set_collection.check_results,
),
],
union_membership=union_membership,
)
assert not stdio_reader.get_stdout()
return result.exit_code, stdio_reader.get_stderr()
def test_invalid_target_noops() -> None:
exit_code, stderr = run_typecheck_rule(request_types=[InvalidRequest], targets=[make_target()])
assert exit_code == 0
assert stderr == ""
def test_summary() -> None:
good_address = Address("", target_name="good")
bad_address = Address("", target_name="bad")
exit_code, stderr = run_typecheck_rule(
request_types=[
ConditionallySucceedsRequest,
FailingRequest,
SkippedRequest,
SuccessfulRequest,
],
targets=[make_target(good_address), make_target(bad_address)],
)
assert exit_code == FailingRequest.exit_code([bad_address])
assert stderr == dedent(
"""\
ConditionallySucceedsChecker failed.
FailingChecker failed.
- SkippedChecker skipped.
SuccessfulChecker succeeded.
"""
)
def test_streaming_output_skip() -> None:
results = CheckResults([], checker_name="typechecker")
assert results.level() == LogLevel.DEBUG
assert results.message() == "typechecker skipped."
def test_streaming_output_success() -> None:
results = CheckResults([CheckResult(0, "stdout", "stderr")], checker_name="typechecker")
assert results.level() == LogLevel.INFO
assert results.message() == dedent(
"""\
typechecker succeeded.
stdout
stderr
"""
)
def test_streaming_output_failure() -> None:
results = CheckResults([CheckResult(18, "stdout", "stderr")], checker_name="typechecker")
assert results.level() == LogLevel.ERROR
assert results.message() == dedent(
"""\
typechecker failed (exit code 18).
stdout
stderr
"""
)
def test_streaming_output_partitions() -> None:
results = CheckResults(
[
CheckResult(21, "", "", partition_description="ghc8.1"),
CheckResult(0, "stdout", "stderr", partition_description="ghc9.2"),
],
checker_name="typechecker",
)
assert results.level() == LogLevel.ERROR
assert results.message() == dedent(
"""\
typechecker failed (exit code 21).
Partition #1 - ghc8.1:
Partition #2 - ghc9.2:
stdout
stderr
"""
)
| 28.459459 | 99 | 0.650681 |
6ab219191a7ea6ce5d831e0b7655a8775e4ac26e | 9,851 | py | Python | data-processing/entities/definitions/model/utils.py | alexkreidler/scholarphi | 86d26d0bfa5ded00760fba1a9c6891a94a3dd6d2 | [
"Apache-2.0"
] | null | null | null | data-processing/entities/definitions/model/utils.py | alexkreidler/scholarphi | 86d26d0bfa5ded00760fba1a9c6891a94a3dd6d2 | [
"Apache-2.0"
] | null | null | null | data-processing/entities/definitions/model/utils.py | alexkreidler/scholarphi | 86d26d0bfa5ded00760fba1a9c6891a94a3dd6d2 | [
"Apache-2.0"
] | 1 | 2020-10-23T12:36:11.000Z | 2020-10-23T12:36:11.000Z | import os
import random
from typing import Any, Dict, List, Union
import numpy as np
import torch
from colorama import Fore, Style
from sklearn.metrics import f1_score
from sklearn.metrics import precision_recall_fscore_support as score
from sklearn.metrics import precision_score, recall_score
def get_partial_match_metrics(
preds: List[List[str]], labels: List[List[str]]
) -> Dict[Any, Any]:
"""
Suppose there are N such pairs in the gold data and the system predicts M such pairs. Say a partial match happens when the system predicts a pair <term,defn> and there is some overlap (at least one token) between the predicted and gold term spans AND there is some overlap between the predicted and gold definition spans. Let X be the number of partial matches. What are
Partial match precision = P/M
Partial match recall = P/N
"""
assert len(preds) == len(labels)
both_in_preds, both_in_labels = [], []
partial_matches, exact_matches = [], []
for pred_sent, label_sent in zip(preds, labels):
simple_pred_sent = simplify_tokens(pred_sent)
simple_label_sent = simplify_tokens(label_sent)
# check whether term/def exist together
both_in_pred = "TERM" in simple_pred_sent and "DEF" in simple_pred_sent
both_in_label = "TERM" in simple_label_sent and "DEF" in simple_label_sent
both_in_preds.append(both_in_pred)
both_in_labels.append(both_in_label)
partial_match = False
exact_match = False
match: List[Union[str, bool]] = []
if both_in_pred and both_in_label:
for p, l in zip(simple_pred_sent, simple_label_sent):
if p == l:
match.append(p)
else:
match.append(False)
if "TERM" in match and "DEF" in match:
partial_match = True
if False not in match:
exact_match = True
partial_matches.append(partial_match)
exact_matches.append(exact_match)
count_both_in_preds = sum(both_in_preds) # N
count_both_in_labels = sum(both_in_labels) # M
count_partial_matches = sum(partial_matches) # P
count_exact_matches = sum(exact_matches) # E
partial_precision = count_partial_matches / count_both_in_preds
partial_recall = count_partial_matches / count_both_in_labels
partial_fscore = (
2 * partial_precision * partial_recall / (partial_precision + partial_recall)
)
exact_precision = count_exact_matches / count_both_in_preds
exact_recall = count_exact_matches / count_both_in_labels
exact_fscore = 2 * exact_precision * exact_recall / (exact_precision + exact_recall)
return {
"partial_match_precision": partial_precision,
"partial_match_recall": partial_recall,
"partial_match_f1": partial_fscore,
"exact_match_precision": exact_precision,
"excat_match_recall": exact_recall,
"excat_match_f1": exact_fscore,
}
def get_slot_simple_metrics(
preds: List[List[str]], labels: List[List[str]]
) -> Dict[Any, Any]:
"""
Conceptually, define the following new types of virtual tags
TERM = B-term OR I-Term (ie the union of those two tags)
DEF = B-Def OR I-Def
Now, what are the P,R & F1 numbers for TERM and DEF? (I think these matter because users may just care about accuracy of term and defn matching and the macro averaged scores conflate other things like recall on these metrics and precision on O. Likewise the current macro average treats missing the first word in a definition differently from skipping the last word.
"""
assert len(preds) == len(labels)
# flatten
preds_flattened = [p for ps in preds for p in ps]
labels_flattened = [l for ls in labels for l in ls]
# simplify by replacing {B,I}-TERM to TERM and {B,I}-DEF to DEF
simple_preds = simplify_tokens(preds_flattened)
simple_labels = simplify_tokens(labels_flattened)
assert len(simple_preds) == len(simple_labels)
label_names = ["O", "TERM", "DEF"]
p, r, f, s = score(simple_labels, simple_preds, average=None, labels=label_names)
s = [int(si) for si in s]
p = [round(float(pi), 3) for pi in p]
r = [round(float(pi), 3) for pi in r]
f = [round(float(pi), 3) for pi in f]
per_class = {"p": list(p), "r": list(r), "f": list(f), "s": list(s)}
# pprint(per_class)
return {
"slot_merged_TERM_precision": per_class["p"][1],
"slot_merged_TERM_recall": per_class["r"][1],
"slot_merged_TERM_f1": per_class["f"][1],
"slot_merged_DEFINITION_precision": per_class["p"][2],
"slot_merged_DEFINITION_recall": per_class["r"][2],
"slot_merged_DEFINITION_f1": per_class["f"][2],
}
def get_sentence_frame_acc(
intent_preds: List[str],
intent_labels: List[str],
slot_preds: List[List[str]],
slot_labels: List[List[str]],
) -> Dict[Any, Any]:
"""For the cases that intent and all the slots are correct (in one sentence)"""
# Get the intent comparison result
intent_result = intent_preds == intent_labels
# Get the slot comparision result
slot_result = []
for preds, labels in zip(slot_preds, slot_labels):
assert len(preds) == len(labels)
one_sent_result = True
for p, l in zip(preds, labels):
if p != l:
one_sent_result = False
break
slot_result.append(one_sent_result)
slot_result = np.array(slot_result)
sementic_acc = np.multiply(intent_result, slot_result).mean()
return {"sementic_frame_acc": sementic_acc}
| 35.952555 | 376 | 0.66308 |
6ab2ef53d9a0815c477ae2435981a3a0029d019b | 11,463 | py | Python | fire/trace.py | nvhoang55/python-fire | b78287f6d68208732ca4d91e57f4678e6c4747c7 | [
"Apache-2.0"
] | null | null | null | fire/trace.py | nvhoang55/python-fire | b78287f6d68208732ca4d91e57f4678e6c4747c7 | [
"Apache-2.0"
] | null | null | null | fire/trace.py | nvhoang55/python-fire | b78287f6d68208732ca4d91e57f4678e6c4747c7 | [
"Apache-2.0"
] | 1 | 2022-01-17T08:35:09.000Z | 2022-01-17T08:35:09.000Z | # Copyright (C) 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module has classes for tracing the execution of a Fire execution.
A FireTrace consists of a sequence of FireTraceElement objects. Each element
represents an action taken by Fire during a single Fire execution. An action may
be instantiating a class, calling a routine, or accessing a property.
Each action consumes args and results in a new component. The final component
is serialized to stdout by Fire as well as returned by the Fire method. If
a Fire usage error occurs, such as insufficient arguments being provided to call
a function, then that error will be captured in the trace and the final
component will be None.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pipes
from fire import inspectutils
INITIAL_COMPONENT = 'Initial component'
INSTANTIATED_CLASS = 'Instantiated class'
CALLED_ROUTINE = 'Called routine'
CALLED_CALLABLE = 'Called callable'
ACCESSED_PROPERTY = 'Accessed property'
COMPLETION_SCRIPT = 'Generated completion script'
INTERACTIVE_MODE = 'Entered interactive mode'
| 36.275316 | 84 | 0.640932 |
6ab36187809db9e1ba202abbdfa4e21a0d5b6dfb | 33,549 | py | Python | test/unit/__init__.py | thiagodasilva/swift | 0553d9333ed0045c4d209065b315533a33e5d7d7 | [
"Apache-2.0"
] | null | null | null | test/unit/__init__.py | thiagodasilva/swift | 0553d9333ed0045c4d209065b315533a33e5d7d7 | [
"Apache-2.0"
] | null | null | null | test/unit/__init__.py | thiagodasilva/swift | 0553d9333ed0045c4d209065b315533a33e5d7d7 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Swift tests """
from __future__ import print_function
import os
import copy
import logging
import errno
from six.moves import range
import sys
from contextlib import contextmanager, closing
from collections import defaultdict, Iterable
import itertools
from numbers import Number
from tempfile import NamedTemporaryFile
import time
import eventlet
from eventlet.green import socket
from tempfile import mkdtemp
from shutil import rmtree
from swift.common.utils import Timestamp, NOTICE
from test import get_config
from swift.common import swob, utils
from swift.common.ring import Ring, RingData
from hashlib import md5
import logging.handlers
from six.moves.http_client import HTTPException
from swift.common import storage_policy
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
VALID_EC_TYPES)
import functools
import six.moves.cPickle as pickle
from gzip import GzipFile
import mock as mocklib
import inspect
EMPTY_ETAG = md5().hexdigest()
# try not to import this module from swift
if not os.path.basename(sys.argv[0]).startswith('swift'):
# never patch HASH_PATH_SUFFIX AGAIN!
utils.HASH_PATH_SUFFIX = 'endcap'
EC_TYPE_PREFERENCE = [
'liberasurecode_rs_vand',
'jerasure_rs_vand',
]
for eclib_name in EC_TYPE_PREFERENCE:
if eclib_name in VALID_EC_TYPES:
break
else:
raise SystemExit('ERROR: unable to find suitable PyECLib type'
' (none of %r found in %r)' % (
EC_TYPE_PREFERENCE,
VALID_EC_TYPES,
))
DEFAULT_TEST_EC_TYPE = eclib_name
def write_fake_ring(path, *devs):
"""
Pretty much just a two node, two replica, 2 part power ring...
"""
dev1 = {'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': 6000}
dev2 = {'id': 0, 'zone': 0, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': 6000}
dev1_updates, dev2_updates = devs or ({}, {})
dev1.update(dev1_updates)
dev2.update(dev2_updates)
replica2part2dev_id = [[0, 1, 0, 1], [1, 0, 1, 0]]
devs = [dev1, dev2]
part_shift = 30
with closing(GzipFile(path, 'wb')) as f:
pickle.dump(RingData(replica2part2dev_id, devs, part_shift), f)
def readuntil2crlfs(fd):
rv = ''
lc = ''
crlfs = 0
while crlfs < 2:
c = fd.read(1)
if not c:
raise ValueError("didn't get two CRLFs; just got %r" % rv)
rv = rv + c
if c == '\r' and lc != '\n':
crlfs = 0
if lc == '\r' and c == '\n':
crlfs += 1
lc = c
return rv
def connect_tcp(hostport):
rv = socket.socket()
rv.connect(hostport)
return rv
xattr_data = {}
import xattr
xattr.setxattr = _setxattr
xattr.getxattr = _getxattr
def with_tempdir(f):
"""
Decorator to give a single test a tempdir as argument to test method.
"""
return wrapped
# logging.LogRecord.__init__ calls time.time
logging.time = UnmockTimeModule()
def debug_logger(name='test'):
"""get a named adapted debug logger"""
return DebugLogAdapter(DebugLogger(), name)
original_syslog_handler = logging.handlers.SysLogHandler
if utils.config_true_value(
get_config('unit_test').get('fake_syslog', 'False')):
fake_syslog_handler()
def fake_http_connect(*code_iter, **kwargs):
timestamps_iter = iter(kwargs.get('timestamps') or ['1'] * len(code_iter))
etag_iter = iter(kwargs.get('etags') or [None] * len(code_iter))
if isinstance(kwargs.get('headers'), (list, tuple)):
headers_iter = iter(kwargs['headers'])
else:
headers_iter = iter([kwargs.get('headers', {})] * len(code_iter))
if isinstance(kwargs.get('expect_headers'), (list, tuple)):
expect_headers_iter = iter(kwargs['expect_headers'])
else:
expect_headers_iter = iter([kwargs.get('expect_headers', {})] *
len(code_iter))
x = kwargs.get('missing_container', [False] * len(code_iter))
if not isinstance(x, (tuple, list)):
x = [x] * len(code_iter)
container_ts_iter = iter(x)
code_iter = iter(code_iter)
conn_id_and_code_iter = enumerate(code_iter)
static_body = kwargs.get('body', None)
body_iter = kwargs.get('body_iter', None)
if body_iter:
body_iter = iter(body_iter)
connect.code_iter = code_iter
return connect
| 32.104306 | 79 | 0.592924 |
6ab3a62e50f821717cc617bcae69096621bae1d3 | 10,138 | py | Python | fairseq/models/bart/model.py | samsontmr/fairseq | 1d50b6dcd961faaa74ee32e9d7a02ff76f16ab87 | [
"MIT"
] | 172 | 2019-08-22T14:20:25.000Z | 2022-02-16T07:38:12.000Z | fairseq/models/bart/model.py | samsontmr/fairseq | 1d50b6dcd961faaa74ee32e9d7a02ff76f16ab87 | [
"MIT"
] | 3 | 2019-08-30T11:56:15.000Z | 2020-10-02T13:57:49.000Z | fairseq/models/bart/model.py | samsontmr/fairseq | 1d50b6dcd961faaa74ee32e9d7a02ff76f16ab87 | [
"MIT"
] | 8 | 2019-10-15T04:36:43.000Z | 2020-10-21T01:50:09.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
BART: Denoising Sequence-to-Sequence Pre-training for
Natural Language Generation, Translation, and Comprehension
"""
import torch.nn as nn
from fairseq import utils
from fairseq.models import (
register_model,
register_model_architecture,
)
from fairseq.models.transformer import TransformerModel
from fairseq.modules.transformer_sentence_encoder import init_bert_params
from .hub_interface import BARTHubInterface
| 41.044534 | 111 | 0.641251 |
6ab43a62d7fe9c3851fb93216f017948e9264012 | 4,665 | py | Python | vagrant/optraj.istic.univ-rennes1.fr/src/system/Assignment.py | gpierre42/optraj | 53beb81c669093b866a786f2c1df9c663bbd7224 | [
"Apache-2.0"
] | null | null | null | vagrant/optraj.istic.univ-rennes1.fr/src/system/Assignment.py | gpierre42/optraj | 53beb81c669093b866a786f2c1df9c663bbd7224 | [
"Apache-2.0"
] | null | null | null | vagrant/optraj.istic.univ-rennes1.fr/src/system/Assignment.py | gpierre42/optraj | 53beb81c669093b866a786f2c1df9c663bbd7224 | [
"Apache-2.0"
] | null | null | null | # coding=utf8
'''
Created on 29 Oct 2013
@author: Nicolas Poirey
'''
from Worker import Worker
from Phase import Phase
| 34.555556 | 1,463 | 0.520043 |
6ab44cf57a381d44d95045fd0ee5b06ba9aa8eed | 2,257 | py | Python | tracker/view/error.py | cmm1107/arch-security-tracker | 7d5f7d69f2b02c056c3888a9b70132c29432a468 | [
"MIT"
] | null | null | null | tracker/view/error.py | cmm1107/arch-security-tracker | 7d5f7d69f2b02c056c3888a9b70132c29432a468 | [
"MIT"
] | null | null | null | tracker/view/error.py | cmm1107/arch-security-tracker | 7d5f7d69f2b02c056c3888a9b70132c29432a468 | [
"MIT"
] | null | null | null | from binascii import hexlify
from functools import wraps
from logging import error
from os import urandom
from random import randint
from flask import make_response
from flask import render_template
from werkzeug.exceptions import BadRequest
from werkzeug.exceptions import Forbidden
from werkzeug.exceptions import Gone
from werkzeug.exceptions import InternalServerError
from werkzeug.exceptions import MethodNotAllowed
from werkzeug.exceptions import NotFound
from config import get_debug_flag
from tracker import tracker
from tracker.symbol import smileys_sad
error_handlers = []
| 29.311688 | 116 | 0.717767 |
6ab4adb92969b15eb2c974889f086f66aef842c0 | 3,351 | py | Python | tb_plugin/torch_tb_profiler/profiler/trace.py | azhou-determined/kineto | 46ed0ce917c1515db29c39cd87b0c5430f5be94e | [
"BSD-3-Clause"
] | null | null | null | tb_plugin/torch_tb_profiler/profiler/trace.py | azhou-determined/kineto | 46ed0ce917c1515db29c39cd87b0c5430f5be94e | [
"BSD-3-Clause"
] | null | null | null | tb_plugin/torch_tb_profiler/profiler/trace.py | azhou-determined/kineto | 46ed0ce917c1515db29c39cd87b0c5430f5be94e | [
"BSD-3-Clause"
] | 2 | 2021-08-12T08:00:41.000Z | 2021-08-20T03:41:03.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# --------------------------------------------------------------------------
from enum import IntEnum
from .. import utils
__all__ = ["EventTypes", "create_event"]
logger = utils.get_logger()
DeviceType = IntEnum('DeviceType', ['CPU', 'CUDA'], start=0)
Supported_EventTypes = [v for k, v in vars(EventTypes).items() if not k.startswith("_") and v != EventTypes.PROFILER_STEP]
def create_event(event):
try:
type = event.get("ph")
if type == "X":
return create_trace_event(event)
elif type == "i" and event.get('s') == 't':
return MemoryEvent(EventTypes.MEMORY, event)
else:
return None
except Exception as ex:
logger.warning("Failed to parse profile event. Exception=%s. Event=%s", ex, event, exc_info=True)
raise
def create_trace_event(event):
category = event.get("cat")
if category == "Operator":
name = event.get("name")
if name and name.startswith("ProfilerStep#"):
return ProfilerStepEvent(event)
if category in Supported_EventTypes:
return TraceEvent(category, event)
else:
return None
| 27.925 | 122 | 0.578036 |
6ab4b36734b64370a49309c3224c455e6990233d | 59,130 | gyp | Python | base/base.gyp | eval1749/elang | 5208b386ba3a3e866a5c0f0271280f79f9aac8c4 | [
"Apache-2.0"
] | 1 | 2018-01-27T22:40:53.000Z | 2018-01-27T22:40:53.000Z | base/base.gyp | eval1749/elang | 5208b386ba3a3e866a5c0f0271280f79f9aac8c4 | [
"Apache-2.0"
] | 1 | 2016-01-29T00:54:49.000Z | 2016-01-29T00:54:49.000Z | base/base.gyp | eval1749/elang | 5208b386ba3a3e866a5c0f0271280f79f9aac8c4 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../build/win_precompile.gypi',
'base.gypi',
],
'targets': [
{
'target_name': 'base',
'type': '<(component)',
'toolsets': ['host', 'target'],
'variables': {
'base_target': 1,
'enable_wexit_time_destructors': 1,
'optimize': 'max',
},
'dependencies': [
'base_static',
'allocator/allocator.gyp:allocator_extension_thunks',
'../testing/gtest.gyp:gtest_prod',
'../third_party/modp_b64/modp_b64.gyp:modp_b64',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
],
# TODO(gregoryd): direct_dependent_settings should be shared with the
# 64-bit target, but it doesn't work due to a bug in gyp
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'conditions': [
['desktop_linux == 1 or chromeos == 1', {
'conditions': [
['chromeos==1', {
'sources/': [ ['include', '_chromeos\\.cc$'] ]
}],
],
'dependencies': [
'symbolize',
'xdg_mime',
],
'defines': [
'USE_SYMBOLIZE',
],
}, { # desktop_linux == 0 and chromeos == 0
'sources/': [
['exclude', '/xdg_user_dirs/'],
['exclude', '_nss\\.cc$'],
],
}],
['use_glib==1', {
'dependencies': [
'../build/linux/system.gyp:glib',
],
'export_dependent_settings': [
'../build/linux/system.gyp:glib',
],
}],
['OS == "android" and _toolset == "host"', {
# Always build base as a static_library for host toolset, even if
# we're doing a component build. Specifically, we only care about the
# target toolset using components since that's what developers are
# focusing on. In theory we should do this more generally for all
# targets when building for host, but getting the gyp magic
# per-toolset for the "component" variable is hard, and we really only
# need base on host.
'type': 'static_library',
# Base for host support is the minimum required to run the
# ssl false start blacklist tool. It requires further changes
# to generically support host builds (and tests).
# Note: when building for host, gyp has OS == "android",
# hence the *_android.cc files are included but the actual code
# doesn't have OS_ANDROID / ANDROID defined.
'conditions': [
['host_os == "mac"', {
'sources/': [
['exclude', '^native_library_linux\\.cc$'],
['exclude', '^process_util_linux\\.cc$'],
['exclude', '^sys_info_linux\\.cc$'],
['exclude', '^sys_string_conversions_linux\\.cc$'],
['exclude', '^worker_pool_linux\\.cc$'],
],
}],
],
}],
['OS == "android" and _toolset == "target"', {
'dependencies': [
'base_java',
'base_jni_headers',
'../build/android/ndk.gyp:cpu_features',
'../third_party/ashmem/ashmem.gyp:ashmem',
],
'link_settings': {
'libraries': [
'-llog',
],
},
'sources!': [
'debug/stack_trace_posix.cc',
],
}],
['os_bsd==1', {
'include_dirs': [
'/usr/local/include',
],
'link_settings': {
'libraries': [
'-L/usr/local/lib -lexecinfo',
],
},
}],
['OS == "linux"', {
'link_settings': {
'libraries': [
# We need rt for clock_gettime().
'-lrt',
# For 'native_library_linux.cc'
'-ldl',
],
},
'conditions': [
['use_allocator!="tcmalloc"', {
'defines': [
'NO_TCMALLOC',
],
'direct_dependent_settings': {
'defines': [
'NO_TCMALLOC',
],
},
}],
],
}],
['OS == "win"', {
# Specify delayload for base.dll.
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
# Specify delayload for components that link with base.lib.
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
},
'copies': [
{
'destination': '<(PRODUCT_DIR)/',
'files': [
'../build/win/dbghelp_xp/dbghelp.dll',
],
},
],
'dependencies': [
'trace_event/etw_manifest/etw_manifest.gyp:etw_manifest',
],
}],
['OS == "mac" or (OS == "ios" and _toolset == "host")', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
'$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework',
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/IOKit.framework',
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
},
}],
['OS == "ios" and _toolset != "host"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreText.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
},
}],
['OS != "win" and (OS != "ios" or _toolset == "host")', {
'dependencies': ['../third_party/libevent/libevent.gyp:libevent'],
},],
['component=="shared_library"', {
'conditions': [
['OS=="win"', {
'sources!': [
'debug/debug_on_start_win.cc',
],
}],
],
}],
['OS=="ios"', {
'sources!': [
'sync_socket.h',
'sync_socket_posix.cc',
]
}],
],
'sources': [
'auto_reset.h',
'linux_util.cc',
'linux_util.h',
'message_loop/message_pump_android.cc',
'message_loop/message_pump_android.h',
'message_loop/message_pump_glib.cc',
'message_loop/message_pump_glib.h',
'message_loop/message_pump_io_ios.cc',
'message_loop/message_pump_io_ios.h',
'message_loop/message_pump_libevent.cc',
'message_loop/message_pump_libevent.h',
'message_loop/message_pump_mac.h',
'message_loop/message_pump_mac.mm',
'metrics/field_trial.cc',
'metrics/field_trial.h',
'posix/file_descriptor_shuffle.cc',
'posix/file_descriptor_shuffle.h',
'sync_socket.h',
'sync_socket_posix.cc',
'sync_socket_win.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.h',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'base_i18n',
'type': '<(component)',
'variables': {
'enable_wexit_time_destructors': 1,
'optimize': 'max',
'base_i18n_target': 1,
},
'dependencies': [
'base',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
],
'conditions': [
['OS == "win"', {
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [
4267,
],
}],
['icu_use_data_file_flag==1', {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE'],
}, { # else icu_use_data_file_flag !=1
'conditions': [
['OS=="win"', {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_SHARED'],
}, {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC'],
}],
],
}],
['OS == "ios"', {
'toolsets': ['host', 'target'],
}],
],
'export_dependent_settings': [
'base',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/icu/icu.gyp:icui18n',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'base_message_loop_tests',
'type': 'static_library',
'dependencies': [
'base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'message_loop/message_loop_test.cc',
'message_loop/message_loop_test.h',
],
},
{
'target_name': 'base_prefs',
'type': '<(component)',
'variables': {
'enable_wexit_time_destructors': 1,
'optimize': 'max',
},
'dependencies': [
'base',
],
'export_dependent_settings': [
'base',
],
'defines': [
'BASE_PREFS_IMPLEMENTATION',
],
'sources': [
'prefs/base_prefs_export.h',
'prefs/default_pref_store.cc',
'prefs/default_pref_store.h',
'prefs/json_pref_store.cc',
'prefs/json_pref_store.h',
'prefs/overlay_user_pref_store.cc',
'prefs/overlay_user_pref_store.h',
'prefs/persistent_pref_store.h',
'prefs/pref_change_registrar.cc',
'prefs/pref_change_registrar.h',
'prefs/pref_filter.h',
'prefs/pref_member.cc',
'prefs/pref_member.h',
'prefs/pref_notifier.h',
'prefs/pref_notifier_impl.cc',
'prefs/pref_notifier_impl.h',
'prefs/pref_observer.h',
'prefs/pref_registry.cc',
'prefs/pref_registry.h',
'prefs/pref_registry_simple.cc',
'prefs/pref_registry_simple.h',
'prefs/pref_service.cc',
'prefs/pref_service.h',
'prefs/pref_service_factory.cc',
'prefs/pref_service_factory.h',
'prefs/pref_store.cc',
'prefs/pref_store.h',
'prefs/pref_value_map.cc',
'prefs/pref_value_map.h',
'prefs/pref_value_store.cc',
'prefs/pref_value_store.h',
'prefs/scoped_user_pref_update.cc',
'prefs/scoped_user_pref_update.h',
'prefs/value_map_pref_store.cc',
'prefs/value_map_pref_store.h',
'prefs/writeable_pref_store.h',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'base_prefs_test_support',
'type': 'static_library',
'dependencies': [
'base',
'base_prefs',
'../testing/gmock.gyp:gmock',
],
'sources': [
'prefs/mock_pref_change_callback.cc',
'prefs/pref_store_observer_mock.cc',
'prefs/pref_store_observer_mock.h',
'prefs/testing_pref_service.cc',
'prefs/testing_pref_service.h',
'prefs/testing_pref_store.cc',
'prefs/testing_pref_store.h',
],
},
{
# This is the subset of files from base that should not be used with a
# dynamic library. Note that this library cannot depend on base because
# base depends on base_static.
'target_name': 'base_static',
'type': 'static_library',
'variables': {
'enable_wexit_time_destructors': 1,
'optimize': 'max',
},
'toolsets': ['host', 'target'],
'sources': [
'base_switches.cc',
'base_switches.h',
'win/pe_image.cc',
'win/pe_image.h',
],
'include_dirs': [
'..',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
# Include this target for a main() function that simply instantiates
# and runs a base::TestSuite.
{
'target_name': 'run_all_unittests',
'type': 'static_library',
'dependencies': [
'test_support_base',
],
'sources': [
'test/run_all_unittests.cc',
],
},
{
'target_name': 'base_unittests',
'type': '<(gtest_target_type)',
'sources': [
'android/application_status_listener_unittest.cc',
'android/content_uri_utils_unittest.cc',
'android/jni_android_unittest.cc',
'android/jni_array_unittest.cc',
'android/jni_string_unittest.cc',
'android/library_loader/library_prefetcher_unittest.cc',
'android/path_utils_unittest.cc',
'android/scoped_java_ref_unittest.cc',
'android/sys_utils_unittest.cc',
'at_exit_unittest.cc',
'atomicops_unittest.cc',
'barrier_closure_unittest.cc',
'base64_unittest.cc',
'base64url_unittest.cc',
'big_endian_unittest.cc',
'bind_unittest.cc',
'bind_unittest.nc',
'bits_unittest.cc',
'build_time_unittest.cc',
'callback_helpers_unittest.cc',
'callback_list_unittest.cc',
'callback_list_unittest.nc',
'callback_unittest.cc',
'callback_unittest.nc',
'cancelable_callback_unittest.cc',
'command_line_unittest.cc',
'containers/adapters_unittest.cc',
'containers/hash_tables_unittest.cc',
'containers/linked_list_unittest.cc',
'containers/mru_cache_unittest.cc',
'containers/scoped_ptr_hash_map_unittest.cc',
'containers/small_map_unittest.cc',
'containers/stack_container_unittest.cc',
'cpu_unittest.cc',
'debug/crash_logging_unittest.cc',
'debug/debugger_unittest.cc',
'debug/leak_tracker_unittest.cc',
'debug/proc_maps_linux_unittest.cc',
'debug/stack_trace_unittest.cc',
'debug/task_annotator_unittest.cc',
'deferred_sequenced_task_runner_unittest.cc',
'environment_unittest.cc',
'feature_list_unittest.cc',
'file_version_info_unittest.cc',
'files/dir_reader_posix_unittest.cc',
'files/file_path_unittest.cc',
'files/file_path_watcher_unittest.cc',
'files/file_proxy_unittest.cc',
'files/file_unittest.cc',
'files/file_util_proxy_unittest.cc',
'files/file_util_unittest.cc',
'files/important_file_writer_unittest.cc',
'files/memory_mapped_file_unittest.cc',
'files/scoped_temp_dir_unittest.cc',
'gmock_unittest.cc',
'guid_unittest.cc',
'hash_unittest.cc',
'i18n/break_iterator_unittest.cc',
'i18n/case_conversion_unittest.cc',
'i18n/char_iterator_unittest.cc',
'i18n/file_util_icu_unittest.cc',
'i18n/icu_string_conversions_unittest.cc',
'i18n/message_formatter_unittest.cc',
'i18n/number_formatting_unittest.cc',
'i18n/rtl_unittest.cc',
'i18n/streaming_utf8_validator_unittest.cc',
'i18n/string_search_unittest.cc',
'i18n/time_formatting_unittest.cc',
'i18n/timezone_unittest.cc',
'id_map_unittest.cc',
'ios/crb_protocol_observers_unittest.mm',
'ios/device_util_unittest.mm',
'ios/weak_nsobject_unittest.mm',
'json/json_parser_unittest.cc',
'json/json_reader_unittest.cc',
'json/json_value_converter_unittest.cc',
'json/json_value_serializer_unittest.cc',
'json/json_writer_unittest.cc',
'json/string_escape_unittest.cc',
'lazy_instance_unittest.cc',
'logging_unittest.cc',
'mac/bind_objc_block_unittest.mm',
'mac/call_with_eh_frame_unittest.mm',
'mac/dispatch_source_mach_unittest.cc',
'mac/foundation_util_unittest.mm',
'mac/libdispatch_task_runner_unittest.cc',
'mac/mac_util_unittest.mm',
'mac/objc_property_releaser_unittest.mm',
'mac/scoped_nsobject_unittest.mm',
'mac/scoped_objc_class_swizzler_unittest.mm',
'mac/scoped_sending_event_unittest.mm',
'md5_unittest.cc',
'memory/aligned_memory_unittest.cc',
'memory/discardable_shared_memory_unittest.cc',
'memory/linked_ptr_unittest.cc',
'memory/memory_pressure_listener_unittest.cc',
'memory/memory_pressure_monitor_chromeos_unittest.cc',
'memory/memory_pressure_monitor_mac_unittest.cc',
'memory/memory_pressure_monitor_win_unittest.cc',
'memory/ref_counted_memory_unittest.cc',
'memory/ref_counted_unittest.cc',
'memory/scoped_ptr_unittest.cc',
'memory/scoped_ptr_unittest.nc',
'memory/scoped_vector_unittest.cc',
'memory/shared_memory_unittest.cc',
'memory/shared_memory_mac_unittest.cc',
'memory/singleton_unittest.cc',
'memory/weak_ptr_unittest.cc',
'memory/weak_ptr_unittest.nc',
'message_loop/message_loop_task_runner_unittest.cc',
'message_loop/message_loop_unittest.cc',
'message_loop/message_pump_glib_unittest.cc',
'message_loop/message_pump_io_ios_unittest.cc',
'message_loop/message_pump_libevent_unittest.cc',
'metrics/bucket_ranges_unittest.cc',
'metrics/field_trial_unittest.cc',
'metrics/histogram_base_unittest.cc',
'metrics/histogram_delta_serialization_unittest.cc',
'metrics/histogram_macros_unittest.cc',
'metrics/histogram_snapshot_manager_unittest.cc',
'metrics/histogram_unittest.cc',
'metrics/metrics_hashes_unittest.cc',
'metrics/sample_map_unittest.cc',
'metrics/sample_vector_unittest.cc',
'metrics/sparse_histogram_unittest.cc',
'metrics/statistics_recorder_unittest.cc',
'native_library_unittest.cc',
'numerics/safe_numerics_unittest.cc',
'observer_list_unittest.cc',
'os_compat_android_unittest.cc',
'path_service_unittest.cc',
'pickle_unittest.cc',
'posix/file_descriptor_shuffle_unittest.cc',
'posix/unix_domain_socket_linux_unittest.cc',
'power_monitor/power_monitor_unittest.cc',
'prefs/default_pref_store_unittest.cc',
'prefs/json_pref_store_unittest.cc',
'prefs/mock_pref_change_callback.h',
'prefs/overlay_user_pref_store_unittest.cc',
'prefs/pref_change_registrar_unittest.cc',
'prefs/pref_member_unittest.cc',
'prefs/pref_notifier_impl_unittest.cc',
'prefs/pref_service_unittest.cc',
'prefs/pref_value_map_unittest.cc',
'prefs/pref_value_store_unittest.cc',
'prefs/scoped_user_pref_update_unittest.cc',
'process/memory_unittest.cc',
'process/memory_unittest_mac.h',
'process/memory_unittest_mac.mm',
'process/process_metrics_unittest.cc',
'process/process_metrics_unittest_ios.cc',
'process/process_unittest.cc',
'process/process_util_unittest.cc',
'profiler/stack_sampling_profiler_unittest.cc',
'profiler/tracked_time_unittest.cc',
'rand_util_unittest.cc',
'scoped_clear_errno_unittest.cc',
'scoped_generic_unittest.cc',
'scoped_native_library_unittest.cc',
'security_unittest.cc',
'sequence_checker_unittest.cc',
'sha1_unittest.cc',
'stl_util_unittest.cc',
'strings/nullable_string16_unittest.cc',
'strings/pattern_unittest.cc',
'strings/safe_sprintf_unittest.cc',
'strings/string16_unittest.cc',
'strings/string_number_conversions_unittest.cc',
'strings/string_piece_unittest.cc',
'strings/string_split_unittest.cc',
'strings/string_tokenizer_unittest.cc',
'strings/string_util_unittest.cc',
'strings/stringize_macros_unittest.cc',
'strings/stringprintf_unittest.cc',
'strings/sys_string_conversions_mac_unittest.mm',
'strings/sys_string_conversions_unittest.cc',
'strings/utf_offset_string_conversions_unittest.cc',
'strings/utf_string_conversions_unittest.cc',
'supports_user_data_unittest.cc',
'sync_socket_unittest.cc',
'synchronization/cancellation_flag_unittest.cc',
'synchronization/condition_variable_unittest.cc',
'synchronization/lock_unittest.cc',
'synchronization/waitable_event_unittest.cc',
'synchronization/waitable_event_watcher_unittest.cc',
'sys_info_unittest.cc',
'system_monitor/system_monitor_unittest.cc',
'task/cancelable_task_tracker_unittest.cc',
'task_runner_util_unittest.cc',
'template_util_unittest.cc',
'test/histogram_tester_unittest.cc',
'test/test_pending_task_unittest.cc',
'test/test_reg_util_win_unittest.cc',
'test/trace_event_analyzer_unittest.cc',
'test/user_action_tester_unittest.cc',
'threading/non_thread_safe_unittest.cc',
'threading/platform_thread_unittest.cc',
'threading/sequenced_worker_pool_unittest.cc',
'threading/sequenced_task_runner_handle_unittest.cc',
'threading/simple_thread_unittest.cc',
'threading/thread_checker_unittest.cc',
'threading/thread_collision_warner_unittest.cc',
'threading/thread_id_name_manager_unittest.cc',
'threading/thread_local_storage_unittest.cc',
'threading/thread_local_unittest.cc',
'threading/thread_unittest.cc',
'threading/watchdog_unittest.cc',
'threading/worker_pool_posix_unittest.cc',
'threading/worker_pool_unittest.cc',
'time/pr_time_unittest.cc',
'time/time_unittest.cc',
'time/time_win_unittest.cc',
'timer/hi_res_timer_manager_unittest.cc',
'timer/mock_timer_unittest.cc',
'timer/timer_unittest.cc',
'tools_sanity_unittest.cc',
'tracked_objects_unittest.cc',
'tuple_unittest.cc',
'values_unittest.cc',
'version_unittest.cc',
'vlog_unittest.cc',
'win/dllmain.cc',
'win/enum_variant_unittest.cc',
'win/event_trace_consumer_unittest.cc',
'win/event_trace_controller_unittest.cc',
'win/event_trace_provider_unittest.cc',
'win/i18n_unittest.cc',
'win/iunknown_impl_unittest.cc',
'win/message_window_unittest.cc',
'win/object_watcher_unittest.cc',
'win/pe_image_unittest.cc',
'win/registry_unittest.cc',
'win/scoped_bstr_unittest.cc',
'win/scoped_comptr_unittest.cc',
'win/scoped_handle_unittest.cc',
'win/scoped_process_information_unittest.cc',
'win/scoped_variant_unittest.cc',
'win/shortcut_unittest.cc',
'win/startup_information_unittest.cc',
'win/win_util_unittest.cc',
'win/wrapped_window_proc_unittest.cc',
'<@(trace_event_test_sources)',
],
'dependencies': [
'base',
'base_i18n',
'base_message_loop_tests',
'base_prefs',
'base_prefs_test_support',
'base_static',
'run_all_unittests',
'test_support_base',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
],
'includes': ['../build/nocompile.gypi'],
'variables': {
# TODO(ajwong): Is there a way to autodetect this?
'module_dir': 'base'
},
'conditions': [
['OS == "android"', {
'dependencies': [
'android/jni_generator/jni_generator.gyp:jni_generator_tests',
'../testing/android/native_test.gyp:native_test_native_code',
],
}],
['OS == "ios" and _toolset != "host"', {
'sources/': [
# iOS does not support FilePathWatcher.
['exclude', '^files/file_path_watcher_unittest\\.cc$'],
# Only test the iOS-meaningful portion of memory and process_utils.
['exclude', '^memory/discardable_shared_memory_unittest\\.cc$'],
['exclude', '^memory/shared_memory_unittest\\.cc$'],
['exclude', '^process/memory_unittest'],
['exclude', '^process/process_unittest\\.cc$'],
['exclude', '^process/process_util_unittest\\.cc$'],
['include', '^process/process_util_unittest_ios\\.cc$'],
# iOS does not use message_pump_libevent.
['exclude', '^message_loop/message_pump_libevent_unittest\\.cc$'],
],
'actions': [
{
'action_name': 'copy_test_data',
'variables': {
'test_data_files': [
'test/data',
],
'test_data_prefix': 'base',
},
'includes': [ '../build/copy_test_data_ios.gypi' ],
},
],
}],
['desktop_linux == 1 or chromeos == 1', {
'defines': [
'USE_SYMBOLIZE',
],
'sources!': [
'file_version_info_unittest.cc',
],
'conditions': [
[ 'desktop_linux==1', {
'sources': [
'nix/xdg_util_unittest.cc',
],
}],
],
}],
['use_glib == 1', {
'dependencies': [
'../build/linux/system.gyp:glib',
],
}, { # use_glib == 0
'sources!': [
'message_loop/message_pump_glib_unittest.cc',
]
}],
['use_ozone == 1', {
'sources!': [
'message_loop/message_pump_glib_unittest.cc',
]
}],
['OS == "linux"', {
'dependencies': [
'malloc_wrapper',
],
'conditions': [
['use_allocator!="none"', {
'dependencies': [
'allocator/allocator.gyp:allocator',
],
}],
]},
],
[ 'OS == "win" and target_arch == "x64"', {
'sources': [
'profiler/win32_stack_frame_unwinder_unittest.cc',
],
'dependencies': [
'base_profiler_test_support_library',
],
}],
['OS == "win"', {
'sources!': [
'file_descriptor_shuffle_unittest.cc',
'files/dir_reader_posix_unittest.cc',
'message_loop/message_pump_libevent_unittest.cc',
'threading/worker_pool_posix_unittest.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [
4267,
],
'conditions': [
# This is needed so base_unittests uses the allocator shim, as
# SecurityTest.MemoryAllocationRestriction* tests are dependent
# on tcmalloc.
# TODO(wfh): crbug.com/246278 Move tcmalloc specific tests into
# their own test suite.
['win_use_allocator_shim==1', {
'dependencies': [
'allocator/allocator.gyp:allocator',
],
}],
['icu_use_data_file_flag==0', {
# This is needed to trigger the dll copy step on windows.
# TODO(mark): This should not be necessary.
'dependencies': [
'../third_party/icu/icu.gyp:icudata',
],
}],
],
}, { # OS != "win"
'dependencies': [
'../third_party/libevent/libevent.gyp:libevent'
],
}],
], # conditions
'target_conditions': [
['OS == "ios" and _toolset != "host"', {
'sources/': [
# Pull in specific Mac files for iOS (which have been filtered out
# by file name rules).
['include', '^mac/bind_objc_block_unittest\\.mm$'],
['include', '^mac/foundation_util_unittest\\.mm$',],
['include', '^mac/objc_property_releaser_unittest\\.mm$'],
['include', '^mac/scoped_nsobject_unittest\\.mm$'],
['include', '^sys_string_conversions_mac_unittest\\.mm$'],
],
}],
['OS == "android"', {
'sources/': [
['include', '^debug/proc_maps_linux_unittest\\.cc$'],
],
}],
# Enable more direct string conversions on platforms with native utf8
# strings
['OS=="mac" or OS=="ios" or <(chromeos)==1 or <(chromecast)==1', {
'defines': ['SYSTEM_NATIVE_UTF8'],
}],
# SyncSocket isn't used on iOS
['OS=="ios"', {
'sources!': [
'sync_socket_unittest.cc',
],
}],
], # target_conditions
},
{
# GN: //base:base_perftests
'target_name': 'base_perftests',
'type': '<(gtest_target_type)',
'dependencies': [
'base',
'test_support_base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'message_loop/message_pump_perftest.cc',
'test/run_all_unittests.cc',
'threading/thread_perftest.cc',
'../testing/perf/perf_test.cc'
],
'conditions': [
['OS == "android"', {
'dependencies': [
'../testing/android/native_test.gyp:native_test_native_code',
],
}],
],
},
{
# GN: //base:base_i18n_perftests
'target_name': 'base_i18n_perftests',
'type': '<(gtest_target_type)',
'dependencies': [
'test_support_base',
'test_support_perf',
'../testing/gtest.gyp:gtest',
'base_i18n',
'base',
],
'sources': [
'i18n/streaming_utf8_validator_perftest.cc',
],
},
{
# GN: //base/test:test_support
'target_name': 'test_support_base',
'type': 'static_library',
'dependencies': [
'base',
'base_static',
'base_i18n',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
],
'export_dependent_settings': [
'base',
],
'conditions': [
['os_posix==0', {
'sources!': [
'test/scoped_locale.cc',
'test/scoped_locale.h',
],
}],
['os_bsd==1', {
'sources!': [
'test/test_file_util_linux.cc',
],
}],
['OS == "android"', {
'dependencies': [
'base_unittests_jni_headers',
'base_java_unittest_support',
],
}],
['OS == "ios"', {
'toolsets': ['host', 'target'],
}],
],
'sources': [
'test/gtest_util.cc',
'test/gtest_util.h',
'test/gtest_xml_unittest_result_printer.cc',
'test/gtest_xml_unittest_result_printer.h',
'test/gtest_xml_util.cc',
'test/gtest_xml_util.h',
'test/histogram_tester.cc',
'test/histogram_tester.h',
'test/icu_test_util.cc',
'test/icu_test_util.h',
'test/ios/wait_util.h',
'test/ios/wait_util.mm',
'test/launcher/test_launcher.cc',
'test/launcher/test_launcher.h',
'test/launcher/test_result.cc',
'test/launcher/test_result.h',
'test/launcher/test_results_tracker.cc',
'test/launcher/test_results_tracker.h',
'test/launcher/unit_test_launcher.cc',
'test/launcher/unit_test_launcher.h',
'test/launcher/unit_test_launcher_ios.cc',
'test/mock_chrome_application_mac.h',
'test/mock_chrome_application_mac.mm',
'test/mock_devices_changed_observer.cc',
'test/mock_devices_changed_observer.h',
'test/mock_entropy_provider.cc',
'test/mock_entropy_provider.h',
'test/mock_log.cc',
'test/mock_log.h',
'test/multiprocess_test.cc',
'test/multiprocess_test.h',
'test/multiprocess_test_android.cc',
'test/null_task_runner.cc',
'test/null_task_runner.h',
'test/opaque_ref_counted.cc',
'test/opaque_ref_counted.h',
'test/perf_log.cc',
'test/perf_log.h',
'test/perf_test_suite.cc',
'test/perf_test_suite.h',
'test/perf_time_logger.cc',
'test/perf_time_logger.h',
'test/power_monitor_test_base.cc',
'test/power_monitor_test_base.h',
'test/scoped_locale.cc',
'test/scoped_locale.h',
'test/scoped_path_override.cc',
'test/scoped_path_override.h',
'test/sequenced_task_runner_test_template.cc',
'test/sequenced_task_runner_test_template.h',
'test/sequenced_worker_pool_owner.cc',
'test/sequenced_worker_pool_owner.h',
'test/simple_test_clock.cc',
'test/simple_test_clock.h',
'test/simple_test_tick_clock.cc',
'test/simple_test_tick_clock.h',
'test/task_runner_test_template.cc',
'test/task_runner_test_template.h',
'test/test_discardable_memory_allocator.cc',
'test/test_discardable_memory_allocator.h',
'test/test_file_util.cc',
'test/test_file_util.h',
'test/test_file_util_android.cc',
'test/test_file_util_linux.cc',
'test/test_file_util_mac.cc',
'test/test_file_util_posix.cc',
'test/test_file_util_win.cc',
'test/test_io_thread.cc',
'test/test_io_thread.h',
'test/test_listener_ios.h',
'test/test_listener_ios.mm',
'test/test_mock_time_task_runner.cc',
'test/test_mock_time_task_runner.h',
'test/test_pending_task.cc',
'test/test_pending_task.h',
'test/test_reg_util_win.cc',
'test/test_reg_util_win.h',
'test/test_shortcut_win.cc',
'test/test_shortcut_win.h',
'test/test_simple_task_runner.cc',
'test/test_simple_task_runner.h',
'test/test_suite.cc',
'test/test_suite.h',
'test/test_support_android.cc',
'test/test_support_android.h',
'test/test_support_ios.h',
'test/test_support_ios.mm',
'test/test_switches.cc',
'test/test_switches.h',
'test/test_timeouts.cc',
'test/test_timeouts.h',
'test/test_ui_thread_android.cc',
'test/test_ui_thread_android.h',
'test/thread_test_helper.cc',
'test/thread_test_helper.h',
'test/trace_event_analyzer.cc',
'test/trace_event_analyzer.h',
'test/trace_to_file.cc',
'test/trace_to_file.h',
'test/user_action_tester.cc',
'test/user_action_tester.h',
'test/values_test_util.cc',
'test/values_test_util.h',
],
'target_conditions': [
['OS == "ios"', {
'sources/': [
# Pull in specific Mac files for iOS (which have been filtered out
# by file name rules).
['include', '^test/test_file_util_mac\\.cc$'],
],
}],
['OS == "ios" and _toolset == "target"', {
'sources!': [
# iOS uses its own unit test launcher.
'test/launcher/unit_test_launcher.cc',
],
}],
['OS == "ios" and _toolset == "host"', {
'sources!': [
'test/launcher/unit_test_launcher_ios.cc',
'test/test_support_ios.h',
'test/test_support_ios.mm',
],
}],
], # target_conditions
},
{
'target_name': 'test_support_perf',
'type': 'static_library',
'dependencies': [
'base',
'test_support_base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/run_all_perftests.cc',
],
'direct_dependent_settings': {
'defines': [
'PERF_TEST',
],
},
},
{
'target_name': 'test_launcher_nacl_nonsfi',
'conditions': [
['disable_nacl==0 and disable_nacl_untrusted==0 and enable_nacl_nonsfi_test==1', {
'type': 'static_library',
'sources': [
'test/launcher/test_launcher_nacl_nonsfi.cc',
],
'dependencies': [
'test_support_base',
],
}, {
'type': 'none',
}],
],
},
],
'conditions': [
['OS=="ios" and "<(GENERATOR)"=="ninja"', {
'targets': [
{
'target_name': 'test_launcher',
'toolsets': ['host'],
'type': 'executable',
'dependencies': [
'test_support_base',
],
'sources': [
'test/launcher/test_launcher_ios.cc',
],
},
],
}],
['OS!="ios"', {
'targets': [
{
# GN: //base:check_example
'target_name': 'check_example',
'type': 'executable',
'sources': [
'check_example.cc',
],
'dependencies': [
'base',
],
},
{
'target_name': 'build_utf8_validator_tables',
'type': 'executable',
'toolsets': ['host'],
'dependencies': [
'base',
'../third_party/icu/icu.gyp:icuuc',
],
'sources': [
'i18n/build_utf8_validator_tables.cc'
],
},
],
}],
['OS == "win" and target_arch=="ia32"', {
'targets': [
# The base_win64 target here allows us to use base for Win64 targets
# (the normal build is 32 bits).
{
'target_name': 'base_win64',
'type': '<(component)',
'variables': {
'base_target': 1,
},
'dependencies': [
'base_static_win64',
'allocator/allocator.gyp:allocator_extension_thunks_win64',
'../third_party/modp_b64/modp_b64.gyp:modp_b64_win64',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations_win64',
'trace_event/etw_manifest/etw_manifest.gyp:etw_manifest',
],
# TODO(gregoryd): direct_dependent_settings should be shared with the
# 32-bit target, but it doesn't work due to a bug in gyp
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'defines': [
'BASE_WIN64',
'<@(nacl_win64_defines)',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
'conditions': [
['component == "shared_library"', {
'sources!': [
'debug/debug_on_start_win.cc',
],
}],
],
# Specify delayload for base_win64.dll.
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
# Specify delayload for components that link with base_win64.lib.
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
},
# TODO(rvargas): Bug 78117. Remove this.
'msvs_disabled_warnings': [
4244,
4996,
4267,
],
'sources': [
'auto_reset.h',
'linux_util.cc',
'linux_util.h',
'md5.cc',
'md5.h',
'message_loop/message_pump_libevent.cc',
'message_loop/message_pump_libevent.h',
'metrics/field_trial.cc',
'metrics/field_trial.h',
'posix/file_descriptor_shuffle.cc',
'posix/file_descriptor_shuffle.h',
'sync_socket.h',
'sync_socket_posix.cc',
'sync_socket_win.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.h',
],
},
{
'target_name': 'base_i18n_nacl_win64',
'type': '<(component)',
# TODO(gregoryd): direct_dependent_settings should be shared with the
# 32-bit target, but it doesn't work due to a bug in gyp
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'defines': [
'<@(nacl_win64_defines)',
'BASE_I18N_IMPLEMENTATION',
],
'include_dirs': [
'..',
],
'sources': [
'i18n/icu_util_nacl_win64.cc',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
},
{
# TODO(rvargas): Remove this when gyp finally supports a clean model.
# See bug 36232.
'target_name': 'base_static_win64',
'type': 'static_library',
'sources': [
'base_switches.cc',
'base_switches.h',
'win/pe_image.cc',
'win/pe_image.h',
],
'sources!': [
# base64.cc depends on modp_b64.
'base64.cc',
],
'include_dirs': [
'..',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
'defines': [
'<@(nacl_win64_defines)',
],
# TODO(rvargas): Bug 78117. Remove this.
'msvs_disabled_warnings': [
4244,
],
},
],
}],
['OS == "win" and target_arch=="x64"', {
'targets': [
{
'target_name': 'base_profiler_test_support_library',
# Must be a shared library so that it can be unloaded during testing.
'type': 'shared_library',
'include_dirs': [
'..',
],
'sources': [
'profiler/test_support_library.cc',
],
},
]
}],
['os_posix==1 and OS!="mac" and OS!="ios"', {
'targets': [
{
'target_name': 'symbolize',
'type': 'static_library',
'toolsets': ['host', 'target'],
'variables': {
'chromium_code': 0,
},
'conditions': [
['OS == "solaris"', {
'include_dirs': [
'/usr/gnu/include',
'/usr/gnu/include/libelf',
],
},],
],
'cflags': [
'-Wno-sign-compare',
],
'cflags!': [
'-Wextra',
],
'defines': [
'GLOG_BUILD_CONFIG_INCLUDE="build/build_config.h"',
],
'sources': [
'third_party/symbolize/config.h',
'third_party/symbolize/demangle.cc',
'third_party/symbolize/demangle.h',
'third_party/symbolize/glog/logging.h',
'third_party/symbolize/glog/raw_logging.h',
'third_party/symbolize/symbolize.cc',
'third_party/symbolize/symbolize.h',
'third_party/symbolize/utilities.h',
],
'include_dirs': [
'..',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'xdg_mime',
'type': 'static_library',
'toolsets': ['host', 'target'],
'variables': {
'chromium_code': 0,
},
'cflags!': [
'-Wextra',
],
'sources': [
'third_party/xdg_mime/xdgmime.c',
'third_party/xdg_mime/xdgmime.h',
'third_party/xdg_mime/xdgmimealias.c',
'third_party/xdg_mime/xdgmimealias.h',
'third_party/xdg_mime/xdgmimecache.c',
'third_party/xdg_mime/xdgmimecache.h',
'third_party/xdg_mime/xdgmimeglob.c',
'third_party/xdg_mime/xdgmimeglob.h',
'third_party/xdg_mime/xdgmimeicon.c',
'third_party/xdg_mime/xdgmimeicon.h',
'third_party/xdg_mime/xdgmimeint.c',
'third_party/xdg_mime/xdgmimeint.h',
'third_party/xdg_mime/xdgmimemagic.c',
'third_party/xdg_mime/xdgmimemagic.h',
'third_party/xdg_mime/xdgmimeparent.c',
'third_party/xdg_mime/xdgmimeparent.h',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
],
}],
['OS == "linux"', {
'targets': [
{
'target_name': 'malloc_wrapper',
'type': 'shared_library',
'dependencies': [
'base',
],
'sources': [
'test/malloc_wrapper.cc',
],
}
],
}],
['OS == "android"', {
'targets': [
{
# GN: //base:base_jni_headers
'target_name': 'base_jni_headers',
'type': 'none',
'sources': [
'android/java/src/org/chromium/base/ApkAssets.java',
'android/java/src/org/chromium/base/ApplicationStatus.java',
'android/java/src/org/chromium/base/AnimationFrameTimeHistogram.java',
'android/java/src/org/chromium/base/BuildInfo.java',
'android/java/src/org/chromium/base/CommandLine.java',
'android/java/src/org/chromium/base/ContentUriUtils.java',
'android/java/src/org/chromium/base/ContextUtils.java',
'android/java/src/org/chromium/base/CpuFeatures.java',
'android/java/src/org/chromium/base/EventLog.java',
'android/java/src/org/chromium/base/FieldTrialList.java',
'android/java/src/org/chromium/base/ImportantFileWriterAndroid.java',
'android/java/src/org/chromium/base/JNIUtils.java',
'android/java/src/org/chromium/base/JavaHandlerThread.java',
'android/java/src/org/chromium/base/LocaleUtils.java',
'android/java/src/org/chromium/base/MemoryPressureListener.java',
'android/java/src/org/chromium/base/PathService.java',
'android/java/src/org/chromium/base/PathUtils.java',
'android/java/src/org/chromium/base/PowerMonitor.java',
'android/java/src/org/chromium/base/SysUtils.java',
'android/java/src/org/chromium/base/SystemMessageHandler.java',
'android/java/src/org/chromium/base/ThreadUtils.java',
'android/java/src/org/chromium/base/TraceEvent.java',
'android/java/src/org/chromium/base/library_loader/LibraryLoader.java',
'android/java/src/org/chromium/base/metrics/RecordHistogram.java',
'android/java/src/org/chromium/base/metrics/RecordUserAction.java',
],
'variables': {
'jni_gen_package': 'base',
},
'dependencies': [
'android_runtime_jni_headers',
],
'includes': [ '../build/jni_generator.gypi' ],
},
{
# GN: //base:android_runtime_jni_headers
'target_name': 'android_runtime_jni_headers',
'type': 'none',
'variables': {
'jni_gen_package': 'base',
'input_java_class': 'java/lang/Runtime.class',
},
'includes': [ '../build/jar_file_jni_generator.gypi' ],
},
{
# GN: //base:base_unittests_jni_headers
'target_name': 'base_unittests_jni_headers',
'type': 'none',
'sources': [
'test/android/java/src/org/chromium/base/ContentUriTestUtils.java',
'test/android/java/src/org/chromium/base/TestUiThread.java',
],
'variables': {
'jni_gen_package': 'base',
},
'includes': [ '../build/jni_generator.gypi' ],
},
{
# GN: //base:base_native_libraries_gen
'target_name': 'base_native_libraries_gen',
'type': 'none',
'sources': [
'android/java/templates/NativeLibraries.template',
],
'variables': {
'package_name': 'org/chromium/base/library_loader',
'template_deps': [],
},
'includes': [ '../build/android/java_cpp_template.gypi' ],
},
{
# GN: //base:base_multidex_gen
'target_name': 'base_multidex_gen',
'type': 'none',
'sources': [
'android/java/templates/ChromiumMultiDex.template',
],
'variables': {
'package_name': 'org/chromium/base/multidex',
'template_deps': [],
'additional_gcc_preprocess_options': [
'--defines', 'MULTIDEX_CONFIGURATION_<(CONFIGURATION_NAME)',
],
},
'includes': ['../build/android/java_cpp_template.gypi'],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_library_process_type',
'type': 'none',
'variables': {
'source_file': 'android/library_loader/library_loader_hooks.h',
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_java
'target_name': 'base_java',
'type': 'none',
'variables': {
'java_in_dir': 'android/java',
'jar_excluded_classes': [ '*/NativeLibraries.class' ],
},
'dependencies': [
'base_java_application_state',
'base_java_library_load_from_apk_status_codes',
'base_java_library_process_type',
'base_java_memory_pressure_level',
'base_multidex_gen',
'base_native_libraries_gen',
'../third_party/android_tools/android_tools.gyp:android_support_multidex_javalib',
'../third_party/jsr-305/jsr-305.gyp:jsr_305_javalib',
],
'includes': [ '../build/java.gypi' ],
},
{
# GN: //base:base_java_unittest_support
'target_name': 'base_java_unittest_support',
'type': 'none',
'dependencies': [
'base_java',
],
'variables': {
'java_in_dir': '../base/test/android/java',
},
'includes': [ '../build/java.gypi' ],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_application_state',
'type': 'none',
'variables': {
'source_file': 'android/application_status_listener.h',
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_library_load_from_apk_status_codes',
'type': 'none',
'variables': {
'source_file': 'android/library_loader/library_load_from_apk_status_codes.h'
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_memory_pressure_level',
'type': 'none',
'variables': {
'source_file': 'memory/memory_pressure_listener.h',
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_java_test_support
'target_name': 'base_java_test_support',
'type': 'none',
'dependencies': [
'base_java',
'../testing/android/on_device_instrumentation.gyp:reporter_java',
],
'variables': {
'java_in_dir': '../base/test/android/javatests',
},
'includes': [ '../build/java.gypi' ],
},
{
# TODO(jbudorick): Remove this once we roll to robolectric 3.0 and pull
# in the multidex shadow library. crbug.com/522043
# GN: //base:base_junit_test_support
'target_name': 'base_junit_test_support',
'type': 'none',
'dependencies': [
'../testing/android/junit/junit_test.gyp:junit_test_support',
'../third_party/android_tools/android_tools.gyp:android_support_multidex_javalib',
],
'variables': {
'src_paths': [
'../base/test/android/junit/',
],
},
'includes': [ '../build/host_jar.gypi' ]
},
{
# GN: //base:base_junit_tests
'target_name': 'base_junit_tests',
'type': 'none',
'dependencies': [
'base_java',
'base_java_test_support',
'base_junit_test_support',
'../testing/android/junit/junit_test.gyp:junit_test_support',
],
'variables': {
'main_class': 'org.chromium.testing.local.JunitTestMain',
'src_paths': [
'../base/android/junit/',
],
},
'includes': [ '../build/host_jar.gypi' ],
},
{
# GN: //base:base_javatests
'target_name': 'base_javatests',
'type': 'none',
'dependencies': [
'base_java',
'base_java_test_support',
],
'variables': {
'java_in_dir': '../base/android/javatests',
},
'includes': [ '../build/java.gypi' ],
},
{
# GN: //base/android/linker:chromium_android_linker
'target_name': 'chromium_android_linker',
'type': 'shared_library',
'sources': [
'android/linker/android_dlext.h',
'android/linker/legacy_linker_jni.cc',
'android/linker/legacy_linker_jni.h',
'android/linker/linker_jni.cc',
'android/linker/linker_jni.h',
'android/linker/modern_linker_jni.cc',
'android/linker/modern_linker_jni.h',
],
# The crazy linker is never instrumented.
'cflags!': [
'-finstrument-functions',
],
'dependencies': [
# The NDK contains the crazy_linker here:
# '<(android_ndk_root)/crazy_linker.gyp:crazy_linker'
# However, we use our own fork. See bug 384700.
'../third_party/android_crazy_linker/crazy_linker.gyp:crazy_linker',
],
},
{
# GN: //base:base_perftests_apk
'target_name': 'base_perftests_apk',
'type': 'none',
'dependencies': [
'base_perftests',
],
'variables': {
'test_suite_name': 'base_perftests',
},
'includes': [ '../build/apk_test.gypi' ],
},
{
# GN: //base:base_unittests_apk
'target_name': 'base_unittests_apk',
'type': 'none',
'dependencies': [
'base_java',
'base_unittests',
],
'variables': {
'test_suite_name': 'base_unittests',
'isolate_file': 'base_unittests.isolate',
},
'includes': [ '../build/apk_test.gypi' ],
},
],
'conditions': [
['test_isolation_mode != "noop"',
{
'targets': [
{
'target_name': 'base_unittests_apk_run',
'type': 'none',
'dependencies': [
'base_unittests_apk',
],
'includes': [
'../build/isolate.gypi',
],
'sources': [
'base_unittests_apk.isolate',
],
},
]
}
],
],
}],
['OS == "win"', {
'targets': [
{
# Target to manually rebuild pe_image_test.dll which is checked into
# base/test/data/pe_image.
'target_name': 'pe_image_test',
'type': 'shared_library',
'sources': [
'win/pe_image_test.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
'DelayLoadDLLs': [
'cfgmgr32.dll',
'shell32.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'shell32.lib',
],
},
},
},
],
}],
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'base_unittests_run',
'type': 'none',
'dependencies': [
'base_unittests',
],
'includes': [
'../build/isolate.gypi',
],
'sources': [
'base_unittests.isolate',
],
},
],
}],
],
}
| 34.278261 | 96 | 0.532589 |
6ab5293b9595b159942c1bb0c1e2bfcef5e08aec | 1,029 | py | Python | solutions/PE4.py | KerimovEmil/ProjectEuler | bc9cb682181c1ac7889ee57c36d32beae7b441a8 | [
"MIT"
] | 1 | 2022-01-22T19:48:44.000Z | 2022-01-22T19:48:44.000Z | solutions/PE4.py | KerimovEmil/ProjectEuler | bc9cb682181c1ac7889ee57c36d32beae7b441a8 | [
"MIT"
] | null | null | null | solutions/PE4.py | KerimovEmil/ProjectEuler | bc9cb682181c1ac7889ee57c36d32beae7b441a8 | [
"MIT"
] | null | null | null | """
PROBLEM
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers
is 9009 = 91 99.
Find the largest palindrome made from the product of two 3-digit numbers.
ANSWER:
906609
Solve time ~ 0.760 seconds
"""
from itertools import product
import unittest
from util.utils import timeit
if __name__ == '__main__':
unittest.main()
| 21.4375 | 114 | 0.640428 |
6ab57006541d451df413f174cce8e16652508bf7 | 1,399 | py | Python | indexclient/parsers/info.py | uc-cdis/indexclient | 5d61bdb2cb9c0104f173d7bba43d92449a093c6d | [
"Apache-2.0"
] | 2 | 2020-02-19T15:52:13.000Z | 2021-10-30T12:06:22.000Z | indexclient/parsers/info.py | uc-cdis/indexclient | 5d61bdb2cb9c0104f173d7bba43d92449a093c6d | [
"Apache-2.0"
] | 20 | 2017-11-30T18:15:53.000Z | 2021-08-20T16:14:17.000Z | indexclient/parsers/info.py | uc-cdis/indexclient | 5d61bdb2cb9c0104f173d7bba43d92449a093c6d | [
"Apache-2.0"
] | 1 | 2019-01-31T21:07:50.000Z | 2019-01-31T21:07:50.000Z | import sys
import json
import logging
import argparse
import warnings
import requests
from indexclient import errors
# DEPRECATED 11/2019 -- interacts with old `/alias/` endpoint.
# For creating aliases for indexd records, prefer using
# the `add_alias` function, which interacts with the new
# `/index/{GUID}/aliases` endpoint.
def info(host, port, name, **kwargs):
"""
Retrieve info by name.
"""
warnings.warn(
(
"This function is deprecated. For creating aliases for indexd "
"records, prefer using the `add_alias_for_did` function, which "
"interacts with the new `/index/{GUID}/aliases` endpoint."
),
DeprecationWarning,
)
resource = "http://{host}:{port}/alias/{name}".format(
host=host, port=port, name=name
)
res = requests.get(resource)
try:
res.raise_for_status()
except Exception as err:
raise errors.BaseIndexError(res.status_code, res.text)
try:
doc = res.json()
except ValueError as err:
reason = json.dumps({"error": "invalid json payload returned"})
raise errors.BaseIndexError(res.status_code, reason)
sys.stdout.write(json.dumps(doc))
def config(parser):
"""
Configure the info command.
"""
parser.set_defaults(func=info)
parser.add_argument("name", help="name of information to retrieve")
| 25.436364 | 76 | 0.653324 |
6ab5f637fa16cf262bdd8660fa9b73c3bc7980b2 | 1,151 | py | Python | email-worker-compose/app/sender.py | guilhermebc/docker-playground | e614c314ed2f5ab54835a8c45b4b3eec1ac4c57b | [
"MIT"
] | 1 | 2019-08-31T11:03:33.000Z | 2019-08-31T11:03:33.000Z | email-worker-compose/app/sender.py | guilhermebc/docker-playground | e614c314ed2f5ab54835a8c45b4b3eec1ac4c57b | [
"MIT"
] | 6 | 2020-09-07T03:12:42.000Z | 2022-03-02T05:25:57.000Z | email-worker-compose/app/sender.py | guilhermebc/docker-playground | e614c314ed2f5ab54835a8c45b4b3eec1ac4c57b | [
"MIT"
] | null | null | null | import psycopg2
import redis
import json
from bottle import Bottle, request
if __name__ == '__main__':
sender = Sender()
sender.run(host='0.0.0.0', port=8080, debug=True) | 30.289474 | 70 | 0.617724 |
6ab5f90a0a47ff5a1e41ea47789a04eed55d4b77 | 8,949 | py | Python | tests/ximpl.py | zsimic/sandbox | 3d1571ca723d1a5e80ddecae0ad912160334fee9 | [
"MIT"
] | null | null | null | tests/ximpl.py | zsimic/sandbox | 3d1571ca723d1a5e80ddecae0ad912160334fee9 | [
"MIT"
] | null | null | null | tests/ximpl.py | zsimic/sandbox | 3d1571ca723d1a5e80ddecae0ad912160334fee9 | [
"MIT"
] | null | null | null | import click
import poyo
import ruamel.yaml
import runez
import strictyaml
import yaml as pyyaml
from zyaml import load_path, load_string, tokens_from_path, tokens_from_string
from zyaml.marshal import decode, default_marshal, represented_scalar
from . import TestSettings
def ruamel_passthrough_tags(loader, tag, node):
name = node.__class__.__name__
if "Seq" in name:
result = []
for v in node.value:
result.append(ruamel_passthrough_tags(loader, tag, v))
return result
if "Map" in name:
result = {}
for k, v in node.value:
k = ruamel_passthrough_tags(loader, tag, k)
v = ruamel_passthrough_tags(loader, tag, v)
result[k] = v
return result
return default_marshal(node.value)
| 30.233108 | 124 | 0.598391 |
6ab606d6bade1bb254f8ee2b1905c9d3d07e2051 | 11,447 | py | Python | ai_analysis.py | kwangilkimkenny/chatbot_seq2seq_flask | f2f3bda9311c5f2930aebc8ae4a6497597b190e1 | [
"MIT"
] | null | null | null | ai_analysis.py | kwangilkimkenny/chatbot_seq2seq_flask | f2f3bda9311c5f2930aebc8ae4a6497597b190e1 | [
"MIT"
] | null | null | null | ai_analysis.py | kwangilkimkenny/chatbot_seq2seq_flask | f2f3bda9311c5f2930aebc8ae4a6497597b190e1 | [
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import re
import pickle
# plotting
import seaborn as sns
import matplotlib.pyplot as plt
# Tune learning_rate
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
# First XGBoost model for MBTI dataset
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
##### Compute list of subject with Type | list of comments
from nltk.stem import PorterStemmer, WordNetLemmatizer
from nltk.corpus import stopwords
from nltk import word_tokenize
import nltk
nltk.download('wordnet')
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.manifold import TSNE
#
#
b_Pers = {'I':0, 'E':1, 'N':0, 'S':1, 'F':0, 'T':1, 'J':0, 'P':1}
#
b_Pers_list = [{0:'I', 1:'E'}, {0:'N', 1:'S'}, {0:'F', 1:'T'}, {0:'J', 1:'P'}]
# We want to remove these from the psosts
unique_type_list = ['INFJ', 'ENTP', 'INTP', 'INTJ', 'ENTJ', 'ENFJ', 'INFP', 'ENFP',
'ISFP', 'ISTP', 'ISFJ', 'ISTJ', 'ESTP', 'ESFP', 'ESTJ', 'ESFJ']
unique_type_list = [x.lower() for x in unique_type_list]
# Lemmatize
stemmer = PorterStemmer()
lemmatiser = WordNetLemmatizer()
# Cache the stop words for speed
cachedStopWords = stopwords.words("english")
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject_2020_12_09/essayai/mbti_1.csv')
data = pd.read_csv('./mbti/mbti_1.csv')
# get_types
data = data.join(data.apply (lambda row: get_types (row),axis=1))
# load
with open('./mbti/list_posts.pickle', 'rb') as f:
list_posts = pickle.load(f)
# load
with open('./mbti/list_personality.pickle', 'rb') as f:
list_personality = pickle.load(f)
# # Posts to a matrix of token counts
cntizer = CountVectorizer(analyzer="word",
max_features=1500,
tokenizer=None,
preprocessor=None,
stop_words=None,
max_df=0.7,
min_df=0.1)
# Learn the vocabulary dictionary and return term-document matrix
print("CountVectorizer...")
X_cnt = cntizer.fit_transform(list_posts)
#################################################
#save!!! model X_cnt
import pickle
# save
# with open('./essayai/ai_character/mbti/data_X_cnt.pickle', 'wb') as f:
# pickle.dump(X_cnt, f, pickle.HIGHEST_PROTOCOL)
# load
with open('./mbti/data_X_cnt.pickle', 'rb') as f:
X_cnt = pickle.load(f)
#################################################
# Transform the count matrix to a normalized tf or tf-idf representation
tfizer = TfidfTransformer()
print("Tf-idf...")
# Learn the idf vector (fit) and transform a count matrix to a tf-idf representation
X_tfidf = tfizer.fit_transform(X_cnt).toarray()
# load
with open('./mbti/data.pickle', 'rb') as f:
X_tfidf = pickle.load(f)
# my_posts = """Describe a place or environment where you are perfectly content. What do you do or experience there, and why is it meaningful to you? 644 words out of 650 Gettysburg, a small town in the middle of Pennsylvania, was the sight of the largest, bloodiest battle in the Civil War. Something about these hallowed grounds draws me back every year for a three day camping trip with my family over Labor Day weekend. Every year, once school starts, I count the days until I take that three and half hour drive from Pittsburgh to Gettysburg. Each year, we leave after school ends on Friday and arrive in Gettysburg with just enough daylight to pitch the tents and cook up a quick dinner on the campfire. As more of the extended family arrives, we circle around the campfire and find out what is new with everyone. The following morning, everyone is up by nine and helping to make breakfast which is our best meal of the day while camping. Breakfast will fuel us for the day as we hike the vast battlefields. My Uncle Mark, my twin brother, Andrew, and I like to take charge of the family tour since we have the most passion and knowledge about the battle. I have learned so much from the stories Mark tells us while walking on the tours. Through my own research during these last couple of trips, I did some of the explaining about the events that occurred during the battle 150 years ago. My fondest experience during one trip was when we decided to go off of the main path to find a carving in a rock from a soldier during the battle. Mark had read about the carving in one of his books about Gettysburg, and we were determined to locate it. After almost an hour of scanning rocks in the area, we finally found it with just enough daylight to read what it said. After a long day of exploring the battlefield, we went back to the campsite for some 'civil war' stew. There is nothing special about the stew, just meat, vegetables and gravy, but for whatever reason, it is some of the best stew I have ever eaten. For the rest of the night, we enjoy the company of our extended family. My cousins, my brother and I listen to the stories from Mark and his friends experiences' in the military. After the parents have gone to bed, we stay up talking with each other, inching closer and closer to the fire as it gets colder. Finally, we creep back into our tents, trying to be as quiet as possible to not wake our parents. The next morning we awake red-eyed from the lack of sleep and cook up another fantastic breakfast. Unfortunately, after breakfast we have to pack up and head back to Pittsburgh. It will be another year until I visit Gettysburg again. There is something about that time I spend in Gettysburg that keeps me coming back to visit. For one, it is just a fun, relaxing time I get to spend with my family. This trip also fulfills my love for the outdoors. From sitting by the campfire and falling asleep to the chirp of the crickets, that is my definition of a perfect weekend. Gettysburg is also an interesting place to go for Civil War buffs like me. While walking down the Union line or walking Pickett's Charge, I imagine how the battle would have been played out around me. Every year when I visit Gettysburg, I learn more facts and stories about the battle, soldiers and generally about the Civil War. While I am in Gettysburg, I am perfectly content, passionate about the history and just enjoying the great outdoors with my family. This drive to learn goes beyond just my passion for history but applies to all of the math, science and business classes I have taken and clubs I am involved in at school. Every day, I am genuinely excited to learn.
# """
# test = mbti_classify(my_posts)
# print ('check')
# test
# print ('check2') | 41.625455 | 3,675 | 0.653621 |
6ab6c4226f6262d47cafb69a5403744916d50994 | 17,464 | py | Python | mummi_ras/online/aa/aa_get_tiltrot_z_state.py | mummi-framework/mummi-ras | 7f4522aad36661e4530e39c830ab8c2a6f134060 | [
"MIT"
] | 4 | 2021-11-16T07:16:36.000Z | 2022-02-16T23:33:46.000Z | mummi_ras/online/aa/aa_get_tiltrot_z_state.py | mummi-framework/mummi-ras | 7f4522aad36661e4530e39c830ab8c2a6f134060 | [
"MIT"
] | 1 | 2021-11-23T20:23:28.000Z | 2021-12-03T09:08:34.000Z | mummi_ras/online/aa/aa_get_tiltrot_z_state.py | mummi-framework/mummi-ras | 7f4522aad36661e4530e39c830ab8c2a6f134060 | [
"MIT"
] | 2 | 2021-11-23T19:54:59.000Z | 2022-02-16T23:32:17.000Z | ###############################################################################
# @todo add Pilot2-splash-app disclaimer
###############################################################################
""" Get's KRAS states """
import MDAnalysis as mda
from MDAnalysis.analysis import align
from MDAnalysis.lib.mdamath import make_whole
import os
import numpy as np
import math
############## Below section needs to be uncommented ############
import mummi_core
import mummi_ras
from mummi_core.utils import Naming
# # Logger has to be initialized the first thing in the script
from logging import getLogger
LOGGER = getLogger(__name__)
# # Innitilize MuMMI if it has not been done before
# MUMMI_ROOT = mummi.init(True)
# This is needed so the Naming works below
#@TODO fix this so we don't have these on import make them as an init
mummi_core.init()
dirKRASStates = Naming.dir_res('states')
dirKRASStructures = Naming.dir_res('structures')
# #RAS_ONLY_macrostate = np.loadtxt(os.path.join(dirKRASStates, "RAS-ONLY.microstates.txt"))
RAS_ONLY_macrostate = np.loadtxt(os.path.join(dirKRASStates, "ras-states.txt"),comments='#')
# #RAS_RAF_macrostate = np.loadtxt(os.path.join(dirKRASStates, "RAS-RAF.microstates.txt"))
RAS_RAF_macrostate = np.loadtxt(os.path.join(dirKRASStates, "ras-raf-states.txt"),comments='#') # Note diffrent number of columns so index change below
# TODO: CS, my edits to test
# RAS_ONLY_macrostate = np.loadtxt('ras-states.txt')
# RAS_RAF_macrostate = np.loadtxt('ras-raf-states.txt')
############## above section needs to be uncommented ############
# TODO: CS, my edits to test
# TODO: TSC, The reference structure has to currently be set as the 'RAS-ONLY-reference-structure.gro'
# TODO: TSC, path to the reference structure is: mummi_resources/structures/
kras_ref_universe = mda.Universe(os.path.join(dirKRASStructures, "RAS-ONLY-reference-structure.gro"))
# kras_ref_universe = mda.Universe("RAS-ONLY-reference-structure.gro")
# kras_ref_universe = mda.Universe('AA_pfpatch_000000004641_RAS_RAF2_411.gro')
# TODO: CS, not using these for x4 proteins; instead using protein_systems below to set num_res
######### Below hard codes the number of residues within RAS-only and RAS-RAF ##########
RAS_only_num_res = 184
RAS_RAF_num_res = 320
######### Above hard codes the number of residues within RAS-only and RAS-RAF ##########
####### This can be removed
# def get_kras(syst, kras_start):
# """Gets all atoms for a KRAS protein starting at 'kras_start'."""
# return syst.atoms[kras_start:kras_start+428]
####### This can be removed
def get_segids(u):
"""Identifies the list of segments within the system. Only needs to be called x1 time"""
segs = u.segments
segs = segs.segids
ras_segids = []
rasraf_segids = []
for i in range(len(segs)):
# print(segs[i])
if segs[i][-3:] == 'RAS':
ras_segids.append(segs[i])
if segs[i][-3:] == 'RAF':
rasraf_segids.append(segs[i])
return ras_segids, rasraf_segids
def get_protein_info(u,tag):
"""Uses the segments identified in get_segids to make a list of all proteins in the systems.\
Outputs a list of the first residue number of the protein, and whether it is 'RAS-ONLY', or 'RAS-RAF'.\
The 'tag' input defines what is used to identify the first residue of the protein. i.e. 'resname ACE1 and name BB'.\
Only needs to be called x1 time"""
ras_segids, rasraf_segids = get_segids(u)
if len(ras_segids) > 0:
RAS = u.select_atoms('segid '+ras_segids[0]+' and '+str(tag))
else:
RAS = []
if len(rasraf_segids) > 0:
RAF = u.select_atoms('segid '+rasraf_segids[0]+' and '+str(tag))
else:
RAF = []
protein_info = []#np.empty([len(RAS)+len(RAF),2])
for i in range(len(RAS)):
protein_info.append((RAS[i].resid,'RAS-ONLY'))
for i in range(len(RAF)):
protein_info.append((RAF[i].resid,'RAS-RAF'))
######## sort protein info
protein_info = sorted(protein_info)
######## sort protein info
return protein_info
def get_ref_kras():
"""Gets the reference KRAS struct. Only called x1 time when class is loaded"""
start_of_g_ref = kras_ref_universe.residues[0].resid
ref_selection = 'resid '+str(start_of_g_ref)+':'+str(start_of_g_ref+24)+' ' +\
str(start_of_g_ref+38)+':'+str(start_of_g_ref+54)+' ' +\
str(start_of_g_ref+67)+':'+str(start_of_g_ref+164)+' ' +\
'and (name CA or name BB)'
r2_26r40_56r69_166_ref = kras_ref_universe.select_atoms(str(ref_selection))
return kras_ref_universe.select_atoms(str(ref_selection)).positions - kras_ref_universe.select_atoms(str(ref_selection)).center_of_mass()
# Load inital ref frames (only need to do this once)
ref0 = get_ref_kras()
def getKRASstates(u,kras_indices):
"""Gets states for all KRAS proteins in path."""
# res_shift = 8
# all_glycine = u.select_atoms("resname GLY")
# kras_indices = []
# for i in range(0, len(all_glycine), 26):
# kras_indices.append(all_glycine[i].index)
########## Below is taken out of the function so it is only done once #########
# kras_indices = get_protein_info(u,'resname ACE1 and name BB')
########## Above is taken out of the function so it is only done once #########
# CS, for x4 cases:
# [{protein_x4: (protein_type, num_res)}]
protein_systems = [{'ras4a': ('RAS-ONLY', 185),
'ras4araf': ('RAS-RAF', 321),
'ras': ('RAS-ONLY', 184),
'rasraf': ('RAS-RAF', 320)}]
ALLOUT = []
for k in range(len(kras_indices)):
start_of_g = kras_indices[k][0]
protein_x4 = str(kras_indices[k][1])
try:
protein_type = [item[protein_x4] for item in protein_systems][0][0] # 'RAS-ONLY' OR 'RAS-RAF'
num_res = [item[protein_x4] for item in protein_systems][0][1]
except:
LOGGER.error('Check KRas naming between modules')
raise Exception('Error: unknown KRas name')
# TODO: CS, replacing this comment section with the above, to handle x4 protein types
# ---------------------------------------
# ALLOUT = []
# for k in range(len(kras_indices)):
# start_of_g = kras_indices[k][0]
# protein_type = str(kras_indices[k][1])
# ########## BELOW SECTION TO DETERMINE WHICH RESIDUES ARE PART OF THE PROTEIN GROUP - NEEDED FOR PBC REMOVAL ##############
# ########## POTENTIALLY REDO WITH A 'HARD-CODED' NUMBER OF RESIDUES PER PROTEIN GROUP (WHETHER RAS-ONLY OR RAS-RAF) #######
# ########## HAS BEEN REDONE WITH A 'HARD-CODED' NUMBER OF RESIDUES PER PROTEIN GROUP (WHETHER RAS-ONLY OR RAS-RAF) ########
# # if len(kras_indices) == 1:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(len(u.residues))+' and name BB') ####### HAS TO BE FIXED FOR BACKBONE ATOMS FOR SPECIFIC PROTEIN
# # elif len(kras_indices) > 1:
# # if k == len(kras_indices)-1:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(len(u.residues))+' and name BB')
# # else:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(kras_indices[k+1][0])+' and name BB')
# ########## ABOVE SECTION TO DETERMINE WHICH RESIDUES ARE PART OF THE PROTEIN GROUP - NEEDED FOR PBC REMOVAL ##############
#
# ########## Below hard codes the number of residues/beads in the RAS-ONLY and RAS-RAF simulations #########################
# if protein_type == 'RAS-ONLY':
# num_res = RAS_only_num_res
# elif protein_type == 'RAS-RAF':
# num_res = RAS_RAF_num_res
# ########## Above hard codes the number of residues/beads in the RAS-ONLY and RAS-RAF simulations #########################
# ---------------------------------------
# TODO: TSC, I changed the selection below, which can be used for the make_whole...
# krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+num_res)+' and (name CA or name BB)')
krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+num_res))
krases0_BB.guess_bonds()
r2_26r40_56r69_166 = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+24)+' ' +\
str(start_of_g+38)+':'+str(start_of_g+54)+' ' +\
str(start_of_g+67)+':'+str(start_of_g+164)+\
' and (name CA or name BB)')
u_selection = \
'resid '+str(start_of_g)+':'+str(start_of_g+24)+' '+str(start_of_g+38)+':'+str(start_of_g+54)+' ' +\
str(start_of_g+67)+':'+str(start_of_g+164)+' and (name CA or name BB)'
mobile0 = u.select_atoms(str(u_selection)).positions - u.select_atoms(str(u_selection)).center_of_mass()
# TODO: CS, something wrong with ref0 from get_kras_ref()
# just making ref0 = mobile0 to test for now
# ref0 = mobile0
# TSC removed this
R, RMSD_junk = align.rotation_matrix(mobile0, ref0)
######## TODO: TSC, Adjusted for AA lipid names ########
# lipids = u.select_atoms('resname POPX POPC PAPC POPE DIPE DPSM PAPS PAP6 CHOL')
lipids = u.select_atoms('resname POPC PAPC POPE DIPE SSM PAPS SAPI CHL1')
coords = ref0
RotMat = []
OS = []
r152_165 = krases0_BB.select_atoms('resid '+str(start_of_g+150)+':'+str(start_of_g+163)+' and (name CA or name BB)')
r65_74 = krases0_BB.select_atoms('resid '+str(start_of_g+63)+':'+str(start_of_g+72)+' and (name CA or name BB)')
timeframes = []
# TODO: CS, for AA need bonds to run make_whole()
# krases0_BB.guess_bonds()
# TODO: CS, turn off for now to test beyond this point
''' *** for AA, need to bring that back on once all else runs ***
'''
# @Tim and Chris S. this was commented out - please check.
#make_whole(krases0_BB)
j, rmsd_junk = mda.analysis.align.rotation_matrix((r2_26r40_56r69_166.positions-r2_26r40_56r69_166.center_of_mass()), coords)
RotMat.append(j)
OS.append(r65_74.center_of_mass()-r152_165.center_of_mass())
timeframes.append(u.trajectory.time)
if protein_type == 'RAS-RAF':
z_pos = []
############### NEED TO CONFIRM THE SELECTION OF THE RAF LOOP RESIDUES BELOW ####################
############### TODO: TSC, zshifting is set to -1 (instead of -2), as there are ACE caps that are separate residues in AA
#zshifting=-1
if protein_x4 == 'rasraf':
zshifting = -1
elif protein_x4 == 'ras4araf':
zshifting = 0
else:
zshifting = 0
LOGGER.error('Found unsupported protein_x4 type')
raf_loops_selection = u.select_atoms('resid '+str(start_of_g+zshifting+291)+':'+str(start_of_g+zshifting+294)+' ' +\
str(start_of_g+zshifting+278)+':'+str(start_of_g+zshifting+281)+' ' +\
' and (name CA or name BB)')
############### NEED TO CONFIRM THE SELECTION OF THE RAF LOOP RESIDUES ABOVE ####################
diff = (lipids.center_of_mass()[2]-raf_loops_selection.center_of_mass(unwrap=True)[2])/10
if diff < 0:
diff = diff+(u.dimensions[2]/10)
z_pos.append(diff)
z_pos = np.array(z_pos)
RotMatNP = np.array(RotMat)
OS = np.array(OS)
OA = RotMatNP[:, 2, :]/(((RotMatNP[:, 2, 0]**2)+(RotMatNP[:, 2, 1]**2)+(RotMatNP[:, 2, 2]**2))**0.5)[:, None]
OWAS = np.arccos(RotMatNP[:, 2, 2])*180/math.pi
OC_temp = np.concatenate((OA, OS), axis=1)
t = ((OC_temp[:, 0]*OC_temp[:, 3])+(OC_temp[:, 1]*OC_temp[:, 4]) +
(OC_temp[:, 2]*OC_temp[:, 5]))/((OC_temp[:, 0]**2)+(OC_temp[:, 1]**2)+(OC_temp[:, 2]**2))
OC = OA*t[:, None]
ORS_tp = np.concatenate((OC, OS), axis=1)
ORS_norm = (((ORS_tp[:, 3]-ORS_tp[:, 0])**2)+((ORS_tp[:, 4]-ORS_tp[:, 1])**2)+((ORS_tp[:, 5]-ORS_tp[:, 2])**2))**0.5
ORS = (OS - OC)/ORS_norm[:, None]
OACRS = np.cross(OA, ORS)
OZCA = OA * OA[:, 2][:, None]
Z_unit = np.full([len(OZCA), 3], 1)
Z_adjust = np.array([0, 0, 1])
Z_unit = Z_unit*Z_adjust
Z_OZCA = Z_unit-OZCA
OZPACB = Z_OZCA/((Z_OZCA[:, 0]**2+Z_OZCA[:, 1]**2+Z_OZCA[:, 2]**2)**0.5)[:, None]
OROTNOTSIGNED = np.zeros([len(ORS)])
for i in range(len(ORS)):
OROTNOTSIGNED[i] = np.arccos(np.dot(OZPACB[i, :], ORS[i, :]) /
(np.sqrt(np.dot(OZPACB[i, :], OZPACB[i, :]))) *
(np.sqrt(np.dot(ORS[i, :], ORS[i, :]))))*180/math.pi
OZPACBCRS_cross = np.cross(OZPACB, ORS)
OZPACBCRS = OZPACBCRS_cross/((OZPACBCRS_cross[:, 0]**2+OZPACBCRS_cross[:, 1]**2+OZPACBCRS_cross[:, 2]**2)**0.5)[:, None]
OFORSIGN_temp = (OA - OZPACBCRS)**2
OFORSIGN = OFORSIGN_temp[:, 0]+OFORSIGN_temp[:, 1]+OFORSIGN_temp[:, 2]
OROT = OROTNOTSIGNED
for i in range(len(OROT)):
if OROT[i] < 0:
OROT[i] = -(OROT[i])
for i in range(len(OROT)):
if OFORSIGN[i] < 0.25:
OROT[i] = -(OROT[i])
###### Below introduces new shift to account for upper vs. lower leaflet #####
for i in range(len(OWAS)):
OWAS[i] = abs(-(OWAS[i])+180) # made this an absolute value so that the tilt remains positive
for i in range(len(OROT)):
if OROT[i] < 0:
OROT[i] = OROT[i]+180
elif OROT[i] > 0:
OROT[i] = OROT[i]-180
###### Above introduces new shift to account for upper vs. lower leaflet #####
###### Below might have to be updated to take into account the periodic nature of the rotation ######
if protein_type == 'RAS-ONLY':
states = np.zeros(len(OROT))
for j in range(len(OROT)):
diff0 = []
for i in range(len(RAS_ONLY_macrostate)):
#diff0.append([((RAS_ONLY_macrostate[i,0]-OWAS[j])**2+(RAS_ONLY_macrostate[i,1]-OROT[j])**2)**0.5, RAS_ONLY_macrostate[i,6]])
diff0.append([((RAS_ONLY_macrostate[i,1]-OWAS[j])**2+(RAS_ONLY_macrostate[i,0]-OROT[j])**2)**0.5, RAS_ONLY_macrostate[i,5]])
diff0.sort()
states[j] = diff0[0][1]
elif protein_type == 'RAS-RAF':
states = np.zeros(len(OROT))
for j in range(len(OROT)):
### below: adding in the requirements for the 'high-z' state ###
if (OROT[j] < -45 or OROT[j] > 140) and z_pos[j] > 4.8:
states[j] = 3
else:
### above: adding in the requirements for the 'high-z' state ###
diff0 = []
for i in range(len(RAS_RAF_macrostate)):
#diff0.append([((RAS_RAF_macrostate[i,0]-OWAS[j])**2+(RAS_RAF_macrostate[i,1]-OROT[j])**2)**0.5, RAS_RAF_macrostate[i,6]])
diff0.append([((RAS_RAF_macrostate[i,1]-OWAS[j])**2+(RAS_RAF_macrostate[i,0]-OROT[j])**2)**0.5, RAS_RAF_macrostate[i,4]])
diff0.sort()
states[j] = diff0[0][1]
###### Above might have to be updated to take into account the periodic nature of the rotation ######
###### Assume we want to remove this? Where is the code that reads this information? i.e. will there be knock-on effects? ######
###### If feedback code needs index 5 (two_states) from the output, deleting this four_states will shift that to index 4 #######
# four_states = np.zeros(len(OROT))
# for j in range(len(OROT)):
# diff0 = []
# for i in range(len(macrostate4)):
# diff0.append([((macrostate4[i,0]-OWAS[j])**2+(macrostate4[i,1]-OROT[j])**2)**0.5, macrostate4[i,6]])
# diff0.sort()
# four_states[j] = diff0[0][1]+1
###### below: old output details.... ######################################
###### Updated - RAS-only to NOT HAVE the Z-distance ######################
###### Updated - Added in the protein 'tag', i.e. RAS-ONLY or RAS-RAF #####
# OUTPUT = np.zeros([len(OROT), 6])
# for i in range(len(OROT)):
# OUTPUT[i] = timeframes[i], OWAS[i], OROT[i], z_pos[i], four_states[i], two_states[i]
###### above: old output details.... ######################################
###### below: NEW output details.... ######################################
if protein_type == 'RAS-ONLY':
OUTPUT = np.zeros([len(OROT), 6]).astype(object)
for i in range(len(OROT)):
OUTPUT[i] = str(protein_type), timeframes[i], OWAS[i], OROT[i], 'n/a', int(states[i])
elif protein_type == 'RAS-RAF':
OUTPUT = np.zeros([len(OROT), 6]).astype(object)
for i in range(len(OROT)):
OUTPUT[i] = str(protein_type), timeframes[i], OWAS[i], OROT[i], z_pos[i], int(states[i])
ALLOUT.append(OUTPUT)
return np.asarray(ALLOUT)
#np.savetxt(str(tpr)+"_tilt_rot_z_state.KRAS_"+str(k+1)+".txt", OUTPUT, fmt=['%i','%10.3f','%10.3f','%10.3f','%i','%i'], delimiter=' ')
| 47.072776 | 173 | 0.57129 |
6ab82df482a2f9c96080ef85619210a77bdeb9a0 | 3,676 | py | Python | homeassistant/components/switch/hikvisioncam.py | maddox/home-assistant | 6624cfefd6ea81b559085779173b91a3dc6bd349 | [
"MIT"
] | 1 | 2015-09-13T21:10:09.000Z | 2015-09-13T21:10:09.000Z | homeassistant/components/switch/hikvisioncam.py | michaelarnauts/home-assistant | 7d905e6c0c99a4454de26d63af0581b454f01ca1 | [
"MIT"
] | null | null | null | homeassistant/components/switch/hikvisioncam.py | michaelarnauts/home-assistant | 7d905e6c0c99a4454de26d63af0581b454f01ca1 | [
"MIT"
] | 1 | 2020-05-07T08:48:36.000Z | 2020-05-07T08:48:36.000Z | """
homeassistant.components.switch.hikvision
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support turning on/off motion detection on Hikvision cameras.
Note: Currently works using default https port only.
CGI API Guide: http://bit.ly/1RuyUuF
Configuration:
To use the Hikvision motion detection switch you will need to add something
like the following to your config/configuration.yaml
switch:
platform: hikvisioncam
name: Hikvision Cam 1 Motion Detection
host: 192.168.1.32
username: YOUR_USERNAME
password: YOUR_PASSWORD
Variables:
host
*Required
This is the IP address of your Hikvision camera. Example: 192.168.1.32
username
*Required
Your Hikvision camera username.
password
*Required
Your Hikvision camera username.
name
*Optional
The name to use when displaying this switch instance.
"""
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import STATE_ON, STATE_OFF
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
import logging
try:
import hikvision.api
from hikvision.error import HikvisionError, MissingParamError
except ImportError:
hikvision.api = None
_LOGGING = logging.getLogger(__name__)
REQUIREMENTS = ['hikvision==0.4']
# pylint: disable=too-many-arguments
# pylint: disable=too-many-instance-attributes
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Setup Hikvision Camera config. """
host = config.get(CONF_HOST, None)
port = config.get('port', "80")
name = config.get('name', "Hikvision Camera Motion Detection")
username = config.get(CONF_USERNAME, "admin")
password = config.get(CONF_PASSWORD, "12345")
if hikvision.api is None:
_LOGGING.error((
"Failed to import hikvision. Did you maybe not install the "
"'hikvision' dependency?"))
return False
try:
hikvision_cam = hikvision.api.CreateDevice(
host, port=port, username=username,
password=password, is_https=False)
except MissingParamError as param_err:
_LOGGING.error("Missing required param: %s", param_err)
return False
except HikvisionError as conn_err:
_LOGGING.error("Unable to connect: %s", conn_err)
return False
add_devices_callback([
HikvisionMotionSwitch(name, hikvision_cam)
])
def turn_off(self, **kwargs):
""" Turn the device off. """
_LOGGING.info("Turning off Motion Detection ")
self._hikvision_cam.disable_motion_detection()
def update(self):
""" Update Motion Detection state """
enabled = self._hikvision_cam.is_motion_detection_enabled()
_LOGGING.info('enabled: %s', enabled)
self._state = STATE_ON if enabled else STATE_OFF
| 27.029412 | 76 | 0.681991 |
6ab942559c3b1955ee240520738d5bee4d16cc10 | 5,689 | py | Python | src/richie/apps/search/filter_definitions/mixins.py | leduong/richie | bf7ed379b7e2528cd790dadcec10ac2656efd189 | [
"MIT"
] | null | null | null | src/richie/apps/search/filter_definitions/mixins.py | leduong/richie | bf7ed379b7e2528cd790dadcec10ac2656efd189 | [
"MIT"
] | null | null | null | src/richie/apps/search/filter_definitions/mixins.py | leduong/richie | bf7ed379b7e2528cd790dadcec10ac2656efd189 | [
"MIT"
] | null | null | null | """Define mixins to easily compose custom FilterDefinition classes."""
| 41.830882 | 97 | 0.4746 |
6ab9a4f6d4ca5cd5f443cec5fb87c6e2f96318a3 | 12,830 | py | Python | electrumsv/gui/qt/receive_view.py | AustEcon/electrumsv | db924efc69f091f39e7d02e7f2d7a71350f4e6af | [
"MIT"
] | 1 | 2019-07-04T03:35:32.000Z | 2019-07-04T03:35:32.000Z | electrumsv/gui/qt/receive_view.py | AustEcon/electrumsv | db924efc69f091f39e7d02e7f2d7a71350f4e6af | [
"MIT"
] | null | null | null | electrumsv/gui/qt/receive_view.py | AustEcon/electrumsv | db924efc69f091f39e7d02e7f2d7a71350f4e6af | [
"MIT"
] | null | null | null | from typing import List, Optional, TYPE_CHECKING
import weakref
from PyQt5.QtCore import QEvent, Qt
from PyQt5.QtWidgets import (QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit,
QVBoxLayout, QWidget)
from electrumsv.app_state import app_state
from electrumsv.bitcoin import script_template_to_string
from electrumsv.constants import PaymentFlag, RECEIVING_SUBPATH
from electrumsv.i18n import _
from electrumsv.logs import logs
from electrumsv.wallet_database.tables import KeyInstanceRow
from electrumsv import web
from .amountedit import AmountEdit, BTCAmountEdit
from .constants import expiration_values
if TYPE_CHECKING:
from .main_window import ElectrumWindow
from .qrcodewidget import QRCodeWidget
from .qrwindow import QR_Window
from .request_list import RequestList
from .table_widgets import TableTopButtonLayout
from .util import ButtonsLineEdit, EnterButton, HelpLabel
| 41.928105 | 98 | 0.676695 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.