hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
11 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
251
max_stars_repo_name
stringlengths
4
130
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequencelengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
251
max_issues_repo_name
stringlengths
4
130
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequencelengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
251
max_forks_repo_name
stringlengths
4
130
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequencelengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
1
1.05M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.04M
alphanum_fraction
float64
0
1
07915aac7804509d8daa27f0a06836f1ec42314d
739
py
Python
src/sentry/api/endpoints/project_tags.py
seukjung/sentry-custom
c5f6bb2019aef3caff7f3e2b619f7a70f2b9b963
[ "BSD-3-Clause" ]
1
2021-08-10T06:07:13.000Z
2021-08-10T06:07:13.000Z
src/sentry/api/endpoints/project_tags.py
fotinakis/sentry
c5cfa5c5e47475bf5ef41e702548c2dfc7bb8a7c
[ "BSD-3-Clause" ]
8
2019-12-28T23:49:55.000Z
2022-03-02T04:34:18.000Z
src/sentry/api/endpoints/project_tags.py
fotinakis/sentry
c5cfa5c5e47475bf5ef41e702548c2dfc7bb8a7c
[ "BSD-3-Clause" ]
1
2017-04-08T04:09:18.000Z
2017-04-08T04:09:18.000Z
from __future__ import absolute_import import six from rest_framework.response import Response from sentry.api.bases.project import ProjectEndpoint from sentry.models import TagKey, TagKeyStatus
26.392857
64
0.626522
07919b5c6e89a71c84a3a15315c4e1b2767495ce
871
py
Python
examples/02 - callbacks/callbacks.py
TensorTom/async-Eel
d6484b6c5c9f89b64f5119d908fcdf29b173bd57
[ "MIT" ]
9
2019-06-17T22:11:01.000Z
2021-08-30T17:36:13.000Z
examples/02 - callbacks/callbacks.py
TensorTom/async-Eel
d6484b6c5c9f89b64f5119d908fcdf29b173bd57
[ "MIT" ]
4
2020-08-17T18:39:21.000Z
2021-08-29T02:54:23.000Z
examples/02 - callbacks/callbacks.py
TensorTom/async-Eel
d6484b6c5c9f89b64f5119d908fcdf29b173bd57
[ "MIT" ]
3
2020-02-27T03:40:05.000Z
2021-03-09T11:52:32.000Z
from __future__ import print_function # For Py2/3 compatibility import async_eel import random import asyncio loop = asyncio.get_event_loop() async def print_num(n): """callback of js_random""" print('Got this from Javascript:', n) if __name__ == '__main__': asyncio.run_coroutine_threadsafe(main(), loop) loop.run_forever()
22.921053
85
0.686567
079304ab36eaaab974694a2714de62dfde9010a0
1,578
py
Python
datacube/index/_api.py
AMA-AC/datacube-core
0d2fe0792cb9298cc93d1a97bbb921cfa59d6f2d
[ "Apache-2.0" ]
2
2019-10-24T15:29:54.000Z
2019-10-24T15:29:58.000Z
datacube/index/_api.py
AMA-AC/datacube-core
0d2fe0792cb9298cc93d1a97bbb921cfa59d6f2d
[ "Apache-2.0" ]
2
2021-03-26T00:37:36.000Z
2021-03-31T20:05:01.000Z
datacube/index/_api.py
PhilipeRLeal/datacube-core
81bed714f2e5cb30a2492f1b0cf3397b79141c3a
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 """ Access methods for indexing datasets & products. """ import logging from datacube.config import LocalConfig from datacube.drivers import index_driver_by_name, index_drivers from .index import Index _LOG = logging.getLogger(__name__) def index_connect(local_config=None, application_name=None, validate_connection=True): # type: (LocalConfig, str, bool) -> Index """ Create a Data Cube Index that can connect to a PostgreSQL server It contains all the required connection parameters, but doesn't actually check that the server is available. :param application_name: A short, alphanumeric name to identify this application. :param datacube.config.LocalConfig local_config: Config object to use. (optional) :param validate_connection: Validate database connection and schema immediately :rtype: datacube.index.index.Index :raises datacube.drivers.postgres._connections.IndexSetupError: """ if local_config is None: local_config = LocalConfig.find() driver_name = local_config.get('index_driver', 'default') index_driver = index_driver_by_name(driver_name) if not index_driver: raise RuntimeError( "No index driver found for %r. %s available: %s" % ( driver_name, len(index_drivers()), ', '.join(index_drivers()) ) ) return index_driver.connect_to_index(local_config, application_name=application_name, validate_connection=validate_connection)
35.863636
86
0.697085
079480b6c82dd74c78bc4155e46e5d2906c6b673
6,560
py
Python
pgarchives/loader/load_message.py
WeilerWebServices/PostgreSQL
ae594ed077bebbad1be3c1d95c38b7c2c2683e8c
[ "PostgreSQL" ]
null
null
null
pgarchives/loader/load_message.py
WeilerWebServices/PostgreSQL
ae594ed077bebbad1be3c1d95c38b7c2c2683e8c
[ "PostgreSQL" ]
null
null
null
pgarchives/loader/load_message.py
WeilerWebServices/PostgreSQL
ae594ed077bebbad1be3c1d95c38b7c2c2683e8c
[ "PostgreSQL" ]
null
null
null
#!/usr/bin/env python3 # # load_message.py - takes a single email or mbox formatted # file on stdin or in a file and reads it into the database. # import os import sys from optparse import OptionParser from configparser import ConfigParser import psycopg2 from lib.storage import ArchivesParserStorage from lib.mbox import MailboxBreakupParser from lib.exception import IgnorableException from lib.log import log, opstatus from lib.varnish import VarnishPurger if __name__ == "__main__": optparser = OptionParser() optparser.add_option('-l', '--list', dest='list', help='Name of list to load message for') optparser.add_option('-d', '--directory', dest='directory', help='Load all messages in directory') optparser.add_option('-m', '--mbox', dest='mbox', help='Load all messages in mbox') optparser.add_option('-i', '--interactive', dest='interactive', action='store_true', help='Prompt after each message') optparser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Verbose output') optparser.add_option('--force-date', dest='force_date', help='Override date (used for dates that can\'t be parsed)') optparser.add_option('--filter-msgid', dest='filter_msgid', help='Only process message with given msgid') (opt, args) = optparser.parse_args() if (len(args)): print("No bare arguments accepted") optparser.print_usage() sys.exit(1) if not opt.list: print("List must be specified") optparser.print_usage() sys.exit(1) if opt.directory and opt.mbox: print("Can't specify both directory and mbox!") optparser.print_usage() sys.exit(1) if opt.force_date and (opt.directory or opt.mbox) and not opt.filter_msgid: print("Can't use force_date with directory or mbox - only individual messages") optparser.print_usage() sys.exit(1) if opt.filter_msgid and not (opt.directory or opt.mbox): print("filter_msgid makes no sense without directory or mbox!") optparser.print_usage() sys.exit(1) log.set(opt.verbose) cfg = ConfigParser() cfg.read('%s/archives.ini' % os.path.realpath(os.path.dirname(sys.argv[0]))) try: connstr = cfg.get('db', 'connstr') except Exception: connstr = 'need_connstr' conn = psycopg2.connect(connstr) curs = conn.cursor() # Take an advisory lock to force serialization. # We could do this "properly" by reordering operations and using ON CONFLICT, # but concurrency is not that important and this is easier... try: curs.execute("SET statement_timeout='30s'") curs.execute("SELECT pg_advisory_xact_lock(8059944559669076)") except Exception as e: print(("Failed to wait on advisory lock: %s" % e)) sys.exit(1) # Get the listid we're working on curs.execute("SELECT listid FROM lists WHERE listname=%(list)s", { 'list': opt.list }) r = curs.fetchall() if len(r) != 1: log.error("List %s not found" % opt.list) conn.close() sys.exit(1) listid = r[0][0] purges = set() if opt.directory: # Parse all files in directory for x in os.listdir(opt.directory): log.status("Parsing file %s" % x) with open(os.path.join(opt.directory, x)) as f: ap = ArchivesParserStorage() ap.parse(f) if opt.filter_msgid and not ap.is_msgid(opt.filter_msgid): continue try: ap.analyze(date_override=opt.force_date) except IgnorableException as e: log_failed_message(listid, "directory", os.path.join(opt.directory, x), ap, e) opstatus.failed += 1 continue ap.store(conn, listid) purges.update(ap.purges) if opt.interactive: print("Interactive mode, committing transaction") conn.commit() print("Proceed to next message with Enter, or input a period (.) to stop processing") x = input() if x == '.': print("Ok, aborting!") break print("---------------------------------") elif opt.mbox: if not os.path.isfile(opt.mbox): print("File %s does not exist" % opt.mbox) sys.exit(1) mboxparser = MailboxBreakupParser(opt.mbox) while not mboxparser.EOF: ap = ArchivesParserStorage() msg = next(mboxparser) if not msg: break ap.parse(msg) if opt.filter_msgid and not ap.is_msgid(opt.filter_msgid): continue try: ap.analyze(date_override=opt.force_date) except IgnorableException as e: log_failed_message(listid, "mbox", opt.mbox, ap, e) opstatus.failed += 1 continue ap.store(conn, listid) purges.update(ap.purges) if mboxparser.returncode(): log.error("Failed to parse mbox:") log.error(mboxparser.stderr_output()) sys.exit(1) else: # Parse single message on stdin ap = ArchivesParserStorage() ap.parse(sys.stdin.buffer) try: ap.analyze(date_override=opt.force_date) except IgnorableException as e: log_failed_message(listid, "stdin", "", ap, e) conn.close() sys.exit(1) ap.store(conn, listid) purges.update(ap.purges) if opstatus.stored: log.log("Stored message with message-id %s" % ap.msgid) conn.commit() conn.close() opstatus.print_status() VarnishPurger(cfg).purge(purges)
35.846995
150
0.594817
079486c9e4c55ef02a54afabc6964be8635f9540
860
py
Python
shop/migrations/0009_auto_20200310_1430.py
manson800819/test
6df7d92eababe76a54585cb8102a00a6d79ca467
[ "MIT" ]
null
null
null
shop/migrations/0009_auto_20200310_1430.py
manson800819/test
6df7d92eababe76a54585cb8102a00a6d79ca467
[ "MIT" ]
null
null
null
shop/migrations/0009_auto_20200310_1430.py
manson800819/test
6df7d92eababe76a54585cb8102a00a6d79ca467
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-03-10 14:30 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion
27.741935
123
0.612791
079607c3763787747e103898caecb5035367fd71
1,351
py
Python
lib/dataset/iNaturalist.py
jrcai/ACE
1e2b04d1cf4bb517f107664ac489a1a96e95a4c1
[ "MIT" ]
18
2021-08-06T01:15:32.000Z
2022-03-14T07:09:39.000Z
lib/dataset/iNaturalist.py
jrcai/BagofTricks-LT
d75b195367e3d535d316d134ec4bbef4bb7fcbdd
[ "MIT" ]
2
2021-09-24T03:29:17.000Z
2021-11-22T19:18:58.000Z
lib/dataset/iNaturalist.py
jrcai/BagofTricks-LT
d75b195367e3d535d316d134ec4bbef4bb7fcbdd
[ "MIT" ]
2
2021-10-17T18:09:20.000Z
2021-11-08T04:19:19.000Z
from dataset.baseset import BaseSet import random, cv2 import numpy as np
32.95122
104
0.623982
079659eddea08dcb5566544e99167b54c9eb8c33
27,964
py
Python
tests/test_conferences.py
mattclark/osf.io
7a362ceb6af3393d3d0423aafef336ee13277303
[ "Apache-2.0" ]
null
null
null
tests/test_conferences.py
mattclark/osf.io
7a362ceb6af3393d3d0423aafef336ee13277303
[ "Apache-2.0" ]
80
2015-02-25T15:12:15.000Z
2015-06-11T18:44:55.000Z
tests/test_conferences.py
mattclark/osf.io
7a362ceb6af3393d3d0423aafef336ee13277303
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- import mock from nose.tools import * # noqa (PEP8 asserts) import hmac import hashlib from StringIO import StringIO from django.core.exceptions import ValidationError from django.db import IntegrityError import furl from framework.auth import get_or_create_user from framework.auth.core import Auth from osf.models import OSFUser, AbstractNode from addons.wiki.models import WikiVersion from osf.exceptions import BlacklistedEmailError from website import settings from website.conferences import views from website.conferences import utils, message from website.util import api_url_for, web_url_for from tests.base import OsfTestCase, fake from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory class TestProvisionNode(ContextTestCase):
36.506527
114
0.611357
079711a016ea4fa9510f792cc6a93c56f3a684e5
4,060
py
Python
socket_tentacles/__init__.py
innovationgarage/socket-tentacles
1cfbf7649017493fafacfcbc96cd05f3c4c5d6b6
[ "MIT" ]
null
null
null
socket_tentacles/__init__.py
innovationgarage/socket-tentacles
1cfbf7649017493fafacfcbc96cd05f3c4c5d6b6
[ "MIT" ]
null
null
null
socket_tentacles/__init__.py
innovationgarage/socket-tentacles
1cfbf7649017493fafacfcbc96cd05f3c4c5d6b6
[ "MIT" ]
1
2019-11-01T12:38:20.000Z
2019-11-01T12:38:20.000Z
import socketserver import socket import sys import threading import json import queue import time import datetime import traceback def run(config, handlers): server = Server(handlers) server.configure(config) return server
31.472868
107
0.580049
0797199eb44c9067c6481782c2b094efbd8e10a6
3,917
py
Python
G5/DerivedData/ParameterProbing/checkme.py
shooking/ZoomPedalFun
7b9f5f4441cfe42e988e06cf6b98603c21ac2466
[ "CC0-1.0" ]
9
2021-02-15T00:05:32.000Z
2022-01-24T14:01:46.000Z
G5/DerivedData/ParameterProbing/checkme.py
shooking/ZoomPedalFun
7b9f5f4441cfe42e988e06cf6b98603c21ac2466
[ "CC0-1.0" ]
13
2021-08-23T02:07:26.000Z
2022-02-16T16:55:00.000Z
G5/DerivedData/ParameterProbing/checkme.py
shooking/ZoomPedalFun
7b9f5f4441cfe42e988e06cf6b98603c21ac2466
[ "CC0-1.0" ]
null
null
null
# -*- coding: ascii -*- import sys import json # handles a zoom firmware if __name__ == "__main__": if len(sys.argv) == 2: f = open(sys.argv[1], "rb") data = f.read() f.close() check(data)
40.381443
129
0.451876
0797927e3d7bde5eb907276662251361ce156da5
475
py
Python
leehao/learn63.py
pilihaotian/pythonlearning
e84b7766cc9ea8131e9720fb1f06761c9581d0da
[ "Apache-2.0" ]
1
2020-02-26T14:52:17.000Z
2020-02-26T14:52:17.000Z
leehao/learn63.py
pilihaotian/pythonlearning
e84b7766cc9ea8131e9720fb1f06761c9581d0da
[ "Apache-2.0" ]
null
null
null
leehao/learn63.py
pilihaotian/pythonlearning
e84b7766cc9ea8131e9720fb1f06761c9581d0da
[ "Apache-2.0" ]
null
null
null
# 6 a-zA-Z0-9 import random source = '' lower_char = [chr(x) for x in range(ord('a'), ord('z') + 1)] upper_char = [chr(x) for x in range(ord('A'), ord('Z') + 1)] number_char = [chr(x) for x in range(ord('0'), ord('9') + 1)] source += "".join(lower_char) source += "".join(upper_char) source += "".join(number_char) source += "_" print(source) # 20 while True: s = "".join(random.sample(source, 20)) if '_' in s: print(s) break
23.75
61
0.591579
0799d6264b46d419f66659960f9e80e3239bdd1c
1,237
py
Python
gbic/tests.py
fga-eps-mds/2017.2-SiGI-Op_API
4532019c15414fd17e06bb3aa78501886e00da1d
[ "BSD-3-Clause" ]
6
2017-08-24T13:18:21.000Z
2017-10-03T18:06:13.000Z
gbic/tests.py
fga-gpp-mds/2017.2-Grupo9
4532019c15414fd17e06bb3aa78501886e00da1d
[ "BSD-3-Clause" ]
173
2017-08-31T15:29:01.000Z
2017-12-14T13:40:13.000Z
gbic/tests.py
fga-gpp-mds/2017.2-SiGI-Op_API
4532019c15414fd17e06bb3aa78501886e00da1d
[ "BSD-3-Clause" ]
2
2018-11-19T10:33:00.000Z
2019-06-19T22:35:43.000Z
from django.test import TestCase from rest_framework.test import APIRequestFactory from .models import GBIC, GBICType from .views import GBICListViewSet # Create your tests here.
35.342857
74
0.672595
079a7acf6ad24d3b711dca5d0ee7ae05fdfe00c0
23,239
py
Python
fruits/core/fruit.py
alienkrieg/fruits
b3b4b6afd7f97d2d4060909689f9811dc97981ed
[ "MIT" ]
4
2021-10-08T11:14:54.000Z
2021-12-30T13:56:32.000Z
fruits/core/fruit.py
alienkrieg/fruits
b3b4b6afd7f97d2d4060909689f9811dc97981ed
[ "MIT" ]
null
null
null
fruits/core/fruit.py
alienkrieg/fruits
b3b4b6afd7f97d2d4060909689f9811dc97981ed
[ "MIT" ]
null
null
null
import inspect from typing import List, Union, Set, Any import numpy as np from fruits.cache import Cache, CoquantileCache from fruits.scope import force_input_shape, FitTransform from fruits.core.callback import AbstractCallback from fruits.signature.iss import SignatureCalculator, CachePlan from fruits.words.word import Word from fruits.sieving.abstract import FeatureSieve from fruits.preparation.abstract import DataPreparateur def branch(self, index: int = None): """Returns the currently selected branch or the branch with the given index. :rtype: FruitBranch """ if index is None: return self._branches[self._cbi] return self._branches[index] def branches(self) -> list: """Returns all branches of this Fruit object. :rtype: list """ return self._branches def switch_branch(self, index: int): """Switches to the branch with the given index. :param index: Integer in ``[0, 1, ..., len(self.branches())-1]`` :type index: int """ if not (0 <= index < len(self._branches)): raise IndexError("Index has to be in [0, len(self.branches()))") self._cbi = index def add(self, *objects: Union[FitTransform, Word, type]): """Adds one or multiple object(s) to the currently selected branch. :param objects: One or more objects of the following types: - :class:`~fruits.preparation.abstract.DataPreparateur` - :class:`~fruits.words.word.Word` - :class:`~fruits.sieving.abstract.FeatureSieve` :type objects: Union[FitTransform, Word] """ if len(self._branches) == 0: self.fork() self._branches[self._cbi].add(*objects) self._fitted = False def nfeatures(self) -> int: """Returns the total number of features of all branches combined. :rtype: int """ return sum([branch.nfeatures() for branch in self._branches]) def configure(self, **kwargs: Any): """Makes changes to the default configuration of a all branches if arguments differ from ``None``. :param kwargs: For possible options, have a look at :meth:`fruits.core.fruit.FruitBranch.configure`. :type kwargs: Any """ for branch in self._branches: branch.configure(**kwargs) def fit(self, X: np.ndarray): """Fits all branches to the given data. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray """ for branch in self._branches: branch.fit(X) self._fitted = True def transform(self, X: np.ndarray, callbacks: List[AbstractCallback] = []) -> np.ndarray: """Returns a two dimensional array of all features from all branches this Fruit object contains. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray :param callbacks: List of callbacks. To write your own callback, override the class :class:`~fruits.core.callback.AbstractCallback`., defaults to None :type callbacks: List[AbstractCallback], optional :rtype: np.ndarray :raises: RuntimeError if Fruit.fit wasn't called """ if not self._fitted: raise RuntimeError("Missing call of self.fit") result = np.zeros((X.shape[0], self.nfeatures())) index = 0 for branch in self._branches: for callback in callbacks: callback.on_next_branch() k = branch.nfeatures() result[:, index:index+k] = branch.transform(X, callbacks) index += k result = np.nan_to_num(result, copy=False, nan=0.0) return result def fit_transform(self, X: np.ndarray) -> np.ndarray: """Fits all branches to the given dataset and returns the transformed results of X from all branches. :param X: (Multidimensional) time series dataset :type X: np.ndarray :returns: Two dimensional feature array :rtype: np.ndarray """ self.fit(X) return self.transform(X) def summary(self) -> str: """Returns a summary of this object. The summary contains a summary for each FruitBranch in this Fruit object. :rtype: str """ summary = "{:=^80}".format(f"Summary of fruits.Fruit: '{self.name}'") summary += f"\nBranches: {len(self.branches())}" summary += f"\nFeatures: {self.nfeatures()}" for branch in self.branches(): summary += "\n\n" + branch.summary() summary += "\n{:=^80}".format(f"End of Summary") return summary def copy(self) -> "Fruit": """Creates a shallow copy of this Fruit object. This also creates shallow copies of all branches in this object. :rtype: Fruit """ copy_ = Fruit(self.name+" (Copy)") for branch in self._branches: copy_.fork(branch.copy()) return copy_ def deepcopy(self) -> "Fruit": """Creates a deep copy of this Fruit object. This also creates deep copies of all branches in this object. :rtype: Fruit """ copy_ = Fruit(self.name+" (Copy)") for branch in self._branches: copy_.fork(branch.deepcopy()) return copy_ class FruitBranch: """One branch of a Fruit object. A FruitBranch object extracts values from time series data that are somehow representative of the input data. The user can customize any of the following three steps. - Preparing data: Apply functions at the start of the extraction procedure. There are many so called :class:`~fruits.preparation.abstract.DataPreparateur` objects in fruits available for preprocessing. The preparateurs will be applied sequentially to the input data. - Calculating Iterated Sums: The preprocessed data is now used to calculate the iterated sums signature for different :class:`~fruits.words.word.Word` objects the user can specify. - Extracting Features: Each :class:`~fruits.sieving.abstract.FeatureSieve` added to the branch will be fitted on the iterated sums from the previous step. The branch then returns an array of numbers (the transformed results from those sieves), i.e. the features for each time series. """ def configure(self, mode: str = None, batch_size: int = None, fit_sample_size: Union[float, int] = None): """Makes changes to the default configuration of a fruit branch if arguments differ from ``None``. :param mode: See :meth:`fruits.signature.iss.SignatureCalculator.transform`, defaults to None :type mode: str, optional :param batch_size: See :meth:`~ruits.signature.iss.SignatureCalculator.transform`, defaults to None :type batch_size: int, optional :param fit_sample_size: Size of the random time series sample that is used for fitting. This is represented as a float which will be multiplied by ``X.shape[0]`` or ``1`` for one random time series., defaults to 1 :type fit_sample_size: Union[float, int] """ if mode is not None: self._calculator_options["mode"] = mode if batch_size is not None: self._calculator_options["batch_size"] = batch_size if fit_sample_size is not None: self._fit_sample_size = fit_sample_size def add_preparateur(self, preparateur: DataPreparateur): """Adds a preparateur to the branch. :type preparateur: DataPreparateur """ if not isinstance(preparateur, DataPreparateur): raise TypeError self._preparateurs.append(preparateur) self._fitted = False def get_preparateurs(self) -> List[DataPreparateur]: """Returns a list of all preparateurs added to the branch. :rtype: List[DataPreparateur] """ return self._preparateurs def clear_preparateurs(self): """Removes all preparateurs that were added to this branch.""" self._preparateurs = [] self._fitted = False def add_word(self, word: Word): """Adds a word to the branch. :type word: Word """ if not isinstance(word, Word): raise TypeError self._words.append(word) self._fitted = False def get_words(self) -> List[Word]: """Returns a list of all words in the branch. :rtype: List[Word] """ return self._words def clear_words(self): """Removes all words that were added to this branch.""" self._words = [] self._sieves_extended = [] self._fitted = False def add_sieve(self, sieve: FeatureSieve): """Appends a new feature sieve to the FruitBranch. :type sieve: FeatureSieve """ if not isinstance(sieve, FeatureSieve): raise TypeError self._sieves.append(sieve) self._fitted = False def get_sieves(self) -> List[FeatureSieve]: """Returns a list of all feature sieves added to the branch. :rtype: List[FeatureSieve] """ return self._sieves def clear_sieves(self): """Removes all feature sieves that were added to this branch.""" self._sieves = [] self._sieve_prerequisites = None self._sieves_extended = [] self._fitted = False def add(self, *objects: Union[FitTransform, Word, type]): """Adds one or multiple object(s) to the branch. :type objects: One or more objects of the following types: - :class:`~fruits.preparation.abstract.DataPreparateur` - :class:`~fruits.words.word.Word` - :class:`~fruits.sieving.abstract.FeatureSieve` """ objects_flattened = np.array(objects, dtype=object).flatten() for obj in objects_flattened: if inspect.isclass(obj): obj = obj() if isinstance(obj, DataPreparateur): self.add_preparateur(obj) elif isinstance(obj, Word): self.add_word(obj) elif isinstance(obj, FeatureSieve): self.add_sieve(obj) else: raise TypeError("Cannot add variable of type"+str(type(obj))) def clear(self): """Clears all settings, configurations and calculated results the branch has. After the branch is cleared, it has the same settings as a newly created FruitBranch object. """ self.clear_preparateurs() self.clear_words() self.clear_sieves() self._calculator_options = {"batch_size": 1, "mode": "single"} def nfeatures(self) -> int: """Returns the total number of features the current configuration produces. :rtype: int """ if self._calculator_options["mode"] == "extended": return ( sum([s.nfeatures() for s in self._sieves]) * CachePlan(self._words).n_iterated_sums( list(range(len(self._words))) ) ) else: return ( sum([s.nfeatures() for s in self._sieves]) * len(self._words) ) def fit(self, X: np.ndarray): """Fits the branch to the given dataset. What this action explicitly does depends on the FruitBranch configuration. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray """ self._compile() self._get_cache(X) prepared_data = self._select_fit_sample(X) for prep in self._preparateurs: prep.fit(prepared_data) prepared_data = prep.transform(prepared_data, cache=self._cache) self._sieves_extended = [] iss_calculations = SignatureCalculator().transform( prepared_data, words=self._words, **self._calculator_options )[0] for iterated_data in iss_calculations: iterated_data = iterated_data.reshape(iterated_data.shape[0] * iterated_data.shape[1], iterated_data.shape[2]) sieves_copy = [sieve.copy() for sieve in self._sieves] for sieve in sieves_copy: sieve.fit(iterated_data[:, :]) self._sieves_extended.append(sieves_copy) self._fitted = True def transform(self, X: np.ndarray, callbacks: List[AbstractCallback] = []) -> np.ndarray: """Transforms the given time series dataset. The results are the calculated features for the different time series. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray :param callbacks: List of callbacks. To write your own callback, override the class :class:`~fruits.core.callback.AbstractCallback`., defaults to [] :type callbacks: List[AbstractCallback], optional :rtype: np.ndarray :raises: RuntimeError if ``self.fit`` wasn't called """ if not self._fitted: raise RuntimeError("Missing call of self.fit") self._get_cache(X) prepared_data = force_input_shape(X) for prep in self._preparateurs: prepared_data = prep.transform(prepared_data, cache=self._cache) for callback in callbacks: callback.on_preparateur(prepared_data) for callback in callbacks: callback.on_preparation_end(prepared_data) sieved_data = np.zeros((prepared_data.shape[0], self.nfeatures())) k = 0 iss_calculations = SignatureCalculator().transform( prepared_data, words=self._words, **self._calculator_options )[0] for i, iterated_data in enumerate(iss_calculations): for callback in callbacks: callback.on_iterated_sum(iterated_data) for sieve in self._sieves_extended[i]: nf = sieve.nfeatures() new_features = nf * iterated_data.shape[1] for it in range(iterated_data.shape[1]): sieved_data[:, k+it*nf:k+(it+1)*nf] = sieve.transform( iterated_data[:, it, :], cache=self._cache, ) for callback in callbacks: callback.on_sieve(sieved_data[k:k+new_features]) k += new_features for callback in callbacks: callback.on_sieving_end(sieved_data) return sieved_data def fit_transform(self, X: np.ndarray) -> np.ndarray: """This function does the same that calling ``self.fit(X)`` and ``self.transform(X)`` consecutively does. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at `:meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray :returns: Array of features. :rtype: np.ndarray """ self.fit(X) return self.transform(X) def summary(self) -> str: """Returns a summary of this object. The summary contains all added preparateurs, words and sieves. :rtype: str """ summary = "{:-^80}".format("fruits.FruitBranch") summary += f"\nNumber of features: {self.nfeatures()}" summary += f"\n\nPreparateurs ({len(self._preparateurs)}): " if len(self._preparateurs) == 0: summary += "-" else: summary += "\n\t+ " + \ "\n\t+ ".join([str(x) for x in self._preparateurs]) summary += f"\nIterators ({len(self._words)}): " if len(self._words) == 0: summary += "-" elif len(self._words) > 10: summary += "\n\t+ " + \ "\n\t+ ".join([str(x) for x in self._words[:9]]) summary += "\n\t..." else: summary += "\n\t+ " + \ "\n\t+ ".join([str(x) for x in self._words]) summary += f"\nSieves ({len(self._sieves)}): " if len(self._sieves) == 0: summary += "-" else: for x in self._sieves: lines = x.summary().split("\n") summary += "\n\t+ " + lines[0] summary += "\n\t " summary += "\n\t ".join(lines[1:]) return summary def copy(self) -> "FruitBranch": """Returns a shallow copy of this FruitBranch object. :returns: Copy of the branch with same settings but all calculations done erased. :rtype: FruitBranch """ copy_ = FruitBranch() for preparateur in self._preparateurs: copy_.add(preparateur) for iterator in self._words: copy_.add(iterator) for sieve in self._sieves: copy_.add(sieve) return copy_ def deepcopy(self) -> "FruitBranch": """Returns a deep copy of this FruitBranch object. :returns: Deepcopy of the branch with same settings but all calculations done erased. :rtype: FruitBranch """ copy_ = FruitBranch() for preparateur in self._preparateurs: copy_.add(preparateur.copy()) for iterator in self._words: copy_.add(iterator.copy()) for sieve in self._sieves: copy_.add(sieve.copy()) copy_._calculator_options = self._calculator_options.copy() return copy_
35.752308
77
0.588235
079c6865019ab91124c781c2644155172081ba8b
9,863
py
Python
workoutizer/__main__.py
pa3kDaWae/workoutizer
15501d0060711bbd8308642bc89b45c1442d4d0f
[ "MIT" ]
null
null
null
workoutizer/__main__.py
pa3kDaWae/workoutizer
15501d0060711bbd8308642bc89b45c1442d4d0f
[ "MIT" ]
null
null
null
workoutizer/__main__.py
pa3kDaWae/workoutizer
15501d0060711bbd8308642bc89b45c1442d4d0f
[ "MIT" ]
null
null
null
import os import argparse import subprocess import socket import sys import click from django.core.management import execute_from_command_line from workoutizer.settings import WORKOUTIZER_DIR, WORKOUTIZER_DB_PATH, TRACKS_DIR from workoutizer import __version__ BASE_DIR = os.path.dirname(os.path.dirname(__file__)) SETUP_DIR = os.path.join(BASE_DIR, 'setup') os.environ["DJANGO_SETTINGS_MODULE"] = "workoutizer.settings" example_rpi_cmd = "wkz --setup_rpi vendor_id=091e product_id=4b48" url_help = 'specify ip address and port pair, like: address:port' cli.add_command(upgrade) cli.add_command(version) cli.add_command(init) cli.add_command(setup_rpi) cli.add_command(run) cli.add_command(manage) cli.add_command(wkz_as_service) if __name__ == '__main__': cli()
38.678431
120
0.667343
079ca32a831e991e4272d56b55460b5a2a9b338e
8,807
py
Python
bcbio/bam/trim.py
matanhofree/bcbio-nextgen
e6938cedb20ff3b7632165105941d71189e46aac
[ "MIT" ]
1
2015-04-08T17:43:39.000Z
2015-04-08T17:43:39.000Z
bcbio/bam/trim.py
matanhofree/bcbio-nextgen
e6938cedb20ff3b7632165105941d71189e46aac
[ "MIT" ]
null
null
null
bcbio/bam/trim.py
matanhofree/bcbio-nextgen
e6938cedb20ff3b7632165105941d71189e46aac
[ "MIT" ]
null
null
null
"""Provide trimming of input reads from Fastq or BAM files. """ import os import sys import tempfile from bcbio.utils import (file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap) from bcbio.log import logger from bcbio.bam import fastq from bcbio.provenance import do from Bio.Seq import Seq from itertools import izip, repeat from bcbio.distributed.transaction import file_transaction from bcbio.pipeline import config_utils SUPPORTED_ADAPTERS = { "illumina": ["AACACTCTTTCCCT", "AGATCGGAAGAGCG"], "truseq": ["AGATCGGAAGAG"], "polya": ["AAAAAAAAAAAAA"], "nextera": ["AATGATACGGCGA", "CAAGCAGAAGACG"]} QUALITY_FLAGS = {5: ['"E"', '"&"'], 20: ['"T"', '"5"']} def trim_read_through(fastq_files, dirs, lane_config): """ for small insert sizes, the read length can be longer than the insert resulting in the reverse complement of the 3' adapter being sequenced. this takes adapter sequences and trims the only the reverse complement of the adapter MYSEQUENCEAAAARETPADA -> MYSEQUENCEAAAA (no polyA trim) """ quality_format = _get_quality_format(lane_config) to_trim = _get_sequences_to_trim(lane_config) out_files = _get_read_through_trimmed_outfiles(fastq_files, dirs) fixed_files = append_stem(out_files, ".fixed") if all(map(file_exists, fixed_files)): return fixed_files logger.info("Trimming %s from the 3' end of reads in %s using " "cutadapt." % (", ".join(to_trim), ", ".join(fastq_files))) cores = lane_config["algorithm"].get("num_cores", 1) out_files = _cutadapt_trim(fastq_files, quality_format, to_trim, out_files, cores) fixed_files = remove_short_reads(out_files, dirs, lane_config) return fixed_files def remove_short_reads(fastq_files, dirs, lane_config): """ remove reads from a single or pair of fastq files which fall below a length threshold (30 bases) """ min_length = int(lane_config["algorithm"].get("min_read_length", 20)) supplied_quality_format = _get_quality_format(lane_config) if supplied_quality_format == "illumina": quality_format = "fastq-illumina" else: quality_format = "fastq-sanger" if is_pair(fastq_files): fastq1, fastq2 = fastq_files out_files = fastq.filter_reads_by_length(fastq1, fastq2, quality_format, min_length) else: out_files = [fastq.filter_single_reads_by_length(fastq_files[0], quality_format, min_length)] map(os.remove, fastq_files) return out_files def _cutadapt_trim(fastq_files, quality_format, adapters, out_files, cores): """Trimming with cutadapt, using version installed with bcbio-nextgen. Uses the system executable to find the version next to our Anaconda Python. TODO: Could we use cutadapt as a library to avoid this? """ if quality_format == "illumina": quality_base = "64" else: quality_base = "33" # --times=2 tries twice remove adapters which will allow things like: # realsequenceAAAAAAadapter to remove both the poly-A and the adapter # this behavior might not be what we want; we could also do two or # more passes of cutadapt cutadapt = os.path.join(os.path.dirname(sys.executable), "cutadapt") base_cmd = [cutadapt, "--times=" + "2", "--quality-base=" + quality_base, "--quality-cutoff=5", "--format=fastq", "--minimum-length=0"] adapter_cmd = map(lambda x: "--adapter=" + x, adapters) base_cmd.extend(adapter_cmd) if all(map(file_exists, out_files)): return out_files with file_transaction(out_files) as tmp_out_files: if isinstance(tmp_out_files, basestring): tmp_out_files = [tmp_out_files] map(_run_cutadapt_on_single_file, izip(repeat(base_cmd), fastq_files, tmp_out_files)) return out_files
40.962791
92
0.65641
079cc8e3750a72f04c77d4be145f9892cc269784
13,419
py
Python
FEniCSUI/AnalysesHub/views.py
nasserarbabi/FEniCSUI-dev
f8f161e1b49932843e01301212e7d031fff4f6c8
[ "MIT" ]
null
null
null
FEniCSUI/AnalysesHub/views.py
nasserarbabi/FEniCSUI-dev
f8f161e1b49932843e01301212e7d031fff4f6c8
[ "MIT" ]
8
2021-03-10T21:59:52.000Z
2021-09-22T19:12:57.000Z
FEniCSUI/AnalysesHub/views.py
nasserarbabi/FEniCSUI
f8f161e1b49932843e01301212e7d031fff4f6c8
[ "MIT" ]
null
null
null
from rest_framework.response import Response from rest_framework.views import APIView from django.shortcuts import get_object_or_404 from dashboard.models import projects from .models import AnalysisConfig, SolverResults, SolverProgress, DockerLogs from rest_framework.parsers import FormParser, JSONParser, MultiPartParser, FileUploadParser from rest_framework import status import docker import os import json from zipfile import ZipFile from django.http import HttpResponse from threading import Thread from time import sleep from datetime import datetime def streamDockerLog(container, project): for line in container.logs(stream=True): logs = get_object_or_404(DockerLogs, project=project) now = datetime.now() current_time = now.strftime("[%H:%M:%S]: ") logs.log = current_time + str(line.strip(), 'utf-8') + "\n" + logs.log logs.save()
42.735669
158
0.634474
079d837b78456fbc7dc57dde76430c107de0f8b2
1,627
py
Python
fs/opener/appfs.py
EnjoyLifeFund/macHighSierra-py36-pkgs
5668b5785296b314ea1321057420bcd077dba9ea
[ "BSD-3-Clause", "BSD-2-Clause", "MIT" ]
null
null
null
fs/opener/appfs.py
EnjoyLifeFund/macHighSierra-py36-pkgs
5668b5785296b314ea1321057420bcd077dba9ea
[ "BSD-3-Clause", "BSD-2-Clause", "MIT" ]
null
null
null
fs/opener/appfs.py
EnjoyLifeFund/macHighSierra-py36-pkgs
5668b5785296b314ea1321057420bcd077dba9ea
[ "BSD-3-Clause", "BSD-2-Clause", "MIT" ]
null
null
null
# coding: utf-8 """``AppFS`` opener definition. """ from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from .base import Opener from .errors import OpenerError from ..subfs import ClosingSubFS from .. import appfs
24.283582
68
0.566687
079e4e7b1369d32d29e2b328e164f3c7bb362e85
23,180
py
Python
scripts/modeling_toolbox/evaluation.py
cyberj0g/verification-classifier
efb19a3864e27a7f149a1c27ee8e13eaa19f96eb
[ "MIT" ]
8
2019-06-06T08:16:45.000Z
2021-06-26T11:53:48.000Z
scripts/modeling_toolbox/evaluation.py
cyberj0g/verification-classifier
efb19a3864e27a7f149a1c27ee8e13eaa19f96eb
[ "MIT" ]
95
2019-03-27T08:36:01.000Z
2022-02-10T00:15:20.000Z
scripts/modeling_toolbox/evaluation.py
cyberj0g/verification-classifier
efb19a3864e27a7f149a1c27ee8e13eaa19f96eb
[ "MIT" ]
8
2019-02-28T11:21:46.000Z
2022-03-21T07:34:20.000Z
import numpy as np from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix from sklearn.decomposition import PCA from sklearn import random_projection from sklearn import svm from sklearn.ensemble import IsolationForest import matplotlib.pyplot as plt from keras.layers import Dense, Input, Dropout from keras.models import Model from keras import regularizers from keras.models import Sequential from keras.optimizers import Adam from keras.regularizers import l2 from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import AdaBoostClassifier import xgboost as xgb
44.83559
122
0.519586
07a0528abcb6666cfcae1ee1c6cb300b86de98df
1,896
py
Python
tests/test_notifo_message.py
mrtazz/notifo.py
26079db3b40c26661155af20a9f16a0eca06dbde
[ "MIT" ]
3
2015-11-05T11:49:34.000Z
2016-07-17T18:06:15.000Z
tests/test_notifo_message.py
mrtazz/notifo.py
26079db3b40c26661155af20a9f16a0eca06dbde
[ "MIT" ]
null
null
null
tests/test_notifo_message.py
mrtazz/notifo.py
26079db3b40c26661155af20a9f16a0eca06dbde
[ "MIT" ]
null
null
null
# encoding: utf-8 import unittest import os import sys sys.path.append(os.getcwd()) from notifo import Notifo, send_message if __name__ == '__main__': unittest.main()
38.693878
79
0.655591
07a0beb6aad78f79be93a859fb255e52020dee2b
1,931
py
Python
geoist/cattools/Smoothing.py
wqqpp007/geoist
116b674eae3da4ee706902ce7f5feae1f61f43a5
[ "MIT" ]
1
2020-06-04T01:09:24.000Z
2020-06-04T01:09:24.000Z
geoist/cattools/Smoothing.py
wqqpp007/geoist
116b674eae3da4ee706902ce7f5feae1f61f43a5
[ "MIT" ]
null
null
null
geoist/cattools/Smoothing.py
wqqpp007/geoist
116b674eae3da4ee706902ce7f5feae1f61f43a5
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # import numpy as np import .Selection as Sel import .Exploration as Exp import .CatUtils as CU #----------------------------------------------------------------------------------------- #-----------------------------------------------------------------------------------------
23.26506
90
0.483169
07a14721d51acde99d2ac0e4f6c8f439015ceb27
2,759
py
Python
hebsafeharbor/identifier/signals/lexicon_based_recognizer.py
dkarmon/HebSafeHarbor
fdad7481c74feb78f8c3265c327eae7712cf16ce
[ "MIT" ]
3
2022-03-10T14:41:54.000Z
2022-03-28T11:18:56.000Z
hebsafeharbor/identifier/signals/lexicon_based_recognizer.py
8400TheHealthNetwork/HebSafeHarbor
f618ca76d995e6fb74c0cb2f01478c7efd2d8836
[ "MIT" ]
2
2022-03-06T10:39:27.000Z
2022-03-07T12:42:13.000Z
hebsafeharbor/identifier/signals/lexicon_based_recognizer.py
dkarmon/HebSafeHarbor
fdad7481c74feb78f8c3265c327eae7712cf16ce
[ "MIT" ]
3
2022-02-15T09:50:08.000Z
2022-02-22T08:43:26.000Z
from typing import List from presidio_analyzer import EntityRecognizer, RecognizerResult, AnalysisExplanation from presidio_analyzer.nlp_engine import NlpArtifacts from hebsafeharbor.common.terms_recognizer import TermsRecognizer
43.793651
117
0.697717
07a42ef8c15bd224503fc4a06fa31dab2756c605
35,966
py
Python
my_plugins/YouCompleteMe/third_party/ycmd/ycmd/tests/clangd/subcommands_test.py
cyx233/vim_config
f09c9206344c17df20a05dd2c08a02f098a7e873
[ "MIT" ]
null
null
null
my_plugins/YouCompleteMe/third_party/ycmd/ycmd/tests/clangd/subcommands_test.py
cyx233/vim_config
f09c9206344c17df20a05dd2c08a02f098a7e873
[ "MIT" ]
null
null
null
my_plugins/YouCompleteMe/third_party/ycmd/ycmd/tests/clangd/subcommands_test.py
cyx233/vim_config
f09c9206344c17df20a05dd2c08a02f098a7e873
[ "MIT" ]
null
null
null
# encoding: utf-8 # # Copyright (C) 2018 ycmd contributors # # This file is part of ycmd. # # ycmd is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ycmd is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ycmd. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function from __future__ import division from hamcrest.core.base_matcher import BaseMatcher from hamcrest import ( assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp ) from pprint import pprint import requests import os.path from ycmd.tests.clangd import ( IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized ) from ycmd.tests.test_utils import ( BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady ) from ycmd.utils import ReadFile # This test is isolated to trigger objcpp hooks, rather than fetching completer # from cache. def Subcommands_GoTo_all_test(): tests = [ # Local::x -> definition/declaration of x { 'req': ( 'goto.cc', 23, 21 ), 'res': ( 'goto.cc', 4, 9 ) }, # Local::in_line -> definition/declaration of Local::in_line { 'req': ( 'goto.cc', 24, 26 ), 'res': ( 'goto.cc', 6, 10 ) }, # Local -> definition/declaration of Local { 'req': ( 'goto.cc', 24, 16 ), 'res': ( 'goto.cc', 2, 11 ) }, # Local::out_of_line -> definition of Local::out_of_line { 'req': ( 'goto.cc', 25, 27 ), 'res': ( 'goto.cc', 14, 13 ) }, # GoToDeclaration alternates between definition and declaration { 'req': ( 'goto.cc', 14, 13 ), 'res': ( 'goto.cc', 11, 10 ) }, { 'req': ( 'goto.cc', 11, 10 ), 'res': ( 'goto.cc', 14, 13 ) }, # test -> definition and declaration of test { 'req': ( 'goto.cc', 21, 5 ), 'res': ( 'goto.cc', 19, 5 ) }, { 'req': ( 'goto.cc', 19, 5 ), 'res': ( 'goto.cc', 21, 5 ) }, # Unicde { 'req': ( 'goto.cc', 34, 9 ), 'res': ( 'goto.cc', 32, 26 ) }, # Another_Unicde { 'req': ( 'goto.cc', 36, 17 ), 'res': ( 'goto.cc', 32, 54 ) }, { 'req': ( 'goto.cc', 36, 25 ), 'res': ( 'goto.cc', 32, 54 ) }, { 'req': ( 'goto.cc', 38, 3 ), 'res': ( 'goto.cc', 36, 28 ) }, # Expected failures { 'req': ( 'goto.cc', 13, 1 ), 'res': 'Cannot jump to location' }, { 'req': ( 'goto.cc', 16, 6 ), 'res': 'Cannot jump to location' }, ] for test in tests: for cmd in [ 'GoToDefinition', 'GoTo', 'GoToImprecise' ]: yield RunGoToTest_all, '', cmd, test def FixIt_Check_cpp11_Ins( results ): # First fixit # switch(A()) { // expected-error{{explicit conversion to}} assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'static_cast<int>(' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 16, 'column_num': 10 } ), 'end' : has_entries( { 'line_num': 16, 'column_num': 10 } ), } ), } ), has_entries( { 'replacement_text': equal_to( ')' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 16, 'column_num': 13 } ), 'end' : has_entries( { 'line_num': 16, 'column_num': 13 } ), } ), } ) ), 'location': has_entries( { 'line_num': 16, 'column_num': 0 } ) } ) ) } ) ) def FixIt_Check_cpp11_InsMultiLine( results ): # Similar to FixIt_Check_cpp11_1 but inserts split across lines # assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'static_cast<int>(' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 26, 'column_num': 7 } ), 'end' : has_entries( { 'line_num': 26, 'column_num': 7 } ), } ), } ), has_entries( { 'replacement_text': equal_to( ')' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 28, 'column_num': 2 } ), 'end' : has_entries( { 'line_num': 28, 'column_num': 2 } ), } ), } ) ), 'location': has_entries( { 'line_num': 25, 'column_num': 14 } ) } ) ) } ) ) def FixIt_Check_cpp11_Del( results ): # Removal of :: assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 35, 'column_num': 7 } ), 'end' : has_entries( { 'line_num': 35, 'column_num': 9 } ), } ), } ) ), 'location': has_entries( { 'line_num': 35, 'column_num': 7 } ) } ) ) } ) ) def FixIt_Check_cpp11_Repl( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'foo' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 40, 'column_num': 6 } ), 'end' : has_entries( { 'line_num': 40, 'column_num': 9 } ), } ), } ) ), 'location': has_entries( { 'line_num': 40, 'column_num': 6 } ) } ) ) } ) ) def FixIt_Check_cpp11_DelAdd( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 3 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 4 } ), } ), } ), has_entries( { 'replacement_text': equal_to( '~' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 9 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 9 } ), } ), } ), ), 'location': has_entries( { 'line_num': 48, 'column_num': 3 } ) } ), has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '= default;' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 15 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 17 } ), } ), } ), ), 'location': has_entries( { 'line_num': 48, 'column_num': 3 } ) } ), ) } ) ) def FixIt_Check_objc( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'id' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 5, 'column_num': 3 } ), 'end' : has_entries( { 'line_num': 5, 'column_num': 3 } ), } ), } ) ), 'location': has_entries( { 'line_num': 5, 'column_num': 3 } ) } ) ) } ) ) def FixIt_Check_objc_NoFixIt( results ): # and finally, a warning with no fixits assert_that( results, equal_to( { 'fixits': [] } ) ) def FixIt_Check_cpp11_MultiFirst( results ): assert_that( results, has_entries( { 'fixits': contains( # first fix-it at 54,16 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'foo' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 16 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 15 } ) } ), # second fix-it at 54,52 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 52 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ), } ), } ), has_entries( { 'replacement_text': equal_to( '~' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 58 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ), } ), } ), ), 'location': has_entries( { 'line_num': 54, 'column_num': 15 } ) } ), has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '= default;' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 64 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 67 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 15 } ) } ), ) } ) ) def FixIt_Check_cpp11_MultiSecond( results ): assert_that( results, has_entries( { 'fixits': contains( # first fix-it at 54,16 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'foo' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 16 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 51 } ) } ), # second fix-it at 54,52 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 52 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ), } ), } ), has_entries( { 'replacement_text': equal_to( '~' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 58 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ), } ), } ), ), 'location': has_entries( { 'line_num': 54, 'column_num': 51 } ) } ), has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '= default;' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 64 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 67 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 51 } ) } ), ) } ) ) def FixIt_Check_unicode_Ins( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '=' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 21, 'column_num': 9 } ), 'end' : has_entries( { 'line_num': 21, 'column_num': 11 } ), } ), } ) ), 'location': has_entries( { 'line_num': 21, 'column_num': 16 } ) } ) ) } ) ) def FixIt_Check_cpp11_Note( results ): assert_that( results, has_entries( { 'fixits': contains( # First note: put parens around it has_entries( { 'text': contains_string( 'parentheses around the assignment' ), 'chunks': contains( ChunkMatcher( '(', LineColMatcher( 59, 8 ), LineColMatcher( 59, 8 ) ), ChunkMatcher( ')', LineColMatcher( 61, 12 ), LineColMatcher( 61, 12 ) ) ), 'location': LineColMatcher( 60, 1 ), } ), # Second note: change to == has_entries( { 'text': contains_string( '==' ), 'chunks': contains( ChunkMatcher( '==', LineColMatcher( 60, 8 ), LineColMatcher( 60, 9 ) ) ), 'location': LineColMatcher( 60, 1 ), } ), # Unresolved, requires /resolve_fixit request has_entries( { 'text': 'Extract subexpression to variable', 'resolve': True, 'command': has_entries( { 'command': 'clangd.applyTweak' } ) } ) ) } ) ) def FixIt_Check_cpp11_SpellCheck( results ): assert_that( results, has_entries( { 'fixits': contains( # Change to SpellingIsNotMyStrongPoint has_entries( { 'text': contains_string( "change 'SpellingIsNotMyStringPiont' to " "'SpellingIsNotMyStrongPoint'" ), 'chunks': contains( ChunkMatcher( 'SpellingIsNotMyStrongPoint', LineColMatcher( 72, 9 ), LineColMatcher( 72, 35 ) ) ), 'location': LineColMatcher( 72, 9 ), } ) ) } ) ) def FixIt_Check_cuda( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': contains_string( "change 'int' to 'void'" ), 'chunks': contains( ChunkMatcher( 'void', LineColMatcher( 3, 12 ), LineColMatcher( 3, 15 ) ) ), 'location': LineColMatcher( 3, 12 ), } ) ) } ) ) def FixIt_Check_SubexprExtract_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': 'Extract subexpression to variable', 'chunks': contains( ChunkMatcher( 'auto dummy = foo(i + 3);\n ', LineColMatcher( 84, 3 ), LineColMatcher( 84, 3 ) ), ChunkMatcher( 'dummy', LineColMatcher( 84, 10 ), LineColMatcher( 84, 22 ) ), ) } ) ) } ) ) def FixIt_Check_RawStringReplace_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': 'Convert to raw string', 'chunks': contains( ChunkMatcher( 'R"(\\\\r\\asd\n\\v)"', LineColMatcher( 80, 19 ), LineColMatcher( 80, 36 ) ), ) } ) ) } ) ) def FixIt_Check_MacroExpand_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': "Expand macro 'DECLARE_INT'", 'chunks': contains( ChunkMatcher( 'int i', LineColMatcher( 83, 3 ), LineColMatcher( 83, 17 ) ), ) } ) ) } ) ) def FixIt_Check_AutoExpand_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': "Expand auto type", 'chunks': contains( ChunkMatcher( 'const char *', LineColMatcher( 80, 1 ), LineColMatcher( 80, 6 ) ), ) } ) ) } ) )
34.783366
79
0.508564
07a4651e3c80a36d3030fc52e231138dba1e43c1
1,032
py
Python
sentiment/config.py
TheRensselaerIDEA/covid19_tweet_ids
fee7d951b11cf2650e48668614c30672179ab3af
[ "MIT" ]
null
null
null
sentiment/config.py
TheRensselaerIDEA/covid19_tweet_ids
fee7d951b11cf2650e48668614c30672179ab3af
[ "MIT" ]
null
null
null
sentiment/config.py
TheRensselaerIDEA/covid19_tweet_ids
fee7d951b11cf2650e48668614c30672179ab3af
[ "MIT" ]
null
null
null
""" Config class containing all the settings for running sentiment scoring tool """ import jsonpickle
27.157895
75
0.622093
07a4d5bc6f7dc36e1d1fd72f273d7cc3af846e4b
9,184
py
Python
tests/ut/python/dataset/test_invert.py
GuoSuiming/mindspore
48afc4cfa53d970c0b20eedfb46e039db2a133d5
[ "Apache-2.0" ]
4
2021-01-26T09:14:01.000Z
2021-01-26T09:17:24.000Z
tests/ut/python/dataset/test_invert.py
forwhat461/mindspore
59a277756eb4faad9ac9afcc7fd526e8277d4994
[ "Apache-2.0" ]
null
null
null
tests/ut/python/dataset/test_invert.py
forwhat461/mindspore
59a277756eb4faad9ac9afcc7fd526e8277d4994
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ Testing Invert op in DE """ import numpy as np import mindspore.dataset as ds import mindspore.dataset.transforms.py_transforms import mindspore.dataset.vision.py_transforms as F import mindspore.dataset.vision.c_transforms as C from mindspore import log as logger from util import visualize_list, save_and_check_md5, diff_mse DATA_DIR = "../data/dataset/testImageNetData/train/" GENERATE_GOLDEN = False def test_invert_py(plot=False): """ Test Invert python op """ logger.info("Test Invert Python op") # Original Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_original = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(), F.Resize((224, 224)), F.ToTensor()]) ds_original = data_set.map(operations=transforms_original, input_columns="image") ds_original = ds_original.batch(512) for idx, (image, _) in enumerate(ds_original): if idx == 0: images_original = np.transpose(image.asnumpy(), (0, 2, 3, 1)) else: images_original = np.append(images_original, np.transpose(image.asnumpy(), (0, 2, 3, 1)), axis=0) # Color Inverted Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_invert = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(), F.Resize((224, 224)), F.Invert(), F.ToTensor()]) ds_invert = data_set.map(operations=transforms_invert, input_columns="image") ds_invert = ds_invert.batch(512) for idx, (image, _) in enumerate(ds_invert): if idx == 0: images_invert = np.transpose(image.asnumpy(), (0, 2, 3, 1)) else: images_invert = np.append(images_invert, np.transpose(image.asnumpy(), (0, 2, 3, 1)), axis=0) num_samples = images_original.shape[0] mse = np.zeros(num_samples) for i in range(num_samples): mse[i] = np.mean((images_invert[i] - images_original[i]) ** 2) logger.info("MSE= {}".format(str(np.mean(mse)))) if plot: visualize_list(images_original, images_invert) def test_invert_c(plot=False): """ Test Invert Cpp op """ logger.info("Test Invert cpp op") # Original Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_original = [C.Decode(), C.Resize(size=[224, 224])] ds_original = data_set.map(operations=transforms_original, input_columns="image") ds_original = ds_original.batch(512) for idx, (image, _) in enumerate(ds_original): if idx == 0: images_original = image.asnumpy() else: images_original = np.append(images_original, image.asnumpy(), axis=0) # Invert Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transform_invert = [C.Decode(), C.Resize(size=[224, 224]), C.Invert()] ds_invert = data_set.map(operations=transform_invert, input_columns="image") ds_invert = ds_invert.batch(512) for idx, (image, _) in enumerate(ds_invert): if idx == 0: images_invert = image.asnumpy() else: images_invert = np.append(images_invert, image.asnumpy(), axis=0) if plot: visualize_list(images_original, images_invert) num_samples = images_original.shape[0] mse = np.zeros(num_samples) for i in range(num_samples): mse[i] = diff_mse(images_invert[i], images_original[i]) logger.info("MSE= {}".format(str(np.mean(mse)))) def test_invert_py_c(plot=False): """ Test Invert Cpp op and python op """ logger.info("Test Invert cpp and python op") # Invert Images in cpp data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224))], input_columns=["image"]) ds_c_invert = data_set.map(operations=C.Invert(), input_columns="image") ds_c_invert = ds_c_invert.batch(512) for idx, (image, _) in enumerate(ds_c_invert): if idx == 0: images_c_invert = image.asnumpy() else: images_c_invert = np.append(images_c_invert, image.asnumpy(), axis=0) # invert images in python data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224))], input_columns=["image"]) transforms_p_invert = mindspore.dataset.transforms.py_transforms.Compose([lambda img: img.astype(np.uint8), F.ToPIL(), F.Invert(), np.array]) ds_p_invert = data_set.map(operations=transforms_p_invert, input_columns="image") ds_p_invert = ds_p_invert.batch(512) for idx, (image, _) in enumerate(ds_p_invert): if idx == 0: images_p_invert = image.asnumpy() else: images_p_invert = np.append(images_p_invert, image.asnumpy(), axis=0) num_samples = images_c_invert.shape[0] mse = np.zeros(num_samples) for i in range(num_samples): mse[i] = diff_mse(images_p_invert[i], images_c_invert[i]) logger.info("MSE= {}".format(str(np.mean(mse)))) if plot: visualize_list(images_c_invert, images_p_invert, visualize_mode=2) def test_invert_one_channel(): """ Test Invert cpp op with one channel image """ logger.info("Test Invert C Op With One Channel Images") c_op = C.Invert() try: data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224)), lambda img: np.array(img[:, :, 0])], input_columns=["image"]) data_set.map(operations=c_op, input_columns="image") except RuntimeError as e: logger.info("Got an exception in DE: {}".format(str(e))) assert "The shape" in str(e) def test_invert_md5_py(): """ Test Invert python op with md5 check """ logger.info("Test Invert python op with md5 check") # Generate dataset data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_invert = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(), F.Invert(), F.ToTensor()]) data = data_set.map(operations=transforms_invert, input_columns="image") # Compare with expected md5 from images filename = "invert_01_result_py.npz" save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN) def test_invert_md5_c(): """ Test Invert cpp op with md5 check """ logger.info("Test Invert cpp op with md5 check") # Generate dataset data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_invert = [C.Decode(), C.Resize(size=[224, 224]), C.Invert(), F.ToTensor()] data = data_set.map(operations=transforms_invert, input_columns="image") # Compare with expected md5 from images filename = "invert_01_result_c.npz" save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN) if __name__ == "__main__": test_invert_py(plot=False) test_invert_c(plot=False) test_invert_py_c(plot=False) test_invert_one_channel() test_invert_md5_py() test_invert_md5_c()
35.875
111
0.579377
07a4fdcadf45ca16af301e1936af3cd96dd967fd
748
py
Python
puzzle/tests/test_candy.py
aliciawyy/dmining
513f6f036f8f258281e1282fef052a74bf9cc3d3
[ "Apache-2.0" ]
null
null
null
puzzle/tests/test_candy.py
aliciawyy/dmining
513f6f036f8f258281e1282fef052a74bf9cc3d3
[ "Apache-2.0" ]
9
2017-10-25T10:03:36.000Z
2018-06-12T22:49:22.000Z
puzzle/tests/test_candy.py
aliciawyy/dmining
513f6f036f8f258281e1282fef052a74bf9cc3d3
[ "Apache-2.0" ]
null
null
null
from parameterized import parameterized from numpy.testing import TestCase from .. import candy
35.619048
73
0.481283
07a6ea2e95247eb4360055919661bfab2c787424
442
py
Python
audio.py
fernandoq/quiz-show
6e130db7923d14cf1976e1c522c58f848e48f2af
[ "MIT" ]
null
null
null
audio.py
fernandoq/quiz-show
6e130db7923d14cf1976e1c522c58f848e48f2af
[ "MIT" ]
null
null
null
audio.py
fernandoq/quiz-show
6e130db7923d14cf1976e1c522c58f848e48f2af
[ "MIT" ]
null
null
null
import time import subprocess import os print os.uname() if not os.uname()[0].startswith("Darw"): import pygame pygame.mixer.init() # Plays a song
24.555556
69
0.708145
07a74f63c4315c5c81ce34abcbe9eb4483cc9b1f
4,783
py
Python
tests/test_dynamodbHandler.py
unfoldingWord-dev/python-aws-tools
8e856697ab07c5c33e60cde2d82ac805dec3ddf3
[ "MIT" ]
1
2017-08-23T22:31:23.000Z
2017-08-23T22:31:23.000Z
tests/test_dynamodbHandler.py
unfoldingWord-dev/python-aws-tools
8e856697ab07c5c33e60cde2d82ac805dec3ddf3
[ "MIT" ]
null
null
null
tests/test_dynamodbHandler.py
unfoldingWord-dev/python-aws-tools
8e856697ab07c5c33e60cde2d82ac805dec3ddf3
[ "MIT" ]
null
null
null
from __future__ import absolute_import, unicode_literals, print_function import mock import unittest import d43_aws_tools as aws_tools from boto3.dynamodb.conditions import Attr
40.533898
104
0.59147
07a75032397b40d87190a129b70b4805e05f8c42
799
py
Python
app.py
sbustamante/heroku_app
6c8ff0b570750f3fe53ec67e24b71641167d53ce
[ "MIT" ]
null
null
null
app.py
sbustamante/heroku_app
6c8ff0b570750f3fe53ec67e24b71641167d53ce
[ "MIT" ]
null
null
null
app.py
sbustamante/heroku_app
6c8ff0b570750f3fe53ec67e24b71641167d53ce
[ "MIT" ]
null
null
null
from dash import Dash, html, dcc import plotly.express as px import pandas as pd app = Dash(__name__) server = app.server # assume you have a "long-form" data frame # see https://plotly.com/python/px-arguments/ for more options df = pd.DataFrame({ "Fruit": ["Apples", "Oranges", "Bananas", "Apples", "Oranges", "Bananas"], "Amount": [4, 1, 2, 2, 4, 5], "City": ["SF", "SF", "SF", "Montreal", "Montreal", "Montreal"] }) fig = px.bar(df, x="Fruit", y="Amount", color="City", barmode="group") app.layout = html.Div(children=[ html.H1(children='Hello Dash'), html.Div(children=''' Dash: A web application framework for your data. '''), dcc.Graph( id='example-graph', figure=fig ) ]) if __name__ == '__main__': app.run_server(debug=True)
24.96875
78
0.617021
07a86aa8bb5513e6175425568029999ed308c9e2
23,759
py
Python
peter_sslers/web/lib/form_utils.py
aptise/peter_sslers
1dcae3fee0c1f4c67ae8a614aed7e2a3121e88b0
[ "MIT" ]
35
2016-04-21T18:55:31.000Z
2022-03-30T08:22:43.000Z
peter_sslers/web/lib/form_utils.py
aptise/peter_sslers
1dcae3fee0c1f4c67ae8a614aed7e2a3121e88b0
[ "MIT" ]
8
2018-05-23T13:38:49.000Z
2021-03-19T21:05:44.000Z
peter_sslers/web/lib/form_utils.py
aptise/peter_sslers
1dcae3fee0c1f4c67ae8a614aed7e2a3121e88b0
[ "MIT" ]
2
2016-08-18T21:07:11.000Z
2017-01-11T09:47:40.000Z
# pypi import six # local from ...lib import db as lib_db from ...lib import utils from ...model import objects as model_objects from ...model import utils as model_utils from . import formhandling # ============================================================================== def decode_args(getcreate_args): """ support for Python2/3 """ if six.PY3: for (k, v) in list(getcreate_args.items()): if isinstance(v, bytes): getcreate_args[k] = v.decode("utf8") return getcreate_args # standardized mapping for `model_utils.DomainsChallenged` to a formStash DOMAINS_CHALLENGED_FIELDS = { "http-01": "domain_names_http01", "dns-01": "domain_names_dns01", } def parse_AcmeAccountSelection( request, formStash, account_key_option=None, allow_none=None, require_contact=None, ): """ :param formStash: an instance of `pyramid_formencode_classic.FormStash` :param account_key_option: :param allow_none: :param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic """ account_key_pem = None account_key_pem_md5 = None dbAcmeAccount = None is_global_default = None # handle the explicit-option acmeAccountSelection = _AcmeAccountSelection() if account_key_option == "account_key_file": # this will handle form validation and raise errors. parser = AcmeAccountUploadParser(formStash) # this will have: `contact`, `private_key_cycle`, `private_key_technology` parser.require_upload(require_contact=require_contact) # update our object acmeAccountSelection.selection = "upload" acmeAccountSelection.upload_parsed = parser return acmeAccountSelection else: if account_key_option == "account_key_global_default": acmeAccountSelection.selection = "global_default" account_key_pem_md5 = formStash.results["account_key_global_default"] is_global_default = True elif account_key_option == "account_key_existing": acmeAccountSelection.selection = "existing" account_key_pem_md5 = formStash.results["account_key_existing"] elif account_key_option == "account_key_reuse": acmeAccountSelection.selection = "reuse" account_key_pem_md5 = formStash.results["account_key_reuse"] elif account_key_option == "none": if not allow_none: # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form( "This form does not support no AcmeAccount selection." ) # note the lowercase "none"; this is an explicit "no item" selection # only certain routes allow this acmeAccountSelection.selection = "none" account_key_pem_md5 = None return acmeAccountSelection else: formStash.fatal_form( message="Invalid `account_key_option`", ) if not account_key_pem_md5: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=account_key_option, message="You did not provide a value" ) dbAcmeAccount = lib_db.get.get__AcmeAccount__by_pemMd5( request.api_context, account_key_pem_md5, is_active=True ) if not dbAcmeAccount: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=account_key_option, message="The selected AcmeAccount is not enrolled in the system.", ) if is_global_default and not dbAcmeAccount.is_global_default: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=account_key_option, message="The selected AcmeAccount is not the current default.", ) acmeAccountSelection.AcmeAccount = dbAcmeAccount return acmeAccountSelection # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form("There was an error validating your form.") def parse_PrivateKeySelection(request, formStash, private_key_option=None): private_key_pem = None private_key_pem_md5 = None PrivateKey = None # :class:`model.objects.PrivateKey` # handle the explicit-option privateKeySelection = _PrivateKeySelection() if private_key_option == "private_key_file": # this will handle form validation and raise errors. parser = _PrivateKeyUploadParser(formStash) parser.require_upload() # update our object privateKeySelection.selection = "upload" privateKeySelection.upload_parsed = parser privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["upload"] ) return privateKeySelection else: if private_key_option == "private_key_existing": privateKeySelection.selection = "existing" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["existing"] ) private_key_pem_md5 = formStash.results["private_key_existing"] elif private_key_option == "private_key_reuse": privateKeySelection.selection = "reuse" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["reuse"] ) private_key_pem_md5 = formStash.results["private_key_reuse"] elif private_key_option in ( "private_key_generate", "private_key_for_account_key", ): dbPrivateKey = lib_db.get.get__PrivateKey__by_id(request.api_context, 0) if not dbPrivateKey: formStash.fatal_field( field=private_key_option, message="Could not load the placeholder PrivateKey.", ) privateKeySelection.PrivateKey = dbPrivateKey if private_key_option == "private_key_generate": privateKeySelection.selection = "generate" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["generate"] ) elif private_key_option == "private_key_for_account_key": privateKeySelection.selection = "private_key_for_account_key" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy[ "private_key_for_account_key" ] ) return privateKeySelection else: # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form("Invalid `private_key_option`") if not private_key_pem_md5: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=private_key_option, message="You did not provide a value" ) dbPrivateKey = lib_db.get.get__PrivateKey__by_pemMd5( request.api_context, private_key_pem_md5, is_active=True ) if not dbPrivateKey: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=private_key_option, message="The selected PrivateKey is not enrolled in the system.", ) privateKeySelection.PrivateKey = dbPrivateKey return privateKeySelection # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form("There was an error validating your form.") def form_key_selection(request, formStash, require_contact=None): """ :param formStash: an instance of `pyramid_formencode_classic.FormStash` :param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic """ acmeAccountSelection = parse_AcmeAccountSelection( request, formStash, account_key_option=formStash.results["account_key_option"], require_contact=require_contact, ) if acmeAccountSelection.selection == "upload": key_create_args = acmeAccountSelection.upload_parsed.getcreate_args key_create_args["event_type"] = "AcmeAccount__insert" key_create_args[ "acme_account_key_source_id" ] = model_utils.AcmeAccountKeySource.from_string("imported") (dbAcmeAccount, _is_created,) = lib_db.getcreate.getcreate__AcmeAccount( request.api_context, **key_create_args ) acmeAccountSelection.AcmeAccount = dbAcmeAccount privateKeySelection = parse_PrivateKeySelection( request, formStash, private_key_option=formStash.results["private_key_option"], ) if privateKeySelection.selection == "upload": key_create_args = privateKeySelection.upload_parsed.getcreate_args key_create_args["event_type"] = "PrivateKey__insert" key_create_args[ "private_key_source_id" ] = model_utils.PrivateKeySource.from_string("imported") key_create_args["private_key_type_id"] = model_utils.PrivateKeyType.from_string( "standard" ) ( dbPrivateKey, _is_created, ) = lib_db.getcreate.getcreate__PrivateKey__by_pem_text( request.api_context, **key_create_args ) privateKeySelection.PrivateKey = dbPrivateKey elif privateKeySelection.selection == "generate": dbPrivateKey = lib_db.get.get__PrivateKey__by_id(request.api_context, 0) if not dbPrivateKey: formStash.fatal_field( field="private_key_option", message="Could not load the placeholder PrivateKey for autogeneration.", ) privateKeySelection.PrivateKey = dbPrivateKey return (acmeAccountSelection, privateKeySelection) def form_domains_challenge_typed(request, formStash, http01_only=False): domains_challenged = model_utils.DomainsChallenged() domain_names_all = [] try: # 1: iterate over the submitted domains by segment for (target_, source_) in DOMAINS_CHALLENGED_FIELDS.items(): submitted_ = formStash.results.get(source_) if submitted_: # this function checks the domain names match a simple regex # it will raise a `ValueError("invalid domain")` on the first invalid domain submitted_ = utils.domains_from_string(submitted_) if submitted_: domain_names_all.extend(submitted_) domains_challenged[target_] = submitted_ # 2: ensure there are domains if not domain_names_all: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="no domain names submitted", ) # 3: ensure there is no overlap domain_names_all_set = set(domain_names_all) if len(domain_names_all) != len(domain_names_all_set): # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="a domain name can only be associated to one challenge type", ) # 4: maybe we only want http01 domains submitted? if http01_only: for (k, v) in domains_challenged.items(): if k == "http-01": continue if v: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="only http-01 domains are accepted by this form", ) except ValueError as exc: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="invalid domain names detected" ) return domains_challenged def form_single_domain_challenge_typed(request, formStash, challenge_type="http-01"): domains_challenged = model_utils.DomainsChallenged() # this function checks the domain names match a simple regex domain_names = utils.domains_from_string(formStash.results["domain_name"]) if not domain_names: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field(field="domain_name", message="Found no domain names") if len(domain_names) != 1: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="domain_name", message="This endpoint currently supports only 1 domain name", ) domains_challenged[challenge_type] = domain_names return domains_challenged
39.271074
151
0.638032
07a919ed87f13258649cbf2c9c6e2971a4de419e
5,568
py
Python
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
jlamperez/Vitis-Tutorials
9a5b611caabb5656bbb2879116e032227b164bfd
[ "Apache-2.0" ]
1
2022-03-09T06:15:43.000Z
2022-03-09T06:15:43.000Z
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
jlamperez/Vitis-Tutorials
9a5b611caabb5656bbb2879116e032227b164bfd
[ "Apache-2.0" ]
null
null
null
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
jlamperez/Vitis-Tutorials
9a5b611caabb5656bbb2879116e032227b164bfd
[ "Apache-2.0" ]
null
null
null
# # Copyright 20202021 Xilinx, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from math import * import random
33.341317
119
0.541667
07aa6fe145a48bc2b3e22c3ee731584817a386ba
357
py
Python
Object Oriented Programming/Lecture 01/Intro.py
ashish-ad/Python-Projects
7f49476b6945189165d536629109030f10603556
[ "Unlicense" ]
1
2021-10-08T06:52:12.000Z
2021-10-08T06:52:12.000Z
Object Oriented Programming/Lecture 01/Intro.py
ashish-ad/Python-Projects
7f49476b6945189165d536629109030f10603556
[ "Unlicense" ]
null
null
null
Object Oriented Programming/Lecture 01/Intro.py
ashish-ad/Python-Projects
7f49476b6945189165d536629109030f10603556
[ "Unlicense" ]
null
null
null
item1='phone' item1_price = 100 item1_quantity = 5 item1_price_total = item1_price * item1_quantity print(type(item1)) # str print(type(item1_price)) # int print(type(item1_quantity)) # int print(type(item1_price_total)) # int # output: # <class 'str'> # <class 'int'> # <class 'int'> # <class 'int'>
21
48
0.59944
07ab77d7eb12a86186be6ca7c7efa3f2eb65e7be
342
py
Python
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_16_36WoodBlock.py
THU-DA-6D-Pose-Group/self6dpp
c267cfa55e440e212136a5e9940598720fa21d16
[ "Apache-2.0" ]
33
2021-12-15T07:11:47.000Z
2022-03-29T08:58:32.000Z
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_16_36WoodBlock.py
THU-DA-6D-Pose-Group/self6dpp
c267cfa55e440e212136a5e9940598720fa21d16
[ "Apache-2.0" ]
3
2021-12-15T11:39:54.000Z
2022-03-29T07:24:23.000Z
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_16_36WoodBlock.py
THU-DA-6D-Pose-Group/self6dpp
c267cfa55e440e212136a5e9940598720fa21d16
[ "Apache-2.0" ]
null
null
null
_base_ = "./FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_01_02MasterChefCan.py" OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/16_36WoodBlock" DATASETS = dict(TRAIN=("ycbv_036_wood_block_train_pbr",))
85.5
154
0.903509
07abd88c1750bfa23ce141be4914e78e9e578d95
316
py
Python
sqlakeyset/__init__.py
jhihruei/sqlakeyset
0aa0f6e041dc37bc5f918303578875ad334cad6c
[ "Unlicense" ]
null
null
null
sqlakeyset/__init__.py
jhihruei/sqlakeyset
0aa0f6e041dc37bc5f918303578875ad334cad6c
[ "Unlicense" ]
null
null
null
sqlakeyset/__init__.py
jhihruei/sqlakeyset
0aa0f6e041dc37bc5f918303578875ad334cad6c
[ "Unlicense" ]
null
null
null
from .columns import OC from .paging import get_page, select_page, process_args from .results import serialize_bookmark, unserialize_bookmark, Page, Paging __all__ = [ 'OC', 'get_page', 'select_page', 'serialize_bookmark', 'unserialize_bookmark', 'Page', 'Paging', 'process_args' ]
19.75
75
0.693038
07abdc1f2ef1ad7ab554d9cccaa9f73782091369
6,609
py
Python
low_rank_local_connectivity/models/simple_model.py
shaun95/google-research
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
[ "Apache-2.0" ]
1
2022-03-13T21:48:52.000Z
2022-03-13T21:48:52.000Z
low_rank_local_connectivity/models/simple_model.py
shaun95/google-research
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
[ "Apache-2.0" ]
null
null
null
low_rank_local_connectivity/models/simple_model.py
shaun95/google-research
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
[ "Apache-2.0" ]
1
2022-03-30T07:20:29.000Z
2022-03-30T07:20:29.000Z
# coding=utf-8 # Copyright 2022 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Simple model for image classification. The model is multiple conv/locally_connected/wide_conv/low_rank_locally_connected layers followed by a fully connected layer. Changes to the model architecture can be made by modifying simple_model_config.py file. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import os import tensorflow.compat.v1 as tf from low_rank_local_connectivity import layers from low_rank_local_connectivity import utils MOMENTUM = 0.9 EPS = 1e-5
37.982759
80
0.681192
07ad506b318b0e47d29c2d5e943847cb809ea5ef
721
py
Python
adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/icm20x_icm20948_gyro_data_rate_test.py
jacoblb64/pico_rgb_keypad_hid
3251ca6a98ef86d9f98c54f639c4d61810601a0b
[ "MIT" ]
47
2021-02-15T23:02:36.000Z
2022-03-04T21:30:03.000Z
adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/icm20x_icm20948_gyro_data_rate_test.py
jacoblb64/pico_rgb_keypad_hid
3251ca6a98ef86d9f98c54f639c4d61810601a0b
[ "MIT" ]
7
2021-02-19T20:00:08.000Z
2022-01-14T10:51:12.000Z
adafruit_circuitpython_libs/adafruit-circuitpython-bundle-py-20210214/examples/icm20x_icm20948_gyro_data_rate_test.py
jacoblb64/pico_rgb_keypad_hid
3251ca6a98ef86d9f98c54f639c4d61810601a0b
[ "MIT" ]
14
2021-02-20T17:40:56.000Z
2022-01-01T19:53:38.000Z
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries # SPDX-License-Identifier: MIT import time import board import busio from adafruit_icm20x import ICM20948 cycles = 200 i2c = busio.I2C(board.SCL, board.SDA) icm = ICM20948(i2c) # Cycle between two data rates # Best viewed in the Mu serial plotter where you can see how # the data rate affects the resolution of the data while True: icm.gyro_data_rate_divisor = 0 # minimum print("Data Rate:", icm.gyro_data_rate) time.sleep(2) for i in range(cycles): print(icm.gyro) icm.gyro_data_rate_divisor = 255 # maximum print("Data Rate:", icm.gyro_data_rate) time.sleep(2) for i in range(cycles): print(icm.gyro)
25.75
62
0.718447
07afa89bd7f7b951972777c2099542dd2fbf96da
304
py
Python
1186.py
TheLurkingCat/TIOJ
077e1cd22239d8f6bc1cd7561f27c68143e80263
[ "MIT" ]
null
null
null
1186.py
TheLurkingCat/TIOJ
077e1cd22239d8f6bc1cd7561f27c68143e80263
[ "MIT" ]
null
null
null
1186.py
TheLurkingCat/TIOJ
077e1cd22239d8f6bc1cd7561f27c68143e80263
[ "MIT" ]
null
null
null
a = int(input()) while a: for x in range(a-1): out = '*' + ' ' * (a-x-2) + '*' + ' ' * (a-x-2) + '*' print(out.center(2*a-1)) print('*' * (2 * a - 1)) for x in range(a-1): out = '*' + ' ' * x + '*' + ' ' * x + '*' print(out.center(2*a-1)) a = int(input())
27.636364
61
0.348684
07afdc77d12fe88ab48cead7329435544528b522
8,032
py
Python
utils/common.py
initialed85/eds-cctv-system
fcdb7e7e23327bf3a901d23d506b3915833027d1
[ "MIT" ]
null
null
null
utils/common.py
initialed85/eds-cctv-system
fcdb7e7e23327bf3a901d23d506b3915833027d1
[ "MIT" ]
null
null
null
utils/common.py
initialed85/eds-cctv-system
fcdb7e7e23327bf3a901d23d506b3915833027d1
[ "MIT" ]
null
null
null
import datetime import json import os from pathlib import Path from types import SimpleNamespace from typing import List from typing import NamedTuple, Union, Optional, Callable from uuid import uuid3, NAMESPACE_DNS from dateutil.parser import parse _VIDEO_SUFFIXES = [".mkv", ".mp4"] _IMAGE_SUFFIXES = [".jpg"] _PERMITTED_EXTENSIONS = _VIDEO_SUFFIXES + _IMAGE_SUFFIXES def get_sorted_paths(path: Path) -> List[Path]: return sorted(Path(path).iterdir(), key=os.path.getmtime) def format_timestamp_for_go(timestamp: Union[datetime.datetime, str]) -> str: if isinstance(timestamp, str): timestamp = parse(timestamp) us = timestamp.strftime("%f") tz_raw = timestamp.strftime("%z") tz = "{}:{}".format(tz_raw[0:3], tz_raw[3:]) return timestamp.strftime(f"%Y-%m-%dT%H:%M:%S.{us}00{tz}") def parse_paths(paths: List[Path], tzinfo: datetime.tzinfo, parse_method: Callable) -> List[PathDetails]: return [ y for y in [parse_method(path=x, tzinfo=tzinfo) for x in paths if x is not None] if y is not None ] def build_event_for_some_path_details(some_path_details: List[PathDetails], path: Path): if len(some_path_details) != 4: raise ValueError( f"expected some_path_details to be 4 long (and related); instead it was {len(some_path_details)} long" ) event_ids = list(set([x.event_id for x in some_path_details])) if len(event_ids) != 1: raise ValueError( f"expected all PathDetails to have a common event_id; instead they were {event_ids}" ) camera_ids = list(set([x.camera_id for x in some_path_details])) if len(camera_ids) != 1: raise ValueError( f"expected all PathDetails to have a common camera_id; instead they were {camera_ids}" ) camera_names = list(set([x.camera_name for x in some_path_details])) if len(camera_names) != 1: raise ValueError( f"expected all PathDetails to have a common camera_name; instead they were {camera_names}" ) high_res_image_paths = list( set([x.path for x in some_path_details if x.is_image and not x.is_lowres]) ) if len(high_res_image_paths) != 1: raise ValueError( f"expected to find 1 high_res_image_path from PathDetails; instead found {high_res_image_paths}" ) low_res_image_paths = list( set([x.path for x in some_path_details if x.is_image and x.is_lowres]) ) if len(low_res_image_paths) != 1: raise ValueError( f"expected to find 1 low_res_image_path from PathDetails; instead found {low_res_image_paths}" ) high_res_video_paths = list( set([x.path for x in some_path_details if not x.is_image and not x.is_lowres]) ) if len(high_res_video_paths) != 1: raise ValueError( f"expected to find 1 high_res_video_path from PathDetails; instead found {high_res_video_paths}" ) low_res_video_paths = list( set([x.path for x in some_path_details if not x.is_image and x.is_lowres]) ) if len(low_res_video_paths) != 1: raise ValueError( f"expected to find 1 low_res_video_path from PathDetails; instead found {low_res_video_paths}" ) timestamp = sorted([x.timestamp for x in some_path_details])[0] high_res_image_path = high_res_image_paths[0] low_res_image_path = low_res_image_paths[0] high_res_video_path = high_res_video_paths[0] low_res_video_path = low_res_video_paths[0] # in Go: # eventId := uuid.NewSHA1( # uuid.NameSpaceDNS, # []byte(fmt.Sprintf("%v, %v, %v, %v, %v", timestamp, highResImagePath, lowResImagePath, highResVideoPath, lowResVideoPath)), # ) event_id = uuid3( NAMESPACE_DNS, f"{format_timestamp_for_go(timestamp)}, {high_res_image_path}, {low_res_image_path}, {high_res_video_path}, {low_res_video_path}", ) return Event( event_id=str(event_id), timestamp=timestamp, camera_name=camera_names[0], high_res_image_path=str(path / high_res_image_path), low_res_image_path=str(path / low_res_image_path), high_res_video_path=str(path / high_res_video_path), low_res_video_path=str(path / low_res_video_path), ) def relate_path_details( some_path_details: List[PathDetails], get_key_methods: List[Callable] ) -> List[List[PathDetails]]: some_path_details_by_key = {} for path_details in some_path_details: keys = [x(path_details) for x in get_key_methods] for key in keys: some_path_details_by_key.setdefault(key, []) some_path_details_by_key[key] += [path_details] viable_some_path_details_by_key = { k: v for k, v in some_path_details_by_key.items() if len(v) == 4 } deduplicated_path_details = [] for some_path_details in viable_some_path_details_by_key.values(): if some_path_details not in deduplicated_path_details: deduplicated_path_details += [some_path_details] return deduplicated_path_details def build_events_for_related_path_details( related_path_details: List[List[PathDetails]], path: Path ) -> List[Event]: events: List[Event] = [] for some_path_details in related_path_details: events += [ build_event_for_some_path_details( some_path_details=some_path_details, path=path ) ] sorted_events = sorted(events, key=lambda x: x.timestamp) for event in sorted_events: event.timestamp = format_timestamp_for_go(timestamp=event.timestamp) return sorted_events def build_json_lines_from_events(events: List[Event]) -> str: return "\n".join( [ json.dumps( { "event_id": x.event_id, "timestamp": x.timestamp, "camera_name": x.camera_name, "high_res_image_path": x.high_res_image_path, "low_res_image_path": x.low_res_image_path, "high_res_video_path": x.high_res_video_path, "low_res_video_path": x.low_res_video_path, } ) for x in events ] ) def write_to_file(path: Path, data: str): with open(str(path), "w") as f: f.write(data) def rebuild_event_store(root_path: Path, tzinfo: datetime.tzinfo, json_path: Path, parse_method: Callable, get_key_methods: List[Callable]): print(f"getting sorted paths from {root_path}...") sorted_paths = get_sorted_paths(path=root_path) print(f"got {len(sorted_paths)} sorted paths") print("parsing sorted paths...") some_path_details = parse_paths(paths=sorted_paths, tzinfo=tzinfo, parse_method=parse_method) print(f"got {len(some_path_details)} parsed paths") print("relating parsed paths...") related_path_details = relate_path_details(some_path_details=some_path_details, get_key_methods=get_key_methods) print(f"got {len(related_path_details)} related paths") print("building events...") events = build_events_for_related_path_details( related_path_details=related_path_details, path=root_path ) print(f"built {len(events)} events") print("building json lines...") json_lines = build_json_lines_from_events(events=events) print(f"built {len(json_lines)} bytes") print(f"writing to {json_path}") write_to_file(path=json_path, data=json_lines) print("done.")
33.606695
140
0.671564
07b0911062690c0f062a5b15420ab76dd3a0ddb5
807
py
Python
schoolio/migrations/0005_auto_20190927_1423.py
schoolio-co/schoolio_site
a616807c504c7a7cab3b9f7c3dab42f827cb0580
[ "MIT", "Unlicense" ]
null
null
null
schoolio/migrations/0005_auto_20190927_1423.py
schoolio-co/schoolio_site
a616807c504c7a7cab3b9f7c3dab42f827cb0580
[ "MIT", "Unlicense" ]
null
null
null
schoolio/migrations/0005_auto_20190927_1423.py
schoolio-co/schoolio_site
a616807c504c7a7cab3b9f7c3dab42f827cb0580
[ "MIT", "Unlicense" ]
null
null
null
# Generated by Django 2.2.1 on 2019-09-27 14:23 from django.db import migrations, models
27.827586
74
0.607187
07b47bbb8eb7acb013788dd5e04ca55c384f4c1f
7,732
py
Python
scripts/extract_gs_citations.py
akhilpandey95/scholarlyimpact
215ae832c90f0564fa0301e4c3f1c99525617625
[ "MIT" ]
null
null
null
scripts/extract_gs_citations.py
akhilpandey95/scholarlyimpact
215ae832c90f0564fa0301e4c3f1c99525617625
[ "MIT" ]
18
2020-02-20T23:40:26.000Z
2020-10-20T04:05:43.000Z
scripts/extract_gs_citations.py
akhilpandey95/scholarlyimpact
215ae832c90f0564fa0301e4c3f1c99525617625
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # This Source Code Form is subject to the terms of the MIT # License. If a copy of the same was not distributed with this # file, You can obtain one at # https://github.com/akhilpandey95/scholarlyimpact/blob/master/LICENSE. import os import csv import glob import json import requests import subprocess import numpy as np import pandas as pd from tqdm import tqdm from ast import literal_eval from fp.fp import FreeProxy from torrequest import TorRequest from scholarly import scholarly from collections import Counter, OrderedDict from operator import attrgetter # class definition for Rate limiting # function for obtaining the citations using the dimensions web url def get_gs_citations_web(title): """ Use the google scholar web URL and requests API to obtain the citations for a given title of a scholarly article Parameters ---------- arg1 | title: str The title of a scholarly article Returns ------- Dictionary dict """ while True: try: # call the lumproxy object scholarly.use_lum_proxy() # make the query query = scholarly.search_pubs(title) # come out break except Exception as e: # come out and try again break # return the response dict return next(query) # function for assigning new IP address def assign_new_ip(text=False): """ Reset the identity using TorRequest Parameters ---------- arg1 [OPTIONAL]| text: bool A boolean flag to return the IP address tuple (old, morphed) Returns ------- boolean True/False """ try: # pass the hashed password req = TorRequest(password='scholarly_password') # return the ip address normal_identity = requests.get('http://ipecho.net/plain') # reset the identity using Tor req.reset_identity() # make a request now morphed_identity = req.get('http://ipecho.net/plain') # return the status depending on the flag if morphed_identity != normal_identity: if text == True: # return the ip address pairs as a tuple return (normal_identity.text, morphed_identity.text) else: return True else: # return just the status return False except: return False # function for assigning a new proxy def set_new_proxy(text=True): """ Reset the identity using FreeProxy Parameters ---------- arg1 [OPTIONAL]| text: bool A boolean flag to return the IP address tuple (old, morphed) Returns ------- Address fp.fp.FreeProxy """ while True: # call the freeproxy object proxy = FreeProxy(rand=True, timeout=1).get() # allocate the proxy address to scholarly proxy_works = scholarly.use_proxy(http=proxy, https=proxy) # check it the ip address works if proxy_works: # come out break # print the ip address depending on the text argument if text: # print the working ip print("Working proxy:", proxy) # return the proxy details return proxy # function for connecting tor to scholarly def scholarly_init_connection(): """ Bind TorRequest to Scholarly service Parameters ---------- No arguments Returns ------- Nothing """ while True: # assign new tor identity ips = assign_new_ip(text=True) # use the tor request for scholarly tor_req = scholarly.use_tor(tor_sock_port=9050, \ tor_control_port=9051, \ tor_pw="scholarly_password") if tor_req: # come out of the loop, when successful break # print the tor identity print("Working Tor identity:", ips[1]) # function for restarting the system tor service def restart_tor_system_service(text=False): """ Use the os module to restart the tor service Parameters ---------- arg1 [OPTIONAL]| text: bool A boolean flag to return the status of the command Returns ------- Boolean bool """ # subprocess command for stopping the tor service tor_stop = subprocess.Popen(['service', 'tor', 'stop']) # subprocess command for restarting the tor service tor_restart = subprocess.Popen(['service', 'tor', 'restart']) # subprocess command for restarting the tor service tor_status = subprocess.Popen(['service', 'tor', 'status'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) # if the label is set to true then print the output if text: for output in tor_status.stdout.readlines(): print(output.strip()) # pipe out the stdout, stderr for the subprocess stdout, stderr = tor_status.communicate() if len(stderr) > 0: # return False return False else: # return true if successful return True def get_articleInfo(title): """ Use the google scholar web URL and requests API to obtain the citations for a given title of a scholarly article Parameters ---------- arg1 | title: str The title of a scholarly article Returns ------- Dictionary dict """ while True: try: # init the connection with scholarly and tor scholarly_init_connection() # search for the query search_query = scholarly.search_pubs(title) # print success print("Got the results of the query") # come out of the loop break except Exception as e: # print error message print("Attempt Failed, patching new tor identity") # restart the system tor service restart_tor_system_service(text=False) # assign new connection again scholarly_init_connection() # obtain the bib entry of the scholarly article pub = next(search_query) # return the bib entry return pub if __name__ == '__main__': # iterate over the length length_of_file = len(open('paper_titles.txt').readlines()) # place the contents of the list into a file alt_list = open('paper_titles.txt').readlines() # iterate over the length of the file # write the results to a file for i in tqdm(range(length_of_file)): alt_info = open('paper_titles.txt', 'r+') cit_info = open('citations_gs.csv', 'a') cit_info.write(str(alt_list[i].strip( ).split('\t')[0]) + ',' + str(get_articleInfo(alt_list[i].strip().split('\t')[1]))) cit_info.write('\n') cit_info.close() alt_info.seek(0) alt_info.truncate() alt_info.writelines(alt_list[i+1:]) alt_info.close()
26.847222
91
0.602431
07b56e4319a20d96b921852eb21d1c92a5f276de
4,672
py
Python
dist-packages/reportlab/pdfgen/pathobject.py
Jianwei-Wang/python2.7_lib
911b8e81512e5ac5f13e669ab46f7693ed897378
[ "PSF-2.0" ]
51
2015-01-20T19:50:34.000Z
2022-03-05T21:23:32.000Z
dist-packages/reportlab/pdfgen/pathobject.py
Jianwei-Wang/python2.7_lib
911b8e81512e5ac5f13e669ab46f7693ed897378
[ "PSF-2.0" ]
16
2015-11-15T04:23:43.000Z
2021-09-27T14:14:20.000Z
src/reportlab/pdfgen/pathobject.py
guildenstern70/pyfab
34d786fec17192591ac3c5f73913a9b04311695a
[ "MIT" ]
46
2015-03-28T10:18:14.000Z
2021-12-16T15:57:47.000Z
#Copyright ReportLab Europe Ltd. 2000-2012 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfgen/pathobject.py __version__=''' $Id$ ''' __doc__=""" PDFPathObject is an efficient way to draw paths on a Canvas. Do not instantiate directly, obtain one from the Canvas instead. Progress Reports: 8.83, 2000-01-13, gmcm: created from pdfgen.py """ from reportlab.pdfgen import pdfgeom from reportlab.lib.rl_accel import fp_str
36.5
109
0.627354
07b58dc361c480dc7628924d4fba99b729151138
687
py
Python
client/modules/Wikipedia.py
devagul93/Jarvis-System
8d1865b19bb8530831c868147c3b27a1c3bad59b
[ "MIT" ]
null
null
null
client/modules/Wikipedia.py
devagul93/Jarvis-System
8d1865b19bb8530831c868147c3b27a1c3bad59b
[ "MIT" ]
null
null
null
client/modules/Wikipedia.py
devagul93/Jarvis-System
8d1865b19bb8530831c868147c3b27a1c3bad59b
[ "MIT" ]
null
null
null
import wikipedia import re import TCPclient as client WORDS = ["WIKIPEDIA","SEARCH","INFORMATION"]
19.083333
61
0.679767
07b68a248a019f47e10cdf7898c67384df56c25f
1,447
py
Python
Madlibs/madlibs.py
nikhil-amin/python-mini-project
cd70a6a43408ce74cff501ce4d4658ab82260c2d
[ "MIT" ]
2
2021-08-20T08:59:28.000Z
2021-11-23T00:17:15.000Z
Madlibs/madlibs.py
nikhil-amin/python-mini-project
cd70a6a43408ce74cff501ce4d4658ab82260c2d
[ "MIT" ]
null
null
null
Madlibs/madlibs.py
nikhil-amin/python-mini-project
cd70a6a43408ce74cff501ce4d4658ab82260c2d
[ "MIT" ]
1
2022-01-06T16:35:43.000Z
2022-01-06T16:35:43.000Z
import random print("Title : Eat, Drink, And Be Sick") noun = [] for i in range(4): n = input("Enter noun : ") noun.append(n) plural = [] for i in range(6): pn = input("Enter plural noun : ") plural.append(pn) adjective = [] for i in range(2): a = input("Enter adjective : ") adjective.append(a) adverb = input("Enter adverb : ") letter = input("Enter any letter : ") body_part = input("Enter any body part : ") print("An inspector from the Department of Health and ", random.choice(noun) , " Services paid a surprise visit to our " , random.choice(adjective) , " school cafeteria.") print("The lunch special, prepared by our " , random.choice(adjective) , "dietician, was spaghetti and " , random.choice(noun) , " balls with a choice of either a " , random.choice(noun) , " salad or French " , random.choice(plural) , ".") print("The inspector found the meat-" , random.choice(plural) , " to be overcooked and discovered a live " , random.choice(noun) , " in the fries,causing him to have a " + body_part + " ache.") print("In response, he threw up all over his " , random.choice(plural) , ".") print("In his report, the inspector " + adverb + " recommended that the school cafeteria serve only nutritious " , random.choice(plural) , " as well as low-calorie " , random.choice(plural) , " and that all of the saturated " , random.choice(plural) , " be eliminated.") print("He rated the cafeteria a " + letter + "-minus.")
60.291667
270
0.676572
07b69c4f080b6ae079038a0250ba13b6035a0cda
191
py
Python
scripts/transpose.py
saikrishnarallabandi/python_connectionist
e3f8f92c8de865190ad727951eb2b0e514248afc
[ "Apache-2.0" ]
null
null
null
scripts/transpose.py
saikrishnarallabandi/python_connectionist
e3f8f92c8de865190ad727951eb2b0e514248afc
[ "Apache-2.0" ]
null
null
null
scripts/transpose.py
saikrishnarallabandi/python_connectionist
e3f8f92c8de865190ad727951eb2b0e514248afc
[ "Apache-2.0" ]
null
null
null
import numpy g = open('/home/srallaba/mgc/transposed/arctic_a0404.mgc','w') x = numpy.loadtxt('/home/srallaba/mgc_spaces/arctic_a0404.mgc') numpy.savetxt(g, numpy.transpose(x)) g.close()
31.833333
64
0.73822
07b6a4b93bbebd4e00153c0e368d7d75f4691489
7,337
py
Python
tests/zone_api_test/core/zone_manager_test.py
yfaway/zone-apis
4aa4120cb4a66812dac1d32e86e825bbafe652b8
[ "MIT" ]
1
2021-11-22T11:51:17.000Z
2021-11-22T11:51:17.000Z
tests/zone_api_test/core/zone_manager_test.py
yfaway/zone-apis
4aa4120cb4a66812dac1d32e86e825bbafe652b8
[ "MIT" ]
20
2021-01-31T00:20:34.000Z
2022-01-09T18:02:31.000Z
tests/zone_api_test/core/zone_manager_test.py
yfaway/zone-apis
4aa4120cb4a66812dac1d32e86e825bbafe652b8
[ "MIT" ]
null
null
null
from zone_api.core.zone_manager import ZoneManager from zone_api import platform_encapsulator as pe from zone_api.core.zone import Zone from zone_api.core.zone_event import ZoneEvent from zone_api.core.devices.dimmer import Dimmer from zone_api.core.devices.switch import Fan, Light, Switch from zone_api.core.devices.illuminance_sensor import IlluminanceSensor from zone_api.core.devices.motion_sensor import MotionSensor from zone_api.core.actions.turn_on_switch import TurnOnSwitch from zone_api_test.core.device_test import DeviceTest ILLUMINANCE_THRESHOLD_IN_LUX = 8 INVALID_ITEM_NAME = 'invalid item name'
40.535912
118
0.694698
07b77a97c35aea2ef5b761b745880bae3410a131
2,796
py
Python
meiduo_mall/meiduo_mall/apps/orders/views.py
Zasling/meiduo_mall33
ec55597758d5052b311d65aee44533b001f6ddd8
[ "MIT" ]
1
2019-04-12T08:56:29.000Z
2019-04-12T08:56:29.000Z
meiduo_mall/meiduo_mall/apps/orders/views.py
Zasling/meiduo_mall33
ec55597758d5052b311d65aee44533b001f6ddd8
[ "MIT" ]
null
null
null
meiduo_mall/meiduo_mall/apps/orders/views.py
Zasling/meiduo_mall33
ec55597758d5052b311d65aee44533b001f6ddd8
[ "MIT" ]
1
2020-03-30T14:35:22.000Z
2020-03-30T14:35:22.000Z
from rest_framework.response import Response from rest_framework.views import APIView from django_redis import get_redis_connection from goods.models import SKU from decimal import Decimal from rest_framework.generics import CreateAPIView,ListAPIView from rest_framework.mixins import ListModelMixin from orders.serializers import OrderShowSerializer, OrderSaveSerializer, OrderListSerializer, CommentSerializers, \ CommentSaveSerializers, CommentShowSerializers from users.models import User from orders.models import OrderInfo,OrderGoods from orders.utils import PageNum from rest_framework.filters import OrderingFilter # # # # - # #
31.066667
115
0.689199
07b88bc36babff3a11858ef34a69a78ca7bd4caf
37,841
py
Python
src/streamlink_cli/main.py
melmorabity/streamlink
24c59a23103922977991acc28741a323d8efa7a1
[ "BSD-2-Clause" ]
null
null
null
src/streamlink_cli/main.py
melmorabity/streamlink
24c59a23103922977991acc28741a323d8efa7a1
[ "BSD-2-Clause" ]
null
null
null
src/streamlink_cli/main.py
melmorabity/streamlink
24c59a23103922977991acc28741a323d8efa7a1
[ "BSD-2-Clause" ]
null
null
null
import argparse import errno import logging import os import platform import signal import sys from collections import OrderedDict from contextlib import closing from distutils.version import StrictVersion from functools import partial from gettext import gettext from itertools import chain from pathlib import Path from time import sleep from typing import List import requests from socks import __version__ as socks_version from websocket import __version__ as websocket_version import streamlink.logger as logger from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version from streamlink.cache import Cache from streamlink.exceptions import FatalPluginError from streamlink.plugin import Plugin, PluginOptions from streamlink.stream import StreamIO, StreamProcess from streamlink.utils.named_pipe import NamedPipe from streamlink_cli.argparser import build_parser from streamlink_cli.compat import DeprecatedPath, is_win32, stdout from streamlink_cli.console import ConsoleOutput, ConsoleUserInputRequester from streamlink_cli.constants import CONFIG_FILES, DEFAULT_STREAM_METADATA, LOG_DIR, PLUGIN_DIRS, STREAM_SYNONYMS from streamlink_cli.output import FileOutput, Output, PlayerOutput from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url ACCEPTABLE_ERRNO = (errno.EPIPE, errno.EINVAL, errno.ECONNRESET) try: ACCEPTABLE_ERRNO += (errno.WSAECONNABORTED,) except AttributeError: pass # Not windows QUIET_OPTIONS = ("json", "stream_url", "subprocess_cmdline", "quiet") args = None console: ConsoleOutput = None output: Output = None plugin: Plugin = None stream_fd: StreamIO = None streamlink: Streamlink = None log = logging.getLogger("streamlink.cli") def check_file_output(filename, force): """Checks if file already exists and ask the user if it should be overwritten if it does.""" log.debug("Checking file output") if os.path.isfile(filename) and not force: if sys.stdin.isatty(): answer = console.ask(f"File {filename} already exists! Overwrite it? [y/N] ") if answer.lower() != "y": sys.exit() else: log.error(f"File {filename} already exists, use --force to overwrite it.") sys.exit() return FileOutput(filename) def create_output(formatter: Formatter): """Decides where to write the stream. Depending on arguments it can be one of these: - The stdout pipe - A subprocess' stdin pipe - A named pipe that the subprocess reads from - A regular file """ if (args.output or args.stdout) and (args.record or args.record_and_pipe): console.exit("Cannot use record options with other file output options.") if args.output: if args.output == "-": out = FileOutput(fd=stdout) else: out = check_file_output(formatter.filename(args.output, args.fs_safe_rules), args.force) elif args.stdout: out = FileOutput(fd=stdout) elif args.record_and_pipe: record = check_file_output(formatter.filename(args.record_and_pipe, args.fs_safe_rules), args.force) out = FileOutput(fd=stdout, record=record) else: http = namedpipe = record = None if not args.player: console.exit("The default player (VLC) does not seem to be " "installed. You must specify the path to a player " "executable with --player.") if args.player_fifo: try: namedpipe = NamedPipe() except OSError as err: console.exit(f"Failed to create pipe: {err}") elif args.player_http: http = create_http_server() if args.record: record = check_file_output(formatter.filename(args.record, args.fs_safe_rules), args.force) log.info(f"Starting player: {args.player}") out = PlayerOutput( args.player, args=args.player_args, quiet=not args.verbose_player, kill=not args.player_no_close, namedpipe=namedpipe, http=http, record=record, title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url ) return out def create_http_server(*_args, **_kwargs): """Creates a HTTP server listening on a given host and port. If host is empty, listen on all available interfaces, and if port is 0, listen on a random high port. """ try: http = HTTPServer() http.bind(*_args, **_kwargs) except OSError as err: console.exit(f"Failed to create HTTP server: {err}") return http def output_stream_http(plugin, initial_streams, formatter: Formatter, external=False, port=0): """Continuously output the stream over HTTP.""" global output if not external: if not args.player: console.exit("The default player (VLC) does not seem to be " "installed. You must specify the path to a player " "executable with --player.") server = create_http_server() player = output = PlayerOutput( args.player, args=args.player_args, filename=server.url, quiet=not args.verbose_player, title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url ) try: log.info(f"Starting player: {args.player}") if player: player.open() except OSError as err: console.exit(f"Failed to start player: {args.player} ({err})") else: server = create_http_server(host=None, port=port) player = None log.info("Starting server, access with one of:") for url in server.urls: log.info(" " + url) for req in iter_http_requests(server, player): user_agent = req.headers.get("User-Agent") or "unknown player" log.info(f"Got HTTP request from {user_agent}") stream_fd = prebuffer = None while not stream_fd and (not player or player.running): try: streams = initial_streams or fetch_streams(plugin) initial_streams = None for stream_name in (resolve_stream_name(streams, s) for s in args.stream): if stream_name in streams: stream = streams[stream_name] break else: log.info("Stream not available, will re-fetch streams in 10 sec") sleep(10) continue except PluginError as err: log.error(f"Unable to fetch new streams: {err}") continue try: log.info(f"Opening stream: {stream_name} ({type(stream).shortname()})") stream_fd, prebuffer = open_stream(stream) except StreamError as err: log.error(err) if stream_fd and prebuffer: log.debug("Writing stream to player") read_stream(stream_fd, server, prebuffer, formatter) server.close(True) player.close() server.close() def output_stream_passthrough(stream, formatter: Formatter): """Prepares a filename to be passed to the player.""" global output filename = f'"{stream_to_url(stream)}"' output = PlayerOutput( args.player, args=args.player_args, filename=filename, call=True, quiet=not args.verbose_player, title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url ) try: log.info(f"Starting player: {args.player}") output.open() except OSError as err: console.exit(f"Failed to start player: {args.player} ({err})") return False return True def open_stream(stream): """Opens a stream and reads 8192 bytes from it. This is useful to check if a stream actually has data before opening the output. """ global stream_fd # Attempts to open the stream try: stream_fd = stream.open() except StreamError as err: raise StreamError(f"Could not open stream: {err}") # Read 8192 bytes before proceeding to check for errors. # This is to avoid opening the output unnecessarily. try: log.debug("Pre-buffering 8192 bytes") prebuffer = stream_fd.read(8192) except OSError as err: stream_fd.close() raise StreamError(f"Failed to read data from stream: {err}") if not prebuffer: stream_fd.close() raise StreamError("No data returned from stream") return stream_fd, prebuffer def output_stream(stream, formatter: Formatter): """Open stream, create output and finally write the stream to output.""" global output success_open = False for i in range(args.retry_open): try: stream_fd, prebuffer = open_stream(stream) success_open = True break except StreamError as err: log.error(f"Try {i + 1}/{args.retry_open}: Could not open stream {stream} ({err})") if not success_open: console.exit(f"Could not open stream {stream}, tried {args.retry_open} times, exiting") output = create_output(formatter) try: output.open() except OSError as err: if isinstance(output, PlayerOutput): console.exit(f"Failed to start player: {args.player} ({err})") else: console.exit(f"Failed to open output: {output.filename} ({err})") with closing(output): log.debug("Writing stream to output") read_stream(stream_fd, output, prebuffer, formatter) return True def read_stream(stream, output, prebuffer, formatter: Formatter, chunk_size=8192): """Reads data from stream and then writes it to the output.""" is_player = isinstance(output, PlayerOutput) is_http = isinstance(output, HTTPServer) is_fifo = is_player and output.namedpipe show_progress = ( isinstance(output, FileOutput) and output.fd is not stdout and (sys.stdout.isatty() or args.force_progress) ) show_record_progress = ( hasattr(output, "record") and isinstance(output.record, FileOutput) and output.record.fd is not stdout and (sys.stdout.isatty() or args.force_progress) ) stream_iterator = chain( [prebuffer], iter(partial(stream.read, chunk_size), b"") ) if show_progress: stream_iterator = progress( stream_iterator, prefix=os.path.basename(output.filename) ) elif show_record_progress: stream_iterator = progress( stream_iterator, prefix=os.path.basename(output.record.filename) ) try: for data in stream_iterator: # We need to check if the player process still exists when # using named pipes on Windows since the named pipe is not # automatically closed by the player. if is_win32 and is_fifo: output.player.poll() if output.player.returncode is not None: log.info("Player closed") break try: output.write(data) except OSError as err: if is_player and err.errno in ACCEPTABLE_ERRNO: log.info("Player closed") elif is_http and err.errno in ACCEPTABLE_ERRNO: log.info("HTTP connection closed") else: console.exit(f"Error when writing to output: {err}, exiting") break except OSError as err: console.exit(f"Error when reading from stream: {err}, exiting") finally: stream.close() log.info("Stream ended") def handle_stream(plugin, streams, stream_name): """Decides what to do with the selected stream. Depending on arguments it can be one of these: - Output internal command-line - Output JSON represenation - Continuously output the stream over HTTP - Output stream data to selected output """ stream_name = resolve_stream_name(streams, stream_name) stream = streams[stream_name] # Print internal command-line if this stream # uses a subprocess. if args.subprocess_cmdline: if isinstance(stream, StreamProcess): try: cmdline = stream.cmdline() except StreamError as err: console.exit(err) console.msg(cmdline) else: console.exit("The stream specified cannot be translated to a command") # Print JSON representation of the stream elif args.json: console.msg_json( stream, metadata=plugin.get_metadata() ) elif args.stream_url: try: console.msg(stream.to_url()) except TypeError: console.exit("The stream specified cannot be translated to a URL") # Output the stream else: # Find any streams with a '_alt' suffix and attempt # to use these in case the main stream is not usable. alt_streams = list(filter(lambda k: stream_name + "_alt" in k, sorted(streams.keys()))) file_output = args.output or args.stdout formatter = get_formatter(plugin) for stream_name in [stream_name] + alt_streams: stream = streams[stream_name] stream_type = type(stream).shortname() if stream_type in args.player_passthrough and not file_output: log.info(f"Opening stream: {stream_name} ({stream_type})") success = output_stream_passthrough(stream, formatter) elif args.player_external_http: return output_stream_http(plugin, streams, formatter, external=True, port=args.player_external_http_port) elif args.player_continuous_http and not file_output: return output_stream_http(plugin, streams, formatter) else: log.info(f"Opening stream: {stream_name} ({stream_type})") success = output_stream(stream, formatter) if success: break def fetch_streams(plugin): """Fetches streams using correct parameters.""" return plugin.streams(stream_types=args.stream_types, sorting_excludes=args.stream_sorting_excludes) def fetch_streams_with_retry(plugin, interval, count): """Attempts to fetch streams repeatedly until some are returned or limit hit.""" try: streams = fetch_streams(plugin) except PluginError as err: log.error(err) streams = None if not streams: log.info(f"Waiting for streams, retrying every {interval} second(s)") attempts = 0 while not streams: sleep(interval) try: streams = fetch_streams(plugin) except FatalPluginError: raise except PluginError as err: log.error(err) if count > 0: attempts += 1 if attempts >= count: break return streams def resolve_stream_name(streams, stream_name): """Returns the real stream name of a synonym.""" if stream_name in STREAM_SYNONYMS and stream_name in streams: for name, stream in streams.items(): if stream is streams[stream_name] and name not in STREAM_SYNONYMS: return name return stream_name def format_valid_streams(plugin, streams): """Formats a dict of streams. Filters out synonyms and displays them next to the stream they point to. Streams are sorted according to their quality (based on plugin.stream_weight). """ delimiter = ", " validstreams = [] for name, stream in sorted(streams.items(), key=lambda stream: plugin.stream_weight(stream[0])): if name in STREAM_SYNONYMS: continue synonyms = list(filter(synonymfilter, streams.keys())) if len(synonyms) > 0: joined = delimiter.join(synonyms) name = f"{name} ({joined})" validstreams.append(name) return delimiter.join(validstreams) def handle_url(): """The URL handler. Attempts to resolve the URL to a plugin and then attempts to fetch a list of available streams. Proceeds to handle stream if user specified a valid one, otherwise output list of valid streams. """ try: plugin = streamlink.resolve_url(args.url) setup_plugin_options(streamlink, plugin) log.info(f"Found matching plugin {plugin.module} for URL {args.url}") if args.retry_max or args.retry_streams: retry_streams = 1 retry_max = 0 if args.retry_streams: retry_streams = args.retry_streams if args.retry_max: retry_max = args.retry_max streams = fetch_streams_with_retry(plugin, retry_streams, retry_max) else: streams = fetch_streams(plugin) except NoPluginError: console.exit(f"No plugin can handle URL: {args.url}") except PluginError as err: console.exit(err) if not streams: console.exit(f"No playable streams found on this URL: {args.url}") if args.default_stream and not args.stream and not args.json: args.stream = args.default_stream if args.stream: validstreams = format_valid_streams(plugin, streams) for stream_name in args.stream: if stream_name in streams: log.info(f"Available streams: {validstreams}") handle_stream(plugin, streams, stream_name) return err = f"The specified stream(s) '{', '.join(args.stream)}' could not be found" if args.json: console.msg_json( plugin=plugin.module, metadata=plugin.get_metadata(), streams=streams, error=err ) else: console.exit(f"{err}.\n Available streams: {validstreams}") elif args.json: console.msg_json( plugin=plugin.module, metadata=plugin.get_metadata(), streams=streams ) elif args.stream_url: try: console.msg(streams[list(streams)[-1]].to_manifest_url()) except TypeError: console.exit("The stream specified cannot be translated to a URL") else: validstreams = format_valid_streams(plugin, streams) console.msg(f"Available streams: {validstreams}") def print_plugins(): """Outputs a list of all plugins Streamlink has loaded.""" pluginlist = list(streamlink.get_plugins().keys()) pluginlist_formatted = ", ".join(sorted(pluginlist)) if args.json: console.msg_json(pluginlist) else: console.msg(f"Loaded plugins: {pluginlist_formatted}") def load_plugins(dirs: List[Path], showwarning: bool = True): """Attempts to load plugins from a list of directories.""" for directory in dirs: if directory.is_dir(): success = streamlink.load_plugins(str(directory)) if success and type(directory) is DeprecatedPath: log.info(f"Loaded plugins from deprecated path, see CLI docs for how to migrate: {directory}") elif showwarning: log.warning(f"Plugin path {directory} does not exist or is not a directory!") def setup_args(parser: argparse.ArgumentParser, config_files: List[Path] = None, ignore_unknown: bool = False): """Parses arguments.""" global args arglist = sys.argv[1:] # Load arguments from config files configs = [f"@{config_file}" for config_file in config_files or []] args, unknown = parser.parse_known_args(configs + arglist) if unknown and not ignore_unknown: msg = gettext("unrecognized arguments: %s") parser.error(msg % " ".join(unknown)) # Force lowercase to allow case-insensitive lookup if args.stream: args.stream = [stream.lower() for stream in args.stream] if not args.url and args.url_param: args.url = args.url_param def setup_http_session(): """Sets the global HTTP settings, such as proxy and headers.""" if args.http_proxy: streamlink.set_option("http-proxy", args.http_proxy) if args.https_proxy: streamlink.set_option("https-proxy", args.https_proxy) if args.http_cookie: streamlink.set_option("http-cookies", dict(args.http_cookie)) if args.http_header: streamlink.set_option("http-headers", dict(args.http_header)) if args.http_query_param: streamlink.set_option("http-query-params", dict(args.http_query_param)) if args.http_ignore_env: streamlink.set_option("http-trust-env", False) if args.http_no_ssl_verify: streamlink.set_option("http-ssl-verify", False) if args.http_disable_dh: streamlink.set_option("http-disable-dh", True) if args.http_ssl_cert: streamlink.set_option("http-ssl-cert", args.http_ssl_cert) if args.http_ssl_cert_crt_key: streamlink.set_option("http-ssl-cert", tuple(args.http_ssl_cert_crt_key)) if args.http_timeout: streamlink.set_option("http-timeout", args.http_timeout) def setup_plugins(extra_plugin_dir=None): """Loads any additional plugins.""" load_plugins(PLUGIN_DIRS, showwarning=False) if extra_plugin_dir: load_plugins([Path(path).expanduser() for path in extra_plugin_dir]) def setup_streamlink(): """Creates the Streamlink session.""" global streamlink streamlink = Streamlink({"user-input-requester": ConsoleUserInputRequester(console)}) def setup_options(): """Sets Streamlink options.""" if args.interface: streamlink.set_option("interface", args.interface) if args.ipv4: streamlink.set_option("ipv4", args.ipv4) if args.ipv6: streamlink.set_option("ipv6", args.ipv6) if args.ringbuffer_size: streamlink.set_option("ringbuffer-size", args.ringbuffer_size) if args.mux_subtitles: streamlink.set_option("mux-subtitles", args.mux_subtitles) if args.hds_live_edge: streamlink.set_option("hds-live-edge", args.hds_live_edge) if args.hls_live_edge: streamlink.set_option("hls-live-edge", args.hls_live_edge) if args.hls_playlist_reload_attempts: streamlink.set_option("hls-playlist-reload-attempts", args.hls_playlist_reload_attempts) if args.hls_playlist_reload_time: streamlink.set_option("hls-playlist-reload-time", args.hls_playlist_reload_time) if args.hls_segment_ignore_names: streamlink.set_option("hls-segment-ignore-names", args.hls_segment_ignore_names) if args.hls_segment_key_uri: streamlink.set_option("hls-segment-key-uri", args.hls_segment_key_uri) if args.hls_audio_select: streamlink.set_option("hls-audio-select", args.hls_audio_select) if args.hls_start_offset: streamlink.set_option("hls-start-offset", args.hls_start_offset) if args.hls_duration: streamlink.set_option("hls-duration", args.hls_duration) if args.hls_live_restart: streamlink.set_option("hls-live-restart", args.hls_live_restart) if args.rtmp_rtmpdump: streamlink.set_option("rtmp-rtmpdump", args.rtmp_rtmpdump) elif args.rtmpdump: streamlink.set_option("rtmp-rtmpdump", args.rtmpdump) if args.rtmp_proxy: streamlink.set_option("rtmp-proxy", args.rtmp_proxy) # deprecated if args.hds_segment_attempts: streamlink.set_option("hds-segment-attempts", args.hds_segment_attempts) if args.hds_segment_threads: streamlink.set_option("hds-segment-threads", args.hds_segment_threads) if args.hds_segment_timeout: streamlink.set_option("hds-segment-timeout", args.hds_segment_timeout) if args.hds_timeout: streamlink.set_option("hds-timeout", args.hds_timeout) if args.hls_segment_attempts: streamlink.set_option("hls-segment-attempts", args.hls_segment_attempts) if args.hls_segment_threads: streamlink.set_option("hls-segment-threads", args.hls_segment_threads) if args.hls_segment_timeout: streamlink.set_option("hls-segment-timeout", args.hls_segment_timeout) if args.hls_timeout: streamlink.set_option("hls-timeout", args.hls_timeout) if args.http_stream_timeout: streamlink.set_option("http-stream-timeout", args.http_stream_timeout) if args.rtmp_timeout: streamlink.set_option("rtmp-timeout", args.rtmp_timeout) # generic stream- arguments take precedence over deprecated stream-type arguments if args.stream_segment_attempts: streamlink.set_option("stream-segment-attempts", args.stream_segment_attempts) if args.stream_segment_threads: streamlink.set_option("stream-segment-threads", args.stream_segment_threads) if args.stream_segment_timeout: streamlink.set_option("stream-segment-timeout", args.stream_segment_timeout) if args.stream_timeout: streamlink.set_option("stream-timeout", args.stream_timeout) if args.ffmpeg_ffmpeg: streamlink.set_option("ffmpeg-ffmpeg", args.ffmpeg_ffmpeg) if args.ffmpeg_verbose: streamlink.set_option("ffmpeg-verbose", args.ffmpeg_verbose) if args.ffmpeg_verbose_path: streamlink.set_option("ffmpeg-verbose-path", args.ffmpeg_verbose_path) if args.ffmpeg_fout: streamlink.set_option("ffmpeg-fout", args.ffmpeg_fout) if args.ffmpeg_video_transcode: streamlink.set_option("ffmpeg-video-transcode", args.ffmpeg_video_transcode) if args.ffmpeg_audio_transcode: streamlink.set_option("ffmpeg-audio-transcode", args.ffmpeg_audio_transcode) if args.ffmpeg_copyts: streamlink.set_option("ffmpeg-copyts", args.ffmpeg_copyts) if args.ffmpeg_start_at_zero: streamlink.set_option("ffmpeg-start-at-zero", args.ffmpeg_start_at_zero) streamlink.set_option("subprocess-errorlog", args.subprocess_errorlog) streamlink.set_option("subprocess-errorlog-path", args.subprocess_errorlog_path) streamlink.set_option("locale", args.locale) def setup_plugin_args(session, parser): """Sets Streamlink plugin options.""" plugin_args = parser.add_argument_group("Plugin options") for pname, plugin in session.plugins.items(): defaults = {} group = plugin_args.add_argument_group(pname.capitalize()) for parg in plugin.arguments: if not parg.is_global: group.add_argument(parg.argument_name(pname), **parg.options) defaults[parg.dest] = parg.default else: pargdest = parg.dest for action in parser._actions: # find matching global argument if pargdest != action.dest: continue defaults[pargdest] = action.default # add plugin to global argument plugins = getattr(action, "plugins", []) plugins.append(pname) setattr(action, "plugins", plugins) plugin.options = PluginOptions(defaults) def setup_plugin_options(session, plugin): """Sets Streamlink plugin options.""" pname = plugin.module required = OrderedDict({}) for parg in plugin.arguments: if parg.options.get("help") == argparse.SUPPRESS: continue value = getattr(args, parg.dest if parg.is_global else parg.namespace_dest(pname)) session.set_plugin_option(pname, parg.dest, value) if not parg.is_global: if parg.required: required[parg.name] = parg # if the value is set, check to see if any of the required arguments are not set if parg.required or value: try: for rparg in plugin.arguments.requires(parg.name): required[rparg.name] = rparg except RuntimeError: log.error(f"{pname} plugin has a configuration error and the arguments cannot be parsed") break if required: for req in required.values(): if not session.get_plugin_option(pname, req.dest): prompt = f"{req.prompt or f'Enter {pname} {req.name}'}: " session.set_plugin_option( pname, req.dest, console.askpass(prompt) if req.sensitive else console.ask(prompt) ) def log_current_versions(): """Show current installed versions""" if not logger.root.isEnabledFor(logging.DEBUG): return # macOS if sys.platform == "darwin": os_version = f"macOS {platform.mac_ver()[0]}" # Windows elif sys.platform == "win32": os_version = f"{platform.system()} {platform.release()}" # Linux / other else: os_version = platform.platform() log.debug(f"OS: {os_version}") log.debug(f"Python: {platform.python_version()}") log.debug(f"Streamlink: {streamlink_version}") log.debug(f"Requests({requests.__version__}), " f"Socks({socks_version}), " f"Websocket({websocket_version})")
33.999102
122
0.640258
07b8d02790ae07e8540167a649afc0cab32e02e9
16,807
py
Python
dbaccesslibUserMailInfo.py
Koushik-ks/FlaskAPP
6f1bd98450bc8f33c3896aa7ec690c51dc414d19
[ "MIT" ]
null
null
null
dbaccesslibUserMailInfo.py
Koushik-ks/FlaskAPP
6f1bd98450bc8f33c3896aa7ec690c51dc414d19
[ "MIT" ]
null
null
null
dbaccesslibUserMailInfo.py
Koushik-ks/FlaskAPP
6f1bd98450bc8f33c3896aa7ec690c51dc414d19
[ "MIT" ]
1
2019-11-08T06:49:57.000Z
2019-11-08T06:49:57.000Z
from io import BytesIO from io import StringIO import json from bson.dbref import DBRef import datetime from bson import json_util import logging import base64 jsonCode ={ "building":{ "Essae Vaishnavi Solitaire": { "id": "B1", "division": { "SS": { "id": "D1", "dept":{ "Semicon":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "RND":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "Mobile":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } }, "TTEC": { "id": "D2", "dept":{ "TTEC-AL":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "TTEC-SL":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "TTEC-DL":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "TTEC-CI":{ "id":"DEP4", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } } } }, "Fortune Summit": { "id": "B2", "division": { "TMSC": { "id": "D1", "dept":{ "Medical":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "RND":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "Imaging":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } }, "tmc": { "id": "D2", "dept":{ "tmc-1":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "tmc-2":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "tmc-3":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } } } } } } #Create and configure logger logging.basicConfig(filename="server.log", format='%(asctime)s %(message)s', filemode='a') #Creating an object logger=logging.getLogger() #Setting the threshold of logger to DEBUG logger.setLevel(logging.DEBUG) import pymongo uri = "mongodb://218ffa09-0ee0-4-231-b9ee:zTV4cwDG0vM49J2GFsw72JzwOD79Bv3dPU8fbVLb5pbh3p0CmTBYcvhrFKTjtl1s7hgYSfRbMOrsVve6hfvhag==@218ffa09-0ee0-4-231-b9ee.documents.azure.com:10255/?ssl=true&replicaSet=globaldb" client = pymongo.MongoClient(uri) print("Obtained the client") mydb = client.test #Clear DB only for testing
42.335013
242
0.404712
07b8f248ebba127a7ba553594c485e647090e69f
3,489
py
Python
src/test/dags/bq_to_cm_dag_test.py
google/cc4d
206543832368f96bac7f55c0de93c96e32127779
[ "Apache-2.0" ]
null
null
null
src/test/dags/bq_to_cm_dag_test.py
google/cc4d
206543832368f96bac7f55c0de93c96e32127779
[ "Apache-2.0" ]
null
null
null
src/test/dags/bq_to_cm_dag_test.py
google/cc4d
206543832368f96bac7f55c0de93c96e32127779
[ "Apache-2.0" ]
null
null
null
# python3 # coding=utf-8 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for dags.bq_to_cm_dag.""" import unittest from airflow.contrib.hooks import bigquery_hook from airflow.models import baseoperator from airflow.models import dag from airflow.models import variable import mock from gps_building_blocks.cloud.utils import cloud_auth from dags import bq_to_cm_dag from plugins.pipeline_plugins.hooks import monitoring_hook _DAG_NAME = bq_to_cm_dag._DAG_NAME AIRFLOW_VARIABLES = { 'dag_name': _DAG_NAME, f'{_DAG_NAME}_schedule': '@once', f'{_DAG_NAME}_retries': 0, f'{_DAG_NAME}_retry_delay': 3, f'{_DAG_NAME}_is_retry': True, f'{_DAG_NAME}_is_run': True, f'{_DAG_NAME}_enable_run_report': False, f'{_DAG_NAME}_enable_monitoring': True, f'{_DAG_NAME}_enable_monitoring_cleanup': False, 'monitoring_data_days_to_live': 50, 'monitoring_dataset': 'test_monitoring_dataset', 'monitoring_table': 'test_monitoring_table', 'monitoring_bq_conn_id': 'test_monitoring_conn', 'bq_dataset_id': 'test_dataset', 'bq_table_id': 'test_table', 'cm_profile_id': 'cm_profile_id', 'cm_service_account': 'cm_service_account' } if __name__ == '__main__': unittest.main()
35.602041
74
0.755804
07b971c0c6fde3e7ed6901642724f170bca1dffb
10,781
py
Python
talentmap_api/common/management/commands/load_xml.py
burgwyn/State-TalentMAP-API
1f4f3659c5743ebfd558cd87af381f5460f284b3
[ "CC0-1.0" ]
5
2018-08-09T18:51:12.000Z
2021-11-08T10:28:17.000Z
talentmap_api/common/management/commands/load_xml.py
burgwyn/State-TalentMAP-API
1f4f3659c5743ebfd558cd87af381f5460f284b3
[ "CC0-1.0" ]
232
2017-06-16T02:09:54.000Z
2018-05-10T16:15:48.000Z
talentmap_api/common/management/commands/load_xml.py
burgwyn/State-TalentMAP-API
1f4f3659c5743ebfd558cd87af381f5460f284b3
[ "CC0-1.0" ]
4
2018-06-13T14:49:27.000Z
2021-06-30T22:29:15.000Z
from django.core.management.base import BaseCommand import logging import re from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag from talentmap_api.language.models import Language, Proficiency from talentmap_api.position.models import Grade, Skill, Position, CapsuleDescription, SkillCone from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country
36.422297
142
0.66914
07b9b8864e1cb2b18a6326e38faad88d4012991a
2,581
py
Python
gluon/tests/test_recfile.py
oscarfonts/web2py
a18e0e489fe7a770c62fca510a4299886b0a9bb7
[ "BSD-3-Clause" ]
null
null
null
gluon/tests/test_recfile.py
oscarfonts/web2py
a18e0e489fe7a770c62fca510a4299886b0a9bb7
[ "BSD-3-Clause" ]
null
null
null
gluon/tests/test_recfile.py
oscarfonts/web2py
a18e0e489fe7a770c62fca510a4299886b0a9bb7
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Unit tests for gluon.recfile """ import unittest import os import shutil import uuid from .fix_path import fix_sys_path fix_sys_path(__file__) from gluon import recfile if __name__ == '__main__': unittest.main()
34.413333
92
0.593181
07ba0eee7b3f5dcbd7cf03e65b5f80e0ede665d0
1,022
py
Python
configLambdas.py
cfrome77/liquid-stats
7a4d751dea215c94b650beb154a90abce7e1592d
[ "MIT" ]
4
2020-06-21T14:43:40.000Z
2021-12-29T17:02:18.000Z
configLambdas.py
cfrome77/liquid-stats
7a4d751dea215c94b650beb154a90abce7e1592d
[ "MIT" ]
21
2018-09-18T18:42:53.000Z
2022-01-04T05:55:12.000Z
configLambdas.py
cfrome77/liquid-stats
7a4d751dea215c94b650beb154a90abce7e1592d
[ "MIT" ]
null
null
null
import json import os import subprocess from dotenv import load_dotenv from subprocess import check_output, Popen, PIPE load_dotenv() # Accessing variables. CLIENT_ID = os.environ.get('CLIENT_ID') CLIENT_SECRET = os.environ.get('CLIENT_SECRET') USERNAME = os.environ.get('USERNAME') BUCKET_NAME = os.environ.get('BUCKET_NAME') lambda_functions = get_lambda_functions() for lambda_function in lambda_functions: function_name = lambda_function['FunctionName'] subprocess.run([ "aws", "lambda", "update-function-configuration", "--function-name", f"{function_name}", "--environment", f"Variables={{CLIENT_ID={CLIENT_ID},CLIENT_SECRET={CLIENT_SECRET},USERNAME={USERNAME},BUCKET_NAME={BUCKET_NAME}}}" ])
26.894737
122
0.714286
07baaefdacf7ace2738a920e7e9c1d5671078a05
13,520
py
Python
microbitAnim.py
SaitoYutaka/microbitAnim
6630d5cdb3ae867d3467a035a1c14358944c0367
[ "MIT" ]
null
null
null
microbitAnim.py
SaitoYutaka/microbitAnim
6630d5cdb3ae867d3467a035a1c14358944c0367
[ "MIT" ]
null
null
null
microbitAnim.py
SaitoYutaka/microbitAnim
6630d5cdb3ae867d3467a035a1c14358944c0367
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- ########################################################################### ## Python code generated with wxFormBuilder (version Aug 8 2018) ## http://www.wxformbuilder.org/ ## ## PLEASE DO *NOT* EDIT THIS FILE! ########################################################################### import wx import wx.xrc ########################################################################### ## Class MyFrame1 ###########################################################################
44.473684
181
0.598669
07babdb06c676058838f65ac7fea336d215d962b
339
py
Python
src/dependencies/contrib/celery.py
nicoddemus/dependencies
74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9
[ "BSD-2-Clause" ]
null
null
null
src/dependencies/contrib/celery.py
nicoddemus/dependencies
74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9
[ "BSD-2-Clause" ]
null
null
null
src/dependencies/contrib/celery.py
nicoddemus/dependencies
74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9
[ "BSD-2-Clause" ]
null
null
null
""" dependencies.contrib.celery --------------------------- This module implements injectable Celery task. :copyright: (c) 2016-2020 by dry-python team. :license: BSD, see LICENSE for more details. """ from _dependencies.contrib.celery import shared_task from _dependencies.contrib.celery import task __all__ = ["shared_task", "task"]
22.6
52
0.710914
07bb317b5c0941f28a4da7f31413b20f4a8bda28
7,838
py
Python
yolo3/utils.py
gaxu/keras-yolo3
7f6be0fb9a8583401246bfe65d2df2ee40777d72
[ "MIT" ]
null
null
null
yolo3/utils.py
gaxu/keras-yolo3
7f6be0fb9a8583401246bfe65d2df2ee40777d72
[ "MIT" ]
null
null
null
yolo3/utils.py
gaxu/keras-yolo3
7f6be0fb9a8583401246bfe65d2df2ee40777d72
[ "MIT" ]
4
2021-02-25T08:21:15.000Z
2021-02-25T08:56:39.000Z
"""Miscellaneous utility functions.""" from functools import reduce from PIL import Image import numpy as np from matplotlib.colors import rgb_to_hsv, hsv_to_rgb def compose(*funcs): """Compose arbitrarily many functions, evaluated left to right. Reference: https://mathieularose.com/function-composition-in-python/ """ # return lambda x: reduce(lambda v, f: f(v), funcs, x) if funcs: return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs) else: raise ValueError('Composition of empty sequence not supported.') def letterbox_image(image, size): '''resize image with unchanged aspect ratio using padding''' iw, ih = image.size w, h = size scale = min(w/iw, h/ih) nw = int(iw*scale) nh = int(ih*scale) image = image.resize((nw,nh), Image.BICUBIC) new_image = Image.new('RGB', size, (128,128,128)) new_image.paste(image, ((w-nw)//2, (h-nh)//2)) return new_image def get_random_data(annotation_line, input_shape, random=True, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True): '''random preprocessing for real-time data augmentation''' line = annotation_line.split() image = Image.open(line[0]) iw, ih = image.size h, w = input_shape box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]]) if not random: # resize image scale = min(w/iw, h/ih) nw = int(iw*scale) nh = int(ih*scale) dx = (w-nw)//2 dy = (h-nh)//2 image_data=0 if proc_img: image = image.resize((nw,nh), Image.BICUBIC) new_image = Image.new('RGB', (w,h), (128,128,128)) new_image.paste(image, (dx, dy)) image_data = np.array(new_image)/255. # correct boxes box_data = np.zeros((max_boxes,5)) if len(box)>0: np.random.shuffle(box) if len(box)>max_boxes: box = box[:max_boxes] box[:, [0,2]] = box[:, [0,2]]*scale + dx box[:, [1,3]] = box[:, [1,3]]*scale + dy box_data[:len(box)] = box return image_data, box_data # resize image new_ar = w/h * rand(1-jitter,1+jitter)/rand(1-jitter,1+jitter) scale = rand(.25, 2) if new_ar < 1: nh = int(scale*h) nw = int(nh*new_ar) else: nw = int(scale*w) nh = int(nw/new_ar) image = image.resize((nw,nh), Image.BICUBIC) # place image dx = int(rand(0, w-nw)) dy = int(rand(0, h-nh)) new_image = Image.new('RGB', (w,h), (128,128,128)) new_image.paste(image, (dx, dy)) image = new_image # flip image or not flip = rand()<.5 if flip: image = image.transpose(Image.FLIP_LEFT_RIGHT) # distort image hue = rand(-hue, hue) sat = rand(1, sat) if rand()<.5 else 1/rand(1, sat) val = rand(1, val) if rand()<.5 else 1/rand(1, val) x = rgb_to_hsv(np.array(image)/255.) x[..., 0] += hue x[..., 0][x[..., 0]>1] -= 1 x[..., 0][x[..., 0]<0] += 1 x[..., 1] *= sat x[..., 2] *= val x[x>1] = 1 x[x<0] = 0 image_data = hsv_to_rgb(x) # numpy array, 0 to 1 # correct boxes box_data = np.zeros((max_boxes,5)) if len(box)>0: np.random.shuffle(box) box[:, [0,2]] = box[:, [0,2]]*nw/iw + dx box[:, [1,3]] = box[:, [1,3]]*nh/ih + dy if flip: box[:, [0,2]] = w - box[:, [2,0]] box[:, 0:2][box[:, 0:2]<0] = 0 box[:, 2][box[:, 2]>w] = w box[:, 3][box[:, 3]>h] = h box_w = box[:, 2] - box[:, 0] box_h = box[:, 3] - box[:, 1] box = box[np.logical_and(box_w>1, box_h>1)] # discard invalid box if len(box)>max_boxes: box = box[:max_boxes] box_data[:len(box)] = box return image_data, box_data def get_random_data2(annotation_line, input_shape, random=True, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True): '''random preprocessing for real-time data augmentation''' line = annotation_line.split() image = Image.open(line[0]) w, h = image.size #13 14 dx, dy = input_shape box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]]) x_min = w x_max = 0 y_min = h y_max = 0 for bbox in box: x_min = min(x_min, bbox[0]) y_min = min(y_min, bbox[1]) x_max = max(x_max, bbox[2]) y_max = max(y_max, bbox[3]) name = bbox[4] # d_to_left = x_min d_to_right = w - x_max d_to_top = y_min d_to_bottom = h - y_max # crop_x_min = int(x_min - rand(0, d_to_left)) crop_y_min = int(y_min - rand(0, d_to_top)) crop_x_max = int(x_max + rand(0, d_to_right)) crop_y_max = int(y_max + rand(0, d_to_bottom)) # crop_x_min = max(0, crop_x_min) crop_y_min = max(0, crop_y_min) crop_x_max = min(w, crop_x_max) crop_y_max = min(h, crop_y_max) cropped = image.crop((crop_x_min, crop_y_min, crop_x_max, crop_y_max)) # (left, upper, right, lower) new_image = Image.new('RGB', (w,h), (128,128,128)) new_image.paste(cropped, (dx, dy)) image_data = np.array(new_image)/255. box_data = np.zeros((max_boxes,5)) if len(box)>0: np.random.shuffle(box) if len(box)>max_boxes: box = box[:max_boxes] box[:,0] = box[:,0]-crop_y_min box[:,1] = box[:,1]-crop_y_min box[:,2] = box[:,2]-crop_x_min box[:,3] = box[:,3]-crop_y_min box_data[:len(box)] = box return image_data, box_data
32.522822
130
0.578719
07bd6b46f25dcc6cd80de3063def3b5b81539dae
161
py
Python
text_classification/config.py
MardanovTimur/kaggle
62392863a07fcc5de9821c28cf9c6dbbf39ced59
[ "MIT" ]
null
null
null
text_classification/config.py
MardanovTimur/kaggle
62392863a07fcc5de9821c28cf9c6dbbf39ced59
[ "MIT" ]
null
null
null
text_classification/config.py
MardanovTimur/kaggle
62392863a07fcc5de9821c28cf9c6dbbf39ced59
[ "MIT" ]
null
null
null
import logging import pathlib logging.basicConfig(level=logging.INFO) # Dirs ROOT_DIR = pathlib.Path(__file__).parent.absolute() DUMP_DIR = ROOT_DIR / 'dumps'
17.888889
51
0.782609
07bd8ff47d8f5245f9a21b92c33b949fce1127a8
8,482
py
Python
sportsreference/ncaaf/rankings.py
JosephDErwin/sportsreference
f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd
[ "MIT" ]
null
null
null
sportsreference/ncaaf/rankings.py
JosephDErwin/sportsreference
f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd
[ "MIT" ]
null
null
null
sportsreference/ncaaf/rankings.py
JosephDErwin/sportsreference
f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd
[ "MIT" ]
1
2020-07-08T16:05:25.000Z
2020-07-08T16:05:25.000Z
import re from pyquery import PyQuery as pq from .. import utils from .constants import RANKINGS_SCHEME, RANKINGS_URL from six.moves.urllib.error import HTTPError
39.821596
79
0.549399
07bd958d061b56e11538171ee991b3269000d33d
5,937
py
Python
ding/hpc_rl/wrapper.py
davide97l/DI-engine
d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423
[ "Apache-2.0" ]
1
2022-03-21T16:15:39.000Z
2022-03-21T16:15:39.000Z
ding/hpc_rl/wrapper.py
jiaruonan/DI-engine
268d77db3cb54401b2cfc83e2bc3ec87c31e7b83
[ "Apache-2.0" ]
null
null
null
ding/hpc_rl/wrapper.py
jiaruonan/DI-engine
268d77db3cb54401b2cfc83e2bc3ec87c31e7b83
[ "Apache-2.0" ]
null
null
null
import importlib from ditk import logging from collections import OrderedDict from functools import wraps import ding ''' Overview: `hpc_wrapper` is the wrapper for functions which are supported by hpc. If a function is wrapped by it, we will search for its hpc type and return the function implemented by hpc. We will use the following code as a sample to introduce `hpc_wrapper`: ``` @hpc_wrapper(shape_fn=shape_fn_dntd, namedtuple_data=True, include_args=[0,1,2,3], include_kwargs=['data', 'gamma', 'v_min', 'v_max'], is_cls_method=False) def dist_nstep_td_error( data: namedtuple, gamma: float, v_min: float, v_max: float, n_atom: int, nstep: int = 1, ) -> torch.Tensor: ... ``` Parameters: - shape_fn (:obj:`function`): a function which return the shape needed by hpc function. In fact, it returns all args that the hpc function needs. - nametuple_data (:obj:`bool`): If True, when hpc function is called, it will be called as hpc_function(*nametuple). If False, nametuple data will remain its `nametuple` type. - include_args (:obj:`list`): a list of index of the args need to be set in hpc function. As shown in the sample, include_args=[0,1,2,3], which means `data`, `gamma`, `v_min` and `v_max` will be set in hpc function. - include_kwargs (:obj:`list`): a list of key of the kwargs need to be set in hpc function. As shown in the sample, include_kwargs=['data', 'gamma', 'v_min', 'v_max'], which means `data`, `gamma`, `v_min` and `v_max` will be set in hpc function. - is_cls_method (:obj:`bool`): If True, it means the function we wrap is a method of a class. `self` will be put into args. We will get rid of `self` in args. Besides, we will use its classname as its fn_name. If False, it means the function is a simple method. Q&A: - Q: Is `include_args` and `include_kwargs` need to be set at the same time? - A: Yes. `include_args` and `include_kwargs` can deal with all type of input, such as (data, gamma, v_min=v_min, v_max=v_max) and (data, gamma, v_min, v_max). - Q: What is `hpc_fns`? - A: Here we show a normal `hpc_fns`: ``` hpc_fns = { 'fn_name1': { 'runtime_name1': hpc_fn1, 'runtime_name2': hpc_fn2, ... }, ... } ``` Besides, `per_fn_limit` means the max length of `hpc_fns[fn_name]`. When new function comes, the oldest function will be popped from `hpc_fns[fn_name]`. ''' hpc_fns = {} per_fn_limit = 3
44.30597
120
0.578912
07be5334fc353ee2aa3b8e027797471255271069
102
py
Python
CodeWars/2016/NumberOfOccurrences-7k.py
JLJTECH/TutorialTesting
f2dbbd49a86b3b086d0fc156ac3369fb74727f86
[ "MIT" ]
null
null
null
CodeWars/2016/NumberOfOccurrences-7k.py
JLJTECH/TutorialTesting
f2dbbd49a86b3b086d0fc156ac3369fb74727f86
[ "MIT" ]
null
null
null
CodeWars/2016/NumberOfOccurrences-7k.py
JLJTECH/TutorialTesting
f2dbbd49a86b3b086d0fc156ac3369fb74727f86
[ "MIT" ]
null
null
null
#Return the count of int(s) in passed array.
34
45
0.72549
07bf6145f03382a8bd2f1aee40dd398c31e7d6c2
10,343
py
Python
telethon_generator/parsers/tlobject.py
islam-200555/Telethon
85103bcf6de8024c902ede98f0b9bf0f7f47a0aa
[ "MIT" ]
2
2021-01-06T12:49:49.000Z
2021-04-23T16:32:13.000Z
telethon_generator/parsers/tlobject.py
islam-200555/Telethon
85103bcf6de8024c902ede98f0b9bf0f7f47a0aa
[ "MIT" ]
null
null
null
telethon_generator/parsers/tlobject.py
islam-200555/Telethon
85103bcf6de8024c902ede98f0b9bf0f7f47a0aa
[ "MIT" ]
null
null
null
import re from zlib import crc32 from ..utils import snake_to_camel_case CORE_TYPES = ( 0xbc799737, # boolFalse#bc799737 = Bool; 0x997275b5, # boolTrue#997275b5 = Bool; 0x3fedd339, # true#3fedd339 = True; 0x1cb5c415, # vector#1cb5c415 {t:Type} # [ t ] = Vector t; ) # https://github.com/telegramdesktop/tdesktop/blob/4bf66cb6e93f3965b40084771b595e93d0b11bcd/Telegram/SourceFiles/codegen/scheme/codegen_scheme.py#L57-L62 WHITELISTED_MISMATCHING_IDS = { # 0 represents any layer 0: {'ipPortSecret', 'accessPointRule', 'help.configSimple'}, 77: {'channel'}, 78: {'channel'} } def _from_line(line, is_function, layer): match = re.match( r'^([\w.]+)' # 'name' r'(?:#([0-9a-fA-F]+))?' # '#optionalcode' r'(?:\s{?\w+:[\w\d<>#.?!]+}?)*' # '{args:.0?type}' r'\s=\s' # ' = ' r'([\w\d<>#.?]+);$', # '<result.type>;' line ) if match is None: # Probably "vector#1cb5c415 {t:Type} # [ t ] = Vector t;" raise ValueError('Cannot parse TLObject {}'.format(line)) args_match = re.findall( r'({)?' r'(\w+)' r':' r'([\w\d<>#.?!]+)' r'}?', line ) return TLObject( fullname=match.group(1), object_id=match.group(2), result=match.group(3), is_function=is_function, layer=layer, args=[TLArg(name, arg_type, brace != '') for brace, name, arg_type in args_match] ) def parse_tl(file_path, layer, ignore_core=False): """This method yields TLObjects from a given .tl file.""" with open(file_path, encoding='utf-8') as file: is_function = False for line in file: comment_index = line.find('//') if comment_index != -1: line = line[:comment_index] line = line.strip() if not line: continue match = re.match('---(\w+)---', line) if match: following_types = match.group(1) is_function = following_types == 'functions' continue try: result = _from_line(line, is_function, layer=layer) if not ignore_core or result.id not in CORE_TYPES: yield result except ValueError as e: if 'vector#1cb5c415' not in str(e): raise def find_layer(file_path): """Finds the layer used on the specified scheme.tl file.""" layer_regex = re.compile(r'^//\s*LAYER\s*(\d+)$') with open(file_path, encoding='utf-8') as file: for line in file: match = layer_regex.match(line) if match: return int(match.group(1))
35.788927
153
0.541332
07c02ac3abe7a10db3f40c2457c57bca98b41742
2,469
bzl
Python
dotnet/private/actions/resx_core.bzl
purkhusid/rules_dotnet
934e62d65ed3657be20b2ae3a63e032a2de9ff84
[ "Apache-2.0" ]
143
2016-03-15T20:37:54.000Z
2022-02-25T12:30:08.000Z
dotnet/private/actions/resx_core.bzl
purkhusid/rules_dotnet
934e62d65ed3657be20b2ae3a63e032a2de9ff84
[ "Apache-2.0" ]
176
2016-03-17T13:28:50.000Z
2022-03-30T21:19:24.000Z
dotnet/private/actions/resx_core.bzl
purkhusid/rules_dotnet
934e62d65ed3657be20b2ae3a63e032a2de9ff84
[ "Apache-2.0" ]
79
2016-03-16T11:34:56.000Z
2022-02-04T10:54:00.000Z
"Actions for compiling resx files" load( "@io_bazel_rules_dotnet//dotnet/private:providers.bzl", "DotnetResourceInfo", ) def emit_resx_core( dotnet, name = "", src = None, identifier = None, out = None, customresgen = None): """The function adds an action that compiles a single .resx file into .resources file. Returns [DotnetResourceInfo](api.md#dotnetresourceinfo). Args: dotnet: [DotnetContextInfo](api.md#dotnetcontextinfo). name: name of the file to generate. src: The .resx source file that is transformed into .resources file. Only `.resx` files are permitted. identifier: The logical name for the resource; the name that is used to load the resource. The default is the basename of the file name (no subfolder). out: An alternative name of the output file (if name should not be used). customresgen: custom resgen program to use. Returns: DotnetResourceInfo: [DotnetResourceInfo](api.md#dotnetresourceinfo). """ if name == "" and out == None: fail("either name or out must be set") if not out: result = dotnet.actions.declare_file(name + ".resources") else: result = dotnet.actions.declare_file(out) args = _make_runner_arglist(dotnet, src, result, customresgen.files_to_run.executable.path) # We use the command to extrace shell path and force runfiles creation resolve = dotnet._ctx.resolve_tools(tools = [customresgen]) inputs = src.files.to_list() if type(src) == "Target" else [src] dotnet.actions.run( inputs = inputs + resolve[0].to_list(), tools = customresgen.default_runfiles.files, outputs = [result], executable = customresgen.files_to_run, arguments = [args], env = {"RUNFILES_MANIFEST_FILE": customresgen.files_to_run.runfiles_manifest.path}, mnemonic = "CoreResxCompile", input_manifests = resolve[1], progress_message = ( "Compiling resoources" + dotnet.label.package + ":" + dotnet.label.name ), ) return DotnetResourceInfo( name = name, result = result, identifier = identifier, )
32.92
159
0.651681
07c0bb5052572bdefa7514e11ef8a10e0bb0427e
1,035
py
Python
test/jit/test_modules.py
xiaohanhuang/pytorch
a31aea8eaa99a5ff72b5d002c206cd68d5467a5e
[ "Intel" ]
183
2018-04-06T21:10:36.000Z
2022-03-30T15:05:24.000Z
test/jit/test_modules.py
xiaohanhuang/pytorch
a31aea8eaa99a5ff72b5d002c206cd68d5467a5e
[ "Intel" ]
818
2020-02-07T02:36:44.000Z
2022-03-31T23:49:44.000Z
test/jit/test_modules.py
xiaohanhuang/pytorch
a31aea8eaa99a5ff72b5d002c206cd68d5467a5e
[ "Intel" ]
58
2018-06-05T16:40:18.000Z
2022-03-16T15:37:29.000Z
# Owner(s): ["oncall: jit"] import torch import os import sys from torch.testing._internal.jit_utils import JitTestCase # Make the helper files in test/ importable pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) sys.path.append(pytorch_test_dir) if __name__ == '__main__': raise RuntimeError("This test file is not meant to be run directly, use:\n\n" "\tpython test/test_jit.py TESTNAME\n\n" "instead.")
30.441176
81
0.630918
07c0c2bb274ab76681ad18763446d5b0c976c985
242
py
Python
pixelate_task_1.py
Swayamshu/Pixelate_Sample_Arena
d8e8b4614987f9302a19ec1e20a922618e67b943
[ "MIT" ]
null
null
null
pixelate_task_1.py
Swayamshu/Pixelate_Sample_Arena
d8e8b4614987f9302a19ec1e20a922618e67b943
[ "MIT" ]
null
null
null
pixelate_task_1.py
Swayamshu/Pixelate_Sample_Arena
d8e8b4614987f9302a19ec1e20a922618e67b943
[ "MIT" ]
null
null
null
import gym import pix_sample_arena import time import pybullet as p import pybullet_data import cv2 if __name__ == "__main__": env = gym.make("pix_sample_arena-v0") x = 0 while True: p.stepSimulation() time.sleep(100)
18.615385
41
0.702479
07c22498468a49059683c71a90ec92358cf9e563
916
py
Python
tests/test_timeparser.py
vgoehler/python-i3-battery-block
e47ce80b315d812d731df84f2a1c8e1155b2469a
[ "BSD-2-Clause" ]
null
null
null
tests/test_timeparser.py
vgoehler/python-i3-battery-block
e47ce80b315d812d731df84f2a1c8e1155b2469a
[ "BSD-2-Clause" ]
9
2020-01-24T17:15:03.000Z
2020-06-02T14:16:40.000Z
tests/test_timeparser.py
vgoehler/python-i3-battery-block
e47ce80b315d812d731df84f2a1c8e1155b2469a
[ "BSD-2-Clause" ]
null
null
null
from datetime import time import pytest from i3_battery_block_vgg.timeparser import __parse_time_manually from i3_battery_block_vgg.timeparser import parse_time
29.548387
94
0.677948
07c22c6dbb3092192d2a4fddcabee0db528c9e22
4,720
py
Python
frontends/PyCDE/test/polynomial.py
fyquah/circt
cee685bf12dbf27a3f2274e08cd1af6874f70baa
[ "Apache-2.0" ]
null
null
null
frontends/PyCDE/test/polynomial.py
fyquah/circt
cee685bf12dbf27a3f2274e08cd1af6874f70baa
[ "Apache-2.0" ]
null
null
null
frontends/PyCDE/test/polynomial.py
fyquah/circt
cee685bf12dbf27a3f2274e08cd1af6874f70baa
[ "Apache-2.0" ]
null
null
null
# RUN: %PYTHON% %s 2>&1 | FileCheck %s from __future__ import annotations import mlir import pycde from pycde import (Input, Output, Parameter, module, externmodule, generator, types, dim) from circt.dialects import comb, hw class Coefficients: def __init__(self, coeff): self.coeff = coeff poly = Polynomial() poly.graph() # CHECK-LABEL: digraph "top" # CHECK: label="top"; # CHECK: [shape=record,label="{hw.constant\ni32\n\nvalue: 23 : i32}"]; poly.print() # CHECK-LABEL: hw.module @top() -> (%y: i32) # CHECK: [[REG0:%.+]] = "pycde.PolynomialCompute"(%c23_i32) {instanceName = "example", opNames = ["x"], parameters = {coefficients = {coeff = [62, 42, 6]}, module_name = "PolyComputeForCoeff_62_42_6", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32 # CHECK: [[REG1:%.+]] = "pycde.PolynomialCompute"([[REG0]]) {instanceName = "example2", opNames = ["x"], parameters = {coefficients = {coeff = [62, 42, 6]}, module_name = "PolyComputeForCoeff_62_42_6", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32 # CHECK: [[REG2:%.+]] = "pycde.PolynomialCompute"([[REG0]]) {instanceName = "example2", opNames = ["x"], parameters = {coefficients = {coeff = [1, 2, 3, 4, 5]}, module_name = "PolyComputeForCoeff_1_2_3_4_5", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32 # CHECK: [[REG3:%.+]] = "pycde.CoolPolynomialCompute"(%c23_i32) {coefficients = [4, 42], opNames = ["x"], parameters = {}, resultNames = ["y"]} : (i32) -> i32 # CHECK: hw.output [[REG0]] : i32 poly.generate() poly.print() # CHECK-LABEL: hw.module @top # CHECK: %example.y = hw.instance "example" @PolyComputeForCoeff_62_42_6(%c23_i32) {parameters = {}} : (i32) -> i32 # CHECK: %example2.y = hw.instance "example2" @PolyComputeForCoeff_62_42_6(%example.y) {parameters = {}} : (i32) -> i32 # CHECK: %example2.y_0 = hw.instance "example2" @PolyComputeForCoeff_1_2_3_4_5(%example.y) {parameters = {}} : (i32) -> i32 # CHECK: %pycde.CoolPolynomialCompute.y = hw.instance "pycde.CoolPolynomialCompute" @supercooldevice(%c23_i32) {coefficients = [4, 42], parameters = {}} : (i32) -> i32 # CHECK-LABEL: hw.module @PolyComputeForCoeff_62_42_6(%x: i32) -> (%y: i32) # CHECK: hw.constant 62 # CHECK: hw.constant 42 # CHECK: hw.constant 6 # CHECK-LABEL: hw.module @PolyComputeForCoeff_1_2_3_4_5(%x: i32) -> (%y: i32) # CHECK: hw.constant 1 # CHECK: hw.constant 2 # CHECK: hw.constant 3 # CHECK: hw.constant 4 # CHECK: hw.constant 5 # CHECK-NOT: hw.module @pycde.PolynomialCompute print("\n\n=== Verilog ===") # CHECK-LABEL: === Verilog === poly.print_verilog() # CHECK-LABEL: module PolyComputeForCoeff_62_42_6( # CHECK: input [31:0] x, # CHECK: output [31:0] y);
35.223881
272
0.63411
07c24587d23b16e92b579d57a83d8cb4e84073ef
3,430
py
Python
python/examples/service_discovery.py
davidgcameron/arc
9813ef5f45e5089507953239de8fa2248f5ad32c
[ "Apache-2.0" ]
null
null
null
python/examples/service_discovery.py
davidgcameron/arc
9813ef5f45e5089507953239de8fa2248f5ad32c
[ "Apache-2.0" ]
null
null
null
python/examples/service_discovery.py
davidgcameron/arc
9813ef5f45e5089507953239de8fa2248f5ad32c
[ "Apache-2.0" ]
null
null
null
#! /usr/bin/env python import arc import sys import os # wait for all the background threads to finish before we destroy the objects they may use import atexit # arc.Logger.getRootLogger().addDestination(arc.LogStream(sys.stderr)) # arc.Logger.getRootLogger().setThreshold(arc.DEBUG) # run the example example()
40.352941
129
0.717201
07c2fc0684398b705e83193b8a4ddd1551624694
46,071
py
Python
core/domain/rights_manager.py
netajik/oppia
d3780352d615db7438e010c5aa5eb60588bb7de6
[ "Apache-2.0" ]
null
null
null
core/domain/rights_manager.py
netajik/oppia
d3780352d615db7438e010c5aa5eb60588bb7de6
[ "Apache-2.0" ]
null
null
null
core/domain/rights_manager.py
netajik/oppia
d3780352d615db7438e010c5aa5eb60588bb7de6
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Domain objects and functions that manage rights for various user actions.""" import logging from constants import constants from core.domain import activity_services from core.domain import role_services from core.domain import subscription_services from core.domain import user_services from core.platform import models import feconf import utils current_user_services = models.Registry.import_current_user_services() (collection_models, exp_models,) = models.Registry.import_models([ models.NAMES.collection, models.NAMES.exploration ]) # IMPORTANT: Ensure that all changes to how these cmds are interpreted preserve # backward-compatibility with previous exploration snapshots in the datastore. # Do not modify the definitions of CMD keys that already exist. CMD_CREATE_NEW = 'create_new' CMD_CHANGE_ROLE = 'change_role' CMD_CHANGE_EXPLORATION_STATUS = 'change_exploration_status' CMD_CHANGE_COLLECTION_STATUS = 'change_collection_status' CMD_CHANGE_PRIVATE_VIEWABILITY = 'change_private_viewability' CMD_RELEASE_OWNERSHIP = 'release_ownership' CMD_UPDATE_FIRST_PUBLISHED_MSEC = 'update_first_published_msec' ACTIVITY_STATUS_PRIVATE = constants.ACTIVITY_STATUS_PRIVATE ACTIVITY_STATUS_PUBLIC = constants.ACTIVITY_STATUS_PUBLIC ROLE_OWNER = 'owner' ROLE_EDITOR = 'editor' ROLE_TRANSLATOR = 'translator' ROLE_VIEWER = 'viewer' ROLE_NONE = 'none' ROLE_ADMIN = 'admin' ROLE_MODERATOR = 'moderator' def get_activity_rights_from_model(activity_rights_model, activity_type): """Constructs an ActivityRights object from the given activity rights model. Args: activity_rights_model: ActivityRightsModel. Activity rights from the datastore. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Returns: ActivityRights. The rights object created from the model. """ return ActivityRights( activity_rights_model.id, activity_rights_model.owner_ids, activity_rights_model.editor_ids, activity_rights_model.translator_ids, activity_rights_model.viewer_ids, community_owned=activity_rights_model.community_owned, cloned_from=( activity_rights_model.cloned_from if activity_type == constants.ACTIVITY_TYPE_EXPLORATION else None), status=activity_rights_model.status, viewable_if_private=activity_rights_model.viewable_if_private, first_published_msec=activity_rights_model.first_published_msec ) def _save_activity_rights( committer_id, activity_rights, activity_type, commit_message, commit_cmds): """Saves an ExplorationRights or CollectionRights domain object to the datastore. Args: committer_id: str. ID of the committer. activity_rights: ActivityRights. The rights object for the given activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION commit_message: str. Descriptive message for the commit. commit_cmds: list(dict). A list of commands describing what kind of commit was done. """ activity_rights.validate() if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: model_cls = exp_models.ExplorationRightsModel elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: model_cls = collection_models.CollectionRightsModel model = model_cls.get(activity_rights.id, strict=False) model.owner_ids = activity_rights.owner_ids model.editor_ids = activity_rights.editor_ids model.viewer_ids = activity_rights.viewer_ids model.translator_ids = activity_rights.translator_ids model.community_owned = activity_rights.community_owned model.status = activity_rights.status model.viewable_if_private = activity_rights.viewable_if_private model.first_published_msec = activity_rights.first_published_msec model.commit(committer_id, commit_message, commit_cmds) def _update_exploration_summary(activity_rights): """Updates the exploration summary for the activity associated with the given rights object. The ID of rights object is the same as the ID of associated activity. Args: activity_rights: ActivityRights. The rights object for the given activity. """ # TODO(msl): get rid of inline imports by refactoring code. from core.domain import exp_services exp_services.update_exploration_summary( activity_rights.id, None) def _update_collection_summary(activity_rights): """Updates the collection summary for the given activity associated with the given rights object. The ID of rights object is the same as the ID of associated activity. Args: activity_rights: ActivityRights. The rights object for the given activity. """ from core.domain import collection_services collection_services.update_collection_summary( activity_rights.id, None) def _update_activity_summary(activity_type, activity_rights): """Updates the activity summary for the given activity associated with the given rights object. The ID of rights object is the same as the ID of associated activity. Args: activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION activity_rights: ActivityRights. The rights object for the given activity. """ if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: _update_exploration_summary(activity_rights) elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: _update_collection_summary(activity_rights) def update_activity_first_published_msec( activity_type, activity_id, first_published_msec): """Updates the first_published_msec field for the given activity. The caller is responsible for ensuring that this value is not already set before updating it. Args: activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION activity_id: str. ID of the activity. first_published_msec: float. First publication time in milliseconds since the Epoch. """ activity_rights = _get_activity_rights(activity_type, activity_id) commit_cmds = [{ 'cmd': CMD_UPDATE_FIRST_PUBLISHED_MSEC, 'old_first_published_msec': activity_rights.first_published_msec, 'new_first_published_msec': first_published_msec }] activity_rights.first_published_msec = first_published_msec _save_activity_rights( feconf.SYSTEM_COMMITTER_ID, activity_rights, activity_type, 'set first published time in msec', commit_cmds) def create_new_exploration_rights(exploration_id, committer_id): """Creates a new exploration rights object and saves it to the datastore. Subscribes the committer to the new exploration. Args: exploration_id: str. ID of the exploration. committer_id: str. ID of the committer. """ exploration_rights = ActivityRights( exploration_id, [committer_id], [], [], []) commit_cmds = [{'cmd': CMD_CREATE_NEW}] exp_models.ExplorationRightsModel( id=exploration_rights.id, owner_ids=exploration_rights.owner_ids, editor_ids=exploration_rights.editor_ids, translator_ids=exploration_rights.translator_ids, viewer_ids=exploration_rights.viewer_ids, community_owned=exploration_rights.community_owned, status=exploration_rights.status, viewable_if_private=exploration_rights.viewable_if_private, first_published_msec=exploration_rights.first_published_msec, ).commit(committer_id, 'Created new exploration', commit_cmds) subscription_services.subscribe_to_exploration( committer_id, exploration_id) def get_exploration_rights(exploration_id, strict=True): """Retrieves the rights for this exploration from the datastore. Args: exploration_id: str. ID of the exploration. strict: bool. Whether to raise an error if there is no exploration matching the given ID. Returns: ActivityRights. The rights object for the given exploration. Raises: EntityNotFoundError. The exploration with ID exploration_id was not found in the datastore. """ model = exp_models.ExplorationRightsModel.get( exploration_id, strict=strict) if model is None: return None return get_activity_rights_from_model( model, constants.ACTIVITY_TYPE_EXPLORATION) def get_multiple_exploration_rights_by_ids(exp_ids): """Returns a list of ActivityRights objects for given exploration ids. Args: exp_ids: list(str). List of exploration ids. Returns: list(ActivityRights or None). List of rights object containing ActivityRights object for existing exploration or None. """ exp_rights_models = exp_models.ExplorationRightsModel.get_multi( exp_ids) exp_models_list = [] for model in exp_rights_models: if model is None: exp_models_list.append(None) else: exp_models_list.append( get_activity_rights_from_model( model, constants.ACTIVITY_TYPE_EXPLORATION)) return exp_models_list def is_exploration_private(exploration_id): """Returns whether exploration is private. Args: exploration_id: str. ID of the exploration. Returns: bool. Whether the exploration is private or not. """ exploration_rights = get_exploration_rights(exploration_id) return exploration_rights.status == ACTIVITY_STATUS_PRIVATE def is_exploration_public(exploration_id): """Returns whether exploration is public. Args: exploration_id: str. ID of the exploration. Returns: bool. Whether the exploration is public. """ exploration_rights = get_exploration_rights(exploration_id) return exploration_rights.status == ACTIVITY_STATUS_PUBLIC def is_exploration_cloned(exploration_id): """Returns whether the exploration is a clone of another exploration. Args: exploration_id: str. ID of the exploration. Returns: bool. Whether the exploration is a clone of another exploration. """ exploration_rights = get_exploration_rights(exploration_id) return bool(exploration_rights.cloned_from) def create_new_collection_rights(collection_id, committer_id): """Creates a new collection rights object and saves it to the datastore. Subscribes the committer to the new collection. Args: collection_id: str. ID of the collection. committer_id: str. ID of the committer. """ collection_rights = ActivityRights( collection_id, [committer_id], [], [], []) commit_cmds = [{'cmd': CMD_CREATE_NEW}] collection_models.CollectionRightsModel( id=collection_rights.id, owner_ids=collection_rights.owner_ids, editor_ids=collection_rights.editor_ids, translator_ids=collection_rights.translator_ids, viewer_ids=collection_rights.viewer_ids, community_owned=collection_rights.community_owned, status=collection_rights.status, viewable_if_private=collection_rights.viewable_if_private, first_published_msec=collection_rights.first_published_msec ).commit(committer_id, 'Created new collection', commit_cmds) subscription_services.subscribe_to_collection(committer_id, collection_id) def get_collection_rights(collection_id, strict=True): """Retrieves the rights for this collection from the datastore. Args: collection_id: str. ID of the collection. strict: bool. Whether to raise an error if ID is not found. Returns: ActivityRights. The rights object for the collection. Raises: EntityNotFoundError. The collection with ID collection_id is not found in the datastore. """ model = collection_models.CollectionRightsModel.get( collection_id, strict=strict) if model is None: return None return get_activity_rights_from_model( model, constants.ACTIVITY_TYPE_COLLECTION) def get_collection_owner_names(collection_id): """Retrieves the owners for this collection from the datastore. Args: collection_id: str. ID of the collection. Returns: list(str). Human-readable usernames (or truncated email addresses) of owners for this collection. """ collection_rights = get_collection_rights(collection_id) return user_services.get_human_readable_user_ids( collection_rights.owner_ids) def is_collection_private(collection_id): """Returns whether the collection is private. Args: collection_id: str. ID of the collection. Returns: bool. Whether the collection is private. """ collection_rights = get_collection_rights(collection_id) return collection_rights.status == ACTIVITY_STATUS_PRIVATE def is_collection_public(collection_id): """Returns whether the collection is public. Args: collection_id: str. ID of the collection. Returns: bool. Whether the collection is public. """ collection_rights = get_collection_rights(collection_id) return collection_rights.status == ACTIVITY_STATUS_PUBLIC def _get_activity_rights(activity_type, activity_id): """Retrieves the rights object for the given activity based on its type. Args: activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION activity_id: str. ID of the activity. Returns: ActivityRights. The rights object associated with the given activity. Raises: Exception. activity_type provided is unknown. """ if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: return get_exploration_rights(activity_id, strict=False) elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: return get_collection_rights(activity_id, strict=False) else: raise Exception( 'Cannot get activity rights for unknown activity type: %s' % ( activity_type)) def check_can_access_activity(user, activity_rights): """Checks whether the user can access given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: AcitivityRights or None. Rights object for the given activity. Returns: bool. Whether the given activity can be accessed by the given user. """ if activity_rights is None: return False elif activity_rights.is_published(): return bool( role_services.ACTION_PLAY_ANY_PUBLIC_ACTIVITY in user.actions) elif activity_rights.is_private(): return bool( (role_services.ACTION_PLAY_ANY_PRIVATE_ACTIVITY in user.actions) or activity_rights.is_viewer(user.user_id) or activity_rights.is_owner(user.user_id) or activity_rights.is_editor(user.user_id) or activity_rights.is_translator(user.user_id) or activity_rights.viewable_if_private) def check_can_edit_activity(user, activity_rights): """Checks whether the user can edit given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the given user can edit this activity. """ if activity_rights is None: return False if role_services.ACTION_EDIT_OWNED_ACTIVITY not in user.actions: return False if (activity_rights.is_owner(user.user_id) or activity_rights.is_editor(user.user_id)): return True if (activity_rights.community_owned or (role_services.ACTION_EDIT_ANY_ACTIVITY in user.actions)): return True if (activity_rights.is_published() and (role_services.ACTION_EDIT_ANY_PUBLIC_ACTIVITY in user.actions)): return True return False def check_can_translate_activity(user, activity_rights): """Checks whether the user can translate given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the given user can translate this activity. """ if activity_rights is None: return False if role_services.ACTION_EDIT_OWNED_ACTIVITY not in user.actions: return False if (activity_rights.is_owner(user.user_id) or activity_rights.is_editor(user.user_id) or activity_rights.is_translator(user.user_id)): return True if (activity_rights.community_owned or (role_services.ACTION_EDIT_ANY_ACTIVITY in user.actions)): return True if (activity_rights.is_published() and (role_services.ACTION_EDIT_ANY_PUBLIC_ACTIVITY in user.actions)): return True return False def check_can_delete_activity(user, activity_rights): """Checks whether the user can delete given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can delete given activity. """ if activity_rights is None: return False if role_services.ACTION_DELETE_ANY_ACTIVITY in user.actions: return True elif (activity_rights.is_private() and (role_services.ACTION_DELETE_OWNED_PRIVATE_ACTIVITY in user.actions) and activity_rights.is_owner(user.user_id)): return True elif (activity_rights.is_published() and (role_services.ACTION_DELETE_ANY_PUBLIC_ACTIVITY in user.actions)): return True return False def check_can_modify_activity_roles(user, activity_rights): """Checks whether the user can modify roles for given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can modify roles for given activity. """ if activity_rights is None: return False if (activity_rights.community_owned or activity_rights.cloned_from): return False if (role_services.ACTION_MODIFY_ROLES_FOR_ANY_ACTIVITY in user.actions): return True if (role_services.ACTION_MODIFY_ROLES_FOR_OWNED_ACTIVITY in user.actions): if activity_rights.is_owner(user.user_id): return True return False def check_can_release_ownership(user, activity_rights): """Checks whether the user can release ownership for given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can release ownership for given activity. """ if activity_rights is None: return False if activity_rights.is_private(): return False return check_can_modify_activity_roles( user, activity_rights) def check_can_publish_activity(user, activity_rights): """Checks whether the user can publish given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can publish given activity. """ if activity_rights is None: return False if activity_rights.cloned_from: return False if activity_rights.is_published(): return False if role_services.ACTION_PUBLISH_ANY_ACTIVITY in user.actions: return True if role_services.ACTION_PUBLISH_OWNED_ACTIVITY in user.actions: if activity_rights.is_owner(user.user_id): return True return False def check_can_unpublish_activity(user, activity_rights): """Checks whether the user can unpublish given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can unpublish given activity. """ if activity_rights is None: return False if activity_rights.community_owned: return False if activity_rights.is_published(): if role_services.ACTION_UNPUBLISH_ANY_PUBLIC_ACTIVITY in user.actions: return True return False def _assign_role( committer, assignee_id, new_role, activity_id, activity_type): """Assigns a new role to the user. Args: committer: UserActionsInfo. UserActionInfo object for the user who is performing the action. assignee_id: str. ID of the user whose role is being changed. new_role: str. The name of the new role: One of ROLE_OWNER ROLE_EDITOR ROLE_TRANSLATOR ROLE_VIEWER activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raises: Exception. The committer does not have rights to modify a role. Exception. The user already owns the activity. Exception. The user can already edit the activity. Exception. The user can already translate the activity. Exception. The activity is already publicly editable. Exception. The activity is already publicly translatable. Exception. The user can already view the activity. Exception. The activity is already publicly viewable. Exception. The role is invalid. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_modify_activity_roles(committer, activity_rights): logging.error( 'User %s tried to allow user %s to be a(n) %s of activity %s ' 'but was refused permission.' % ( committer_id, assignee_id, new_role, activity_id)) raise Exception( 'UnauthorizedUserException: Could not assign new role.') assignee_username = user_services.get_username(assignee_id) old_role = ROLE_NONE if new_role == ROLE_OWNER: if activity_rights.is_owner(assignee_id): raise Exception('This user already owns this %s.' % activity_type) activity_rights.owner_ids.append(assignee_id) if assignee_id in activity_rights.viewer_ids: activity_rights.viewer_ids.remove(assignee_id) old_role = ROLE_VIEWER if assignee_id in activity_rights.editor_ids: activity_rights.editor_ids.remove(assignee_id) old_role = ROLE_EDITOR if assignee_id in activity_rights.translator_ids: activity_rights.translator_ids.remove(assignee_id) old_role = ROLE_TRANSLATOR elif new_role == ROLE_EDITOR: if (activity_rights.is_editor(assignee_id) or activity_rights.is_owner(assignee_id)): raise Exception( 'This user already can edit this %s.' % activity_type) if activity_rights.community_owned: raise Exception( 'Community-owned %ss can be edited by anyone.' % activity_type) activity_rights.editor_ids.append(assignee_id) if assignee_id in activity_rights.translator_ids: activity_rights.translator_ids.remove(assignee_id) old_role = ROLE_TRANSLATOR if assignee_id in activity_rights.viewer_ids: activity_rights.viewer_ids.remove(assignee_id) old_role = ROLE_VIEWER elif new_role == ROLE_TRANSLATOR: if (activity_rights.is_editor(assignee_id) or activity_rights.is_translator(assignee_id) or activity_rights.is_owner(assignee_id)): raise Exception( 'This user already can translate this %s.' % activity_type) if activity_rights.community_owned: raise Exception( 'Community-owned %ss can be translated by anyone.' % activity_type) activity_rights.translator_ids.append(assignee_id) if assignee_id in activity_rights.viewer_ids: activity_rights.viewer_ids.remove(assignee_id) old_role = ROLE_VIEWER elif new_role == ROLE_VIEWER: if (activity_rights.is_owner(assignee_id) or activity_rights.is_editor(assignee_id) or activity_rights.is_viewer(assignee_id)): raise Exception( 'This user already can view this %s.' % activity_type) if activity_rights.status != ACTIVITY_STATUS_PRIVATE: raise Exception( 'Public %ss can be viewed by anyone.' % activity_type) activity_rights.viewer_ids.append(assignee_id) else: raise Exception('Invalid role: %s' % new_role) commit_message = 'Changed role of %s from %s to %s' % ( assignee_username, old_role, new_role) commit_cmds = [{ 'cmd': CMD_CHANGE_ROLE, 'assignee_id': assignee_id, 'old_role': old_role, 'new_role': new_role }] _save_activity_rights( committer_id, activity_rights, activity_type, commit_message, commit_cmds) _update_activity_summary(activity_type, activity_rights) def _release_ownership_of_activity(committer, activity_id, activity_type): """Releases ownership of the given activity to the community. Args: committer: UserActionsInfo. UserActionsInfo object for the user who is performing the action. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raise: Exception. The committer does not have release rights. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_release_ownership(committer, activity_rights): logging.error( 'User %s tried to release ownership of %s %s but was ' 'refused permission.' % (committer_id, activity_type, activity_id)) raise Exception( 'The ownership of this %s cannot be released.' % activity_type) activity_rights.community_owned = True activity_rights.owner_ids = [] activity_rights.editor_ids = [] activity_rights.viewer_ids = [] commit_cmds = [{ 'cmd': CMD_RELEASE_OWNERSHIP, }] _save_activity_rights( committer_id, activity_rights, activity_type, '%s ownership released to the community.' % activity_type, commit_cmds) _update_activity_summary(activity_type, activity_rights) def _change_activity_status( committer_id, activity_id, activity_type, new_status, commit_message): """Changes the status of the given activity. Args: committer_id: str. ID of the user who is performing the update action. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION new_status: str. The new status of the activity. commit_message: str. The human-written commit message for this change. """ activity_rights = _get_activity_rights(activity_type, activity_id) old_status = activity_rights.status activity_rights.status = new_status if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: cmd_type = CMD_CHANGE_EXPLORATION_STATUS elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: cmd_type = CMD_CHANGE_COLLECTION_STATUS commit_cmds = [{ 'cmd': cmd_type, 'old_status': old_status, 'new_status': new_status }] if new_status != ACTIVITY_STATUS_PRIVATE: activity_rights.viewer_ids = [] if activity_rights.first_published_msec is None: activity_rights.first_published_msec = ( utils.get_current_time_in_millisecs()) _save_activity_rights( committer_id, activity_rights, activity_type, commit_message, commit_cmds) _update_activity_summary(activity_type, activity_rights) def _publish_activity(committer, activity_id, activity_type): """Publishes the given activity. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raises: Exception. The committer does not have rights to publish the activity. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_publish_activity(committer, activity_rights): logging.error( 'User %s tried to publish %s %s but was refused ' 'permission.' % (committer_id, activity_type, activity_id)) raise Exception('This %s cannot be published.' % activity_type) _change_activity_status( committer_id, activity_id, activity_type, ACTIVITY_STATUS_PUBLIC, '%s published.' % activity_type) def _unpublish_activity(committer, activity_id, activity_type): """Unpublishes the given activity. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raises: Exception. The committer does not have rights to unpublish the activity. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_unpublish_activity(committer, activity_rights): logging.error( 'User %s tried to unpublish %s %s but was refused ' 'permission.' % (committer_id, activity_type, activity_id)) raise Exception('This %s cannot be unpublished.' % activity_type) _change_activity_status( committer_id, activity_id, activity_type, ACTIVITY_STATUS_PRIVATE, '%s unpublished.' % activity_type) activity_services.remove_featured_activity(activity_type, activity_id) # Rights functions for activities. def assign_role_for_exploration( committer, exploration_id, assignee_id, new_role): """Assigns a user to the given role and subscribes the assignee to future exploration updates. The caller should ensure that assignee_id corresponds to a valid user in the system. Args: committer: UserActionsInfo. The UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. assignee_id: str. ID of the user whose role is being changed. new_role: str. The name of the new role: One of ROLE_OWNER ROLE_EDITOR ROLE_TRANSLATOR Raises: Exception. This could potentially throw an exception from _assign_role. """ _assign_role( committer, assignee_id, new_role, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) if new_role in [ROLE_OWNER, ROLE_EDITOR, ROLE_TRANSLATOR]: subscription_services.subscribe_to_exploration( assignee_id, exploration_id) def release_ownership_of_exploration(committer, exploration_id): """Releases ownership of the given exploration to the community. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _release_ownership_of_activity. """ _release_ownership_of_activity( committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) def set_private_viewability_of_exploration( committer, exploration_id, viewable_if_private): """Sets the viewable_if_private attribute for the given exploration's rights object. If viewable_if_private is True, this allows a private exploration to be viewed by anyone with the link. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. viewable_if_private: bool. Whether the exploration should be made viewable (by anyone with the link). Raises: Exception. The committer does not have the permission to perform change action. Exception. If the viewable_if_private property is already as desired. """ committer_id = committer.user_id exploration_rights = get_exploration_rights(exploration_id) # The user who can publish activity can change its private viewability. if not check_can_publish_activity(committer, exploration_rights): logging.error( 'User %s tried to change private viewability of exploration %s ' 'but was refused permission.' % (committer_id, exploration_id)) raise Exception( 'The viewability status of this exploration cannot be changed.') old_viewable_if_private = exploration_rights.viewable_if_private if old_viewable_if_private == viewable_if_private: raise Exception( 'Trying to change viewability status of this exploration to %s, ' 'but that is already the current value.' % viewable_if_private) exploration_rights.viewable_if_private = viewable_if_private commit_cmds = [{ 'cmd': CMD_CHANGE_PRIVATE_VIEWABILITY, 'old_viewable_if_private': old_viewable_if_private, 'new_viewable_if_private': viewable_if_private, }] commit_message = ( 'Made exploration viewable to anyone with the link.' if viewable_if_private else 'Made exploration viewable only to invited playtesters.') _save_activity_rights( committer_id, exploration_rights, constants.ACTIVITY_TYPE_EXPLORATION, commit_message, commit_cmds) _update_exploration_summary(exploration_rights) def publish_exploration(committer, exploration_id): """Publishes the given exploration. It is the responsibility of the caller to check that the exploration is valid prior to publication. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _publish_activity. """ _publish_activity( committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) def unpublish_exploration(committer, exploration_id): """Unpublishes the given exploration. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _unpublish_activity. """ _unpublish_activity( committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) # Rights functions for collections. def assign_role_for_collection( committer, collection_id, assignee_id, new_role): """Assign the given user to the given role and subscribes the assignee to future collection updates. The caller should ensure that assignee_id corresponds to a valid user in the system. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. assignee_id: str. ID of the user whose role is being changed. new_role: str. The name of the new role: One of ROLE_OWNER ROLE_EDITOR Raises: Exception. This could potentially throw an exception from _assign_role. """ _assign_role( committer, assignee_id, new_role, collection_id, constants.ACTIVITY_TYPE_COLLECTION) if new_role in [ROLE_OWNER, ROLE_EDITOR]: subscription_services.subscribe_to_collection( assignee_id, collection_id) def release_ownership_of_collection(committer, collection_id): """Releases ownership of the given collection to the community. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. Raises: Exception. This could potentially throw an exception from _release_ownership_of_activity. """ _release_ownership_of_activity( committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION) def publish_collection(committer, collection_id): """Publishes the given collection. It is the responsibility of the caller to check that the collection is valid prior to publication. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. Raises: Exception. This could potentially throw an exception from _publish_activity. """ _publish_activity( committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION) def unpublish_collection(committer, collection_id): """Unpublishes the given collection. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. Raises: Exception. This could potentially throw an exception from _unpublish_activity. """ _unpublish_activity( committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION)
35.303448
80
0.692323
07c453afd778414ad9fee5c8bcdaf13018eeb9f5
1,755
py
Python
pkg_resources/_vendor/packaging/_typing.py
GDGSNF/setuptools
3a209029fd2217c039593cd1f6cb378a28527a59
[ "MIT" ]
null
null
null
pkg_resources/_vendor/packaging/_typing.py
GDGSNF/setuptools
3a209029fd2217c039593cd1f6cb378a28527a59
[ "MIT" ]
null
null
null
pkg_resources/_vendor/packaging/_typing.py
GDGSNF/setuptools
3a209029fd2217c039593cd1f6cb378a28527a59
[ "MIT" ]
null
null
null
"""For neatly implementing static typing in packaging. `mypy` - the static type analysis tool we use - uses the `typing` module, which provides core functionality fundamental to mypy's functioning. Generally, `typing` would be imported at runtime and used in that fashion - it acts as a no-op at runtime and does not have any run-time overhead by design. As it turns out, `typing` is not vendorable - it uses separate sources for Python 2/Python 3. Thus, this codebase can not expect it to be present. To work around this, mypy allows the typing import to be behind a False-y optional to prevent it from running at runtime and type-comments can be used to remove the need for the types to be accessible directly during runtime. This module provides the False-y guard in a nicely named fashion so that a curious maintainer can reach here to read this. In packaging, all static-typing related imports should be guarded as follows: from packaging._typing import TYPE_CHECKING if TYPE_CHECKING: from typing import ... Ref: https://github.com/python/mypy/issues/3216 """ __all__ = ["TYPE_CHECKING", "cast"] # The TYPE_CHECKING constant defined by the typing module is False at runtime # but True while type checking. TYPE_CHECKING = False # pragma: no cover # typing's cast syntax requires calling typing.cast at runtime, but we don't # want to import typing at runtime. Here, we inform the type checkers that # we're importing `typing.cast` as `cast` and re-implement typing.cast's # runtime behavior in a block that is ignored by type checkers. if TYPE_CHECKING: # pragma: no cover # not executed at runtime from typing import cast else: # executed at runtime
37.340426
79
0.756125
07c4707b6186eda818ca02fcf8c20fba9ffa25c7
7,024
py
Python
selfdrive/car/toyota/carcontroller.py
aolin480/openpilot
9ac00c3e5e111a05a0bb10018ccd190571dfff4d
[ "MIT" ]
70
2021-06-18T13:04:10.000Z
2022-03-31T13:58:09.000Z
selfdrive/car/toyota/carcontroller.py
aolin480/openpilot
9ac00c3e5e111a05a0bb10018ccd190571dfff4d
[ "MIT" ]
50
2021-06-16T23:52:55.000Z
2022-03-30T14:05:33.000Z
selfdrive/car/toyota/carcontroller.py
aolin480/openpilot
9ac00c3e5e111a05a0bb10018ccd190571dfff4d
[ "MIT" ]
88
2021-06-23T21:33:19.000Z
2022-03-31T07:27:08.000Z
from cereal import car from common.numpy_fast import clip, interp from selfdrive.car import apply_toyota_steer_torque_limits, create_gas_interceptor_command, make_can_msg from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, \ create_accel_command, create_acc_cancel_command, \ create_fcw_command, create_lta_steer_command from selfdrive.car.toyota.values import CAR, STATIC_DSU_MSGS, NO_STOP_TIMER_CAR, TSS2_CAR, \ MIN_ACC_SPEED, PEDAL_TRANSITION, CarControllerParams from opendbc.can.packer import CANPacker from common.op_params import opParams VisualAlert = car.CarControl.HUDControl.VisualAlert
48.777778
152
0.716259
07c5ea304ad9e83dcc3f691f719157c2c8a1317f
85
py
Python
app/auth/__init__.py
louisenje/Pitches
11248906ffa7d1b6ed6c47db0c5e7bd3b4768825
[ "Unlicense" ]
null
null
null
app/auth/__init__.py
louisenje/Pitches
11248906ffa7d1b6ed6c47db0c5e7bd3b4768825
[ "Unlicense" ]
null
null
null
app/auth/__init__.py
louisenje/Pitches
11248906ffa7d1b6ed6c47db0c5e7bd3b4768825
[ "Unlicense" ]
null
null
null
from flask import Blueprint auth=Blueprint('auth',__name__) from .import views,forms
21.25
31
0.811765
07c638a7630e99e901331aada0e29b538ff7310d
1,482
py
Python
forms/QRGenerator.py
Rono-Barto-Co/Project-QR
e80fc5a41f25542038c090311844912790cb1478
[ "MIT" ]
3
2019-07-04T03:27:06.000Z
2019-09-06T08:52:35.000Z
forms/QRGenerator.py
Rono-Barto-Co/Project-QR
e80fc5a41f25542038c090311844912790cb1478
[ "MIT" ]
null
null
null
forms/QRGenerator.py
Rono-Barto-Co/Project-QR
e80fc5a41f25542038c090311844912790cb1478
[ "MIT" ]
null
null
null
from flask_wtf import FlaskForm from wtforms import StringField, SubmitField, SelectField from wtforms.validators import DataRequired
54.888889
89
0.323887
07c6a477f6bfebee04a539e1d02b2df95226ab91
1,259
py
Python
Quiz/m2_advanced_quants/l5_volatility/volatility_estimation.py
jcrangel/AI-for-Trading
c3b865e992f8eb8deda91e7641428eef1d343636
[ "Apache-2.0" ]
null
null
null
Quiz/m2_advanced_quants/l5_volatility/volatility_estimation.py
jcrangel/AI-for-Trading
c3b865e992f8eb8deda91e7641428eef1d343636
[ "Apache-2.0" ]
null
null
null
Quiz/m2_advanced_quants/l5_volatility/volatility_estimation.py
jcrangel/AI-for-Trading
c3b865e992f8eb8deda91e7641428eef1d343636
[ "Apache-2.0" ]
null
null
null
import pandas as pd import numpy as np def estimate_volatility(prices, l): """Create an exponential moving average model of the volatility of a stock price, and return the most recent (last) volatility estimate. Parameters ---------- prices : pandas.Series A series of adjusted closing prices for a stock. l : float The 'lambda' parameter of the exponential moving average model. Making this value smaller will cause the model to weight older terms less relative to more recent terms. Returns ------- last_vol : float The last element of your exponential moving averge volatility model series. """ # TODO: Implement the exponential moving average volatility model and return the last value. return prices.ewm(alpha=(1-l)).mean()[-1] def test_run(filename='data.csv'): """Test run get_most_volatile() with stock prices from a file.""" prices = pd.read_csv(filename, parse_dates=[ 'date'], index_col='date', squeeze=True) print("Most recent volatility estimate: {:.6f}".format(estimate_volatility(prices, 0.7))) # print(estimate_volatility(prices, 0.7)) if __name__ == '__main__': test_run()
31.475
96
0.659253
07c6e4237d951b4abd908ba105b7d327ebecef98
484
py
Python
jp.atcoder/abc122/abc122_c/9516079.py
kagemeka/atcoder-submissions
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
[ "MIT" ]
1
2022-02-09T03:06:25.000Z
2022-02-09T03:06:25.000Z
jp.atcoder/abc122/abc122_c/9516079.py
kagemeka/atcoder-submissions
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
[ "MIT" ]
1
2022-02-05T22:53:18.000Z
2022-02-09T01:29:30.000Z
jp.atcoder/abc122/abc122_c/9516079.py
kagemeka/atcoder-submissions
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
[ "MIT" ]
null
null
null
import sys n, q = map(int, sys.stdin.readline().split()) s = '$' + sys.stdin.readline().rstrip() lr = zip(*[map(int, sys.stdin.read().split())] * 2) if __name__ == '__main__': ans = main() print(*ans, sep='\n')
23.047619
52
0.452479
07c71ca4f5171ffec7ba2da60f8676b83050c69c
2,262
py
Python
Decoder.py
gokulsg/Attention-is-all-you-need-implementation-from-scratch
f5eb591d169cbef3ef8b066d8d462fee11badc3b
[ "MIT" ]
1
2022-02-19T03:51:38.000Z
2022-02-19T03:51:38.000Z
Decoder.py
gokulsg/Attention-is-all-you-need-implementation-from-scratch
f5eb591d169cbef3ef8b066d8d462fee11badc3b
[ "MIT" ]
null
null
null
Decoder.py
gokulsg/Attention-is-all-you-need-implementation-from-scratch
f5eb591d169cbef3ef8b066d8d462fee11badc3b
[ "MIT" ]
null
null
null
import torch import torch.nn as nn from DecoderLayer import DecoderLayer import math
42.679245
122
0.597259
07c821ec1109c1e7667cac57580d2ee557fe694c
901
py
Python
salt/grains/nxos.py
babs/salt
c536ea716d5308880b244e7980f4b659d86fc104
[ "Apache-2.0" ]
9,425
2015-01-01T05:59:24.000Z
2022-03-31T20:44:05.000Z
salt/grains/nxos.py
babs/salt
c536ea716d5308880b244e7980f4b659d86fc104
[ "Apache-2.0" ]
33,507
2015-01-01T00:19:56.000Z
2022-03-31T23:48:20.000Z
salt/grains/nxos.py
babs/salt
c536ea716d5308880b244e7980f4b659d86fc104
[ "Apache-2.0" ]
5,810
2015-01-01T19:11:45.000Z
2022-03-31T02:37:20.000Z
""" Grains for Cisco NX-OS minions .. versionadded:: 2016.11.0 For documentation on setting up the nxos proxy minion look in the documentation for :mod:`salt.proxy.nxos<salt.proxy.nxos>`. """ import logging import salt.utils.nxos import salt.utils.platform from salt.exceptions import NxosClientError log = logging.getLogger(__name__) __proxyenabled__ = ["nxos"] __virtualname__ = "nxos"
21.97561
79
0.675916
07c8e1478041c57a7ff67d2397fd305a48517875
3,356
py
Python
tmsproviderapisdk/tms_device.py
tvip/tmsproviderapisdk
f385ddb0d7e87e7a62d1caef3e2c9769e844a4a1
[ "MIT" ]
null
null
null
tmsproviderapisdk/tms_device.py
tvip/tmsproviderapisdk
f385ddb0d7e87e7a62d1caef3e2c9769e844a4a1
[ "MIT" ]
null
null
null
tmsproviderapisdk/tms_device.py
tvip/tmsproviderapisdk
f385ddb0d7e87e7a62d1caef3e2c9769e844a4a1
[ "MIT" ]
null
null
null
from typing import List, Optional, Tuple from tmsproviderapisdk.tms_extended_model import TmsExtendedModel
40.926829
120
0.598033
07ca1045f3d1ae1412959e210adfbad3cf54eff0
7,676
py
Python
fealty/fields.py
eddiejessup/fealty
03745eb98d85bc2a5d08920773ab9c4515462d30
[ "BSD-3-Clause" ]
null
null
null
fealty/fields.py
eddiejessup/fealty
03745eb98d85bc2a5d08920773ab9c4515462d30
[ "BSD-3-Clause" ]
null
null
null
fealty/fields.py
eddiejessup/fealty
03745eb98d85bc2a5d08920773ab9c4515462d30
[ "BSD-3-Clause" ]
null
null
null
""" A class hierarchy relating to fields of all kinds. """ from __future__ import print_function, division import numpy as np from ciabatta.meta import make_repr_str from fealty import lattice, field_numerics, walled_field_numerics class Field(Space): def density_field(self, r): return density(r, self.L, self.dx) def r_to_i(self, r): return lattice.r_to_i(r, self.L, self.dx) def i_to_r(self, i): return lattice.i_to_r(i, self.L, self.dx) def __repr__(self): fs = [('L', self.L), ('dim', self.dim), ('dx', self.dx)] return make_repr_str(self, fs) class Scalar(Field): class Diffusing(Scalar): class WalledScalar(Scalar): # Note, inheritance order matters to get walled grad & laplacian call # (see diamond problem on wikipedia and how python handles it)
29.186312
74
0.60839
07cb2143ea47874c58c2e1ce2165dfb24f78e2e2
583
py
Python
examples/example_django/example_django/asgi.py
cpascariello/aleph-vm
1b4920bec211ef3bd379e9359f57f06b9308c1a1
[ "MIT" ]
19
2021-04-13T14:01:06.000Z
2022-02-19T00:45:05.000Z
examples/example_django/example_django/asgi.py
cpascariello/aleph-vm
1b4920bec211ef3bd379e9359f57f06b9308c1a1
[ "MIT" ]
65
2021-04-14T09:02:24.000Z
2022-03-29T07:56:03.000Z
examples/example_django/example_django/asgi.py
cpascariello/aleph-vm
1b4920bec211ef3bd379e9359f57f06b9308c1a1
[ "MIT" ]
9
2021-06-01T23:03:28.000Z
2022-02-17T20:24:57.000Z
""" ASGI config for example_django project. It exposes the ASGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ """ import os from django.core.asgi import get_asgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_django.settings") application = get_asgi_application() os.system("/usr/bin/python3 /opt/code/manage.py migrate") os.system("/usr/bin/python3 /opt/code/manage.py " "loaddata /opt/code/blog/fixtures/default_articles.json")
26.5
78
0.766724
07cc54388c8061e52f8dc1aa33c14d904afe5143
3,964
py
Python
lectures/extensions/hyperbolic_discounting/replication_code/src/analysis/get_bivariate_distr_data.py
loikein/ekw-lectures
a2f5436f10515ab26eab323fca8c37c91bdc5dcd
[ "MIT" ]
4
2019-11-15T15:21:27.000Z
2020-07-08T15:04:30.000Z
lectures/extensions/hyperbolic_discounting/replication_code/src/analysis/get_bivariate_distr_data.py
loikein/ekw-lectures
a2f5436f10515ab26eab323fca8c37c91bdc5dcd
[ "MIT" ]
9
2019-11-18T15:54:36.000Z
2020-07-14T13:56:53.000Z
lectures/extensions/hyperbolic_discounting/replication_code/src/analysis/get_bivariate_distr_data.py
loikein/ekw-lectures
a2f5436f10515ab26eab323fca8c37c91bdc5dcd
[ "MIT" ]
3
2021-01-25T15:41:30.000Z
2021-09-21T08:51:36.000Z
"""Generate values of Method of Simulated Moments criterion function. Given observed moments and weighting matrix in `OUT_ANALYSIS`, "msm_estimation", generate values of Method of Simulated Moments criterion function for combinations of discount factor and present bias values. The goal is to study the bivariate distribution of the time preference parameters around the combination of true parameter values. """ import itertools import numpy as np import pandas as pd import respy as rp import yaml from bld.project_paths import project_paths_join as ppj from src.library.compute_moments import _replace_nans from src.library.compute_moments import calc_restricted_choice_probabilities from src.library.compute_moments import calc_restricted_wage_distribution from src.library.compute_moments import calc_unrestricted_choice_probabilities from src.library.compute_moments import calc_unrestricted_wage_distribution from src.library.compute_moments import calc_very_restricted_choice_probabilities from src.library.compute_moments import calc_very_restricted_wage_distribution from src.library.housekeeping import _load_pickle from src.library.housekeeping import _temporary_working_directory from tqdm import tqdm def get_bivariate_distribution(params, crit_func, grid_delta, grid_beta): """Compute value of criterion function. Args: params (pd.DataFrame): DataFrame containing model parameters. crit_func (dict): Dictionary containing model options. grid_delta (np.array): Values of discount factor. grid_beta (np.array): Values of present-bias parameter. Returns: pd.DataFrame """ results = [] for beta, delta in tqdm(itertools.product(grid_beta, grid_delta)): params_ = params.copy() params_.loc[("beta", "beta"), "value"] = beta params_.loc[("delta", "delta"), "value"] = delta val = crit_func(params_) result = {"beta": beta, "delta": delta, "val": val} results.append(result) return pd.DataFrame.from_dict(results) if __name__ == "__main__": # load params params = pd.read_csv( ppj("IN_MODEL_SPECS", "params_hyp.csv"), sep=";", index_col=["category", "name"], ) params["value"] = params["value"].astype(float) # load options with open(ppj("IN_MODEL_SPECS", "options_hyp.yaml")) as options: options = yaml.safe_load(options) # get empirical moments empirical_moments = _load_pickle(ppj("OUT_ANALYSIS", "msm_estimation", "moments_hyp.pickle")) # get weighting matrix weighting_matrix = _load_pickle( ppj("OUT_ANALYSIS", "msm_estimation", "weighting_matrix_hyp.pickle") ) calc_moments = { "Choice Probabilities Very Restricted": calc_very_restricted_choice_probabilities, "Choice Probabilities Restricted": calc_restricted_choice_probabilities, "Choice Probabilities Unrestricted": calc_unrestricted_choice_probabilities, "Wage Distribution Very Restricted": calc_very_restricted_wage_distribution, "Wage Distribution Restricted": calc_restricted_wage_distribution, "Wage Distribution Unrestricted": calc_unrestricted_wage_distribution, } with _temporary_working_directory(snippet="heatmap"): # get criterion function weighted_sum_squared_errors = rp.get_moment_errors_func( params=params, options=options, calc_moments=calc_moments, replace_nans=_replace_nans, empirical_moments=empirical_moments, weighting_matrix=weighting_matrix, ) # get bivariate distribution results results = get_bivariate_distribution( crit_func=weighted_sum_squared_errors, params=params, grid_delta=np.arange(0.945, 0.9625, 0.0025), grid_beta=np.arange(0.75, 1.05, 0.01), ) results.to_csv(ppj("OUT_ANALYSIS", "heatmap.csv"))
36.703704
97
0.726791
07cdddcbeb3cb4de7a49a05fd2f00ba890200e08
180
py
Python
space_game/events/KeyPressedEvent.py
Iwomichu/probable-giggle
2af5ed83a60d65ec9d509c217cb5fcb880d5dbcc
[ "MIT" ]
1
2020-11-30T11:21:21.000Z
2020-11-30T11:21:21.000Z
space_game/events/KeyPressedEvent.py
Iwomichu/probable-giggle
2af5ed83a60d65ec9d509c217cb5fcb880d5dbcc
[ "MIT" ]
5
2020-11-03T16:46:49.000Z
2021-01-24T14:29:24.000Z
space_game/events/KeyPressedEvent.py
Iwomichu/probable-giggle
2af5ed83a60d65ec9d509c217cb5fcb880d5dbcc
[ "MIT" ]
null
null
null
from dataclasses import dataclass from space_game.domain_names import KeyId from space_game.events.Event import Event
18
41
0.822222
07cebbfa2f7886d8fbfa5e82f77ae6da2ef52744
703
py
Python
src/oncall/messengers/teams_messenger.py
navoday-91/oncall
0a977f06bbf308978d0d2c2b46e0aca23937ca9a
[ "BSD-2-Clause" ]
857
2017-05-03T00:59:10.000Z
2022-03-29T06:45:23.000Z
src/oncall/messengers/teams_messenger.py
navoday-91/oncall
0a977f06bbf308978d0d2c2b46e0aca23937ca9a
[ "BSD-2-Clause" ]
142
2017-05-03T02:00:58.000Z
2022-03-25T20:58:11.000Z
src/oncall/messengers/teams_messenger.py
navoday-91/oncall
0a977f06bbf308978d0d2c2b46e0aca23937ca9a
[ "BSD-2-Clause" ]
218
2017-05-03T02:04:56.000Z
2022-03-25T18:28:04.000Z
import pymsteams import logging from oncall.constants import TEAMS_SUPPORT
30.565217
91
0.651494
07cf43823d8f9a2b976bcf3456b4ba03ebef67d9
11,632
py
Python
python/chronos/src/bigdl/chronos/autots/model/auto_prophet.py
Laniakea94/BigDL
4d01734086dda893a7f08ba53251dc3c5c8ecfd1
[ "Apache-2.0" ]
3
2021-07-14T01:28:47.000Z
2022-03-02T01:16:32.000Z
python/chronos/src/bigdl/chronos/autots/model/auto_prophet.py
liangs6212/BigDL
3c89ff7e8bbdc713110536c18099506811cd2b3a
[ "Apache-2.0" ]
null
null
null
python/chronos/src/bigdl/chronos/autots/model/auto_prophet.py
liangs6212/BigDL
3c89ff7e8bbdc713110536c18099506811cd2b3a
[ "Apache-2.0" ]
null
null
null
# + # # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either exp' # ress or implied. # See the License for the specific language governing permissions and # limitations under the License. # import pandas as pd import warnings from bigdl.chronos.model.prophet import ProphetBuilder, ProphetModel from bigdl.chronos.autots.utils import recalculate_n_sampling # -
48.066116
121
0.616145
07cfa7c073692f7ff81f78defb2e3541ec3803be
13,313
py
Python
nf/flows.py
arita37/normalizing-flows
c9896656bfd2007b0c17b801c0fe068560127301
[ "MIT" ]
1
2019-11-11T05:40:30.000Z
2019-11-11T05:40:30.000Z
nf/flows.py
arita37/normalizing-flows
c9896656bfd2007b0c17b801c0fe068560127301
[ "MIT" ]
null
null
null
nf/flows.py
arita37/normalizing-flows
c9896656bfd2007b0c17b801c0fe068560127301
[ "MIT" ]
null
null
null
import math import numpy as np import scipy as sp import scipy.linalg import torch import torch.nn as nn import torch.nn.init as init import torch.nn.functional as F from nf.utils import unconstrained_RQS # supported non-linearities: note that the function must be invertible functional_derivatives = { torch.tanh: lambda x: 1 - torch.pow(torch.tanh(x), 2), F.leaky_relu: lambda x: (x > 0).type(torch.FloatTensor) + \ (x < 0).type(torch.FloatTensor) * -0.01, F.elu: lambda x: (x > 0).type(torch.FloatTensor) + \ (x < 0).type(torch.FloatTensor) * torch.exp(x) }
35.312997
80
0.533539
07d1d3b232129762c8438710d6931bc551a36122
5,286
py
Python
ioos_qc/config_creator/fx_parser.py
HakaiInstitute/ioos_qc
dfb28ee404a17c8355747b792fba0471093953c4
[ "Apache-2.0" ]
null
null
null
ioos_qc/config_creator/fx_parser.py
HakaiInstitute/ioos_qc
dfb28ee404a17c8355747b792fba0471093953c4
[ "Apache-2.0" ]
null
null
null
ioos_qc/config_creator/fx_parser.py
HakaiInstitute/ioos_qc
dfb28ee404a17c8355747b792fba0471093953c4
[ "Apache-2.0" ]
1
2021-01-20T23:20:06.000Z
2021-01-20T23:20:06.000Z
# module pyparsing.py # # Copyright (c) 2003-2019 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # from pyparsing import ( Literal, Word, Group, Forward, alphas, alphanums, Regex, CaselessKeyword, Suppress, delimitedList, ) import math import operator # map operator symbols to corresponding arithmetic operations epsilon = 1e-12 opn = { "+": operator.add, "-": operator.sub, "*": operator.mul, "/": operator.truediv, "^": operator.pow, } fn = { "sin": math.sin, "cos": math.cos, "tan": math.tan, "exp": math.exp, "abs": abs, "trunc": lambda a: int(a), "round": round, "sgn": lambda a: -1 if a < -epsilon else 1 if a > epsilon else 0, } exprStack = [] def BNF(): """ expop :: '^' multop :: '*' | '/' addop :: '+' | '-' integer :: ['+' | '-'] '0'..'9'+ atom :: PI | E | real | fn '(' expr ')' | '(' expr ')' factor :: atom [ expop factor ]* term :: factor [ multop factor ]* expr :: term [ addop term ]* """ # use CaselessKeyword for e and pi, to avoid accidentally matching # functions that start with 'e' or 'pi' (such as 'exp'); Keyword # and CaselessKeyword only match whole words e = CaselessKeyword("E") pi = CaselessKeyword("PI") # fnumber = Combine(Word("+-"+nums, nums) + # Optional("." + Optional(Word(nums))) + # Optional(e + Word("+-"+nums, nums))) # or use provided pyparsing_common.number, but convert back to str: # fnumber = ppc.number().addParseAction(lambda t: str(t[0])) fnumber = Regex(r"[+-]?\d+(?:\.\d*)?(?:[eE][+-]?\d+)?") ident = Word(alphas, alphanums + "_$") plus, minus, mult, div = map(Literal, "+-*/") lpar, rpar = map(Suppress, "()") addop = plus | minus multop = mult | div expop = Literal("^") expr = Forward() expr_list = delimitedList(Group(expr)) # add parse action that replaces the function identifier with a (name, number of args) tuple fn_call = (ident + lpar - Group(expr_list) + rpar).setParseAction( lambda t: t.insert(0, (t.pop(0), len(t[0]))) ) atom = ( addop[...] + ( (fn_call | pi | e | fnumber | ident).setParseAction(push_first) | Group(lpar + expr + rpar) ) ).setParseAction(push_unary_minus) # by defining exponentiation as "atom [ ^ factor ]..." instead of "atom [ ^ atom ]...", we get right-to-left # exponents, instead of left-to-right that is, 2^3^2 = 2^(3^2), not (2^3)^2. factor = Forward() factor <<= atom + (expop + factor).setParseAction(push_first)[...] term = factor + (multop + factor).setParseAction(push_first)[...] expr <<= term + (addop + term).setParseAction(push_first)[...] bnf = expr return bnf def eval_fx(fx, stats): """Given fx and stats ('min', 'max', 'mean', 'std') return the result""" _ = BNF().parseString(fx, parseAll=True) val = evaluate_stack(exprStack[:], stats) return val
31.278107
112
0.600643
07d274563189ebc57a38c1571e12c09ed638080d
18,828
py
Python
scanlogger.py
pythonhacker/pyscanlogd
64d6ad38127243e5c422be7f899ecfa802e1ad21
[ "BSD-3-Clause" ]
1
2021-04-03T22:15:06.000Z
2021-04-03T22:15:06.000Z
scanlogger.py
pythonhacker/pyscanlogd
64d6ad38127243e5c422be7f899ecfa802e1ad21
[ "BSD-3-Clause" ]
null
null
null
scanlogger.py
pythonhacker/pyscanlogd
64d6ad38127243e5c422be7f899ecfa802e1ad21
[ "BSD-3-Clause" ]
2
2020-12-18T20:06:21.000Z
2021-04-08T02:47:40.000Z
# -- coding: utf-8 #!/usr/bin/env python """ pyscanlogger: Port scan detector/logger tool, inspired by scanlogd {http://www.openwall.com/scanlogd} but with added ability to log slow port-scans. Features 1. Detects all stealth (half-open) and full-connect scans. 2. Detects Idle scan and logs it correctly using correlation! 3. Detects SCTP scan. 4. Detects slow port-scans also. Modification History Mar 17 2010 - Cleaned up code to publish to google. Apr 8 2010 - Better detection of TCP full-connect scan without spurious and incorrect logging. Better logging functions. Licensed under GNU GPL v3.0. """ import sys, os import dpkt, pcap import struct import socket import time import threading import optparse import entry import timerlist __author__ = "pythonhacker" __maintainer__ = "pythonhacker" __version__ = '0.5.1' __modified__ = 'Thu Apr 8 19:21:11 IST 2010' # UDP - in progress... SCAN_TIMEOUT = 5 WEIGHT_THRESHOLD = 25 PIDFILE="/var/run/pyscanlogger.pid" # TCP flag constants TH_URG=dpkt.tcp.TH_URG TH_ACK=dpkt.tcp.TH_ACK TH_PSH=dpkt.tcp.TH_PUSH TH_RST=dpkt.tcp.TH_RST TH_SYN=dpkt.tcp.TH_SYN TH_FIN=dpkt.tcp.TH_FIN # Protocols TCP=dpkt.tcp.TCP UDP=dpkt.udp.UDP SCTP=dpkt.sctp.SCTP get_timestamp = lambda : time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) ip2quad = lambda x: socket.inet_ntoa(struct.pack('I', x)) scan_ip2quad = lambda scan: map(ip2quad, [scan.src, scan.dst]) if __name__ == '__main__': main()
37.8833
132
0.468929
07d2a6693b06381833978cbf344d430f2c2b518b
4,539
py
Python
src/util/util.py
ashleylst/DSDmodel
4276c832e0335539aef2ae2b33e23719957a3f08
[ "MIT" ]
1
2021-07-26T17:30:16.000Z
2021-07-26T17:30:16.000Z
src/util/util.py
ashleylst/DSDmodel
4276c832e0335539aef2ae2b33e23719957a3f08
[ "MIT" ]
null
null
null
src/util/util.py
ashleylst/DSDmodel
4276c832e0335539aef2ae2b33e23719957a3f08
[ "MIT" ]
2
2020-06-29T15:43:22.000Z
2021-05-12T08:25:03.000Z
from itertools import combinations import copy def check_following_migration(edges, p=0): """ :param edges: :return: """ e = copy.copy(edges) visited = [False for _ in e] miggroup = [] cnt = -1 for i in range(0, len(e)): if visited[i]: continue e[i] = list(e[i]) e[i][p] = list(e[i][p]) t1 = sorted(e[i][p], key=lambda tup: tup[0]) if not visited[i]: visited[i] = True miggroup.append([i]) cnt += 1 for j in range(0, len(e)): if j != i and not visited[j]: e[j] = list(e[j]) e[j][p] = list(e[j][p]) t2 = sorted(e[j][p], key=lambda tup: tup[0]) if (t2[0][0] != t1[0][0]) or (t2[1][0] != t1[1][0]): continue for num in range(0, len(miggroup[cnt])): t1 = sorted(e[miggroup[cnt][num]][p], key=lambda tup: tup[0]) if (t1[0][1] + 1 == t2[0][1] and t1[1][1] - 1 == t2[1][1]) \ or (t1[0][1] - 1 == t2[0][1] and t1[1][1] + 1 == t2[1][1]): visited[j] = True miggroup[cnt].append(j) break return miggroup def get_absdist(domain1, domain2): """ :param domain1: :param domain2: :return: """ return abs(domain1[1] - domain2[1]) def get_closet_domain_to_target(target, domains): """ :param target: :param domains: :return: """ closet = 10000 closetd = () for i in domains: dist = get_absdist(i, target) if dist < closet: closet = dist closetd = i return closetd def get_domains_on_2sides(target1, target2, domains1, domains2): """ :param target1: :param target2: :param domains1: :param domains2: :return: """ if target1[0] == domains1[0][0]: closetd1 = get_closet_domain_to_target(target1, domains1) elif target2[0] == domains1[0][0]: closetd1 = get_closet_domain_to_target(target2, domains1) if target1[0] == domains2[0][0]: closetd2 = get_closet_domain_to_target(target1, domains2) elif target2[0] == domains2[0][0]: closetd2 = get_closet_domain_to_target(target2, domains2) return closetd1, closetd2 def get_closest_target(domains, targets): """ :return: """ domains = sorted(domains, key=lambda tup: tup[1]) mindist = 10000 mint = None for t in targets: dist = min(get_absdist(t, domains[0]), get_absdist(t, domains[len(domains) - 1])) if dist < mindist: mint = t return mint
22.695
89
0.516193
07d5b427e69bdc09287f1c66c02797e0db0b274b
1,218
py
Python
examples/question_answering/qa_sparse_train.py
ebell495/nn_pruning
41263ab898117a639f3f219c23a4cecc8bc0e3f3
[ "Apache-2.0" ]
250
2021-02-22T15:50:04.000Z
2022-03-31T08:12:02.000Z
examples/question_answering/qa_sparse_train.py
vuiseng9/nn_pruning
8f4a14dd63d621483cbc1bc4eb34600d66e9e71b
[ "Apache-2.0" ]
28
2021-02-22T15:54:34.000Z
2022-03-17T08:57:38.000Z
examples/question_answering/qa_sparse_train.py
vuiseng9/nn_pruning
8f4a14dd63d621483cbc1bc4eb34600d66e9e71b
[ "Apache-2.0" ]
31
2021-02-22T16:07:17.000Z
2022-03-28T09:17:24.000Z
# coding=utf-8 # Copyright 2020 The HuggingFace Team All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Sparse Fine-tuning the library models for question answering. """ # You can also adapt this script on your own question answering task. Pointers for this are left as comments. from nn_pruning.sparse_trainer import SparseTrainer from .qa_train import QATrainer # SparseTrainer should appear first in the base classes, as its functions must override QATrainer and its base classes (Trainer)
43.5
128
0.769294
07d6951331a7c029a2698e9098ae813a023bef49
10,729
py
Python
ironic/drivers/modules/ilo/raid.py
armohamm/ironic
21093ca886ed736a7a25bf5e71e05d41e132fd2f
[ "Apache-2.0" ]
2
2019-06-17T21:37:53.000Z
2020-07-11T03:58:39.000Z
ironic/drivers/modules/ilo/raid.py
armohamm/ironic
21093ca886ed736a7a25bf5e71e05d41e132fd2f
[ "Apache-2.0" ]
5
2019-08-14T06:46:03.000Z
2021-12-13T20:01:25.000Z
ironic/drivers/modules/ilo/raid.py
armohamm/ironic
21093ca886ed736a7a25bf5e71e05d41e132fd2f
[ "Apache-2.0" ]
6
2019-06-13T12:49:33.000Z
2021-04-17T16:33:19.000Z
# Copyright 2018 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ iLO5 RAID specific methods """ from ironic_lib import metrics_utils from oslo_log import log as logging from oslo_utils import importutils from ironic.common import exception from ironic.common.i18n import _ from ironic.common import raid from ironic.common import states from ironic.conductor import utils as manager_utils from ironic import conf from ironic.drivers import base from ironic.drivers.modules import deploy_utils from ironic.drivers.modules.ilo import common as ilo_common LOG = logging.getLogger(__name__) CONF = conf.CONF METRICS = metrics_utils.get_metrics_logger(__name__) ilo_error = importutils.try_import('proliantutils.exception')
45.461864
79
0.609004
07d7992d7ae8299b452c378aa6d4664a38bab354
1,252
py
Python
src/petronia/aid/bootstrap/__init__.py
groboclown/petronia
486338023d19cee989e92f0c5692680f1a37811f
[ "MIT" ]
19
2017-06-21T10:28:24.000Z
2021-12-31T11:49:28.000Z
src/petronia/aid/bootstrap/__init__.py
groboclown/petronia
486338023d19cee989e92f0c5692680f1a37811f
[ "MIT" ]
10
2016-11-11T18:57:57.000Z
2021-02-01T15:33:43.000Z
src/petronia/aid/bootstrap/__init__.py
groboclown/petronia
486338023d19cee989e92f0c5692680f1a37811f
[ "MIT" ]
3
2017-09-17T03:29:35.000Z
2019-06-03T10:43:08.000Z
""" Common Petronia imports for bootstrap parts of an extension. This should be imported along with the `simp` module. """ from ...base.bus import ( EventBus, ListenerRegistrar, ListenerSetup, QueuePriority, ExtensionMetadataStruct, register_event, EVENT_WILDCARD, TARGET_WILDCARD, QUEUE_EVENT_NORMAL, QUEUE_EVENT_HIGH, QUEUE_EVENT_IO, QUEUE_EVENT_TYPES ) from ...base.participant import ( create_singleton_identity, NOT_PARTICIPANT, ) from ...base.events import ( # These are generally just bootstrap events. DisposeCompleteEvent, as_dispose_complete_listener, RequestDisposeEvent, as_request_dispose_listener, SystemStartedEvent, as_system_started_listener, ) from ...base.events.bus import ( EventProtectionModel, GLOBAL_EVENT_PROTECTION, INTERNAL_EVENT_PROTECTION, PRODUCE_EVENT_PROTECTION, CONSUME_EVENT_PROTECTION, REQUEST_EVENT_PROTECTION, RESPONSE_EVENT_PROTECTION, ) from ...core.extensions.api import ANY_VERSION from ...core.shutdown.api import ( SystemShutdownEvent, as_system_shutdown_listener, SystemShutdownFinalizeEvent, as_system_shutdown_finalize_listener, TARGET_ID_SYSTEM_SHUTDOWN, )
19.261538
60
0.747604
07d9159827c315605286d46a4f7de494b7d7489e
3,251
py
Python
examples/dump-properties.py
zachjweiner/pyopencl
4e2e4f3150c331680e6d9e36c59290411e4a0c40
[ "Apache-2.0" ]
7
2020-11-23T09:26:17.000Z
2021-01-26T08:23:30.000Z
examples/dump-properties.py
zachjweiner/pyopencl
4e2e4f3150c331680e6d9e36c59290411e4a0c40
[ "Apache-2.0" ]
null
null
null
examples/dump-properties.py
zachjweiner/pyopencl
4e2e4f3150c331680e6d9e36c59290411e4a0c40
[ "Apache-2.0" ]
1
2020-12-14T05:12:10.000Z
2020-12-14T05:12:10.000Z
import pyopencl as cl from optparse import OptionParser parser = OptionParser() parser.add_option("-s", "--short", action="store_true", help="don't print all device properties") (options, args) = parser.parse_args() for platform in cl.get_platforms(): print(75*"=") print(platform) print(75*"=") if not options.short: print_info(platform, cl.platform_info) for device in platform.get_devices(): if not options.short: print(75*"-") print(device) if not options.short: print(75*"-") print_info(device, cl.device_info) ctx = cl.Context([device]) for mf in [ cl.mem_flags.READ_ONLY, #cl.mem_flags.READ_WRITE, #cl.mem_flags.WRITE_ONLY ]: for itype in [ cl.mem_object_type.IMAGE2D, cl.mem_object_type.IMAGE3D ]: try: formats = cl.get_supported_image_formats(ctx, mf, itype) except: formats = "<error>" else: formats = ", ".join( "{}-{}".format( cl.channel_order.to_string(iform.channel_order, "<unknown channel order 0x%x>"), str_chd_type(iform.channel_data_type)) for iform in formats) print("{} {} FORMATS: {}\n".format( cl.mem_object_type.to_string(itype), cl.mem_flags.to_string(mf), formats)) del ctx
38.702381
83
0.450015
07da573d0a32afbfa2ea160efcad1a4e21eef4c3
42,106
py
Python
interfaces/acados_template/acados_template/acados_ocp_solver.py
jgillis/acados
3119e2dda636a8358fbd52247eb0163a167cbc97
[ "BSD-2-Clause" ]
1
2020-07-30T10:54:35.000Z
2020-07-30T10:54:35.000Z
interfaces/acados_template/acados_template/acados_ocp_solver.py
jgillis/acados
3119e2dda636a8358fbd52247eb0163a167cbc97
[ "BSD-2-Clause" ]
null
null
null
interfaces/acados_template/acados_template/acados_ocp_solver.py
jgillis/acados
3119e2dda636a8358fbd52247eb0163a167cbc97
[ "BSD-2-Clause" ]
null
null
null
# -*- coding: future_fstrings -*- # # Copyright 2019 Gianluca Frison, Dimitris Kouzoupis, Robin Verschueren, # Andrea Zanelli, Niels van Duijkeren, Jonathan Frey, Tommaso Sartor, # Branimir Novoselnik, Rien Quirynen, Rezart Qelibari, Dang Doan, # Jonas Koenemann, Yutao Chen, Tobias Schls, Jonas Schlagenhauf, Moritz Diehl # # This file is part of acados. # # The 2-Clause BSD License # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE.; # import sys, os, json import numpy as np from ctypes import * from casadi import CasadiMeta, Function, SX from copy import deepcopy from .generate_c_code_explicit_ode import generate_c_code_explicit_ode from .generate_c_code_implicit_ode import generate_c_code_implicit_ode from .generate_c_code_gnsf import generate_c_code_gnsf from .generate_c_code_constraint import generate_c_code_constraint from .generate_c_code_nls_cost import generate_c_code_nls_cost from .generate_c_code_external_cost import generate_c_code_external_cost from .acados_ocp import AcadosOcp from .acados_model import acados_model_strip_casadi_symbolics from .utils import is_column, is_empty, casadi_length, render_template, acados_class2dict,\ format_class_dict, ocp_check_against_layout, np_array_to_list, make_model_consistent,\ set_up_imported_gnsf_model
40.525505
175
0.644682
07da9e40651ed6fdc8a2ae109d81653125f5c378
5,128
py
Python
noise/estimation/PCA/analyticNoiseEstimation_PCA.py
MaikWischow/Camera-Condition-Monitoring
910f9192d6309a6803ab76c346269fa5029c38e6
[ "MIT" ]
3
2021-12-10T14:52:23.000Z
2022-03-22T08:48:08.000Z
noise/estimation/PCA/analyticNoiseEstimation_PCA.py
MaikWischow/Camera-Condition-Monitoring
910f9192d6309a6803ab76c346269fa5029c38e6
[ "MIT" ]
null
null
null
noise/estimation/PCA/analyticNoiseEstimation_PCA.py
MaikWischow/Camera-Condition-Monitoring
910f9192d6309a6803ab76c346269fa5029c38e6
[ "MIT" ]
2
2021-12-13T08:12:52.000Z
2022-01-17T14:00:48.000Z
import numpy as np import cv2 import sys import os import glob def im2patch(im, pch_size, stride=1): ''' Transform image to patches. Input: im: 3 x H x W or 1 X H x W image, numpy format pch_size: (int, int) tuple or integer stride: (int, int) tuple or integer ''' if isinstance(pch_size, tuple): pch_H, pch_W = pch_size elif isinstance(pch_size, int): pch_H = pch_W = pch_size else: sys.exit('The input of pch_size must be a integer or a int tuple!') if isinstance(stride, tuple): stride_H, stride_W = stride elif isinstance(stride, int): stride_H = stride_W = stride else: sys.exit('The input of stride must be a integer or a int tuple!') C, H, W = im.shape num_H = len(range(0, H-pch_H+1, stride_H)) num_W = len(range(0, W-pch_W+1, stride_W)) num_pch = num_H * num_W pch = np.zeros((C, pch_H*pch_W, num_pch), dtype=im.dtype) kk = 0 for ii in range(pch_H): for jj in range(pch_W): temp = im[:, ii:H-pch_H+ii+1:stride_H, jj:W-pch_W+jj+1:stride_W] pch[:, kk, :] = temp.reshape((C, num_pch)) kk += 1 return pch.reshape((C, pch_H, pch_W, num_pch)) def noise_estimate(im, pch_size=8): ''' Implement of noise level estimation of the following paper: Chen G , Zhu F , Heng P A . An Efficient Statistical Method for Image Noise Level Estimation[C]// 2015 IEEE International Conference on Computer Vision (ICCV). IEEE Computer Society, 2015. Input: im: the noise image, H x W x 3 or H x W numpy tensor, range [0,1] pch_size: patch_size Output: noise_level: the estimated noise level ''' if im.ndim == 3: im = im.transpose((2, 0, 1)) else: im = np.expand_dims(im, axis=0) # image to patch pch = im2patch(im, pch_size, 3) # C x pch_size x pch_size x num_pch tensor num_pch = pch.shape[3] pch = pch.reshape((-1, num_pch)) # d x num_pch matrix d = pch.shape[0] mu = pch.mean(axis=1, keepdims=True) # d x 1 X = pch - mu sigma_X = np.matmul(X, X.transpose()) / num_pch sig_value, _ = np.linalg.eigh(sigma_X) sig_value.sort() for ii in range(-1, -d-1, -1): tau = np.mean(sig_value[:ii]) if np.sum(sig_value[:ii]>tau) == np.sum(sig_value[:ii] < tau): return np.sqrt(tau) def run(imgPath, patchSize, internalNumPatches, dirOut, saveResults=True): """ Estimates the standard deviation of (additive white gaussian) noise of image patches. The noise is estimated patch by patch. Based on: "An Efficient Statistical Method for Image Noise Level Estimation" (2015) :param imgPath: Path to the input image. :param patchSize: Image patch size. :param internalNumPatches: Internal number of sub-image-patches. :param dirOut: Directory where to save the noise estimation results. :param saveResults: Whether to save the estimation results or not. :return: None """ # Load image img = np.array(cv2.imread(imgPath)) try: img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) img = img / 255.0 h, w = img.shape psize = min(min(patchSize, h), w) psize -= psize % 2 patch_step = psize shift_factor = 2 # Result array estimatedNoiseMap = np.zeros([h, w], dtype=np.int8) rangex = range(0, w, patch_step) rangey = range(0, h, patch_step) for start_x in rangex: for start_y in rangey: end_x = start_x + psize end_y = start_y + psize if end_x > w: end_x = w end_x = shift_factor * ((end_x) // shift_factor) start_x = end_x - psize if end_y > h: end_y = h end_y = shift_factor * ((end_y) // shift_factor) start_y = end_y - psize tileM = img[start_y:end_y, start_x:end_x] h_, w_ = tileM.shape sigma = noise_estimate(tileM, internalNumPatches) * 255.0 estimatedNoiseMap[start_y :start_y + h_, start_x : start_x + w_] = sigma if saveResults: if dirOut is not None: imgName = imgPath.split(os.sep)[-1].split(".")[0] dirOut = os.path.join(dirOut) if not os.path.exists(dirOut): os.makedirs(dirOut) noiseMapPath = os.path.join(dirOut, imgName + ".npz") if not os.path.exists(noiseMapPath): np.savez_compressed(noiseMapPath, estimatedNoiseMap) return estimatedNoiseMap except: return None # Example # if __name__ == '__main__': # dirIn = r"../../../data/udacity/img/GT" # dirOut = r"../../../data/udacity/labels_noise_patchwise/PCA" # imgFileEnding = ".jpg" # for imgPath in glob.glob(os.path.join(dirIn, "*" + imgFileEnding)): # run(imgPath, 128, 8, dirOut)
34.884354
136
0.576248
07dab8d1754575bc1f3f83e4e0cadea3c8dcd3af
8,104
py
Python
src/biotite/application/application.py
claudejrogers/biotite
3635bc9071506ecb85ddd9b1dbe6a430295e060e
[ "BSD-3-Clause" ]
null
null
null
src/biotite/application/application.py
claudejrogers/biotite
3635bc9071506ecb85ddd9b1dbe6a430295e060e
[ "BSD-3-Clause" ]
null
null
null
src/biotite/application/application.py
claudejrogers/biotite
3635bc9071506ecb85ddd9b1dbe6a430295e060e
[ "BSD-3-Clause" ]
null
null
null
# This source code is part of the Biotite package and is distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst' for further # information. __name__ = "biotite.application" __author__ = "Patrick Kunzmann" __all__ = ["Application", "AppStateError", "TimeoutError", "VersionError", "AppState", "requires_state"] import abc import time from functools import wraps from enum import Flag, auto def requires_state(app_state): """ A decorator for methods of :class:`Application` subclasses that raises an :class:`AppStateError` in case the method is called, when the :class:`Application` is not in the specified :class:`AppState` `app_state`. Parameters ---------- app_state : AppState The required app state. Examples -------- Raises :class:`AppStateError` when `function` is called, if :class:`Application` is not in one of the specified states: >>> @requires_state(AppState.RUNNING | AppState.FINISHED) ... def function(self): ... pass """ return decorator def clean_up(self): """ Do clean up work after the application terminates. PROTECTED: Optionally override when inheriting. """ pass class AppStateError(Exception): """ Indicate that the application lifecycle was violated. """ pass class TimeoutError(Exception): """ Indicate that the application's timeout expired. """ pass class VersionError(Exception): """ Indicate that the application's version is invalid. """ pass
31.169231
79
0.604516
07db7794b6d98e1a7fc4b7e5727cce2c98e1474f
8,162
py
Python
python/paddle/tensor/attribute.py
douch/Paddle
81c40722869935d6e897f4b1aeb6e6f67606188a
[ "Apache-2.0" ]
1
2021-12-27T02:40:41.000Z
2021-12-27T02:40:41.000Z
python/paddle/tensor/attribute.py
LiYuRio/Paddle
dbd6e2df9d074973b7ee177e2d6b96ed2318008e
[ "Apache-2.0" ]
null
null
null
python/paddle/tensor/attribute.py
LiYuRio/Paddle
dbd6e2df9d074973b7ee177e2d6b96ed2318008e
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function from ..framework import core from ..fluid.layer_helper import LayerHelper from ..fluid.data_feeder import check_variable_and_dtype # TODO: define functions to get tensor attributes from ..fluid.layers import rank # noqa: F401 from ..fluid.layers import shape # noqa: F401 import paddle from paddle import _C_ops from paddle.static import Variable from ..fluid.framework import _in_legacy_dygraph, in_dygraph_mode __all__ = [] def is_complex(x): """Return whether x is a tensor of complex data type(complex64 or complex128). Args: x (Tensor): The input tensor. Returns: bool: True if the data type of the input is complex data type, otherwise false. Examples: .. code-block:: python import paddle x = paddle.to_tensor([1 + 2j, 3 + 4j]) print(paddle.is_complex(x)) # True x = paddle.to_tensor([1.1, 1.2]) print(paddle.is_complex(x)) # False x = paddle.to_tensor([1, 2, 3]) print(paddle.is_complex(x)) # False """ if not isinstance(x, (paddle.Tensor, paddle.static.Variable)): raise TypeError("Expected Tensor, but received type of x: {}".format( type(x))) dtype = x.dtype is_complex_dtype = (dtype == core.VarDesc.VarType.COMPLEX64 or dtype == core.VarDesc.VarType.COMPLEX128) return is_complex_dtype def is_floating_point(x): """ Returns whether the dtype of `x` is one of paddle.float64, paddle.float32, paddle.float16, and paddle.bfloat16. Args: x (Tensor): The input tensor. Returns: bool: True if the dtype of `x` is floating type, otherwise false. Examples: .. code-block:: python import paddle x = paddle.arange(1., 5., dtype='float32') y = paddle.arange(1, 5, dtype='int32') print(paddle.is_floating_point(x)) # True print(paddle.is_floating_point(y)) # False """ if not isinstance(x, (paddle.Tensor, paddle.static.Variable)): raise TypeError("Expected Tensor, but received type of x: {}".format( type(x))) dtype = x.dtype is_fp_dtype = (dtype == core.VarDesc.VarType.FP32 or dtype == core.VarDesc.VarType.FP64 or dtype == core.VarDesc.VarType.FP16 or dtype == core.VarDesc.VarType.BF16) return is_fp_dtype def is_integer(x): """Return whether x is a tensor of integeral data type. Args: x (Tensor): The input tensor. Returns: bool: True if the data type of the input is integer data type, otherwise false. Examples: .. code-block:: python import paddle x = paddle.to_tensor([1 + 2j, 3 + 4j]) print(paddle.is_integer(x)) # False x = paddle.to_tensor([1.1, 1.2]) print(paddle.is_integer(x)) # False x = paddle.to_tensor([1, 2, 3]) print(paddle.is_integer(x)) # True """ if not isinstance(x, (paddle.Tensor, paddle.static.Variable)): raise TypeError("Expected Tensor, but received type of x: {}".format( type(x))) dtype = x.dtype is_int_dtype = (dtype == core.VarDesc.VarType.UINT8 or dtype == core.VarDesc.VarType.INT8 or dtype == core.VarDesc.VarType.INT16 or dtype == core.VarDesc.VarType.INT32 or dtype == core.VarDesc.VarType.INT64) return is_int_dtype def real(x, name=None): """ Returns a new tensor containing real values of the input tensor. Args: x (Tensor): the input tensor, its data type could be complex64 or complex128. name (str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` . Returns: Tensor: a tensor containing real values of the input tensor. Examples: .. code-block:: python import paddle x = paddle.to_tensor( [[1 + 6j, 2 + 5j, 3 + 4j], [4 + 3j, 5 + 2j, 6 + 1j]]) # Tensor(shape=[2, 3], dtype=complex64, place=CUDAPlace(0), stop_gradient=True, # [[(1+6j), (2+5j), (3+4j)], # [(4+3j), (5+2j), (6+1j)]]) real_res = paddle.real(x) # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[1., 2., 3.], # [4., 5., 6.]]) real_t = x.real() # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[1., 2., 3.], # [4., 5., 6.]]) """ if in_dygraph_mode(): return _C_ops.final_state_real(x) if _in_legacy_dygraph(): return _C_ops.real(x) check_variable_and_dtype(x, 'x', ['complex64', 'complex128'], 'real') helper = LayerHelper('real', **locals()) out = helper.create_variable_for_type_inference( dtype=_complex_to_real_dtype(helper.input_dtype())) helper.append_op(type='real', inputs={'X': x}, outputs={'Out': out}) return out def imag(x, name=None): """ Returns a new tensor containing imaginary values of input tensor. Args: x (Tensor): the input tensor, its data type could be complex64 or complex128. name (str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` . Returns: Tensor: a tensor containing imaginary values of the input tensor. Examples: .. code-block:: python import paddle x = paddle.to_tensor( [[1 + 6j, 2 + 5j, 3 + 4j], [4 + 3j, 5 + 2j, 6 + 1j]]) # Tensor(shape=[2, 3], dtype=complex64, place=CUDAPlace(0), stop_gradient=True, # [[(1+6j), (2+5j), (3+4j)], # [(4+3j), (5+2j), (6+1j)]]) imag_res = paddle.imag(x) # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[6., 5., 4.], # [3., 2., 1.]]) imag_t = x.imag() # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[6., 5., 4.], # [3., 2., 1.]]) """ if in_dygraph_mode(): return _C_ops.final_state_imag(x) if _in_legacy_dygraph(): return _C_ops.imag(x) check_variable_and_dtype(x, 'x', ['complex64', 'complex128'], 'imag') helper = LayerHelper('imag', **locals()) out = helper.create_variable_for_type_inference( dtype=_complex_to_real_dtype(helper.input_dtype())) helper.append_op(type='imag', inputs={'X': x}, outputs={'Out': out}) return out
33.178862
115
0.590787
07dedb4b0df0aef6400f3a4ca8f79f22e2a6cabd
2,990
py
Python
appcodec.py
guardhunt/TelemterRC
679f99b317ecc6cbef6e022ae861cde18594f6a0
[ "MIT" ]
null
null
null
appcodec.py
guardhunt/TelemterRC
679f99b317ecc6cbef6e022ae861cde18594f6a0
[ "MIT" ]
null
null
null
appcodec.py
guardhunt/TelemterRC
679f99b317ecc6cbef6e022ae861cde18594f6a0
[ "MIT" ]
null
null
null
import evdev import time import struct
37.375
262
0.585619
07dee507ce31e115b2b94a29d53cdc5c3d4bd0df
2,316
py
Python
scripts/examples/OpenMV/16-Codes/find_barcodes.py
jiskra/openmv
a0f321836f77f94d8118910598dcdb79eb784d58
[ "MIT" ]
1,761
2015-07-10T23:14:17.000Z
2022-03-30T07:49:49.000Z
scripts/examples/OpenMV/16-Codes/find_barcodes.py
jiskra/openmv
a0f321836f77f94d8118910598dcdb79eb784d58
[ "MIT" ]
487
2015-07-07T23:21:20.000Z
2022-03-30T17:13:22.000Z
scripts/examples/OpenMV/16-Codes/find_barcodes.py
jiskra/openmv
a0f321836f77f94d8118910598dcdb79eb784d58
[ "MIT" ]
882
2015-08-01T08:34:19.000Z
2022-03-30T07:36:23.000Z
# Barcode Example # # This example shows off how easy it is to detect bar codes using the # OpenMV Cam M7. Barcode detection does not work on the M4 Camera. import sensor, image, time, math sensor.reset() sensor.set_pixformat(sensor.GRAYSCALE) sensor.set_framesize(sensor.VGA) # High Res! sensor.set_windowing((640, 80)) # V Res of 80 == less work (40 for 2X the speed). sensor.skip_frames(time = 2000) sensor.set_auto_gain(False) # must turn this off to prevent image washout... sensor.set_auto_whitebal(False) # must turn this off to prevent image washout... clock = time.clock() # Barcode detection can run at the full 640x480 resolution of your OpenMV Cam's # OV7725 camera module. Barcode detection will also work in RGB565 mode but at # a lower resolution. That said, barcode detection requires a higher resolution # to work well so it should always be run at 640x480 in grayscale... while(True): clock.tick() img = sensor.snapshot() codes = img.find_barcodes() for code in codes: img.draw_rectangle(code.rect()) print_args = (barcode_name(code), code.payload(), (180 * code.rotation()) / math.pi, code.quality(), clock.fps()) print("Barcode %s, Payload \"%s\", rotation %f (degrees), quality %d, FPS %f" % print_args) if not codes: print("FPS %f" % clock.fps())
35.090909
121
0.638169
07df8ef3eee337410a8a9059f852b86e3f074f99
102
py
Python
Python/factorial.py
devaslooper/Code-Overflow
d7d55ea0f5015bccb5c4100c4240464fcda8504a
[ "MIT" ]
null
null
null
Python/factorial.py
devaslooper/Code-Overflow
d7d55ea0f5015bccb5c4100c4240464fcda8504a
[ "MIT" ]
null
null
null
Python/factorial.py
devaslooper/Code-Overflow
d7d55ea0f5015bccb5c4100c4240464fcda8504a
[ "MIT" ]
null
null
null
n=int(input("Enter number ")) fact=1 for i in range(1,n+1): fact=fact*i print("Factorial is ",fact)
17
29
0.666667
07e01a30113159cdf23415b99ef5107294e27bab
606
py
Python
mundo 3/099.py
thiagofreitascarneiro/Curso-de-Python---Curso-em-Video
0342e482780b5a1c6f78cddd51d9bfad785c79fa
[ "MIT" ]
1
2021-08-04T13:21:22.000Z
2021-08-04T13:21:22.000Z
mundo 3/099.py
thiagofreitascarneiro/Curso-de-Python---Curso-em-Video
0342e482780b5a1c6f78cddd51d9bfad785c79fa
[ "MIT" ]
null
null
null
mundo 3/099.py
thiagofreitascarneiro/Curso-de-Python---Curso-em-Video
0342e482780b5a1c6f78cddd51d9bfad785c79fa
[ "MIT" ]
null
null
null
import time # O * para desempacotar o paramtro. Permite atribuir inumeros parametros. maior(2, 1, 7) maior(5, 4, 7, 9, 2) maior(1, 4, 7, 20, 2) maior(0)
23.307692
76
0.544554
07e0af37e19269400d7c0cf5ac0dc3b1672e18e4
10,752
py
Python
tests/test_packed_to_padded.py
theycallmepeter/pytorch3d_PBR
bc83c23969ff7843fc05d2da001952b368926174
[ "BSD-3-Clause" ]
null
null
null
tests/test_packed_to_padded.py
theycallmepeter/pytorch3d_PBR
bc83c23969ff7843fc05d2da001952b368926174
[ "BSD-3-Clause" ]
null
null
null
tests/test_packed_to_padded.py
theycallmepeter/pytorch3d_PBR
bc83c23969ff7843fc05d2da001952b368926174
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. import unittest import torch from common_testing import TestCaseMixin, get_random_cuda_device from pytorch3d.ops import packed_to_padded, padded_to_packed from pytorch3d.structures.meshes import Meshes def _test_padded_to_packed_helper(self, D, device): """ Check the results from packed_to_padded and PyTorch implementations are the same. """ meshes = self.init_meshes(16, 100, 300, device=device) mesh_to_faces_packed_first_idx = meshes.mesh_to_faces_packed_first_idx() num_faces_per_mesh = meshes.num_faces_per_mesh() max_faces = num_faces_per_mesh.max().item() if D == 0: values = torch.rand((len(meshes), max_faces), device=device) else: values = torch.rand((len(meshes), max_faces, D), device=device) for i, num in enumerate(num_faces_per_mesh): values[i, num:] = 0 values.requires_grad = True values_torch = values.detach().clone() values_torch.requires_grad = True values_packed = padded_to_packed( values, mesh_to_faces_packed_first_idx, num_faces_per_mesh.sum().item() ) values_packed_torch = TestPackedToPadded.padded_to_packed_python( values_torch, mesh_to_faces_packed_first_idx, num_faces_per_mesh.sum().item(), device, ) # check forward self.assertClose(values_packed, values_packed_torch) # check backward if D == 0: grad_inputs = torch.rand((num_faces_per_mesh.sum().item()), device=device) else: grad_inputs = torch.rand( (num_faces_per_mesh.sum().item(), D), device=device ) values_packed.backward(grad_inputs) grad_outputs = values.grad values_packed_torch.backward(grad_inputs) grad_outputs_torch1 = values_torch.grad grad_outputs_torch2 = TestPackedToPadded.packed_to_padded_python( grad_inputs, mesh_to_faces_packed_first_idx, values.size(1), device=device ) self.assertClose(grad_outputs, grad_outputs_torch1) self.assertClose(grad_outputs, grad_outputs_torch2)
39.384615
89
0.628255
07e0ba1b41406f219565825c5f80d1ee0cfaded5
8,638
py
Python
easyric/tests/test_io_geotiff.py
HowcanoeWang/EasyRIC
a3420bc7b1e0f1013411565cf0e66dd2d2ba5371
[ "MIT" ]
12
2021-01-25T07:11:52.000Z
2022-02-14T11:57:03.000Z
easyric/tests/test_io_geotiff.py
HowcanoeWang/EasyRIC
a3420bc7b1e0f1013411565cf0e66dd2d2ba5371
[ "MIT" ]
null
null
null
easyric/tests/test_io_geotiff.py
HowcanoeWang/EasyRIC
a3420bc7b1e0f1013411565cf0e66dd2d2ba5371
[ "MIT" ]
null
null
null
import pyproj import pytest import numpy as np from easyric.io import geotiff, shp from skimage.io import imread from skimage.color import rgb2gray import matplotlib.pyplot as plt
37.232759
117
0.683376
07e0cae8b52445fc39f360d69691eeb8e3e8f35e
9,833
py
Python
src/ebay_rest/api/buy_marketplace_insights/models/item_location.py
matecsaj/ebay_rest
dd23236f39e05636eff222f99df1e3699ce47d4a
[ "MIT" ]
3
2021-12-12T04:28:03.000Z
2022-03-10T03:29:18.000Z
src/ebay_rest/api/buy_marketplace_insights/models/item_location.py
jdavv/ebay_rest
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
[ "MIT" ]
33
2021-06-16T20:44:36.000Z
2022-03-30T14:55:06.000Z
src/ebay_rest/api/buy_marketplace_insights/models/item_location.py
jdavv/ebay_rest
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
[ "MIT" ]
7
2021-06-03T09:30:23.000Z
2022-03-08T19:51:33.000Z
# coding: utf-8 """ Marketplace Insights API <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> The Marketplace Insights API provides the ability to search for sold items on eBay by keyword, GTIN, category, and product and returns the of sales history of those items. # noqa: E501 OpenAPI spec version: v1_beta.2.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ItemLocation): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
34.992883
442
0.616699
07e1c7308c17fb9509fe646f19b12d537b946ccf
502
py
Python
fractionalKnapsack.py
aadishgoel2013/Algos-with-Python
19541607c8ede9a76a8cbbe047e01343080cfd5b
[ "Apache-2.0" ]
6
2017-11-12T05:13:27.000Z
2022-03-13T07:58:31.000Z
fractionalKnapsack.py
aadishgoel2013/Algos-with-Python
19541607c8ede9a76a8cbbe047e01343080cfd5b
[ "Apache-2.0" ]
null
null
null
fractionalKnapsack.py
aadishgoel2013/Algos-with-Python
19541607c8ede9a76a8cbbe047e01343080cfd5b
[ "Apache-2.0" ]
null
null
null
# Fractional Knapsack wt = [40,50,30,10,10,40,30] pro = [30,20,20,25,5,35,15] n = len(wt) data = [ (i,pro[i],wt[i]) for i in range(n) ] bag = 100 data.sort(key=lambda x: x[1]/x[2], reverse=True) profit=0 ans=[] i=0 while i<n: if data[i][2]<=bag: bag-=data[i][2] ans.append(data[i][0]) profit+=data[i][1] i+=1 else: break if i<n: ans.append(data[i][0]) profit += (bag*data[i][1])/data[i][2] print(profit,ans)
17.310345
49
0.498008