hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e41bb3e24e831bc6c9db543d89a47e06639cb0a0
| 355 |
py
|
Python
|
src/comments/migrations/0004_auto_20200209_1812.py
|
samrika25/TRAVIS_HEROKU_GIT
|
bcae6d0422d9a0369810944a91dd03db7df0d058
|
[
"MIT"
] | null | null | null |
src/comments/migrations/0004_auto_20200209_1812.py
|
samrika25/TRAVIS_HEROKU_GIT
|
bcae6d0422d9a0369810944a91dd03db7df0d058
|
[
"MIT"
] | 4 |
2021-03-30T12:35:36.000Z
|
2021-06-10T18:11:24.000Z
|
src/comments/migrations/0004_auto_20200209_1812.py
|
samrika25/TRAVIS_HEROKU_GIT
|
bcae6d0422d9a0369810944a91dd03db7df0d058
|
[
"MIT"
] | 2 |
2021-02-07T16:16:36.000Z
|
2021-07-13T05:26:51.000Z
|
# Generated by Django 3.0.2 on 2020-02-09 18:12
from django.db import migrations
| 19.722222 | 51 | 0.6 |
e41c425d0ed1f3d737beeff6b6c0f31113fafb62
| 768 |
py
|
Python
|
multicasting_test_scripts/sender.py
|
sandwichdoge/libmulticastudp
|
735a3a6242d5444f9a5a070322a7033296707cdf
|
[
"MIT"
] | null | null | null |
multicasting_test_scripts/sender.py
|
sandwichdoge/libmulticastudp
|
735a3a6242d5444f9a5a070322a7033296707cdf
|
[
"MIT"
] | null | null | null |
multicasting_test_scripts/sender.py
|
sandwichdoge/libmulticastudp
|
735a3a6242d5444f9a5a070322a7033296707cdf
|
[
"MIT"
] | null | null | null |
#
# mostly copied from
# http://bioportal.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/socket/multicast.html
#
import socket
import struct
import sys
import time
message = 'data worth repeating'
multicast_group = ('226.1.1.1', 4321)
# Create the datagram socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Set a timeout so the socket does not block indefinitely when trying
# to receive data.
sock.settimeout(0.2)
counter = 0
try:
while True:
counter +=1
# Send data to the multicast group
print >>sys.stderr, '%d: sending "%s"' % (counter, message )
sent = sock.sendto(message, multicast_group)
time.sleep( 5 )
finally:
print >>sys.stderr, 'closing socket'
sock.close()
| 20.756757 | 90 | 0.670573 |
e42510b046e5ad727d96dec824908363abd5654f
| 852 |
py
|
Python
|
python/chol_factor_test.py
|
davxy/numeric
|
1e8b44a72e1d570433a5ba81ae0795a750ce5921
|
[
"Unlicense"
] | 2 |
2020-05-03T17:02:44.000Z
|
2022-02-21T04:09:34.000Z
|
python/chol_factor_test.py
|
davxy/numeric
|
1e8b44a72e1d570433a5ba81ae0795a750ce5921
|
[
"Unlicense"
] | null | null | null |
python/chol_factor_test.py
|
davxy/numeric
|
1e8b44a72e1d570433a5ba81ae0795a750ce5921
|
[
"Unlicense"
] | null | null | null |
import numpy as np
from chol_factor import chol_factor
from triangular import triangular
# TEST: Cholesky factorization (LL')
# Symmetric positive definite matrix
A = np.matrix('5 1.2 0.3 -0.6;'
'1.2 6 -0.4 0.9;'
'0.3 -0.4 8 1.7;'
'-0.6 0.9 1.7 10');
print('A = \n', A)
# Computation of the L factor
L = chol_factor(A)
print('L = \n', L)
# Check
if np.allclose(A, np.dot(L, L.transpose())) == False:
raise Exception('QR factorizzation test failure')
# TEST: System Resolution
# Ax = LL'x = b
b = np.matrix("68; 9; 45; 35")
print('b = \n', b)
# Lk = b
k = triangular(L, b, 1)
print('k = \n', k)
# L'x = k
x = triangular(L.transpose(), k, 0)
print('x = \n', x)
# Check
b1 = np.dot(A, x)
print('b1 = \n', b1)
if np.allclose(b, b1) == False:
raise Exception('System resolution failure')
| 23.027027 | 53 | 0.580986 |
e4257523a5f56faf33e09f713fd3a02e93109a4b
| 11,245 |
py
|
Python
|
PSO_system/GUI/gui_root.py
|
daniel4lee/PSO-car-simulator
|
b4aebca0fed614e33acc3e7d665085d55a67b82a
|
[
"MIT"
] | 1 |
2022-03-23T21:51:59.000Z
|
2022-03-23T21:51:59.000Z
|
PSO_system/GUI/gui_root.py
|
daniel4lee/PSO-car-simulator
|
b4aebca0fed614e33acc3e7d665085d55a67b82a
|
[
"MIT"
] | 1 |
2018-10-08T12:53:42.000Z
|
2018-10-08T13:46:13.000Z
|
PSO_system/GUI/gui_root.py
|
daniel4lee/PSO-car-simulator
|
b4aebca0fed614e33acc3e7d665085d55a67b82a
|
[
"MIT"
] | 2 |
2020-04-26T08:22:53.000Z
|
2021-05-18T09:51:24.000Z
|
"""Build the tkinter gui root"""
import math
from PyQt5.QtWidgets import *#(QWidget, QToolTip, QDesktopWidget, QPushButton, QApplication)
from PyQt5.QtGui import QFont
from PyQt5.QtCore import QCoreApplication, QObject, QRunnable, QThread, QThreadPool, pyqtSignal, pyqtSlot
from PyQt5.QtGui import QIntValidator, QDoubleValidator
import sys
from PSO_system.Counting.plot import PlotCanvas
from PSO_system.Counting.run import CarRunning
from PSO_system.Counting.test_result import TestRunning
THREADS = []
if __name__ == '__main__':
print("Error: This file can only be imported. Execute 'main.py'")
| 41.494465 | 139 | 0.649355 |
e425b8c86c1c0699016fdb4cfc8b01eea833c4f2
| 2,346 |
py
|
Python
|
qsrlib/src/qsrlib_qsrs/qsr_cardinal_direction.py
|
alexiatoumpa/QSR_Detector
|
ff92a128dddb613690a49a7b4130afeac0dd4381
|
[
"MIT"
] | 15 |
2015-06-15T16:50:37.000Z
|
2022-03-27T09:25:56.000Z
|
qsrlib/src/qsrlib_qsrs/qsr_cardinal_direction.py
|
alexiatoumpa/QSR_Detector
|
ff92a128dddb613690a49a7b4130afeac0dd4381
|
[
"MIT"
] | 205 |
2015-01-22T12:02:59.000Z
|
2022-03-29T11:59:55.000Z
|
qsrlib/src/qsrlib_qsrs/qsr_cardinal_direction.py
|
alexiatoumpa/QSR_Detector
|
ff92a128dddb613690a49a7b4130afeac0dd4381
|
[
"MIT"
] | 16 |
2015-02-04T23:13:18.000Z
|
2022-03-08T13:45:53.000Z
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
from qsrlib_qsrs.qsr_dyadic_abstractclass import QSR_Dyadic_1t_Abstractclass
import math
| 31.28 | 118 | 0.561381 |
e4287373cf648c93ed322e508af33deff1f8e862
| 4,291 |
py
|
Python
|
clustering/GMM.py
|
peasant98/NBA-Stats-Clustering
|
57ff7e70a8cbb0c609d6a6720134a37695e2a860
|
[
"MIT"
] | null | null | null |
clustering/GMM.py
|
peasant98/NBA-Stats-Clustering
|
57ff7e70a8cbb0c609d6a6720134a37695e2a860
|
[
"MIT"
] | null | null | null |
clustering/GMM.py
|
peasant98/NBA-Stats-Clustering
|
57ff7e70a8cbb0c609d6a6720134a37695e2a860
|
[
"MIT"
] | null | null | null |
# NBA Stats Clustering
# Copyright Matthew Strong, 2019
# gaussian mixture models with em algorithm
import numpy as np
from scipy import stats
from clustering.Cluster import NBACluster
# nba gmm class
# gmm from scratch as well, more explained below
| 40.102804 | 120 | 0.554649 |
e428f454d7dceb480c84f33f264e2ac819a010fd
| 1,484 |
py
|
Python
|
ML/eval.py
|
Data-Science-Community-SRM/Fashion-Generation
|
fa062e2b31b4fba8945820d911dfa41de45b1333
|
[
"MIT"
] | 1 |
2021-04-27T09:13:09.000Z
|
2021-04-27T09:13:09.000Z
|
ML/eval.py
|
Aradhya-Tripathi/Fashion-Generation
|
fa062e2b31b4fba8945820d911dfa41de45b1333
|
[
"MIT"
] | null | null | null |
ML/eval.py
|
Aradhya-Tripathi/Fashion-Generation
|
fa062e2b31b4fba8945820d911dfa41de45b1333
|
[
"MIT"
] | 1 |
2021-03-12T13:15:08.000Z
|
2021-03-12T13:15:08.000Z
|
import torch
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
import sys
sys.path.append("./ML")
import Definitions.models as models
from Definitions.dataset import Data
if __name__ == "__main__":
main()
| 26.981818 | 94 | 0.617925 |
e42935051444daddcd5cee33f9a2daa9cde6e823
| 4,965 |
py
|
Python
|
app/screens/authorize.py
|
jimkutter/rpi_lcars
|
f5ae0891f26d3494ad77f894c4f7733deaf063ee
|
[
"MIT"
] | null | null | null |
app/screens/authorize.py
|
jimkutter/rpi_lcars
|
f5ae0891f26d3494ad77f894c4f7733deaf063ee
|
[
"MIT"
] | null | null | null |
app/screens/authorize.py
|
jimkutter/rpi_lcars
|
f5ae0891f26d3494ad77f894c4f7733deaf063ee
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timedelta
import pygame
from pygame.mixer import Sound
from screens.base_screen import BaseScreen
from ui import colours
from ui.widgets.background import LcarsBackgroundImage
from ui.widgets.gifimage import LcarsGifImage
from ui.widgets.lcars_widgets import LcarsButton
from ui.widgets.lcars_widgets import LcarsText
| 31.03125 | 118 | 0.557301 |
e42efd7b2e91e2b6ad55453d791a04774b95fe07
| 31 |
py
|
Python
|
swarm_tasks/utils/__init__.py
|
rmvanarse/swarm_tasks
|
3335297ba8fcdbff756ae519002bcce919d54a84
|
[
"MIT"
] | 6 |
2021-03-13T12:54:18.000Z
|
2022-01-29T12:12:28.000Z
|
swarm_tasks/utils/__init__.py
|
rmvanarse/swarm_tasks
|
3335297ba8fcdbff756ae519002bcce919d54a84
|
[
"MIT"
] | null | null | null |
swarm_tasks/utils/__init__.py
|
rmvanarse/swarm_tasks
|
3335297ba8fcdbff756ae519002bcce919d54a84
|
[
"MIT"
] | 2 |
2021-08-06T15:02:15.000Z
|
2022-02-08T12:11:30.000Z
|
import swarm_tasks.utils.robot
| 15.5 | 30 | 0.870968 |
e4324e2ffd9d0f0cc445c08f1b32895fbc79b0d2
| 2,178 |
py
|
Python
|
Problems/P0010 - Soma de primos.py
|
clasenback/EulerProject
|
775d9774fcdfbbcc579e3c4ec0bb2d4a941764ad
|
[
"CC0-1.0"
] | null | null | null |
Problems/P0010 - Soma de primos.py
|
clasenback/EulerProject
|
775d9774fcdfbbcc579e3c4ec0bb2d4a941764ad
|
[
"CC0-1.0"
] | null | null | null |
Problems/P0010 - Soma de primos.py
|
clasenback/EulerProject
|
775d9774fcdfbbcc579e3c4ec0bb2d4a941764ad
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 7 17:11:12 2021
@author: User
SUMMATION OF PRIMES
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
21min19s to find.
"""
from datetime import datetime as date
# INPUTS
target = 2000000
primes = [2, 3, 5, 7, 11, 13, 17, 19]
control = target / 10
path = "C:/Users/User/Documents/AA - Pessoal/DataScience/Project Euler/"
file = "primos_ate_" + str(target) + ".csv"
print("INICIANDO BUSCA DOS NMEROS PRIMOS MENORES QUE", target)
start = date.now()
# PROCESSING
while primes[-1] < target :
candidate = nextPrime(primes[-1], primes)
if candidate > target :
break
primes.append(candidate)
# CONTROLLING
if candidate >= control:
print("O", len(primes), " primo ", candidate, "em", date.now() - start)
control += target / 10
# OUTPUT
print("\n")
print("RESULTADOS:")
print("ENCONTRAR OS NMEROS PRIMOS MENORES QUE", target)
print("FORAM ENCONTRADOS", len(primes), "NMEROS PRIMOS")
print("LTIMO PRIMO DA LISTA:", primes[-1])
print("SOMA DOS PRIMOS ENCONTRADOS:", sum(primes))
print("TEMPO TOTAL DA BUSCA:", date.now() - start)
# TO FILE
f = open(path + file, "w+")
for i in range(len(primes)):
f.write(str(i+1))
f.write("\t") # tab
f.write(str(primes[i]))
f.write("\r") # carriage return
f.close()
| 26.888889 | 82 | 0.539027 |
e4348a8c3eadb9042a4b4b0ebb7cd499d99a7b46
| 1,124 |
py
|
Python
|
l5kit/l5kit/tests/rasterization/render_context_test.py
|
cdicle-motional/l5kit
|
4dc4ee5391479bb71f0b373f39c316f9eef5a961
|
[
"Apache-2.0"
] | 1 |
2021-12-04T17:48:53.000Z
|
2021-12-04T17:48:53.000Z
|
l5kit/l5kit/tests/rasterization/render_context_test.py
|
cdicle-motional/l5kit
|
4dc4ee5391479bb71f0b373f39c316f9eef5a961
|
[
"Apache-2.0"
] | null | null | null |
l5kit/l5kit/tests/rasterization/render_context_test.py
|
cdicle-motional/l5kit
|
4dc4ee5391479bb71f0b373f39c316f9eef5a961
|
[
"Apache-2.0"
] | 1 |
2021-11-19T08:13:46.000Z
|
2021-11-19T08:13:46.000Z
|
import numpy as np
import pytest
from l5kit.geometry import transform_points
from l5kit.rasterization.render_context import RenderContext
| 35.125 | 77 | 0.715302 |
e434cb20e1bb4b89d1f4687abbe31af32ff3e3b8
| 1,528 |
py
|
Python
|
plugin/fcitx.py
|
bigshans/fcitx.vim
|
228a51c6c95997439feddff6c38d62ce014e6d59
|
[
"MIT"
] | null | null | null |
plugin/fcitx.py
|
bigshans/fcitx.vim
|
228a51c6c95997439feddff6c38d62ce014e6d59
|
[
"MIT"
] | null | null | null |
plugin/fcitx.py
|
bigshans/fcitx.vim
|
228a51c6c95997439feddff6c38d62ce014e6d59
|
[
"MIT"
] | null | null | null |
import vim
import functools
import dbus
try:
Fcitx = FcitxComm()
fcitx_loaded = True
except dbus.exceptions.DBusException as e:
if not vim.vars.get('silent_unsupported'):
vim.command('echohl WarningMsg | echom "fcitx.vim not loaded: %s" | echohl NONE' % e)
fcitx_loaded = False
| 25.466667 | 106 | 0.656414 |
e43577db4ce37b9708732914de0c5a01c24639dc
| 311 |
py
|
Python
|
ctf/post.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
ctf/post.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
ctf/post.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
import requests
post()
| 17.277778 | 61 | 0.559486 |
e435bc6759728f66c9ba58ab0f9f30b4d9e6d31b
| 828 |
py
|
Python
|
avioclient/controller.py
|
HermenegildoK/AvioClient
|
9cad3a89bbf10d7212561cf15b3ad453060c9434
|
[
"MIT"
] | null | null | null |
avioclient/controller.py
|
HermenegildoK/AvioClient
|
9cad3a89bbf10d7212561cf15b3ad453060c9434
|
[
"MIT"
] | null | null | null |
avioclient/controller.py
|
HermenegildoK/AvioClient
|
9cad3a89bbf10d7212561cf15b3ad453060c9434
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from avioclient.send_data import SendControls
from avioclient import config
if __name__ == "__main__":
send_data()
| 23.657143 | 50 | 0.48913 |
e43608fd33081461199e20cc093779ca67fd8543
| 132 |
py
|
Python
|
pythonExercicios/ex014.py
|
Yhago-Carvalho/CursoPython
|
343ccabb1a61e16c6078de9672c78c56deed2589
|
[
"MIT"
] | null | null | null |
pythonExercicios/ex014.py
|
Yhago-Carvalho/CursoPython
|
343ccabb1a61e16c6078de9672c78c56deed2589
|
[
"MIT"
] | null | null | null |
pythonExercicios/ex014.py
|
Yhago-Carvalho/CursoPython
|
343ccabb1a61e16c6078de9672c78c56deed2589
|
[
"MIT"
] | null | null | null |
c = float(input('Digite a temperatura em Ceusius: '))
f = (9*c + 160)/5
print(f'A temperatura de {c:.1f}C corresponde a {f:.1f}F')
| 44 | 60 | 0.659091 |
e436ff03150d44e0196337e442c791322d057adb
| 95 |
py
|
Python
|
python/p287ex5.py
|
ThePeeps191/dmoj-solutions
|
7137e945f3f595c481ad4d29e1dc3a77d8b26e55
|
[
"MIT"
] | 1 |
2022-01-23T16:02:14.000Z
|
2022-01-23T16:02:14.000Z
|
python/p287ex5.py
|
ThePeeps191/dmoj-solutions
|
7137e945f3f595c481ad4d29e1dc3a77d8b26e55
|
[
"MIT"
] | 5 |
2022-01-23T00:16:49.000Z
|
2022-01-30T04:37:45.000Z
|
python/p287ex5.py
|
ThePeeps191/dmoj-solutions
|
7137e945f3f595c481ad4d29e1dc3a77d8b26e55
|
[
"MIT"
] | 1 |
2022-01-23T00:03:47.000Z
|
2022-01-23T00:03:47.000Z
|
# not yet finished
for _ in range(int(input())):print(len(list(set(input().replace("-", "")))))
| 47.5 | 76 | 0.631579 |
e43c4d5552c855523479c4f6f4237cbc56d53955
| 906 |
py
|
Python
|
tests/test_fitsutils.py
|
lsst-dm/despyfitsutils
|
7fb96869077712eb20a1cb0f5c132e1cc85424ec
|
[
"NCSA"
] | null | null | null |
tests/test_fitsutils.py
|
lsst-dm/despyfitsutils
|
7fb96869077712eb20a1cb0f5c132e1cc85424ec
|
[
"NCSA"
] | null | null | null |
tests/test_fitsutils.py
|
lsst-dm/despyfitsutils
|
7fb96869077712eb20a1cb0f5c132e1cc85424ec
|
[
"NCSA"
] | null | null | null |
import os
import unittest
import despyfitsutils.fitsutils as utils
TESTDIR = os.path.dirname(__file__)
| 25.885714 | 72 | 0.611479 |
e43dacaa5bafcd52f175484e3b1f257816fb14b1
| 4,047 |
py
|
Python
|
applications/MensajeriaMasiva/models/db.py
|
chitohugo/MassiveSMS
|
05b528de146498531c967aff1ee4fe72720febb3
|
[
"BSD-3-Clause"
] | null | null | null |
applications/MensajeriaMasiva/models/db.py
|
chitohugo/MassiveSMS
|
05b528de146498531c967aff1ee4fe72720febb3
|
[
"BSD-3-Clause"
] | null | null | null |
applications/MensajeriaMasiva/models/db.py
|
chitohugo/MassiveSMS
|
05b528de146498531c967aff1ee4fe72720febb3
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from time import gmtime, strftime
from gluon.custom_import import track_changes
track_changes(True)
from gluon import current
from pydal import *
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
if request.global_settings.web2py_version < "2.14.1":
raise HTTP(500, "Requires web2py 2.13.3 or newer")
from gluon.contrib.appconfig import AppConfig
myconf = AppConfig(reload=True)
uri = "postgres://chito:yndrid@localhost/massivesms"
current.db = DAL(uri,pool_size=1, check_reserved=['all'], lazy_tables=False, migrate=False)
current.db.define_table('municipio',
Field('descripcion', type='string', length=20, required=True, notnull=True,
requires=[IS_NOT_EMPTY(error_message=('Este campo no puede ser vacio'))]),
)
current.db.define_table('cargo',
Field('descripcion', type='string', length=20, required=True, notnull=True,
requires=[IS_NOT_EMPTY(error_message=('Este campo no puede ser vacio'))]),
)
current.db.define_table('mun_cargo',
Field('fk_municipio', 'reference municipio'),
Field('fk_cargo', 'reference cargo'),
primarykey=['fk_municipio','fk_cargo'],
)
current.db.define_table('contacto',
Field('numero', type='string', length=11, required=True, notnull=True,unique=True,
requires=[IS_NOT_EMPTY(error_message=('Este campo no puede ser vacio'))]),
Field('fk_municipio_id', 'reference municipio',required=True),
Field('fk_cargo_id', 'reference cargo',required=True),
)
current.db.define_table('estado_mensaje',
Field('estado', length=1, required=True, notnull=True,default=1),
Field('estado_envio',length=1,required=True, notnull=True,default=1),
Field('fk_municipio_id', 'reference municipio',required=True),
Field('fk_cargo_id', 'reference cargo',required=True),
Field('destino',length=11,required=True, notnull=True),
Field('mensaje',length=160,required=True, notnull=True),
)
# -------------------------------------------------------------------------
response.generic_patterns = ['*'] if request.is_local else []
response.formstyle = myconf.get('forms.formstyle') # or 'bootstrap3_stacked' or 'bootstrap2' or other
response.form_label_separator = myconf.get('forms.separator') or ''
from gluon.tools import Auth, Service, PluginManager
# host names must be a list of allowed host names (glob syntax allowed)
auth = Auth(current.db, host_names=myconf.get('host.names'))
service = Service()
plugins = PluginManager()
# -------------------------------------------------------------------------
# create all tables needed by auth if not custom tables
# -------------------------------------------------------------------------
auth.define_tables(username=True, signature=False)
# -------------------------------------------------------------------------
# configure email
# -------------------------------------------------------------------------
mail = auth.settings.mailer
mail.settings.server = 'logging' if request.is_local else myconf.get('smtp.server')
mail.settings.sender = myconf.get('smtp.sender')
mail.settings.login = myconf.get('smtp.login')
mail.settings.tls = myconf.get('smtp.tls') or False
mail.settings.ssl = myconf.get('smtp.ssl') or False
# -------------------------------------------------------------------------
# configure auth policy
# -------------------------------------------------------------------------
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
| 45.988636 | 165 | 0.560909 |
e43dfd916520e80acf562c6592c0e2124190ae44
| 2,066 |
py
|
Python
|
dibs/src/dibs_link.py
|
emin63/dibs
|
419b2fad041aee40647429d3c1faac52c92c25a3
|
[
"MIT"
] | null | null | null |
dibs/src/dibs_link.py
|
emin63/dibs
|
419b2fad041aee40647429d3c1faac52c92c25a3
|
[
"MIT"
] | null | null | null |
dibs/src/dibs_link.py
|
emin63/dibs
|
419b2fad041aee40647429d3c1faac52c92c25a3
|
[
"MIT"
] | null | null | null |
import os
if (os.name == 'nt' or os.name == 'dos'):
try:
from win32com.shell import shell
import pythoncom
except Exception, e:
print 'WARNING: Received exception ' + `e` + ' in doing import.'
print 'WARNING: Unable to import win32com.shell.shell, pythoncom.'
print 'WARNING: Symbolic links and Shortcuts will not work.'
from win32com.shell import shell
import pythoncom, os
else:
| 34.433333 | 80 | 0.57696 |
e43f5553851f44ad5911378e9d31bfdce168b90d
| 1,207 |
py
|
Python
|
rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py
|
psiyan/rfid
|
401a093958ffafdcd10259cc9e19b7bd9f0c0e8c
|
[
"Apache-2.0"
] | null | null | null |
rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py
|
psiyan/rfid
|
401a093958ffafdcd10259cc9e19b7bd9f0c0e8c
|
[
"Apache-2.0"
] | null | null | null |
rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py
|
psiyan/rfid
|
401a093958ffafdcd10259cc9e19b7bd9f0c0e8c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-28 08:09
from __future__ import unicode_literals
from django.db import migrations, models
| 29.439024 | 93 | 0.584093 |
e43fd711dcd86e63949520216ee91e975352e431
| 10,839 |
py
|
Python
|
esp8266/main.py
|
0xSebin/SwimTime.github.io
|
e2d997464d1f4a36783638c81307a775cdfa7fcd
|
[
"MIT"
] | 1 |
2021-03-28T16:24:23.000Z
|
2021-03-28T16:24:23.000Z
|
esp8266/main.py
|
5ebin-thomas/SwimTime.github.io
|
e2d997464d1f4a36783638c81307a775cdfa7fcd
|
[
"MIT"
] | null | null | null |
esp8266/main.py
|
5ebin-thomas/SwimTime.github.io
|
e2d997464d1f4a36783638c81307a775cdfa7fcd
|
[
"MIT"
] | 2 |
2018-02-15T17:27:34.000Z
|
2019-11-20T10:00:43.000Z
|
"""
Group -
SwimTime - Swim your way to success
"""
import ads1x15
import network
import time
import math
import machine
from umqtt.simple import MQTTClient
import micropython
from micropython import const
from machine import Pin
"""
Define constant values
"""
run = False
lapnr = 3 #default lap number
temp = 0.0
wifi_ssid = "Alfabeta"
wifi_pswd = "12345678"
server = "io.adafruit.com"
user = "kk2314"
passwd = "674d8794c84d49008c5e0092dc6be24b"
mqtt_temp = "kk2314/feeds/temp"
mqtt_time = "kk2314/feeds/time"
mqtt_rawdata = "kk2314/feeds/rawdata"
mqtt_control = "kk2314/feeds/control"
mqtt_stat = "kk2314/feeds/stat"
mqtt_debug = "kk2314/feeds/debug"
mqtt_tempalert = "kk2314/feeds/tempalert"
"""
Define pins for LED and buzzer
"""
red = Pin(0, Pin.OUT)
blue = Pin(2, Pin.OUT)
p12 = machine.Pin(12)
buzz = machine.PWM(p12)
#function to blink LED
#setting up I2C for range finder/ set up ADC
i2c = machine.I2C(scl=machine.Pin(5), sda=machine.Pin(4), freq=100000)
adc = ads1x15.ADS1115(i2c)
adc.gain = 1 #ADS1015_REG_CONFIG_PGA_4_096V
#setting up I2C for temp sens
i2c_temp = machine.I2C(scl=machine.Pin(14), sda=machine.Pin(13), freq=100000)
#Received messages from subscriptions will be delivered to this callback
"""
Connect to the wifi
"""
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
sta_if.scan()
sta_if.connect(wifi_ssid, wifi_pswd)
print('Connecting to Wi-Fi')
#while connecting blink LED and wait
while not sta_if.isconnected():
blink_LED(red)
pass
print('Wifi connected')
#Turn red LED on (active-low)
red.off()
# Turn off ESP8266's AP
ap_if = network.WLAN(network.AP_IF)
ap_if.active(False)
#Converts the data received from ultrasonic sensor into meters
#Send a read request and read information of temp sensor as well as convert temp into degree celcius
#sets up the buzzer to run a countdown composed of 3 short beeps and a long one
#converts secs into min and seconds
#main() function which executes sensing and mqtt push
if __name__ == "__main__":
main(server)
| 28.448819 | 110 | 0.658456 |
e44176bdde09e0e534875279d12d7f2e7e878bfb
| 40,102 |
py
|
Python
|
pyboto3/workdocs.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/workdocs.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/workdocs.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def abort_document_version_upload(DocumentId=None, VersionId=None):
"""
Aborts the upload of the specified document version that was previously initiated by InitiateDocumentVersionUpload . The client should make this call only when it no longer intends or fails to upload the document version.
See also: AWS API Documentation
:example: response = client.abort_document_version_upload(
DocumentId='string',
VersionId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The ID of the version.
"""
pass
def activate_user(UserId=None):
"""
Activates the specified user. Only active users can access Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.activate_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
def add_resource_permissions(ResourceId=None, Principals=None):
"""
Creates a set of permissions for the specified folder or document. The resource permissions are overwritten if the principals already have different permissions.
See also: AWS API Documentation
:example: response = client.add_resource_permissions(
ResourceId='string',
Principals=[
{
'Id': 'string',
'Type': 'USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION',
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER'
},
]
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type Principals: list
:param Principals: [REQUIRED]
The users, groups, or organization being granted permission.
(dict) --Describes the recipient type and ID, if available.
Id (string) -- [REQUIRED]The ID of the recipient.
Type (string) -- [REQUIRED]The type of the recipient.
Role (string) -- [REQUIRED]The role of the recipient.
:rtype: dict
:return: {
'ShareResults': [
{
'PrincipalId': 'string',
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER',
'Status': 'SUCCESS'|'FAILURE',
'ShareId': 'string',
'StatusMessage': 'string'
},
]
}
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def create_folder(Name=None, ParentFolderId=None):
"""
Creates a folder with the specified name and parent folder.
See also: AWS API Documentation
:example: response = client.create_folder(
Name='string',
ParentFolderId='string'
)
:type Name: string
:param Name: The name of the new folder.
:type ParentFolderId: string
:param ParentFolderId: [REQUIRED]
The ID of the parent folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
}
}
"""
pass
def create_notification_subscription(OrganizationId=None, Endpoint=None, Protocol=None, SubscriptionType=None):
"""
Configure WorkDocs to use Amazon SNS notifications.
The endpoint receives a confirmation message, and must confirm the subscription. For more information, see Confirm the Subscription in the Amazon Simple Notification Service Developer Guide .
See also: AWS API Documentation
:example: response = client.create_notification_subscription(
OrganizationId='string',
Endpoint='string',
Protocol='HTTPS',
SubscriptionType='ALL'
)
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
:type Endpoint: string
:param Endpoint: [REQUIRED]
The endpoint to receive the notifications. If the protocol is HTTPS, the endpoint is a URL that begins with 'https://'.
:type Protocol: string
:param Protocol: [REQUIRED]
The protocol to use. The supported value is https, which delivers JSON-encoded messasges using HTTPS POST.
:type SubscriptionType: string
:param SubscriptionType: [REQUIRED]
The notification type.
:rtype: dict
:return: {
'Subscription': {
'SubscriptionId': 'string',
'EndPoint': 'string',
'Protocol': 'HTTPS'
}
}
"""
pass
def create_user(OrganizationId=None, Username=None, GivenName=None, Surname=None, Password=None, TimeZoneId=None, StorageRule=None):
"""
Creates a user in a Simple AD or Microsoft AD directory. The status of a newly created user is "ACTIVE". New users can access Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.create_user(
OrganizationId='string',
Username='string',
GivenName='string',
Surname='string',
Password='string',
TimeZoneId='string',
StorageRule={
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
)
:type OrganizationId: string
:param OrganizationId: The ID of the organization.
:type Username: string
:param Username: [REQUIRED]
The login name of the user.
:type GivenName: string
:param GivenName: [REQUIRED]
The given name of the user.
:type Surname: string
:param Surname: [REQUIRED]
The surname of the user.
:type Password: string
:param Password: [REQUIRED]
The password of the user.
:type TimeZoneId: string
:param TimeZoneId: The time zone ID of the user.
:type StorageRule: dict
:param StorageRule: The amount of storage for the user.
StorageAllocatedInBytes (integer) --The amount of storage allocated, in bytes.
StorageType (string) --The type of storage.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
def deactivate_user(UserId=None):
"""
Deactivates the specified user, which revokes the user's access to Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.deactivate_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
"""
pass
def delete_document(DocumentId=None):
"""
Permanently deletes the specified document and its associated metadata.
See also: AWS API Documentation
:example: response = client.delete_document(
DocumentId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
"""
pass
def delete_folder(FolderId=None):
"""
Permanently deletes the specified folder and its contents.
See also: AWS API Documentation
:example: response = client.delete_folder(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
"""
pass
def delete_folder_contents(FolderId=None):
"""
Deletes the contents of the specified folder.
See also: AWS API Documentation
:example: response = client.delete_folder_contents(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
"""
pass
def delete_notification_subscription(SubscriptionId=None, OrganizationId=None):
"""
Deletes the specified subscription from the specified organization.
See also: AWS API Documentation
:example: response = client.delete_notification_subscription(
SubscriptionId='string',
OrganizationId='string'
)
:type SubscriptionId: string
:param SubscriptionId: [REQUIRED]
The ID of the subscription.
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
"""
pass
def delete_user(UserId=None):
"""
Deletes the specified user from a Simple AD or Microsoft AD directory.
See also: AWS API Documentation
:example: response = client.delete_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
"""
pass
def describe_document_versions(DocumentId=None, Marker=None, Limit=None, Include=None, Fields=None):
"""
Retrieves the document versions for the specified document.
By default, only active versions are returned.
See also: AWS API Documentation
:example: response = client.describe_document_versions(
DocumentId='string',
Marker='string',
Limit=123,
Include='string',
Fields='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of versions to return with this call.
:type Include: string
:param Include: A comma-separated list of values. Specify 'INITIALIZED' to include incomplete versions.
:type Fields: string
:param Fields: Specify 'SOURCE' to include initialized versions and a URL for the source document.
:rtype: dict
:return: {
'DocumentVersions': [
{
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
],
'Marker': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_folder_contents(FolderId=None, Sort=None, Order=None, Limit=None, Marker=None, Type=None, Include=None):
"""
Describes the contents of the specified folder, including its documents and sub-folders.
By default, Amazon WorkDocs returns the first 100 active document and folder metadata items. If there are more results, the response includes a marker that you can use to request the next set of results. You can also request initialized documents.
See also: AWS API Documentation
:example: response = client.describe_folder_contents(
FolderId='string',
Sort='DATE'|'NAME',
Order='ASCENDING'|'DESCENDING',
Limit=123,
Marker='string',
Type='ALL'|'DOCUMENT'|'FOLDER',
Include='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Sort: string
:param Sort: The sorting criteria.
:type Order: string
:param Order: The order for the contents of the folder.
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Type: string
:param Type: The type of items.
:type Include: string
:param Include: The contents to include. Specify 'INITIALIZED' to include initialized documents.
:rtype: dict
:return: {
'Folders': [
{
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
},
],
'Documents': [
{
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
},
],
'Marker': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_notification_subscriptions(OrganizationId=None, Marker=None, Limit=None):
"""
Lists the specified notification subscriptions.
See also: AWS API Documentation
:example: response = client.describe_notification_subscriptions(
OrganizationId='string',
Marker='string',
Limit=123
)
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:rtype: dict
:return: {
'Subscriptions': [
{
'SubscriptionId': 'string',
'EndPoint': 'string',
'Protocol': 'HTTPS'
},
],
'Marker': 'string'
}
"""
pass
def describe_resource_permissions(ResourceId=None, Limit=None, Marker=None):
"""
Describes the permissions of a specified resource.
See also: AWS API Documentation
:example: response = client.describe_resource_permissions(
ResourceId='string',
Limit=123,
Marker='string'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call)
:rtype: dict
:return: {
'Principals': [
{
'Id': 'string',
'Type': 'USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION',
'Roles': [
{
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER',
'Type': 'DIRECT'|'INHERITED'
},
]
},
],
'Marker': 'string'
}
"""
pass
def describe_users(OrganizationId=None, UserIds=None, Query=None, Include=None, Order=None, Sort=None, Marker=None, Limit=None, Fields=None):
"""
Describes the specified users. You can describe all users or filter the results (for example, by status or organization).
By default, Amazon WorkDocs returns the first 24 active or pending users. If there are more results, the response includes a marker that you can use to request the next set of results.
See also: AWS API Documentation
:example: response = client.describe_users(
OrganizationId='string',
UserIds='string',
Query='string',
Include='ALL'|'ACTIVE_PENDING',
Order='ASCENDING'|'DESCENDING',
Sort='USER_NAME'|'FULL_NAME'|'STORAGE_LIMIT'|'USER_STATUS'|'STORAGE_USED',
Marker='string',
Limit=123,
Fields='string'
)
:type OrganizationId: string
:param OrganizationId: The ID of the organization.
:type UserIds: string
:param UserIds: The IDs of the users.
:type Query: string
:param Query: A query to filter users by user name.
:type Include: string
:param Include: The state of the users. Specify 'ALL' to include inactive users.
:type Order: string
:param Order: The order for the results.
:type Sort: string
:param Sort: The sorting criteria.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of items to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'STORAGE_METADATA' to include the user storage quota and utilization information.
:rtype: dict
:return: {
'Users': [
{
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
},
],
'TotalNumberOfUsers': 123,
'Marker': 'string'
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_document(DocumentId=None):
"""
Retrieves the specified document object.
See also: AWS API Documentation
:example: response = client.get_document(
DocumentId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document object.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
}
}
:returns:
(string) --
(string) --
"""
pass
def get_document_path(DocumentId=None, Limit=None, Fields=None, Marker=None):
"""
Retrieves the path information (the hierarchy from the root folder) for the requested document.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the requested document and only includes the IDs of the parent folders in the path. You can limit the maximum number of levels. You can also request the names of the parent folders.
See also: AWS API Documentation
:example: response = client.get_document_path(
DocumentId='string',
Limit=123,
Fields='string',
Marker='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Limit: integer
:param Limit: The maximum number of levels in the hierarchy to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'NAME' to include the names of the parent folders.
:type Marker: string
:param Marker: This value is not supported.
:rtype: dict
:return: {
'Path': {
'Components': [
{
'Id': 'string',
'Name': 'string'
},
]
}
}
"""
pass
def get_document_version(DocumentId=None, VersionId=None, Fields=None):
"""
Retrieves version metadata for the specified document.
See also: AWS API Documentation
:example: response = client.get_document_version(
DocumentId='string',
VersionId='string',
Fields='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The version ID of the document.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'SOURCE' to include a URL for the source document.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def get_folder(FolderId=None):
"""
Retrieves the metadata of the specified folder.
See also: AWS API Documentation
:example: response = client.get_folder(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
}
}
"""
pass
def get_folder_path(FolderId=None, Limit=None, Fields=None, Marker=None):
"""
Retrieves the path information (the hierarchy from the root folder) for the specified folder.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the requested folder and only includes the IDs of the parent folders in the path. You can limit the maximum number of levels. You can also request the parent folder names.
See also: AWS API Documentation
:example: response = client.get_folder_path(
FolderId='string',
Limit=123,
Fields='string',
Marker='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Limit: integer
:param Limit: The maximum number of levels in the hierarchy to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'NAME' to include the names of the parent folders.
:type Marker: string
:param Marker: This value is not supported.
:rtype: dict
:return: {
'Path': {
'Components': [
{
'Id': 'string',
'Name': 'string'
},
]
}
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
"""
pass
def get_waiter():
"""
"""
pass
def initiate_document_version_upload(Id=None, Name=None, ContentCreatedTimestamp=None, ContentModifiedTimestamp=None, ContentType=None, DocumentSizeInBytes=None, ParentFolderId=None):
"""
Creates a new document object and version object.
The client specifies the parent folder ID and name of the document to upload. The ID is optionally specified when creating a new version of an existing document. This is the first step to upload a document. Next, upload the document to the URL returned from the call, and then call UpdateDocumentVersion .
To cancel the document upload, call AbortDocumentVersionUpload .
See also: AWS API Documentation
:example: response = client.initiate_document_version_upload(
Id='string',
Name='string',
ContentCreatedTimestamp=datetime(2015, 1, 1),
ContentModifiedTimestamp=datetime(2015, 1, 1),
ContentType='string',
DocumentSizeInBytes=123,
ParentFolderId='string'
)
:type Id: string
:param Id: The ID of the document.
:type Name: string
:param Name: The name of the document.
:type ContentCreatedTimestamp: datetime
:param ContentCreatedTimestamp: The time stamp when the content of the document was originally created.
:type ContentModifiedTimestamp: datetime
:param ContentModifiedTimestamp: The time stamp when the content of the document was modified.
:type ContentType: string
:param ContentType: The content type of the document.
:type DocumentSizeInBytes: integer
:param DocumentSizeInBytes: The size of the document, in bytes.
:type ParentFolderId: string
:param ParentFolderId: [REQUIRED]
The ID of the parent folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
},
'UploadMetadata': {
'UploadUrl': 'string',
'SignedHeaders': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def remove_all_resource_permissions(ResourceId=None):
"""
Removes all the permissions from the specified resource.
See also: AWS API Documentation
:example: response = client.remove_all_resource_permissions(
ResourceId='string'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
"""
pass
def remove_resource_permission(ResourceId=None, PrincipalId=None, PrincipalType=None):
"""
Removes the permission for the specified principal from the specified resource.
See also: AWS API Documentation
:example: response = client.remove_resource_permission(
ResourceId='string',
PrincipalId='string',
PrincipalType='USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type PrincipalId: string
:param PrincipalId: [REQUIRED]
The principal ID of the resource.
:type PrincipalType: string
:param PrincipalType: The principal type of the resource.
"""
pass
def update_document(DocumentId=None, Name=None, ParentFolderId=None, ResourceState=None):
"""
Updates the specified attributes of the specified document. The user must have access to both the document and its parent folder, if applicable.
See also: AWS API Documentation
:example: response = client.update_document(
DocumentId='string',
Name='string',
ParentFolderId='string',
ResourceState='ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Name: string
:param Name: The name of the document.
:type ParentFolderId: string
:param ParentFolderId: The ID of the parent folder.
:type ResourceState: string
:param ResourceState: The resource state of the document. Note that only ACTIVE and RECYCLED are supported.
"""
pass
def update_document_version(DocumentId=None, VersionId=None, VersionStatus=None):
"""
Changes the status of the document version to ACTIVE.
Amazon WorkDocs also sets its document container to ACTIVE. This is the last step in a document upload, after the client uploads the document to an S3-presigned URL returned by InitiateDocumentVersionUpload .
See also: AWS API Documentation
:example: response = client.update_document_version(
DocumentId='string',
VersionId='string',
VersionStatus='ACTIVE'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The version ID of the document.
:type VersionStatus: string
:param VersionStatus: The status of the version.
"""
pass
def update_folder(FolderId=None, Name=None, ParentFolderId=None, ResourceState=None):
"""
Updates the specified attributes of the specified folder. The user must have access to both the folder and its parent folder, if applicable.
See also: AWS API Documentation
:example: response = client.update_folder(
FolderId='string',
Name='string',
ParentFolderId='string',
ResourceState='ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Name: string
:param Name: The name of the folder.
:type ParentFolderId: string
:param ParentFolderId: The ID of the parent folder.
:type ResourceState: string
:param ResourceState: The resource state of the folder. Note that only ACTIVE and RECYCLED are accepted values from the API.
"""
pass
def update_user(UserId=None, GivenName=None, Surname=None, Type=None, StorageRule=None, TimeZoneId=None, Locale=None):
"""
Updates the specified attributes of the specified user, and grants or revokes administrative privileges to the Amazon WorkDocs site.
See also: AWS API Documentation
:example: response = client.update_user(
UserId='string',
GivenName='string',
Surname='string',
Type='USER'|'ADMIN',
StorageRule={
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
},
TimeZoneId='string',
Locale='en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
:type GivenName: string
:param GivenName: The given name of the user.
:type Surname: string
:param Surname: The surname of the user.
:type Type: string
:param Type: The type of the user.
:type StorageRule: dict
:param StorageRule: The amount of storage for the user.
StorageAllocatedInBytes (integer) --The amount of storage allocated, in bytes.
StorageType (string) --The type of storage.
:type TimeZoneId: string
:param TimeZoneId: The time zone ID of the user.
:type Locale: string
:param Locale: The locale of the user.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
| 29.143895 | 310 | 0.573039 |
e4423151d9e155eac596c2c27348cae0215b843a
| 983 |
py
|
Python
|
binding/python/ddls/feeder/feeder.py
|
huzelin/ddls
|
3333a669c59ce2e525945f814a54784dafc6191b
|
[
"MIT"
] | 3 |
2019-01-03T07:34:01.000Z
|
2020-02-13T19:53:35.000Z
|
binding/python/ddls/feeder/feeder.py
|
huzelin/ddls
|
3333a669c59ce2e525945f814a54784dafc6191b
|
[
"MIT"
] | null | null | null |
binding/python/ddls/feeder/feeder.py
|
huzelin/ddls
|
3333a669c59ce2e525945f814a54784dafc6191b
|
[
"MIT"
] | 1 |
2020-05-06T11:08:07.000Z
|
2020-05-06T11:08:07.000Z
|
""" Feeder for batch production"""
from __future__ import absolute_import
import ctypes
from ddls.base import check_call, LIB, c_str, c_array
from ddls.feeder.batch_iterator import BatchIterator
| 27.305556 | 84 | 0.580875 |
e44339ec7d8d98173878c5ddc15f39e511c628ec
| 258 |
py
|
Python
|
tests/test_example.py
|
akoul1/mvlearn
|
177d391bb12c6e94335720d9af3608bd719d8be1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_example.py
|
akoul1/mvlearn
|
177d391bb12c6e94335720d9af3608bd719d8be1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_example.py
|
akoul1/mvlearn
|
177d391bb12c6e94335720d9af3608bd719d8be1
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from mvlearn.example.example import example_function
def test_example_function():
"""
Test that example function returns correct value.
"""
assert example_function() == "param"
assert example_function("hello") == "hello"
| 21.5 | 53 | 0.713178 |
e443a35a02a890811a35899fe38cc7d3bb4c7d5c
| 2,155 |
py
|
Python
|
api/resources/resources.py
|
arkhn/fhirball-server
|
b4d1a1c29dfff5ba60bfbb6b291f6bdb6e6ccd6e
|
[
"Apache-2.0"
] | 5 |
2018-12-21T13:20:12.000Z
|
2019-11-20T23:58:06.000Z
|
api/resources/resources.py
|
arkhn/fhir-ball-server
|
b4d1a1c29dfff5ba60bfbb6b291f6bdb6e6ccd6e
|
[
"Apache-2.0"
] | null | null | null |
api/resources/resources.py
|
arkhn/fhir-ball-server
|
b4d1a1c29dfff5ba60bfbb6b291f6bdb6e6ccd6e
|
[
"Apache-2.0"
] | null | null | null |
from flask_restful import Resource
import requests
from api.common.utils import file_response
ENCODING = 'utf-8'
SCHEMA_URL = 'http://127.0.0.1:8422'
STORE_URL = 'http://127.0.0.1:8423'
| 24.770115 | 87 | 0.624594 |
e445d667e0d2518eeb5e300fca8baeaa532b0501
| 427 |
py
|
Python
|
t_mongo.py
|
iloghyr/easy_python
|
b750f6817d54562b23630e2419bace19da0abf8b
|
[
"Apache-2.0"
] | 1 |
2018-03-01T02:42:52.000Z
|
2018-03-01T02:42:52.000Z
|
t_mongo.py
|
iloghyr/easy_python
|
b750f6817d54562b23630e2419bace19da0abf8b
|
[
"Apache-2.0"
] | null | null | null |
t_mongo.py
|
iloghyr/easy_python
|
b750f6817d54562b23630e2419bace19da0abf8b
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python
#coding: utf-8
import pymongo
print [x for x in range(2)]
con = pymongo.MongoClient("localhost", 27017)
db = con.mars
collection = db.users
data = collection.find_one({"username":"hyr"})
print data
data['age'] = 225
print collection.update({"_idd":data['_id']}, data)
print collection.find_one({"username":"hyr"})
# for i in collection.find().sort('_id', pymongo.DESCENDING).limit(1):
# print i
| 17.08 | 70 | 0.683841 |
e4466c3b9ecc29dbb105b55c4d10907897f3d25c
| 742 |
py
|
Python
|
ArtificialData/RhoAndBeta.py
|
AlfLobos/DSP
|
1e1073c6b0da562b0aea3dec9d62bc563a3b46f5
|
[
"CNRI-Python"
] | null | null | null |
ArtificialData/RhoAndBeta.py
|
AlfLobos/DSP
|
1e1073c6b0da562b0aea3dec9d62bc563a3b46f5
|
[
"CNRI-Python"
] | null | null | null |
ArtificialData/RhoAndBeta.py
|
AlfLobos/DSP
|
1e1073c6b0da562b0aea3dec9d62bc563a3b46f5
|
[
"CNRI-Python"
] | null | null | null |
import numpy as np
| 35.333333 | 91 | 0.677898 |
e44985df33485739c9a738d44c1ed72af3c01cd0
| 3,208 |
py
|
Python
|
src/utils/greedy.py
|
vmgabriel/tabu-base
|
615c45e4d6b6fdb1c85c8fbaa316a1e6ce829fcd
|
[
"Apache-2.0"
] | null | null | null |
src/utils/greedy.py
|
vmgabriel/tabu-base
|
615c45e4d6b6fdb1c85c8fbaa316a1e6ce829fcd
|
[
"Apache-2.0"
] | null | null | null |
src/utils/greedy.py
|
vmgabriel/tabu-base
|
615c45e4d6b6fdb1c85c8fbaa316a1e6ce829fcd
|
[
"Apache-2.0"
] | null | null | null |
"""
Greedy Module Solution for Utils control
"""
# Libraries
from typing import List
from functools import reduce
# Modules
from src.utils.math import (
list_negative,
invert_positions,
evaluate_fo
)
# Constants
COMPARE_VALUE = 99999999
def worst_solution(distance_matrix: List[List[float]]) -> List[int]:
"""This generate the worst solution"""
negative_matrix = list(map(
list_negative,
distance_matrix
))
return neghbord_most_near(negative_matrix)
def neghbord_most_near(
distance_matrix: List[List[float]],
start_city: int = 0
) -> List[int]:
"""
get the city most near in distance
"""
neghbord_used = [start_city]
def city_most_near(line: int) -> int:
"""
Get City most near
"""
compare_value = COMPARE_VALUE
most_near = -1
for key, value in enumerate(distance_matrix[line]):
if (
line != key and
value < compare_value and
key not in neghbord_used
):
compare_value = value
most_near = key
neghbord_used.append(most_near)
return most_near
return list(map(
lambda x: city_most_near(x) if x != start_city else start_city,
range(len(distance_matrix))
))
def best_change_not_tabu(
matrix_distance: List[List[float]],
solution: List[int]
) -> (float, tuple):
"""
change the data for best change based into function objective
matrix_distance: List[List[float]] -> Matrix of distances
solution: List[int] -> all solutions
return (float, (posx, posy)) -> the best solution into position
"""
# fun_before = evaluate_fo(matrix_distance, solution)
best_fo = 1E+100
position = (-1, -1)
tam = len(solution)
for posx in range(tam-1):
for posy in range(posx+1 if posx+1 != tam else tam, tam):
funobj = evaluate_fo(
matrix_distance,
invert_positions(solution, posx, posy)
)
if funobj < best_fo:
best_fo = funobj
position = (posx, posy)
return (best_fo, position)
def generate_local_search(
matrix_distance: List[List[float]],
solution: List[int]
) -> (int, List[int]):
"""
This generate a local search for the minize way based in fo
matrix_distance: List[List[float]]
"""
counter = 0
manage = True
best_change = best_change_not_tabu(matrix_distance, solution)
prev_change = (1E+100,)
while manage:
if prev_change[0] < best_change[0]:
manage = False
else:
prev_change = best_change
best_change = best_change_not_tabu(matrix_distance, solution)
solution = invert_positions(
solution,
origin=best_change[1][0],
destiny=best_change[1][1]
)
counter += 1
return (
counter,
(
prev_change[0]
if prev_change[0] < best_change[0] and
prev_change[0] != 0
else best_change[0]
),
solution
)
| 25.870968 | 73 | 0.5798 |
e45010e55211f1d8b353af0fb64ccf62757ae1c3
| 5,649 |
py
|
Python
|
codes/models/modules/Inv_arch.py
|
lin-zhao-resoLve/Symmetric-Enhancement
|
11c1a662020582d1333d11cf5f9c99556ec0f427
|
[
"Apache-2.0"
] | 14 |
2021-09-30T07:05:04.000Z
|
2022-03-31T08:22:39.000Z
|
codes/models/modules/Inv_arch.py
|
lin-zhao-resoLve/Symmetric-Enhancement
|
11c1a662020582d1333d11cf5f9c99556ec0f427
|
[
"Apache-2.0"
] | 3 |
2021-11-09T06:52:13.000Z
|
2021-11-20T08:00:46.000Z
|
codes/models/modules/Inv_arch.py
|
lin-zhao-resoLve/Symmetric-Enhancement
|
11c1a662020582d1333d11cf5f9c99556ec0f427
|
[
"Apache-2.0"
] | null | null | null |
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from models.modules.model.vgg16 import Vgg16
import os
vgg = Vgg16()
vgg.load_state_dict(torch.load(os.path.join(os.path.abspath('.'), 'models/modules/model/', 'vgg16.weight')))
params = list(vgg.named_parameters())
encoding1 = params[0][1].data
encoding2 = params[2][1].data
# class encoder(nn.Module):
# def __init__(self, in_channels, out_channels, num_features):
# super(encoder, self).__init__()
# stride = 1
# padding = 1
# kernel_size = 3
# self.conv1 = nn.Conv2d(in_channels, 2*num_features, kernel_size, stride=stride, padding=padding)
# self.conv2 = nn.Conv2d(2*num_features, num_features, kernel_size, stride=stride, padding=padding)
# self.conv3 = nn.Conv2d(num_features, out_channels, kernel_size=1, stride=1)
# self.prelu = nn.PReLU(num_parameters=1, init=0.2)
#
# def forward(self, x, rev=False):
# x1 = self.prelu(self.conv1(x))
# x2 = self.prelu(self.conv2(x1))
# x3 = self.prelu(self.conv3(x2))
# return x3
| 36.211538 | 108 | 0.615861 |
e450e0a78fcbebd70da772f87d262f552594b525
| 56 |
py
|
Python
|
FrontRunner.py
|
mmaist/FrontRunner
|
05095421b69a0a5ccf4ef53ae3dc35b8e8b926b7
|
[
"MIT"
] | 1 |
2021-02-18T10:41:36.000Z
|
2021-02-18T10:41:36.000Z
|
FrontRunner.py
|
mmaist/FrontRunner
|
05095421b69a0a5ccf4ef53ae3dc35b8e8b926b7
|
[
"MIT"
] | null | null | null |
FrontRunner.py
|
mmaist/FrontRunner
|
05095421b69a0a5ccf4ef53ae3dc35b8e8b926b7
|
[
"MIT"
] | null | null | null |
import time
import random
import json
import requests
| 8 | 15 | 0.821429 |
e4520356b6e60cb7ea00f5353a2466e715bcd995
| 1,642 |
py
|
Python
|
py_algo/dynamic_programming/introduction/equal_array.py
|
Sk0uF/Algorithms
|
236cc5b056ce2637d5d947c5fc1e3367cde886bf
|
[
"MIT"
] | 1 |
2021-07-05T15:39:04.000Z
|
2021-07-05T15:39:04.000Z
|
py_algo/dynamic_programming/introduction/equal_array.py
|
Sk0uF/Algorithms
|
236cc5b056ce2637d5d947c5fc1e3367cde886bf
|
[
"MIT"
] | null | null | null |
py_algo/dynamic_programming/introduction/equal_array.py
|
Sk0uF/Algorithms
|
236cc5b056ce2637d5d947c5fc1e3367cde886bf
|
[
"MIT"
] | 1 |
2021-09-02T21:31:34.000Z
|
2021-09-02T21:31:34.000Z
|
"""
Codemonk link: https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/equal-array-84cf6c5f/
You are given an array A of size N. Find the minimum non negative number X such that there exists an index j that when
you can replace Aj by Aj+X, the sum of elements of the array from index 1 to j and j+1 to N become equal where
1 <= j <= N-1. Assume array to be 1-indexed. If there is no possible X print -1 in a separate line.
Input - Output:
The first line contains the number of test cases.
The first line of each test case contains an integer N,which denotes the size of the array.
The second line contains N space-separated integers where the ith integer denotes Ai.
Sample input:
1
5
1 2 3 2 1
Sample Output:
3
"""
"""
We can simply find the partial sums array, iterate throught the array end at each step check for the minimum X number
that is required.
Final complexity: O(N)
"""
t = int(input())
for _ in range(t):
n = int(input())
array = list(map(int, input().split()))
partial_sums = [array[0]]
for i in range(1, n):
partial_sums.append(array[i]+partial_sums[i-1])
ans = float("inf")
stop = False
for i in range(n):
if partial_sums[i] < partial_sums[-1] - partial_sums[i]:
val = partial_sums[-1] - 2*partial_sums[i]
ans = min(ans, val)
if partial_sums[i] == partial_sums[-1] - partial_sums[i]:
print(0)
stop = True
break
if not stop:
if ans != float("inf"):
print(ans)
else:
print(-1)
| 30.407407 | 170 | 0.658343 |
e4526af2d705bb3c47b1ba3a6b79144d1876aeeb
| 1,331 |
py
|
Python
|
model.py
|
mollikka/Penrose
|
6d9870f54e9810f7e2f4ea82bb619424785a65db
|
[
"MIT"
] | 1 |
2019-07-17T02:46:45.000Z
|
2019-07-17T02:46:45.000Z
|
model.py
|
mollikka/Penrose
|
6d9870f54e9810f7e2f4ea82bb619424785a65db
|
[
"MIT"
] | null | null | null |
model.py
|
mollikka/Penrose
|
6d9870f54e9810f7e2f4ea82bb619424785a65db
|
[
"MIT"
] | null | null | null |
from itertools import chain
phi = 1.61803398875
| 22.183333 | 72 | 0.480841 |
e45397111350f9273e2cc86843e6973c134d6e85
| 1,465 |
py
|
Python
|
src/tests/unittests/configuration_helper/adapters/test_keysight_e8267d_instrument_adapter.py
|
QuTech-Delft/qilib
|
a87892f8a9977ed338c36e8fb1e262b47449cf44
|
[
"MIT"
] | 1 |
2019-02-20T16:56:30.000Z
|
2019-02-20T16:56:30.000Z
|
src/tests/unittests/configuration_helper/adapters/test_keysight_e8267d_instrument_adapter.py
|
QuTech-Delft/qilib
|
a87892f8a9977ed338c36e8fb1e262b47449cf44
|
[
"MIT"
] | 22 |
2019-02-16T06:10:55.000Z
|
2022-02-15T18:52:34.000Z
|
src/tests/unittests/configuration_helper/adapters/test_keysight_e8267d_instrument_adapter.py
|
QuTech-Delft/qilib
|
a87892f8a9977ed338c36e8fb1e262b47449cf44
|
[
"MIT"
] | 2 |
2020-02-04T08:46:21.000Z
|
2020-10-18T16:31:58.000Z
|
import unittest
from unittest.mock import call, patch, Mock, MagicMock
from qilib.configuration_helper import InstrumentAdapterFactory
| 47.258065 | 112 | 0.661433 |
e455b64eee36fc129ded8331905ce5976719baa2
| 1,364 |
py
|
Python
|
scripts/mint.py
|
tomazmm/artsyapes-contract
|
95b10e1c73aa4e0712ff8d5162271e84aec91810
|
[
"Apache-2.0"
] | null | null | null |
scripts/mint.py
|
tomazmm/artsyapes-contract
|
95b10e1c73aa4e0712ff8d5162271e84aec91810
|
[
"Apache-2.0"
] | null | null | null |
scripts/mint.py
|
tomazmm/artsyapes-contract
|
95b10e1c73aa4e0712ff8d5162271e84aec91810
|
[
"Apache-2.0"
] | null | null | null |
import json
import pprint
import random
from terra_sdk.core import AccAddress, Coins
from terra_sdk.core.auth import StdFee
from terra_sdk.core.broadcast import BlockTxBroadcastResult
from scripts.deploy import owner, lt
from terra_sdk.core.wasm import MsgExecuteContract
if __name__ == '__main__':
main()
| 28.416667 | 98 | 0.662023 |
e45875441dea1d18e8ce1f3858f85bde9799b868
| 281 |
py
|
Python
|
url_shortener/exceptions.py
|
alena-kono/simple-shortener
|
d1549b342e190ff70509ce5b442cb31376f2a07a
|
[
"MIT"
] | null | null | null |
url_shortener/exceptions.py
|
alena-kono/simple-shortener
|
d1549b342e190ff70509ce5b442cb31376f2a07a
|
[
"MIT"
] | null | null | null |
url_shortener/exceptions.py
|
alena-kono/simple-shortener
|
d1549b342e190ff70509ce5b442cb31376f2a07a
|
[
"MIT"
] | null | null | null |
from core.exceptions import BaseProjectException
| 20.071429 | 55 | 0.761566 |
e4589a7ec39dfb446ef1fe4c8fd01bbb42b8704d
| 1,507 |
py
|
Python
|
enbios/processing/indicators/__init__.py
|
ENVIRO-Module/enbios
|
10e93df9a168627833eca6d04e4e2b864de8e8d9
|
[
"BSD-3-Clause"
] | 2 |
2022-01-28T09:38:28.000Z
|
2022-01-28T09:38:32.000Z
|
enbios/processing/indicators/__init__.py
|
ENVIRO-Module/enbios
|
10e93df9a168627833eca6d04e4e2b864de8e8d9
|
[
"BSD-3-Clause"
] | 1 |
2022-01-27T21:42:42.000Z
|
2022-01-27T21:42:42.000Z
|
enbios/processing/indicators/__init__.py
|
ENVIRO-Module/enbios
|
10e93df9a168627833eca6d04e4e2b864de8e8d9
|
[
"BSD-3-Clause"
] | null | null | null |
import math
from nexinfosys.model_services import State
materials = {
"Aluminium",
"Antimony",
"Arsenic",
"Baryte",
"Beryllium",
"Borates",
"Cadmium",
"Cerium",
"Chromium",
"Cobalt",
"Copper",
"Diatomite",
"Dysprosium",
"Europium",
"Fluorspar",
"Gadolinium",
"Gallium",
"Gold",
"Gypsum",
"IronOre",
"KaolinClay",
"Lanthanum",
"Lead",
"Lithium",
"Magnesite",
"Magnesium",
"Manganese",
"Molybdenum",
"NaturalGraphite",
"Neodymium",
"Nickel",
"Palladium",
"Perlite",
"Phosphorus",
"Platinum",
"Praseodymium",
"Rhenium",
"Rhodium",
"Samarium",
"Selenium",
"SiliconMetal",
"Silver",
"Strontium",
"Sulphur",
"Talc",
"Tantalum",
"Tellurium",
"Terbium",
"Tin",
"Titanium",
"Tungsten",
"Vanadium",
"Yttrium",
"Zinc",
"Zirconium"
}
| 17.125 | 43 | 0.50564 |
e45a47a7a23107da9b1e4e894dbe004e6d56eaf1
| 2,933 |
py
|
Python
|
Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py
|
mrankitgupta/PythonLessons
|
119efc58518c5b35c6647009c74ff96728f851fa
|
[
"MIT"
] | null | null | null |
Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py
|
mrankitgupta/PythonLessons
|
119efc58518c5b35c6647009c74ff96728f851fa
|
[
"MIT"
] | null | null | null |
Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py
|
mrankitgupta/PythonLessons
|
119efc58518c5b35c6647009c74ff96728f851fa
|
[
"MIT"
] | null | null | null |
# defining a decorator
# defining a function, to be called inside wrapper
# passing 'function_to_be_used' inside the decorator to control its behaviour
function_to_be_used = hello_decorator(function_to_be_used)
# calling the function
function_to_be_used()
# find out the execution time of a function using a decorator
# importing libraries
import time
import math
# decorator to calculate duration # taken by any function.
# this can be added to any function present, in this case to calculate a factorial
# calling the function.
factorial(10)
# Chaining Decorators
# code for testing decorator chaining
print(num())
# Decorators with parameters in Python
# Generator Function
# A generator function that yields 1 for first time, 2 second time and 3 third time
def simpleGeneratorFun():
yield 1
yield 2
yield 3
# Driver code to check above generator function
for value in simpleGeneratorFun():
print(value)
# A Python program to demonstrate use of generator object with next()
# A generator function
# x is a generator object
x = simpleGeneratorFun()
# Iterating over the generator object using next
print(x.next()) # In Python 3, __next__()
print(x.next())
print(x.next())
| 30.237113 | 144 | 0.691101 |
e45a7bbe70e7b8614eb0c9109018644cf05fb490
| 24,654 |
py
|
Python
|
src/1-topicmodeling.py
|
sofieditmer/topic_modeling
|
edfff3c4d45c932562f796cc81e9ce9fe35f8e4b
|
[
"MIT"
] | null | null | null |
src/1-topicmodeling.py
|
sofieditmer/topic_modeling
|
edfff3c4d45c932562f796cc81e9ce9fe35f8e4b
|
[
"MIT"
] | null | null | null |
src/1-topicmodeling.py
|
sofieditmer/topic_modeling
|
edfff3c4d45c932562f796cc81e9ce9fe35f8e4b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Info: This script performs topic modeling on the clean tweets by Donald Trump. The number of topics is estimated by computing coherence values for different number of topics, and an LDA model is constructed with the number of topics with the highest coherence value. Visualizations of the topics are created relying on pyLDAvis and wordcloud and these visualizations are saved in the output directory.
Parameters:
(optional) input_file: str <name-of-input-file>, default = clean_trump_tweets.csv
(optional) chunk_size: int <size-of-chunks>, default = 10
(optional) passes: int <number-of-passes>, default = 10
(optional) min_count: int <minimum-count-bigrams>, default = 2
(optional) threshold: int <threshold-for-keeping-phrases>, default = 100
(optional) iterations: int <number-of-iterations>, default = 100
(optional) rolling_mean: int <rolling-mean>, default = 50
(optional) step_size: int <size-of-steps>, default = 5
Usage:
$ python 1-topicmodeling.py
Output:
- topics.txt: overview of topics generated by the LDA model
- dominant_topic.csv: table showing the most dominant topics and their associated keywords as well as how much each topic contributes.
- topic_contributions.csv: a dataframe showing the most contributing keywords for each topic.
- topics_over_time.jpg: visualization of the topic contributions over time.
- topic_wordclouds.png: the topics visualized as word clouds.
"""
### DEPENDENCIES ###
# core libraries
import sys
import os
sys.path.append(os.path.join(".."))
# numpy, pandas, pyplot
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
# spaCy
import spacy
nlp = spacy.load("en_core_web_sm", disable=["ner"])
nlp.max_length = 68000000 # increasing maximum length
# pyLDAvis and seaborn for vizualisations
import pyLDAvis.gensim
import seaborn as sns
# matplotlib colors
import matplotlib.colors as mcolors
# wordcloud tools
from wordcloud import WordCloud
# LDA tools
import gensim
import gensim.corpora as corpora
from gensim.models import CoherenceModel
from utils import lda_utils
# Ignore warnings
import logging, warnings
warnings.filterwarnings('ignore')
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.ERROR)
# argparse
import argparse
### MAIN FUNCTION ###
### TOPIC MODELING ###
# Creating Topic modeling class
# Define behaviour when called from command line
if __name__=="__main__":
main()
| 46.693182 | 402 | 0.626511 |
e45a8dc57b1450e18797d47ff570959f3d7e2d31
| 15,086 |
py
|
Python
|
EEG_Lightning/dassl/data/datasets/ProcessDataBase_v1.py
|
mcd4874/NeurIPS_competition
|
4df1f222929e9824a55c9c4ae6634743391b0fe9
|
[
"MIT"
] | 23 |
2021-10-14T02:31:06.000Z
|
2022-01-25T16:26:44.000Z
|
EEG_Lightning/dassl/data/datasets/ProcessDataBase_v1.py
|
mcd4874/NeurIPS_competition
|
4df1f222929e9824a55c9c4ae6634743391b0fe9
|
[
"MIT"
] | null | null | null |
EEG_Lightning/dassl/data/datasets/ProcessDataBase_v1.py
|
mcd4874/NeurIPS_competition
|
4df1f222929e9824a55c9c4ae6634743391b0fe9
|
[
"MIT"
] | 1 |
2022-03-05T06:54:11.000Z
|
2022-03-05T06:54:11.000Z
|
"""
William DUong
"""
import os.path as osp
import os
import errno
from .build import DATASET_REGISTRY
from .base_dataset import Datum, DatasetBase,EEGDatum
from scipy.io import loadmat
import numpy as np
from collections import defaultdict
| 45.032836 | 166 | 0.65173 |
e45ad99677d6577af2671852ef4f62636067fd15
| 9,321 |
py
|
Python
|
pywolf3d/level_editor/app.py
|
jammers-ach/pywolf3d
|
3e305d7bdb9aa4f38ae5cf460ed22c54efe8980c
|
[
"MIT"
] | null | null | null |
pywolf3d/level_editor/app.py
|
jammers-ach/pywolf3d
|
3e305d7bdb9aa4f38ae5cf460ed22c54efe8980c
|
[
"MIT"
] | null | null | null |
pywolf3d/level_editor/app.py
|
jammers-ach/pywolf3d
|
3e305d7bdb9aa4f38ae5cf460ed22c54efe8980c
|
[
"MIT"
] | null | null | null |
import argparse
import json
from ursina import load_texture, Ursina, Entity, color, camera, Quad, mouse, time, window, invoke, WindowPanel, \
Text, InputField, Space, scene, Button, Draggable, Tooltip, Scrollable
from pywolf3d.games.wolf3d import WALL_DEFS, WallDef, OBJECT_DEFS
Z_GRID = 0
Z_OBJECT = 2
Z_WALL = 3
def start_editor(fname, path_to_game):
app = Ursina()
editor = LevelEditor(fname, path_to_game)
app.run()
def run():
parser = argparse.ArgumentParser(description='Mapmaker for pywolf3d')
parser.add_argument('level', help='path to level to load')
parser.add_argument('--path', help='path to wolf3d datafiles (default ./wolfdata)',
default="./wolfdata/")
args = parser.parse_args()
start_editor(args.level, args.path)
if __name__ == '__main__':
run()
| 29.590476 | 122 | 0.55037 |
e45ba78572ce87d65bc9fa965f1a8af3685baf94
| 3,404 |
py
|
Python
|
code/data_mgmt.py
|
TomDonoghue/EEGparam
|
a3e747094617479122900688643fa396ecbf8bab
|
[
"MIT"
] | 8 |
2021-08-17T05:22:40.000Z
|
2022-03-23T02:03:48.000Z
|
code/data_mgmt.py
|
TomDonoghue/EEGparam
|
a3e747094617479122900688643fa396ecbf8bab
|
[
"MIT"
] | 1 |
2020-12-09T13:22:03.000Z
|
2021-01-27T01:56:09.000Z
|
code/data_mgmt.py
|
TomDonoghue/EEGparam
|
a3e747094617479122900688643fa396ecbf8bab
|
[
"MIT"
] | 4 |
2021-06-20T14:44:38.000Z
|
2021-12-11T11:21:26.000Z
|
"""Functions for loading and data management for EEG-FOOOF."""
from os.path import join as pjoin
import numpy as np
from fooof import FOOOFGroup
from fooof.analysis import get_band_peak_fg
from settings import BANDS, YNG_INDS, OLD_INDS, N_LOADS, N_SUBJS, N_TIMES
###################################################################################################
###################################################################################################
def reshape_data(data):
"""Reshape loaded data objects into subsets for YNG and OLD groups."""
yng_data = np.vstack([data[0, YNG_INDS, :], data[1, YNG_INDS, :], data[2, YNG_INDS, :]])
old_data = np.vstack([data[0, OLD_INDS, :], data[1, OLD_INDS, :], data[2, OLD_INDS, :]])
return yng_data, old_data
def load_fooof_task_md(data_path, side='Contra', folder='FOOOF'):
"""Load task data in for all subjects, selects & return metadata."""
# Collect measures together from FOOOF results into matrices
all_r2s = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
all_errs = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
for li, load in enumerate(['Load1', 'Load2', 'Load3']):
pre, early, late = _load_fgs(data_path, folder, side, load)
for ind, fg in enumerate([pre, early, late]):
all_r2s[li, :, ind] = fg.get_params('r_squared')
all_errs[li, :, ind] = fg.get_params('error')
return all_r2s, all_errs
def load_fooof_task_ap(data_path, side='Contra', folder='FOOOF'):
"""Loads task data in for all subjects, selects and return aperiodic FOOOF outputs.
data_path : path to where data
side: 'Ipsi' or 'Contra'
"""
# Collect measures together from FOOOF results into matrices
all_exps = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
all_offsets = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
for li, load in enumerate(['Load1', 'Load2', 'Load3']):
pre, early, late = _load_fgs(data_path, folder, side, load)
for ind, fg in enumerate([pre, early, late]):
all_exps[li, :, ind] = fg.get_params('aperiodic_params', 'exponent')
all_offsets[li, :, ind] = fg.get_params('aperiodic_params', 'offset')
return all_offsets, all_exps
def load_fooof_task_pe(data_path, side='Contra', param_ind=1, folder='FOOOF'):
"""Loads task data for all subjects, selects and return periodic FOOOF outputs.
data_path : path to where data
side: 'Ipsi' or 'Contra'
"""
# Collect measures together from FOOOF results into matrices
all_alphas = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
for li, load in enumerate(['Load1', 'Load2', 'Load3']):
pre, early, late = _load_fgs(data_path, folder, side, load)
for ind, fg in enumerate([pre, early, late]):
temp_alphas = get_band_peak_fg(fg, BANDS.alpha)
all_alphas[li, :, ind] = temp_alphas[:, param_ind]
return all_alphas
def _load_fgs(data_path, folder, side, load):
"""Helper to load FOOOFGroups."""
# Load the FOOOF analyses of the average
pre, early, late = FOOOFGroup(), FOOOFGroup(), FOOOFGroup()
pre.load('Group_' + load + '_' + side + '_Pre', pjoin(data_path, folder))
early.load('Group_' + load + '_' + side + '_Early', pjoin(data_path, folder))
late.load('Group_' + load + '_' + side + '_Late', pjoin(data_path, folder))
return pre, early, late
| 35.831579 | 99 | 0.623384 |
e45c0f05cdc7fe7a2e45a2f57230877bc9ba6968
| 413 |
py
|
Python
|
match_shapes.py
|
KyojiOsada/Python-Library
|
b06e50454c56c84c2abb96e6f68d35117ea5f4b5
|
[
"Apache-2.0"
] | null | null | null |
match_shapes.py
|
KyojiOsada/Python-Library
|
b06e50454c56c84c2abb96e6f68d35117ea5f4b5
|
[
"Apache-2.0"
] | null | null | null |
match_shapes.py
|
KyojiOsada/Python-Library
|
b06e50454c56c84c2abb96e6f68d35117ea5f4b5
|
[
"Apache-2.0"
] | null | null | null |
import sys
import cv2
import numpy as np
img1 = cv2.imread('source1.jpg',0)
img2 = cv2.imread('source2.jpg',0)
ret, thresh = cv2.threshold(img1, 127, 255,0)
ret, thresh2 = cv2.threshold(img2, 127, 255,0)
contours,hierarchy,a = cv2.findContours(thresh,2,1)
cnt1 = contours[0]
contours,hierarchy,a = cv2.findContours(thresh2,2,1)
cnt2 = contours[0]
ret = cv2.matchShapes(cnt1,cnt2,1,0.0)
print(ret)
sys.exit()
| 20.65 | 52 | 0.72155 |
e45c3482ede83aa24d104869dacc8d42f601273f
| 25,556 |
py
|
Python
|
SlicerModules/SegmentConnectedParzenPDF/SegmentConnectedParzenPDF.py
|
jcfr/TubeTK
|
3791790e206b5627a35c46f86eeb9671c8d4190f
|
[
"Apache-2.0"
] | 1 |
2019-07-19T09:27:37.000Z
|
2019-07-19T09:27:37.000Z
|
SlicerModules/SegmentConnectedParzenPDF/SegmentConnectedParzenPDF.py
|
jcfr/TubeTK
|
3791790e206b5627a35c46f86eeb9671c8d4190f
|
[
"Apache-2.0"
] | null | null | null |
SlicerModules/SegmentConnectedParzenPDF/SegmentConnectedParzenPDF.py
|
jcfr/TubeTK
|
3791790e206b5627a35c46f86eeb9671c8d4190f
|
[
"Apache-2.0"
] | 1 |
2019-07-19T09:28:56.000Z
|
2019-07-19T09:28:56.000Z
|
import os
from __main__ import vtk, qt, ctk, slicer
import EditorLib
from EditorLib.EditOptions import HelpButton
from EditorLib.EditOptions import EditOptions
from EditorLib import EditUtil
from EditorLib import LabelEffect
#
# EditorEffectTemplateTool
#
#
# EditorEffectTemplateLogic
#
#
# The InteractiveConnectedComponentsUsingParzenPDFs Template class definition
#
#
# EditorEffectTemplate
#
#
# EditorEffectTemplateWidget
#
| 44.138169 | 292 | 0.755204 |
e45d9ac1d7f7347063075b259a658688aa945eb7
| 415 |
py
|
Python
|
category/urls.py
|
amin-bahiraei-75/shop_back
|
afcc5907fe33de2db1615f14df71443d1a35bbd0
|
[
"MIT"
] | 1 |
2021-12-24T15:20:37.000Z
|
2021-12-24T15:20:37.000Z
|
category/urls.py
|
amin-bahiraei-75/shop_back
|
afcc5907fe33de2db1615f14df71443d1a35bbd0
|
[
"MIT"
] | null | null | null |
category/urls.py
|
amin-bahiraei-75/shop_back
|
afcc5907fe33de2db1615f14df71443d1a35bbd0
|
[
"MIT"
] | null | null | null |
from django.urls import path
from category.views import List,Detail,Create,Delete,Update,Search,All
urlpatterns = [
path('all',All.as_view()),
path('list/<int:pk>',List.as_view()),
path('search/<str:pk>',Search.as_view()),
path('detail/<int:pk>',Detail.as_view()),
path('create', Create.as_view()),
path('delete/<int:pk>', Delete.as_view()),
path('update/<int:pk>', Update.as_view()),
]
| 34.583333 | 70 | 0.653012 |
e460d64b915b9a1607000858e70b226926b3124a
| 3,488 |
py
|
Python
|
led_motor_switch.py
|
scarmel/iot-demo
|
02c6d810098720803196bf32ee1780925011f57c
|
[
"Apache-2.0"
] | null | null | null |
led_motor_switch.py
|
scarmel/iot-demo
|
02c6d810098720803196bf32ee1780925011f57c
|
[
"Apache-2.0"
] | null | null | null |
led_motor_switch.py
|
scarmel/iot-demo
|
02c6d810098720803196bf32ee1780925011f57c
|
[
"Apache-2.0"
] | null | null | null |
# ------------------------------------------
# Description: This python script will update AWS Thing Shadow for a Device/Thing
# ------------------------------------------
# Import package
import paho.mqtt.client as mqtt
import ssl, time, sys
# =======================================================
# Set Following Variables
# AWS IoT Endpoint
MQTT_HOST = "your aws iot endpoint"
# CA Root Certificate File Path
CA_ROOT_CERT_FILE = "path for the aws root certificate file"
# AWS IoT Thing Name
THING_NAME = "your thing name"
# AWS IoT Thing Certificate File Path
THING_CERT_FILE = "path for your device certificate file"
# AWS IoT Thing Private Key File Path
THING_PRIVATE_KEY_FILE = "path for your device private key"
# =======================================================
# =======================================================
# No need to change following variables
MQTT_PORT = 8883
MQTT_KEEPALIVE_INTERVAL = 45
SHADOW_UPDATE_TOPIC = "$aws/things/" + THING_NAME + "/shadow/update"
SHADOW_UPDATE_ACCEPTED_TOPIC = "$aws/things/" + THING_NAME + "/shadow/update/accepted"
SHADOW_UPDATE_REJECTED_TOPIC = "$aws/things/" + THING_NAME + "/shadow/update/rejected"
SHADOW_STATE_DOC_LED_ON = """{"state" : {"desired" : {"LED" : "ON"}}}"""
SHADOW_STATE_DOC_LED_OFF = """{"state" : {"desired" : {"LED" : "OFF"}}}"""
RESPONSE_RECEIVED = False
# =======================================================
# Initiate MQTT Client
mqttc = mqtt.Client("led_switch_client")
# Define on_message event function.
# This function will be invoked every time,
# a new message arrives for the subscribed topic
# Register callback functions
mqttc.on_message = on_message
mqttc.on_connect = on_connect
# Configure TLS Set
mqttc.tls_set(CA_ROOT_CERT_FILE, certfile=THING_CERT_FILE, keyfile=THING_PRIVATE_KEY_FILE, cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None)
# Connect with MQTT Broker
mqttc.connect(MQTT_HOST, MQTT_PORT, MQTT_KEEPALIVE_INTERVAL)
mqttc.loop_start()
print "Enter 1 to Turn On the LED"
print "Enter 2 to Turn OFF the LED"
print "Enter 3 to exit"
data = raw_input("Select an option:")
if data == "1":
mqttc.publish(SHADOW_UPDATE_TOPIC, SHADOW_STATE_DOC_LED_ON, qos=1)
elif data == "2":
mqttc.publish(SHADOW_UPDATE_TOPIC, SHADOW_STATE_DOC_LED_OFF, qos=1)
elif data == "3":
sys.exit()
else:
print("Invalid input try again...")
sys.exit()
# Wait for Response
Counter = 1
while True:
if RESPONSE_RECEIVED == True:
break
print "I have finished my work!!!"
# time.sleep(1)
# if Counter == 10:
# print "No response from AWS IoT. Check your Settings."
# break
# elif RESPONSE_RECEIVED == True:
# break
| 32.90566 | 119 | 0.65539 |
e462bb80e8e5cfe48f10d58ffcdefb6c7a4fc2ec
| 680 |
py
|
Python
|
test.py
|
jsayles/LPD8806
|
6f13b65ae92f3bd903df684459964b8f5f621942
|
[
"MIT"
] | null | null | null |
test.py
|
jsayles/LPD8806
|
6f13b65ae92f3bd903df684459964b8f5f621942
|
[
"MIT"
] | null | null | null |
test.py
|
jsayles/LPD8806
|
6f13b65ae92f3bd903df684459964b8f5f621942
|
[
"MIT"
] | null | null | null |
import time
from lightpi.hardware import strip, string1, string2
DELAY_SEC = 0.3
# Test the RGB Strip
strip.red()
time.sleep(DELAY_SEC)
strip.green()
time.sleep(DELAY_SEC)
strip.blue()
time.sleep(DELAY_SEC)
strip.off()
# Test the LED Strings
string1.on()
time.sleep(DELAY_SEC)
string1.off()
time.sleep(DELAY_SEC)
string2.on()
time.sleep(DELAY_SEC)
string2.off()
################################################################################
# Helper Methods
################################################################################
| 17.435897 | 80 | 0.535294 |
e4634c0a0adb3cc0d16bbbb61f40f718de94ef2b
| 3,141 |
py
|
Python
|
wind_direction.py
|
simseve/weatherstation
|
68196a032a2cd39062f3924ce6d386f5f54af393
|
[
"MIT"
] | null | null | null |
wind_direction.py
|
simseve/weatherstation
|
68196a032a2cd39062f3924ce6d386f5f54af393
|
[
"MIT"
] | null | null | null |
wind_direction.py
|
simseve/weatherstation
|
68196a032a2cd39062f3924ce6d386f5f54af393
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# wind_direction.py
#
# Copyright 2020 <Simone Severini>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import time
import board
import busio
import adafruit_ads1x15.ads1115 as ADS
from adafruit_ads1x15.analog_in import AnalogIn
| 27.313043 | 71 | 0.561605 |
e4639c8948f8a93b0256a4c34b5d407b8adc42bc
| 3,875 |
py
|
Python
|
oswin_tempest_plugin/tests/_mixins/migrate.py
|
openstack/oswin-tempest-plugin
|
59e6a14d01dda304c7d11fda1d35198f25799d6c
|
[
"Apache-2.0"
] | 6 |
2017-10-31T10:40:24.000Z
|
2019-01-28T22:08:15.000Z
|
oswin_tempest_plugin/tests/_mixins/migrate.py
|
openstack/oswin-tempest-plugin
|
59e6a14d01dda304c7d11fda1d35198f25799d6c
|
[
"Apache-2.0"
] | null | null | null |
oswin_tempest_plugin/tests/_mixins/migrate.py
|
openstack/oswin-tempest-plugin
|
59e6a14d01dda304c7d11fda1d35198f25799d6c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common import waiters
import testtools
from oswin_tempest_plugin import config
CONF = config.CONF
| 35.87963 | 79 | 0.659613 |
e466a39aa7123e6924bb036424ddce439a785489
| 2,572 |
py
|
Python
|
articulation_structure/nodes/process_bags.py
|
tum-vision/articulation
|
3bb714fcde14b8d47977bd3b3da2c2cd13ebe685
|
[
"BSD-2-Clause"
] | 3 |
2017-03-15T16:50:05.000Z
|
2021-02-28T05:27:24.000Z
|
articulation_structure/nodes/process_bags.py
|
AbdelrahmanElsaid/articulation
|
3bb714fcde14b8d47977bd3b3da2c2cd13ebe685
|
[
"BSD-2-Clause"
] | null | null | null |
articulation_structure/nodes/process_bags.py
|
AbdelrahmanElsaid/articulation
|
3bb714fcde14b8d47977bd3b3da2c2cd13ebe685
|
[
"BSD-2-Clause"
] | 7 |
2015-07-14T14:47:51.000Z
|
2018-04-02T16:22:23.000Z
|
#!/usr/bin/python
import rospy
import rosbag
import time
| 25.72 | 84 | 0.66563 |
e468b2b5e8f04b80c414c4137b991f429ffae653
| 2,508 |
py
|
Python
|
kedro/extras/logging/color_logger.py
|
daniel-falk/kedro
|
19187199339ddc4a757aaaa328f319ec4c1e452a
|
[
"Apache-2.0"
] | 2,047 |
2022-01-10T15:22:12.000Z
|
2022-03-31T13:38:56.000Z
|
kedro/extras/logging/color_logger.py
|
daniel-falk/kedro
|
19187199339ddc4a757aaaa328f319ec4c1e452a
|
[
"Apache-2.0"
] | 170 |
2022-01-10T12:44:31.000Z
|
2022-03-31T17:01:24.000Z
|
kedro/extras/logging/color_logger.py
|
daniel-falk/kedro
|
19187199339ddc4a757aaaa328f319ec4c1e452a
|
[
"Apache-2.0"
] | 112 |
2022-01-10T19:15:24.000Z
|
2022-03-30T11:20:52.000Z
|
"""A logging handler class which produces coloured logs."""
import logging
import click
| 26.125 | 74 | 0.548644 |
e46b6c69ae3a4c3f1fee528d4d729291bff4cf8d
| 1,468 |
py
|
Python
|
qt_figure.py
|
liwenlongonly/LogAnalyzer
|
4981c0673cf0d1a52ad76e473ffc1c30bb6bf22b
|
[
"Apache-2.0"
] | null | null | null |
qt_figure.py
|
liwenlongonly/LogAnalyzer
|
4981c0673cf0d1a52ad76e473ffc1c30bb6bf22b
|
[
"Apache-2.0"
] | null | null | null |
qt_figure.py
|
liwenlongonly/LogAnalyzer
|
4981c0673cf0d1a52ad76e473ffc1c30bb6bf22b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from PyQt5 import QtCore
import numpy as np
from matplotlib.figure import Figure
import time
import matplotlib
matplotlib.use("Qt5Agg") # QT5
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
| 32.622222 | 83 | 0.636921 |
e46c1072625294f177cc250fe85584da3ad9a985
| 124,267 |
py
|
Python
|
python/target_selection/cartons/bhm_spiders_agn.py
|
sdss/target_selection
|
7196bf1491c4e9c18140301c7001e503f391a8e1
|
[
"BSD-3-Clause"
] | 3 |
2020-07-07T01:38:59.000Z
|
2020-11-24T21:46:58.000Z
|
python/target_selection/cartons/bhm_spiders_agn.py
|
sdss/target_selection
|
7196bf1491c4e9c18140301c7001e503f391a8e1
|
[
"BSD-3-Clause"
] | 26 |
2020-05-28T07:18:54.000Z
|
2021-11-30T18:36:10.000Z
|
python/target_selection/cartons/bhm_spiders_agn.py
|
sdss/target_selection
|
7196bf1491c4e9c18140301c7001e503f391a8e1
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Tom Dwelly
# @Date: 2020-03-03
# @Filename: bhm_spiders_agn.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
# derived from guide.py
# ### flake8: noqa
# isort: skip_file
import peewee
from peewee import JOIN
from peewee import fn
from target_selection.cartons.base import BaseCarton
# general catalogdb imports
from sdssdb.peewee.sdss5db.catalogdb import (
Catalog,
EROSITASupersetAGN,
)
# imports of existing spectro catalogues
from sdssdb.peewee.sdss5db.catalogdb import (
CatalogToSDSS_DR16_SpecObj,
SDSS_DR16_SpecObj,
CatalogToBHM_eFEDS_Veto,
BHM_eFEDS_Veto,
SDSSV_BOSS_SPALL,
SDSSV_BOSS_Conflist,
SDSSV_Plateholes,
SDSSV_Plateholes_Meta,
)
# additional imports required by bhm_spiders_agn_lsdr8
from sdssdb.peewee.sdss5db.catalogdb import (
CatalogToLegacy_Survey_DR8,
Legacy_Survey_DR8,
)
# additional imports required by bhm_spiders_agn_gaiadr2
from sdssdb.peewee.sdss5db.catalogdb import (
CatalogToTIC_v8,
TIC_v8,
)
# additional imports required by bhm_spiders_agn_ps1dr2
from sdssdb.peewee.sdss5db.catalogdb import (
Panstarrs1,
CatalogToPanstarrs1,
)
# additional imports required by bhm_spiders_agn_skymapperdr2
from sdssdb.peewee.sdss5db.catalogdb import (
SkyMapper_DR2,
CatalogToSkyMapper_DR2,
)
# additional imports required by bhm_spiders_agn_supercosmos
from sdssdb.peewee.sdss5db.catalogdb import (
SuperCosmos,
CatalogToSuperCosmos,
CatalogToCatWISE2020,
)
from target_selection.mag_flux import AB2nMgy, AB2Jy
# used by cartons that need to compute Galactic latitude:
north_gal_pole_ra = 192.85948 # deg, J2000
north_gal_pole_dec = +27.12825 # deg, J2000
# ############################################
# ############################################
# ############################################
# ############################################
# This provides the following BHM SPIDERS AGN cartons in v0.5:
# * bhm_spiders_agn_lsdr8
# * bhm_spiders_agn_efeds_stragglers
# * bhm_spiders_agn_gaiadr2
# * bhm_spiders_agn_sep
# * bhm_spiders_agn_ps1dr2
# * bhm_spiders_agn_skymapperdr2
# bhm_spiders_agn_supercosmos
# ############################################
# ############################################
# ############################################
# ############################################
# some reference points for AB->nMgy conversions
# 30.0 AB = 1e-3 nMgy
# 22.5 AB = 1.0 nMgy
# 22.0 AB = 1.58489 nMgy
# 21.5 AB = 2.51189 nMgy
# 21.0 AB = 3.98107 nMgy
# 20.0 AB = 10.0 nMgy
# 18.5 AB = 39.8107 nMgy
# 16.5 AB = 251.189 nMgy
# 14.0 AB = 2511.89 nMgy
# 13.5 AB = 3981.07 nMgy
# some reference points for AB->Jy conversions (for ps1_dr2 fluxes)
# 30.0 AB = 3.631e-9 Jy
# 22.5 AB = 3.631e-6 Jy
# 22.0 AB = 5.754e-6 Jy
# 21.5 AB = 9.120e-6 Jy
# 21.0 AB = 1.445e-5 Jy
# 20.5 AB = 2.291e-5 Jy
# 18.5 AB = 1.445e-4 Jy
# 16.5 AB = 9.120e-4 Jy
# 14.0 AB = 9.120e-3 Jy
# 13.5 AB = 1.445e-2 Jy
# Notes on catalogdb.panstarrs1.flags aka objInfoFlag from ObjectThin
# https://outerspace.stsci.edu/display/PANSTARRS/PS1+ObjectThin+table+fields
# https://outerspace.stsci.edu/display/PANSTARRS/PS1+Object+Flags
# select objects that have the GOOD_STACK flag set:
# Flag name value decimal Notes
# GOOD_STACK 0x08000000 134217728 good-quality object in the stack (> 1 good stack measurement)
# Use these two flags to decide whether to use aper mags or not
# Flag name value decimal Notes
# EXT 0x00800000 8388608 extended in our data (eg, PS)
# EXT_ALT 0x01000000 16777216 extended in external data (eg, 2MASS)
# Notes on how many targets to expect:
# sdss5db=> SELECT ero_version,xmatch_method,xmatch_version,opt_cat,count(*)
# FROM erosita_superset_agn GROUP BY ero_version,xmatch_method,xmatch_version,opt_cat;
# ero_version | xmatch_method | xmatch_version | opt_cat | count
# --------------------------+----------------+--------------------------+--------------+--------
# eFEDS_c001_V18C_V3_ext | XPS/MLR | Merged_03DEC2020 | lsdr8 | 14
# eFEDS_c001_V18C_V3_ext | XPS/NWAY | Merged_03DEC2020 | lsdr8 | 248
# eFEDS_c001_V18C_V3_main | XPS/MLR | Merged_03DEC2020 | lsdr8 | 794
# eFEDS_c001_V18C_V3_main | XPS/NWAY | Merged_03DEC2020 | lsdr8 | 26575
# em01_c946_201008_poscorr | XPS/NWAY | JWMS_CW2_v_03_TDopt | gaiadr2 | 441175
# em01_c946_201008_poscorr | XPS/NWAY | JWMS_CW2_v_03_TDopt | lsdr8 | 305076
# em01_c946_201008_poscorr | XPS/NWAY | JWMS_CW2_v_03_TDopt | ps1dr2 | 241150
# em01_c946_201008_poscorr | XPS/NWAY | JWMS_CW2_v_03_TDopt | skymapperdr2 | 312372
# em01_c946_201008_poscorr | XPS/NWAY | JWMS_v_03 | catwise2020 | 740691
# em01_c946_201008_poscorr | XPS/NWAY | JWMS_v_40 | lsdr8 | 345189
# em01_SEP_c946 | XPS/NWAY | SEP_CW2_07DEC2020 | catwise2020 | 32268
# em01_SEP_c946 | XPS/NWAY | SEP_CW2_07DEC2020_TDopt | gaiadr2 | 309
# em01_SEP_c946 | XPS/NWAY | SEP_CW2_NOV2020_MSopt | gaiadr2 | 740
# (13 rows)
# Notes on avoiding saturated legacysurvey sources
# https://www.legacysurvey.org/dr8/bitmasks/
# Bit Name Description
# 0 NPRIMARY touches a pixel that is outside the BRICK_PRIMARY region of a brick
# 1 BRIGHT touches a pixel within the locus of a radius-magnitude relation for
# Tycho-2 stars or one for Gaia DR2 stars to G < 13
# 2 SATUR_G touches a pixel that was saturated in at least one g-band image
# 3 SATUR_R touches a pixel that was saturated in at least one r-band image
# 4 SATUR_Z touches a pixel that was saturated in at least one z-band image
# 5 ALLMASK_G touches a pixel that has any of the ALLMASK_G bits set
# 6 ALLMASK_R touches a pixel that has any of the ALLMASK_R bits set
# 7 ALLMASK_Z touches a pixel that has any of the ALLMASK_Z bits set
# 8 WISEM1 touches a pixel in a WISEMASK_W1 bright star mask
# 9 WISEM2 touches a pixel in a WISEMASK_W2 bright star mask
# 10 BAILOUT touches a pixel in a blob where we "bailed out" of source fitting
# 11 MEDIUM touches a pixel within the locus of a radius-magnitude relation
# for Gaia DR2 stars to G < 16
# 12 GALAXY touches a pixel in an SGA large galaxy
# 13 CLUSTER touches a pixel in a globular cluster
#
# so, mask to avoid saturated targets is 2**2 + 2**3 + 2**4 = 4+8+16 = 28
#
# END PREAMBLE
# ##################################################################################
#
# END BhmSpidersAgnLsdr8Carton
# ##################################################################################
#
# END BhmSpidersAgnEfedsStragglersCarton
# ##################################################################################
#
# END BhmSpidersAgnGaiadr2Carton
# ##################################################################################
#
# END BhmSpidersAgnSepCarton
# ##################################################################################
#
# END BhmSpidersAgnPs1dr2Carton
# ##################################################################################
#
# END BhmSpidersAgnSkyMapperDr2Carton
# ##################################################################################
#
# END BhmSpidersAgnSuperCosmosCarton
# ##################################################################################
| 41.325906 | 388 | 0.500036 |
e47016cbd72e7098c0941f4e47d79ce1b7c698d1
| 776 |
py
|
Python
|
back-end/erasmail/emails/migrations/0038_auto_20210422_0059.py
|
SamirM-BE/ErasMail
|
88602a039ae731ca8566c96c7c4d2635f82a07a5
|
[
"Apache-2.0"
] | 7 |
2021-02-06T21:06:23.000Z
|
2022-01-31T09:33:26.000Z
|
back-end/erasmail/emails/migrations/0038_auto_20210422_0059.py
|
SamirM-BE/ErasMail
|
88602a039ae731ca8566c96c7c4d2635f82a07a5
|
[
"Apache-2.0"
] | null | null | null |
back-end/erasmail/emails/migrations/0038_auto_20210422_0059.py
|
SamirM-BE/ErasMail
|
88602a039ae731ca8566c96c7c4d2635f82a07a5
|
[
"Apache-2.0"
] | 5 |
2021-05-07T15:35:25.000Z
|
2022-03-21T09:11:24.000Z
|
# Generated by Django 3.1.6 on 2021-04-22 00:59
from django.db import migrations, models
| 28.740741 | 117 | 0.619845 |
e4705f3acb58336e0e7ad1a046d3910433815d04
| 1,488 |
py
|
Python
|
worldmap/src/worldmap/model/dtm.py
|
expertanalytics/fagkveld
|
96e16b9610e8b60d36425e7bc5435d266de1f8bf
|
[
"BSD-2-Clause"
] | null | null | null |
worldmap/src/worldmap/model/dtm.py
|
expertanalytics/fagkveld
|
96e16b9610e8b60d36425e7bc5435d266de1f8bf
|
[
"BSD-2-Clause"
] | null | null | null |
worldmap/src/worldmap/model/dtm.py
|
expertanalytics/fagkveld
|
96e16b9610e8b60d36425e7bc5435d266de1f8bf
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Data terrain model (DTM).
Examples::
>>> from worldmap import DTM
>>> dtm = DTM()
>>> print(dtm["NOR"])
Location('Norway')
"""
from typing import Dict, List, Tuple, Set, Optional
from bokeh.models import Model
from bokeh.models import ColumnDataSource, Patches, LabelSet
import logging
import numpy as np
from .location import Location
from .coloring import set_location_colors
from ..utils.data_fetcher import get_world_topology, get_country_polygon
| 25.220339 | 74 | 0.638441 |
e4710ca29b9a5a6a2747143e02042c64942aa376
| 1,865 |
py
|
Python
|
src/blockchain/crypto_tools/__init__.py
|
ParisNeo/blockchain
|
8bc2768a3e89088e202ea89e5f301576f6f9d95c
|
[
"MIT"
] | null | null | null |
src/blockchain/crypto_tools/__init__.py
|
ParisNeo/blockchain
|
8bc2768a3e89088e202ea89e5f301576f6f9d95c
|
[
"MIT"
] | null | null | null |
src/blockchain/crypto_tools/__init__.py
|
ParisNeo/blockchain
|
8bc2768a3e89088e202ea89e5f301576f6f9d95c
|
[
"MIT"
] | null | null | null |
"""
"""
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5
from base58 import b58encode, b58decode
# ================= cryptography helpers ======================================
def hash(data):
"""Hash some data
"""
# Hash the stuff we need to hash
digest = SHA256.new()
digest.update(data)
hash= digest.hexdigest()
return hash
def sign(private_key:RSA.RsaKey, message):
"""Sign a message
Parameters
----------
private_key (RSAPublicKey) : The private key to sign the message with
message (str) : The message to be signed
"""
hasher = SHA256.new(message)
signer = PKCS1_v1_5.new(private_key)
signature = signer.sign(hasher)
return signature
def verify(public_key, message, signature):
"""Verify the message signature
Parameters
----------
public_key (RSAPublicKey) : The public key to verify that the sender is the right one
message (str) : The signed message (used for verification)
signature (str) : The signature
"""
hasher = SHA256.new(message)
verifier = PKCS1_v1_5.new(public_key)
return verifier.verify(hasher, signature)
def privateKey2Text(key:RSA.RsaKey):
"""Converts a private key to text
"""
return b58encode(key.exportKey('DER'))
def publicKey2Text(key:RSA.RsaKey):
"""Converts a public key to text
"""
return b58encode(key.exportKey('DER'))
def text2PrivateKey(text:str):
"""Convert a text to a private key
"""
return RSA.importKey(b58decode(text))
def text2PublicKey(text:str):
"""Convert a text to a key
"""
return RSA.importKey(b58decode(text))
| 26.267606 | 92 | 0.642895 |
e472ad25bd9133e0e1fe623219e0826f24f2f7ef
| 365 |
py
|
Python
|
Mandelbrot fractal/visualize.py
|
TTimerkhanov/parallel_computing
|
75c79a4e50ac2f5f9fab90cd79560cd8e848228e
|
[
"MIT"
] | 8 |
2018-03-21T12:26:44.000Z
|
2019-10-05T08:46:20.000Z
|
Mandelbrot fractal/visualize.py
|
TTimerkhanov/parallel_computing
|
75c79a4e50ac2f5f9fab90cd79560cd8e848228e
|
[
"MIT"
] | null | null | null |
Mandelbrot fractal/visualize.py
|
TTimerkhanov/parallel_computing
|
75c79a4e50ac2f5f9fab90cd79560cd8e848228e
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
mandelbrot(120, 6000)
| 22.8125 | 48 | 0.60274 |
e472f3485c0e5680d2198a1ae932c5d7712b5057
| 19,303 |
py
|
Python
|
tools/unit_tests/test_config_transforms.py
|
dice-project/DICE-deployment-service
|
e209c6a061a78f170e81cfc03d2959af0283ed15
|
[
"Apache-2.0"
] | 2 |
2018-04-03T20:45:26.000Z
|
2022-02-07T19:53:42.000Z
|
tools/unit_tests/test_config_transforms.py
|
dice-project/DICE-deployment-service
|
e209c6a061a78f170e81cfc03d2959af0283ed15
|
[
"Apache-2.0"
] | 3 |
2016-11-15T10:41:43.000Z
|
2020-03-16T07:49:03.000Z
|
tools/unit_tests/test_config_transforms.py
|
dice-project/DICE-deployment-service
|
e209c6a061a78f170e81cfc03d2959af0283ed15
|
[
"Apache-2.0"
] | 2 |
2018-07-04T11:37:12.000Z
|
2022-02-07T19:53:43.000Z
|
import unittest
import copy
import os
import tempfile
from config_tool.utils import *
if __name__ == '__main__':
unittest.main()
| 38.528942 | 80 | 0.600425 |
e47311462a03c6a7eb9b40addcc16befdf99f631
| 2,133 |
py
|
Python
|
code/venv/lib/python3.8/site-packages/datadog_api_client/v2/model/permission_attributes.py
|
Valisback/hiring-engineers
|
7196915dd5a429ae27c21fa43d527f0332e662ed
|
[
"Apache-2.0"
] | null | null | null |
code/venv/lib/python3.8/site-packages/datadog_api_client/v2/model/permission_attributes.py
|
Valisback/hiring-engineers
|
7196915dd5a429ae27c21fa43d527f0332e662ed
|
[
"Apache-2.0"
] | null | null | null |
code/venv/lib/python3.8/site-packages/datadog_api_client/v2/model/permission_attributes.py
|
Valisback/hiring-engineers
|
7196915dd5a429ae27c21fa43d527f0332e662ed
|
[
"Apache-2.0"
] | null | null | null |
# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
from datadog_api_client.v2.model_utils import (
ModelNormal,
cached_property,
datetime,
)
| 30.042254 | 108 | 0.61369 |
e4751dd89498b1da7109ee5f07994f5fbd04447a
| 95 |
py
|
Python
|
vulture/whitelists/logging_whitelist.py
|
kianmeng/vulture
|
b8cbc44dac89b2a96f6da7033424f52525d6f574
|
[
"MIT"
] | 2,081 |
2017-03-06T14:45:21.000Z
|
2022-03-31T13:29:34.000Z
|
vulture/whitelists/logging_whitelist.py
|
kianmeng/vulture
|
b8cbc44dac89b2a96f6da7033424f52525d6f574
|
[
"MIT"
] | 248 |
2017-03-06T12:13:37.000Z
|
2022-03-15T11:21:27.000Z
|
vulture/whitelists/logging_whitelist.py
|
kianmeng/vulture
|
b8cbc44dac89b2a96f6da7033424f52525d6f574
|
[
"MIT"
] | 111 |
2017-03-06T20:48:04.000Z
|
2022-03-17T09:49:32.000Z
|
import logging
logging.Filter.filter
logging.getLogger().propagate
logging.StreamHandler.emit
| 15.833333 | 29 | 0.852632 |
e479315b2fec6b1b30374526a8f3ec4a57556364
| 536 |
py
|
Python
|
tests/test_set_key.py
|
GustavoKatel/pushbullet-cli
|
e5102772752a97db539594b0d50b5effb36a22e2
|
[
"MIT"
] | 176 |
2017-01-30T16:21:48.000Z
|
2022-02-10T05:32:57.000Z
|
tests/test_set_key.py
|
GustavoKatel/pushbullet-cli
|
e5102772752a97db539594b0d50b5effb36a22e2
|
[
"MIT"
] | 49 |
2017-01-21T20:27:03.000Z
|
2022-01-16T02:57:51.000Z
|
tests/test_set_key.py
|
GustavoKatel/pushbullet-cli
|
e5102772752a97db539594b0d50b5effb36a22e2
|
[
"MIT"
] | 21 |
2017-01-26T06:08:54.000Z
|
2022-01-04T19:53:25.000Z
|
import platform
import pytest
| 25.52381 | 73 | 0.673507 |
e4797fdc0550e8c83ce7e94b28483dfdbf77d5d3
| 344 |
py
|
Python
|
02. Programacion estructurada/05. datos tupla/e1.py
|
Cidryl/python-desde-cero
|
fade09d13ab0ed0cbb4f45a49a4ad9e3980f3276
|
[
"MIT"
] | null | null | null |
02. Programacion estructurada/05. datos tupla/e1.py
|
Cidryl/python-desde-cero
|
fade09d13ab0ed0cbb4f45a49a4ad9e3980f3276
|
[
"MIT"
] | null | null | null |
02. Programacion estructurada/05. datos tupla/e1.py
|
Cidryl/python-desde-cero
|
fade09d13ab0ed0cbb4f45a49a4ad9e3980f3276
|
[
"MIT"
] | null | null | null |
# bloque principal
fecha=cargar_fecha()
imprimir_fecha(fecha)
| 22.933333 | 46 | 0.639535 |
e47c17dddcd00889d223b3d8fce4a9d9c3d285a3
| 356 |
py
|
Python
|
doingmathwithpython/ch03.py
|
andyliumathematics/mlnotes
|
7b7a1c37d7660bdf9144c59693719033080d654b
|
[
"Apache-2.0"
] | null | null | null |
doingmathwithpython/ch03.py
|
andyliumathematics/mlnotes
|
7b7a1c37d7660bdf9144c59693719033080d654b
|
[
"Apache-2.0"
] | null | null | null |
doingmathwithpython/ch03.py
|
andyliumathematics/mlnotes
|
7b7a1c37d7660bdf9144c59693719033080d654b
|
[
"Apache-2.0"
] | null | null | null |
# %%
l = [38,32,49,15,806,806]
sum(l)
# %%
len(l)
# %%
sum(l)//len(l)
# %%
l.sort()
# %%
l
# %%
# %%
l.most_common(1)
# %%
'''
'''
from collections import Counter
l = ['38','32','49','15','806','806']
c = Counter(l)
print(c.most_common()[0][0])
print(c.most_common(1))
print(c.most_common(2))
# %%
c.most_common()[0]
# %%
print(33)
# %%
| 11.483871 | 37 | 0.530899 |
e4813380bf2daa72d111c3321e1a0890661d1b92
| 5,475 |
py
|
Python
|
CodedCaching/Network.py
|
qizhu8/CodedCachingSim
|
84e8f1e58e1c431ee4916525487d4b28f92e629b
|
[
"MIT"
] | null | null | null |
CodedCaching/Network.py
|
qizhu8/CodedCachingSim
|
84e8f1e58e1c431ee4916525487d4b28f92e629b
|
[
"MIT"
] | null | null | null |
CodedCaching/Network.py
|
qizhu8/CodedCachingSim
|
84e8f1e58e1c431ee4916525487d4b28f92e629b
|
[
"MIT"
] | null | null | null |
"""
Network class is in charge of:
1. Storing M - User Cache Size, N - Number of Files, K - Number of Users
2. Storing User instances, Server instance, and attacker instance
"""
import numpy as np
from scipy import special
import itertools
from Server import Server
from User import User
from tabulate import tabulate
T_BE_INTEGER = True
if __name__ == "__main__":
# if t is specified, M is not needed. Currently, I only consider t to be a positive integer.
# M: unified cache size per user (if t is specified, M is not useful anymore)
# N: number of files in the network
# K: number of users in the network
# t: M*K/N,
# M, N, K, t = -1, 3, 3, 1
M, N, K, t = -1, 3, 5, 3
# M, N, K, t = -1, 4, 5, 2
codedCachingNetwork = Network(M=M, N=N, K=K, t=t, fileId2Alphabet=True)
print(codedCachingNetwork)
# codedCachingNetwork.placement(verboseForCache=True, verboseForUser=True, isRandom=True)
codedCachingNetwork.placement(verboseForCache=True, verboseForUser=True, isRandom=False)
X_D_table = []
# for D in itertools.combinations_with_replacement(range(N), K):
for D in codedCachingNetwork.allD():
D, X, groupList = codedCachingNetwork.delivery(verbose=False, D=D) # generate X based on D
groupList
D_str = ",".join(list(map(lambda d: chr(65+ d), D)))
X_D_table.append(["["+D_str+"]"] + codedCachingNetwork.printableServerTransmission(X, inList=True, fileId2Alphabet=True))
# header = ["D", "X"]
header = ["D"] + groupList
print(tabulate(X_D_table, headers=header))
| 36.993243 | 140 | 0.592146 |
e4832a86e7db8f21257aa59834d215a8144ccb1f
| 23 |
py
|
Python
|
protonets/data/__init__.py
|
sripathisridhar/prototypical-networks
|
02a1379dceea896e23ecf21384d4a6ee2393f38c
|
[
"MIT"
] | 889 |
2017-11-12T22:04:25.000Z
|
2022-03-31T09:42:13.000Z
|
protonets/data/__init__.py
|
Harzva/prototypical-networks
|
c9bb4d258267c11cb6e23f0a19242d24ca98ad8a
|
[
"MIT"
] | 24 |
2017-12-06T19:28:23.000Z
|
2021-11-27T11:35:53.000Z
|
protonets/data/__init__.py
|
Harzva/prototypical-networks
|
c9bb4d258267c11cb6e23f0a19242d24ca98ad8a
|
[
"MIT"
] | 240 |
2017-11-12T22:04:28.000Z
|
2022-03-26T09:25:42.000Z
|
from . import omniglot
| 11.5 | 22 | 0.782609 |
e483e0a6252d9a8ff4f77f42bb3708e55acf3498
| 330 |
py
|
Python
|
flask/part3_templates/ex3-app/students2.py
|
macloo/python-beginners
|
1124922cd57d3f647eacafa0b82948587514d4bd
|
[
"MIT"
] | 42 |
2018-03-25T22:41:57.000Z
|
2022-01-08T21:23:02.000Z
|
flask/part3_templates/ex3-app/students2.py
|
pavanpatil45/python-beginners
|
1124922cd57d3f647eacafa0b82948587514d4bd
|
[
"MIT"
] | null | null | null |
flask/part3_templates/ex3-app/students2.py
|
pavanpatil45/python-beginners
|
1124922cd57d3f647eacafa0b82948587514d4bd
|
[
"MIT"
] | 17 |
2018-03-20T00:56:57.000Z
|
2022-01-12T06:36:18.000Z
|
# two templates are used in this app
from flask import Flask, render_template
app = Flask(__name__)
if __name__ == '__main__':
app.run(debug=True)
| 20.625 | 52 | 0.70303 |
e4884423f1f3c28f1f01d03c9e676127547b57c0
| 250 |
py
|
Python
|
docs/autodoc_example.py
|
aio-libs/sphinxcontrib-asyncio
|
dbfa79e29980e73ad2dd9dec59f1238b1a8a7cd7
|
[
"Apache-2.0"
] | 19 |
2016-02-21T13:27:43.000Z
|
2020-02-19T17:22:38.000Z
|
docs/autodoc_example.py
|
aio-libs/sphinxcontrib-asyncio
|
dbfa79e29980e73ad2dd9dec59f1238b1a8a7cd7
|
[
"Apache-2.0"
] | 9 |
2016-04-15T08:43:39.000Z
|
2022-01-06T10:43:08.000Z
|
docs/autodoc_example.py
|
aio-libs/sphinxcontrib-asyncio
|
dbfa79e29980e73ad2dd9dec59f1238b1a8a7cd7
|
[
"Apache-2.0"
] | 6 |
2016-04-11T07:32:41.000Z
|
2019-09-28T10:59:51.000Z
|
import asyncio
| 14.705882 | 39 | 0.608 |
e488e12d7b940dac3d9db3e9a5b2cb31258d0310
| 25,266 |
py
|
Python
|
test/recipe/test_edit_recipe.py
|
fredsonchaves07/foodfy
|
5bff04434749f369f982090b00590cca31fce186
|
[
"MIT"
] | null | null | null |
test/recipe/test_edit_recipe.py
|
fredsonchaves07/foodfy
|
5bff04434749f369f982090b00590cca31fce186
|
[
"MIT"
] | 141 |
2021-03-03T01:38:10.000Z
|
2022-01-16T15:42:02.000Z
|
test/recipe/test_edit_recipe.py
|
fredsonchaves07/foodfy
|
5bff04434749f369f982090b00590cca31fce186
|
[
"MIT"
] | null | null | null |
import json
from io import BytesIO
from app.ext.api.exceptions import (
ChefNotFound,
InvalidToken,
MaximumImageCapacityError,
OperationNotAllowed,
RecipeWithoutIngredient,
RecipeWithoutPreparationMode,
)
from app.ext.api.services import token_services
| 28.48478 | 87 | 0.566097 |
e4892ccb409c7b541cbd948a9e9898c388a282c5
| 521 |
py
|
Python
|
survey/migrations/0018_auto_20161128_0936.py
|
watchdogpolska/ankieta-rodzic-po-ludzku-nfz
|
68b1d1ccac969ca51416761d1168678effb1e6c6
|
[
"MIT"
] | null | null | null |
survey/migrations/0018_auto_20161128_0936.py
|
watchdogpolska/ankieta-rodzic-po-ludzku-nfz
|
68b1d1ccac969ca51416761d1168678effb1e6c6
|
[
"MIT"
] | null | null | null |
survey/migrations/0018_auto_20161128_0936.py
|
watchdogpolska/ankieta-rodzic-po-ludzku-nfz
|
68b1d1ccac969ca51416761d1168678effb1e6c6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-28 09:36
from __future__ import unicode_literals
from django.db import migrations, models
| 24.809524 | 122 | 0.610365 |
e48a824fae829e3008efd9a9fbcb0f03d3adc45f
| 90 |
py
|
Python
|
tests/__init__.py
|
slinksoft/PathExactDelayPrototype
|
633576cfe031e8ee884daaa453a3e5d206eaeaa9
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
slinksoft/PathExactDelayPrototype
|
633576cfe031e8ee884daaa453a3e5d206eaeaa9
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
slinksoft/PathExactDelayPrototype
|
633576cfe031e8ee884daaa453a3e5d206eaeaa9
|
[
"MIT"
] | null | null | null |
import os
import sys
from pathlib import Path
sys.path.insert(0, 'exactdelaypathfinder')
| 15 | 42 | 0.8 |
e48ba9fc67c09776260edc71cd67600e98eb63a9
| 1,885 |
py
|
Python
|
2017/day07/code.py
|
Fadi88/AoC
|
8b24f2f2cc7b4e1c63758e81e63d8670a261cc7c
|
[
"Unlicense"
] | 12 |
2019-12-15T21:53:19.000Z
|
2021-12-24T17:03:41.000Z
|
2017/day07/code.py
|
Fadi88/adventofcode19
|
dd2456bdd163beb02dbfe9dcea2b021061c7671e
|
[
"Unlicense"
] | 1 |
2021-12-15T20:40:51.000Z
|
2021-12-15T22:19:48.000Z
|
2017/day07/code.py
|
Fadi88/adventofcode19
|
dd2456bdd163beb02dbfe9dcea2b021061c7671e
|
[
"Unlicense"
] | 5 |
2020-12-11T06:00:24.000Z
|
2021-12-20T21:37:46.000Z
|
import time
from collections import defaultdict
if __name__ == "__main__":
part1()
part2()
| 22.176471 | 83 | 0.490716 |
e48e53ba04ff99bdd6227e182235f811ae1dc4ee
| 403 |
py
|
Python
|
src/microbit/spi-tof-master.py
|
romilly/multi-VL53L0X
|
80cf0d82d93ceae9c54acb967c24a1bf8deb5e3a
|
[
"MIT"
] | null | null | null |
src/microbit/spi-tof-master.py
|
romilly/multi-VL53L0X
|
80cf0d82d93ceae9c54acb967c24a1bf8deb5e3a
|
[
"MIT"
] | null | null | null |
src/microbit/spi-tof-master.py
|
romilly/multi-VL53L0X
|
80cf0d82d93ceae9c54acb967c24a1bf8deb5e3a
|
[
"MIT"
] | null | null | null |
from microbit import *
import struct
from time import sleep
SENSORS = 2
spi.init(baudrate=100000)
while True:
for i in [0, 1]:
print(i, ord(spi_read(i)))
sleep(0.1)
| 21.210526 | 45 | 0.652605 |
e48f98c85bda6baa0cc86d71b689b55e8122a390
| 16,653 |
py
|
Python
|
hasher-matcher-actioner/hmalib/models.py
|
isabella232/ThreatExchange
|
0d07a800bbf25d8541f40b828e2dfd377395af9b
|
[
"BSD-3-Clause"
] | null | null | null |
hasher-matcher-actioner/hmalib/models.py
|
isabella232/ThreatExchange
|
0d07a800bbf25d8541f40b828e2dfd377395af9b
|
[
"BSD-3-Clause"
] | 1 |
2021-04-19T10:20:43.000Z
|
2021-04-19T10:20:43.000Z
|
hasher-matcher-actioner/hmalib/models.py
|
isabella232/ThreatExchange
|
0d07a800bbf25d8541f40b828e2dfd377395af9b
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import datetime
import typing as t
import json
from dataclasses import dataclass, field
from mypy_boto3_dynamodb.service_resource import Table
from boto3.dynamodb.conditions import Attr, Key
"""
Data transfer object classes to be used with dynamodbstore
Classes in this module should implement methods `to_dynamodb_item(self)` and
`to_sqs_message(self)`
"""
class HashRecordQuery:
DEFAULT_PROJ_EXP = "PK, ContentHash, UpdatedAt, Quality"
| 32.273256 | 110 | 0.623311 |
e48fad25c05c483e3b144a00ff76a128d96f4a18
| 89 |
py
|
Python
|
colossalai/utils/commons/__init__.py
|
mrriteshranjan/ColossalAI
|
0d057a1bae67b915a385be7edab7da83413cb645
|
[
"Apache-2.0"
] | null | null | null |
colossalai/utils/commons/__init__.py
|
mrriteshranjan/ColossalAI
|
0d057a1bae67b915a385be7edab7da83413cb645
|
[
"Apache-2.0"
] | null | null | null |
colossalai/utils/commons/__init__.py
|
mrriteshranjan/ColossalAI
|
0d057a1bae67b915a385be7edab7da83413cb645
|
[
"Apache-2.0"
] | null | null | null |
from .bucket_tensor_copy import BucketizedTensorCopy
__all__ = ['BucketizedTensorCopy']
| 22.25 | 52 | 0.842697 |
e49334330d41dc2dca73dcd98740a04934ce3d79
| 83 |
py
|
Python
|
DRF-React/appy/apps.py
|
Preet538-neitzen/LOC-Hackathon
|
e7bad458ef0069becdba42576f5fe1bfd736678b
|
[
"MIT"
] | null | null | null |
DRF-React/appy/apps.py
|
Preet538-neitzen/LOC-Hackathon
|
e7bad458ef0069becdba42576f5fe1bfd736678b
|
[
"MIT"
] | 8 |
2021-03-19T13:44:46.000Z
|
2022-03-12T00:55:03.000Z
|
DRF-React/appy/apps.py
|
Preet538-neitzen/LOC-Hackathon
|
e7bad458ef0069becdba42576f5fe1bfd736678b
|
[
"MIT"
] | 1 |
2021-02-13T00:16:36.000Z
|
2021-02-13T00:16:36.000Z
|
from django.apps import AppConfig
| 13.833333 | 33 | 0.73494 |
e494747ad6589e1234241f26ac62dacfe6cecd8c
| 998 |
py
|
Python
|
test/test_truss.py
|
deeepeshthakur/ddtruss
|
86aa945d577c6efe752099eee579386762942289
|
[
"MIT"
] | 1 |
2020-01-27T12:03:47.000Z
|
2020-01-27T12:03:47.000Z
|
test/test_truss.py
|
deeepeshthakur/ddtruss
|
86aa945d577c6efe752099eee579386762942289
|
[
"MIT"
] | null | null | null |
test/test_truss.py
|
deeepeshthakur/ddtruss
|
86aa945d577c6efe752099eee579386762942289
|
[
"MIT"
] | null | null | null |
import numpy as np
import pytest
from ddtruss import Truss, DataDrivenSolver
points = np.array([[0, 0], [1, 0], [0.5, 0.5], [2, 1]])
lines = np.array([[0, 2], [1, 2], [1, 3], [2, 3]], dtype=int)
truss = Truss(points, lines)
E = 1.962e11
A = [2e-4, 2e-4, 1e-4, 1e-4]
U_dict = {0: [0, 0], 1: [0, 0]}
F_dict = {3: [0, -9.81e3]}
u_ref = np.array(
[0, 0, 0, 0, 2.65165043e-4, 8.83883476e-5, 3.47902545e-3, -5.60034579e-3]
)
| 24.95 | 77 | 0.621242 |
e494dbf6ede35cd65a3c40c381a319f33cf3e78d
| 2,563 |
py
|
Python
|
app/models.py
|
MilanMathew/machine_test_focaloid
|
fa179e655c531825167e97aed4e2d6affea9c736
|
[
"MIT"
] | null | null | null |
app/models.py
|
MilanMathew/machine_test_focaloid
|
fa179e655c531825167e97aed4e2d6affea9c736
|
[
"MIT"
] | null | null | null |
app/models.py
|
MilanMathew/machine_test_focaloid
|
fa179e655c531825167e97aed4e2d6affea9c736
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from app import db
| 37.144928 | 71 | 0.684354 |
e49516ca8ad700f85017d9325736d77d5ccd8a3d
| 2,326 |
py
|
Python
|
PTO-yelp/Modules/attention_classifier.py
|
LegendTianjin/Point-Then-Operate
|
a6b0818343bc34c468738ab91ecea89dd03a9535
|
[
"Apache-2.0"
] | 50 |
2019-06-06T05:30:32.000Z
|
2021-11-18T07:24:36.000Z
|
PTO-yelp/Modules/attention_classifier.py
|
lancopku/Point-Then-Operate
|
1c04ec326b52fc65f97f5610a6f16f6e938d583e
|
[
"Apache-2.0"
] | 2 |
2019-08-30T09:49:26.000Z
|
2020-01-17T04:20:53.000Z
|
PTO-yelp/Modules/attention_classifier.py
|
ChenWu98/Point-Then-Operate
|
a6b0818343bc34c468738ab91ecea89dd03a9535
|
[
"Apache-2.0"
] | 7 |
2019-06-17T06:20:47.000Z
|
2020-10-26T03:19:44.000Z
|
import os
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from utils.utils import gpu_wrapper
from Modules.subModules.attention import AttentionUnit
from torch.nn.utils.rnn import pack_padded_sequence as pack
from torch.nn.utils.rnn import pad_packed_sequence as unpack
| 41.535714 | 136 | 0.564488 |
e4954d56f09841ccf54e7784967df8b418345b0e
| 569 |
py
|
Python
|
minion/parser.py
|
timofurrer/minion-ci
|
411d0ea6638fb37d7e170cc8c8c5815304cc9f5c
|
[
"MIT"
] | 49 |
2016-03-07T06:42:40.000Z
|
2021-03-06T02:43:02.000Z
|
minion/parser.py
|
timofurrer/minion-ci
|
411d0ea6638fb37d7e170cc8c8c5815304cc9f5c
|
[
"MIT"
] | 16 |
2016-03-08T07:20:52.000Z
|
2017-04-21T18:15:12.000Z
|
minion/parser.py
|
timofurrer/minion-ci
|
411d0ea6638fb37d7e170cc8c8c5815304cc9f5c
|
[
"MIT"
] | 9 |
2016-03-29T22:08:52.000Z
|
2021-06-16T16:29:30.000Z
|
"""
`minion-ci` is a minimalist, decentralized, flexible Continuous Integration Server for hackers.
This module contains the parser to parse the `minion.yml` file.
:copyright: (c) by Timo Furrer
:license: MIT, see LICENSE for details
"""
import yaml
from .errors import MinionError
def parse(path):
"""Parse the given minion.yml file"""
try:
with open(path) as minion_file:
config = yaml.load(minion_file)
except OSError:
raise MinionError("No minion.yml config file found in repository")
return config
| 24.73913 | 99 | 0.681898 |
e49a3917364c39b81a8dd470087dc69990edf5b7
| 1,431 |
py
|
Python
|
finace/utils/rong_city.py
|
pythonyhd/finace
|
614d98ad92e1bbaa6cf7dc1d6dfaba4f24431688
|
[
"Apache-2.0"
] | 1 |
2020-08-18T01:55:14.000Z
|
2020-08-18T01:55:14.000Z
|
finace/utils/rong_city.py
|
pythonyhd/finace
|
614d98ad92e1bbaa6cf7dc1d6dfaba4f24431688
|
[
"Apache-2.0"
] | null | null | null |
finace/utils/rong_city.py
|
pythonyhd/finace
|
614d98ad92e1bbaa6cf7dc1d6dfaba4f24431688
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from pymongo import MongoClient
from finace import settings
if __name__ == '__main__':
spider = SpiderCity()
url_list = spider.get()
print(url_list)
| 26.018182 | 110 | 0.583508 |
e49b044e4f3bdfef09e6426d0ff3c5f755aa63ae
| 1,464 |
py
|
Python
|
bufflog/bufflog.py
|
bufferapp/python-bufflog
|
12d218dfb917419789c720fb1851a35708909810
|
[
"MIT"
] | null | null | null |
bufflog/bufflog.py
|
bufferapp/python-bufflog
|
12d218dfb917419789c720fb1851a35708909810
|
[
"MIT"
] | null | null | null |
bufflog/bufflog.py
|
bufferapp/python-bufflog
|
12d218dfb917419789c720fb1851a35708909810
|
[
"MIT"
] | 1 |
2021-02-08T12:53:43.000Z
|
2021-02-08T12:53:43.000Z
|
import structlog
import logging
import sys
import os
from structlog.processors import JSONRenderer
from structlog.stdlib import filter_by_level
from structlog.stdlib import add_log_level_number
from .datadog import tracer_injection
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
| 24.4 | 57 | 0.705601 |
e49cb572bd1c712b03397fca3826c3ed98801ce6
| 990 |
py
|
Python
|
templator.py
|
daren-thomas/template-system-example
|
248d2f78392be826f3223ee27e90c82feb70a17a
|
[
"MIT"
] | null | null | null |
templator.py
|
daren-thomas/template-system-example
|
248d2f78392be826f3223ee27e90c82feb70a17a
|
[
"MIT"
] | null | null | null |
templator.py
|
daren-thomas/template-system-example
|
248d2f78392be826f3223ee27e90c82feb70a17a
|
[
"MIT"
] | null | null | null |
"""
templator.py reads in an excel file and a template and outputs a file for each row
in the excel file, by substituting the template variables with the values in the columns.
This technique uses pandas to read the excel file into a DataFrame and the python format operator ``%```
to apply the values.
"""
import sys
import os
import pandas as pd
if __name__ == '__main__':
template_file = os.path.join(os.path.dirname(__file__), 'template.txt')
excel_file = os.path.join(os.path.dirname(__file__), 'variables.xls')
main(template_file, excel_file)
| 34.137931 | 104 | 0.706061 |
e4a25083351a643d4c8f2b90bb9ef5552f4ba55d
| 483 |
py
|
Python
|
src/stackoverflow/56339991/tests.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | null | null | null |
src/stackoverflow/56339991/tests.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | null | null | null |
src/stackoverflow/56339991/tests.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | 3 |
2020-02-19T08:02:04.000Z
|
2021-06-08T13:27:51.000Z
|
import unittest
from test_base import TestBaseImporter
from test_child import TestChildImporter
if __name__ == '__main__':
test_loader = unittest.TestLoader()
test_classes_to_run = [TestBaseImporter, TestChildImporter]
suites_list = []
for test_class in test_classes_to_run:
suite = test_loader.loadTestsFromTestCase(test_class)
suites_list.append(suite)
big_suite = unittest.TestSuite(suites_list)
unittest.TextTestRunner().run(big_suite)
| 30.1875 | 63 | 0.766046 |
e4a318b6cae44face6ae5927fa38829adbecfa61
| 1,015 |
py
|
Python
|
src/mains/test_tf.py
|
JungleEngine/Intelligent_Frame_Skipping_Network
|
8178acfe06e112f5d33acbd17ad33239a6c4afc2
|
[
"MIT"
] | null | null | null |
src/mains/test_tf.py
|
JungleEngine/Intelligent_Frame_Skipping_Network
|
8178acfe06e112f5d33acbd17ad33239a6c4afc2
|
[
"MIT"
] | null | null | null |
src/mains/test_tf.py
|
JungleEngine/Intelligent_Frame_Skipping_Network
|
8178acfe06e112f5d33acbd17ad33239a6c4afc2
|
[
"MIT"
] | null | null | null |
# import tensorflow as tf
# print(tf.__version__)
#
#
# with tf.name_scope('scalar_set_one') as scope:
# tf_constant_one = tf.constant(10, name="ten")
# tf_constant_two = tf.constant(20, name="twenty")
# scalar_sum_one = tf.add(tf_constant_one, tf_constant_two, name="scalar_ten_plus_twenty")
#
#
#
# with tf.name_scope('scalar_set_two') as scope:
# tf_constant_three = tf.constant(30, name="thirty")
# tf_constant_four = tf.constant(40, name="fourty")
# scalar_sum_two = tf.add(tf_constant_three, tf_constant_four, name="scalar_thirty_plus_fourty")
#
#
# scalar_sum_sum = tf.add(scalar_sum_one, scalar_sum_two)
#
#
# sess = tf.Session()
# sess.run(tf.global_variables_initializer())
#
# tf_tensorboard_writer = tf.summary.FileWriter('./graphs', sess.graph)
# tf_tensorboard_writer.close()
# sess.close()
import scipy
import _pickle as cPickle
data = unpickle("model_svm_relations.pkl")
| 27.432432 | 100 | 0.719212 |
e4a3bd3abdfaed582c987ca4af954c061d659067
| 24,952 |
py
|
Python
|
src/menus/user/Menu.py
|
stregea/TransactionTrackr
|
c38b99d56816becaa47a21400fb20c615d3483ef
|
[
"MIT"
] | 2 |
2021-07-02T19:49:24.000Z
|
2021-07-08T02:59:25.000Z
|
src/menus/user/Menu.py
|
stregea/TransactionTrackr
|
c38b99d56816becaa47a21400fb20c615d3483ef
|
[
"MIT"
] | null | null | null |
src/menus/user/Menu.py
|
stregea/TransactionTrackr
|
c38b99d56816becaa47a21400fb20c615d3483ef
|
[
"MIT"
] | null | null | null |
from objects.user.User import User
from objects.interface.dbconn import DB
from objects.user.Currency import get_currency_symbol
from objects.threads.UploadThread import UploadThread
import utils.globals as _globals
from utils.print import print_message, print_error
from utils.enums import Months, SettingsSelection, is_valid_month, month_string_to_enum
from utils.visualizer import visualizer, visualizer_helper
from utils.builders.folderbuilder import create_user_folder
from utils.exceptions import NoDataFound, NoTotalFound, InvalidMonth, InvalidYear, UserNotFound
from utils.dates.dates import get_dates, subtract_days
from utils.averager.averager import calculate_average
from utils.formatting.formatter import format_date_pretty, format_month_enum_to_string
from utils.generators.csv_generator import generate_transaction_files
from menus.user.Settings import Settings
def user_has_data(user: User) -> bool:
"""
Test to determine if a user has any data
:param user: The user to check.
:return: True if the user has data. False otherwise.
"""
# Determine if the user has any available data.
try:
user.get_earliest_transaction_date()
except Exception: # excepting NoDataFound here does not work for some reason?
print_error("No data is currently available.")
return False
return True
def is_valid_year(year_to_check: str) -> bool:
"""
Determine if the passed in year currently exists within the database.
:param year_to_check: The year to check.
:return: True if the year exists, false otherwise.
"""
year_is_valid = False
db = DB(_globals.DATABASE)
years = db.fetchall(f"SELECT DISTINCT strftime('%Y', Date) from Transactions;")
db.close()
# search through all the years. If the year that was specified exists, set the flag to true.
for year in years:
if year_to_check == year[0]:
year_is_valid = True
break
return year_is_valid
def get_month_and_year() -> (Months, str):
"""
Prompt a user to enter a month and a year.
:raises InvalidMonth: Exception that is to be raised when user enters an invalid month.
:raises InvalidYear: Exception that is to be raised when user enters an invalid year.
:return: A Month enum and the year the user selected.
"""
month = input("Enter a month:\t")
month_enum = month_string_to_enum(month)
if is_valid_month(month_enum):
year = input("Enter a year:\t")
if is_valid_year(year):
return month_enum, year
else:
raise InvalidYear(year)
else:
raise InvalidMonth(month)
def get_year():
"""
Prompt a user to enter a year.
:raises InvalidYear: Exception that is to be raised if a user enters an invalid year.
:return: The year the user enters.
"""
year = input("Enter a year:\t")
if is_valid_year(year):
return year
raise InvalidYear(year)
def display_monthly_information(user: User, month: Months, year: str, show_console: bool = False,
show_visual: bool = False) -> None:
"""
Display information regarding the total money spent within a month.
:param user: The current user.
:param month: The month to get the information regarding how much was spent.
:param year: The year corresponding to the month.
:param show_console: Boolean to determine whether or not to display the information of the total spent
in a month to the console.
:param show_visual: Boolean to determine whether or not to display a visualization of the total spent in the month.
"""
try:
# Dictionary that contains the information about all of the transactions in a given month.
# The key is the day, the value is the total spent on that day.
transactions_dictionary = visualizer_helper.get_transactions_by_month(month, year, user.id)
# The total amount of money spent during the specified month.
total = visualizer_helper.get_monthly_total(month, year, user.id)
except (NoDataFound, NoTotalFound) as n:
print_error(n.message)
return
# List to hold the dollar values for each day.
dollars = []
# List to hold the labels that correspond to each day in the month that had a transaction.
day_labels = []
# List of hold the labels that correspond to the dollar values for the transactions.
dollars_labels = []
# The type of currency the current user is using.
currency_symbol = get_currency_symbol(user.currency_id)
# The title to be displayed on the console and/or the visualization
title = f"Total spent in {format_month_enum_to_string(month)} {year}: {currency_symbol}{total:,}"
for date_key in transactions_dictionary:
day_labels.append(date_key)
# Sort the labels (YYYY-MM-DD - End of Month)
day_labels.sort()
# Add the dollar amount to the corresponding day index, then create a label for that day.
for day in day_labels:
value = round(float(transactions_dictionary[day]), 2)
dollars.append(value)
dollars_labels.append(f"{currency_symbol}{value:,}")
# Display each day and then display the total spent for the month
if show_console: # TODO: change to function to prevent duplicated code.
for i, day in enumerate(day_labels):
print_message(f"{day}:\t{dollars_labels[i]}")
print_message(f"{title}")
# Display a visualization of the money spent in the month specified
if show_visual:
visualizer.display_bar_chart(title=title,
list_of_values=dollars,
list_of_labels=day_labels,
currency_labels=dollars_labels)
def display_yearly_information(user: User, year: str, show_console: bool = False, show_visual: bool = False) -> None:
"""
Display information regarding the total money spent within a certain year.
:param user: The current user.
:param year: The year to gather information from.
:param show_console: Boolean to determine whether or not to display the information of the total spent
in a year to the console.
:param show_visual: Boolean to determine whether or not to display a visualization of the total spent in the month.
"""
try:
# Dictionary to contain the total transaction values per month given the year
transactions_dictionary = visualizer_helper.get_transactions_by_year(year, user.id)
# The total amount of money spent during the specified year.
total = visualizer_helper.get_yearly_total(year, user.id)
except (NoDataFound, NoTotalFound) as n:
print_error(n.message)
return
# List to hold the dollar values for each month.
dollars = []
# List to hold the labels that correspond to the total number of transactions in each month.
month_labels = []
# List of hold the labels that correspond to the dollar values for the transactions.
dollars_labels = []
# The type of currency the current user is using.
currency_symbol = get_currency_symbol(user.currency_id)
# The title to be displayed on the console and/or the visualization
title = f"Total Spent in {year}: {currency_symbol}{total:,}"
for month_name in transactions_dictionary:
value = round(float(transactions_dictionary[month_name]), 2)
dollars.append(value)
dollars_labels.append(f"{currency_symbol}{value:,}")
# Not formatting month name here since the string is already in the key format for the months dictionary.
month_labels.append(_globals.months[month_name])
if show_console:
for i, month in enumerate(month_labels):
print_message(f"{month}: {dollars_labels[i]}")
print_message(f"{title}")
if show_visual:
visualizer.display_bar_chart(title=title,
list_of_values=dollars,
list_of_labels=month_labels,
currency_labels=dollars_labels)
def display_information_all_time(user: User, show_console: bool = False, show_visual: bool = False) -> None:
"""
Display information regarding the total money spent all time.
:param user: The current user.
:param show_console: Boolean to determine whether or not to display the information of the total spent
in a year to the console.
:param show_visual: Boolean to determine whether or not to display a visualization of the total spent in the month.
"""
try:
transactions_dictionary = visualizer_helper.get_transactions_all_time(user.id)
total = visualizer_helper.get_total_all_time(user.id)
except NoDataFound as ndf:
print_error(ndf.message)
return
# List to hold the total dollar values for each available year.
dollars = []
# List to hold the labels that correspond to the total number of transactions in each year.
year_labels = []
# List of hold the labels that correspond to the dollar values for the transactions.
dollars_labels = []
# The type of currency the current user is using.
currency_symbol = get_currency_symbol(user.currency_id)
# The title to be displayed on the console and/or the visualization
title = f"Total Spent All Time: {currency_symbol}{total:,}"
for year in transactions_dictionary:
value = round(float(transactions_dictionary[year]), 2)
dollars.append(value)
dollars_labels.append(f"{currency_symbol}{value:,}")
year_labels.append(year)
if show_console:
for i, year in enumerate(year_labels):
print_message(f"{year}: {dollars_labels[i]}")
print_message(f"{title}")
if show_visual:
visualizer.display_bar_chart(title=title,
list_of_values=dollars,
list_of_labels=year_labels,
currency_labels=dollars_labels)
| 42.726027 | 142 | 0.631412 |
e4a69e3428e588c7d00739ddb17751edb51f6451
| 1,717 |
py
|
Python
|
website/CookieHelper.py
|
sousic/flask.huny.kr
|
53a8f5af1fa63b290a4e97278a86328758e97d43
|
[
"MIT"
] | null | null | null |
website/CookieHelper.py
|
sousic/flask.huny.kr
|
53a8f5af1fa63b290a4e97278a86328758e97d43
|
[
"MIT"
] | null | null | null |
website/CookieHelper.py
|
sousic/flask.huny.kr
|
53a8f5af1fa63b290a4e97278a86328758e97d43
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
import base64
from functools import wraps
import pyaes
from flask import request
from werkzeug.utils import redirect
from website.domain.UserVO import UserVO
| 28.616667 | 90 | 0.594059 |
e4a6e1bb797c7875ed388c77bf15d0c26b3189cb
| 3,652 |
py
|
Python
|
export_resized_ios_assets.py
|
Tubbebubbe/gimp-plugins
|
11221ded072d8d3001202f30fda266e0cccd3a36
|
[
"MIT"
] | 4 |
2016-08-03T18:20:59.000Z
|
2020-05-24T04:38:47.000Z
|
export_resized_ios_assets.py
|
Tubbebubbe/gimp-plugins
|
11221ded072d8d3001202f30fda266e0cccd3a36
|
[
"MIT"
] | null | null | null |
export_resized_ios_assets.py
|
Tubbebubbe/gimp-plugins
|
11221ded072d8d3001202f30fda266e0cccd3a36
|
[
"MIT"
] | 2 |
2017-10-23T08:23:36.000Z
|
2020-05-24T04:38:57.000Z
|
#!/usr/bin/env python
"""
export_resized_ios_images
Gimp plugin to export image to icon files usable on iOS.
Author:
-------
Tobias Blom, Techne Development AB <[email protected]>
Installation:
-------------
Under Mac OS X, copy this file to ~/Library/Application Support/GIMP/x.x/plug-ins and
make it executable (chmod 755)
Usage:
------
1. Create your image at a resolution four times what you want on a
standard iOS device, twice the size on a retina device.
GIMP image Plug-in output
-------------------------------------------------
80 x 80 @ 144 dpi | Icon 20 x 20 @ 72 dpi
| Icon 40 x 40 @ 144 dpi
| Icon 60 x 60 @ 144 dpi
-------------------------------------------------
120 x 120 @ 144 dpi | Icon 30 x 30 @ 72 dpi
| Icon 60 x 60 @ 144 dpi
| Icon 90 x 90 @ 144 dpi
-------------------------------------------------
2. Run the plug-in (from the File menu) and select the output
directory.
License:
--------
Released under the MIT License
Copyright (c) 2013-2017 Techne Development AB
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from gimpfu import *
import os
register(
"export_resized_ios_assets",
"Exports iOS assets at 50% and 75% (144 dpi) and 25% (72 dpi) size",
"Exports iOS assets at 50% and 75% (144 dpi) and 25% (72 dpi) size",
"Techne Development AB",
"Copyright (c) 2013-2017 Techne Development AB. Released under MIT License.",
"2017",
"<Image>/File/Export as iOS assets...",
"RGB*, GRAY*",
[
(PF_DIRNAME, "directory", "Output directory", os.path.expanduser("~")),
],
[],
plugin_main)
main()
| 33.504587 | 85 | 0.659639 |
e4a86bcd74faf3a16d79362c4832a1d23917c50f
| 3,696 |
py
|
Python
|
SortingComparison.py
|
kogol99/MSiD
|
bbe0ee535f785476a3fe75f0654f496c185565e4
|
[
"MIT"
] | null | null | null |
SortingComparison.py
|
kogol99/MSiD
|
bbe0ee535f785476a3fe75f0654f496c185565e4
|
[
"MIT"
] | 10 |
2020-03-15T20:17:04.000Z
|
2020-06-05T01:58:35.000Z
|
SortingComparison.py
|
kogol99/MSiD
|
bbe0ee535f785476a3fe75f0654f496c185565e4
|
[
"MIT"
] | 37 |
2020-03-15T17:30:40.000Z
|
2020-04-11T20:16:28.000Z
|
from timeit import default_timer as timer
import random
# shell sort using Knuth's sequence
if __name__ == "__main__":
main()
| 33.297297 | 103 | 0.619048 |
e4a93421928eb84ea60e2492daf9f320c6c9d564
| 8,417 |
py
|
Python
|
site/office/compline.py
|
scottBowles/dailyoffice2019
|
ca750ac77316d247ca7a7a820e085f9968fbc8ff
|
[
"MIT"
] | 19 |
2020-01-12T23:57:22.000Z
|
2022-03-30T16:35:17.000Z
|
site/office/compline.py
|
scottBowles/dailyoffice2019
|
ca750ac77316d247ca7a7a820e085f9968fbc8ff
|
[
"MIT"
] | 59 |
2020-01-13T00:45:27.000Z
|
2022-02-20T04:10:05.000Z
|
site/office/compline.py
|
scottBowles/dailyoffice2019
|
ca750ac77316d247ca7a7a820e085f9968fbc8ff
|
[
"MIT"
] | 7 |
2020-01-21T21:12:03.000Z
|
2021-10-24T01:15:50.000Z
|
import datetime
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from office.offices import Office, OfficeSection
from psalter.utils import get_psalms
| 43.386598 | 345 | 0.640489 |
e4a98810c99783995caf35d9ff70ccf375552008
| 1,735 |
py
|
Python
|
src/tide_constituents/water_level_prediction.py
|
slawler/SI_2019_Coastal
|
4064d323bc62ce2f47a7af41b9a11ea5538ad181
|
[
"MIT"
] | 1 |
2020-03-13T07:51:44.000Z
|
2020-03-13T07:51:44.000Z
|
src/tide_constituents/water_level_prediction.py
|
cheginit/SI_2019_Coastal
|
4064d323bc62ce2f47a7af41b9a11ea5538ad181
|
[
"MIT"
] | null | null | null |
src/tide_constituents/water_level_prediction.py
|
cheginit/SI_2019_Coastal
|
4064d323bc62ce2f47a7af41b9a11ea5538ad181
|
[
"MIT"
] | 1 |
2020-03-13T14:44:57.000Z
|
2020-03-13T14:44:57.000Z
|
import tide_constituents as tc
from py_noaa import coops
import pandas as pd
import numpy as np
import tappy
start = '20180201'
end = '20180228'
interval = 1
start = pd.to_datetime(start)
end = pd.to_datetime(end)
d = start
w, t, p, r = [], [], [], []
while d < end:
start_ = d
end_ = start_ + pd.DateOffset(interval)
end_ = end_ if end_ < end else end
water_level, tide = tc.get_water_levels(start_.strftime('%Y%m%d'),
end_.strftime('%Y%m%d'),
-88.2, 30.4)
water_level = water_level.water_level.astype('float')
prediction = 0.0 if 'Z0' not in list(tide.speed_dict.keys()) else tide.speed_dict['Z0']
prediction += sum_signals(tide.key_list, tide.dates, tide.speed_dict, tide.r, tide.phase)
residual = water_level - prediction
w.append(water_level)
p.append(prediction)
d = end_
water_level = pd.concat(w).to_frame()
water_level.columns = ['observation']
water_level['prediction'] = np.hstack(p)
data = tc.get_tides('20180101', '20181231', -88.2, 30.4)
wl = data.predicted_wl.copy()
grouped = wl.groupby(pd.Grouper(freq='M'))
wl_demeaned = grouped.apply(f)
min_month = wl_demeaned.rolling(30).min().groupby(pd.Grouper(freq='M')).last()
max_month = wl_demeaned.rolling(30).max().groupby(pd.Grouper(freq='M')).last()
monthly_minmax = min_month.copy()
monthly_minmax['high'] = max_month['demeaned']
monthly_minmax = monthly_minmax[['demeaned', 'high']]
monthly_minmax.columns = ['low', 'high']
monthly_minmax['range'] = monthly_minmax.high - monthly_minmax.low
monthly_minmax.sort_values('range')
| 30.438596 | 93 | 0.663977 |
e4ab8e400d3a8428f396d10f517cf745bdb624df
| 24,862 |
py
|
Python
|
sdk/python/pulumi_azure/cosmosdb/cassandra_table.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109 |
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/cosmosdb/cassandra_table.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663 |
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/cosmosdb/cassandra_table.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41 |
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['CassandraTableArgs', 'CassandraTable']
class CassandraTable(pulumi.CustomResource):
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(CassandraTableArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
analytical_storage_ttl: Optional[pulumi.Input[int]] = None,
autoscale_settings: Optional[pulumi.Input[pulumi.InputType['CassandraTableAutoscaleSettingsArgs']]] = None,
cassandra_keyspace_id: Optional[pulumi.Input[str]] = None,
default_ttl: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
schema: Optional[pulumi.Input[pulumi.InputType['CassandraTableSchemaArgs']]] = None,
throughput: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = CassandraTableArgs.__new__(CassandraTableArgs)
__props__.__dict__["analytical_storage_ttl"] = analytical_storage_ttl
__props__.__dict__["autoscale_settings"] = autoscale_settings
if cassandra_keyspace_id is None and not opts.urn:
raise TypeError("Missing required property 'cassandra_keyspace_id'")
__props__.__dict__["cassandra_keyspace_id"] = cassandra_keyspace_id
__props__.__dict__["default_ttl"] = default_ttl
__props__.__dict__["name"] = name
if schema is None and not opts.urn:
raise TypeError("Missing required property 'schema'")
__props__.__dict__["schema"] = schema
__props__.__dict__["throughput"] = throughput
super(CassandraTable, __self__).__init__(
'azure:cosmosdb/cassandraTable:CassandraTable',
resource_name,
__props__,
opts)
| 48.558594 | 238 | 0.662376 |
e4ad4d1b1a19faa8dce0b003b788008a58802470
| 10,457 |
py
|
Python
|
HW10/b06502027_hw10.py
|
Pyrojewel-zard/ML
|
d8a11d893eed3e889b9af0d6aeb3ab08cd60d997
|
[
"MIT"
] | 5 |
2021-11-26T10:05:03.000Z
|
2022-03-17T11:45:46.000Z
|
HW10/b06502027_hw10.py
|
Pyrojewel-zard/ML
|
d8a11d893eed3e889b9af0d6aeb3ab08cd60d997
|
[
"MIT"
] | null | null | null |
HW10/b06502027_hw10.py
|
Pyrojewel-zard/ML
|
d8a11d893eed3e889b9af0d6aeb3ab08cd60d997
|
[
"MIT"
] | 1 |
2022-01-09T02:17:19.000Z
|
2022-01-09T02:17:19.000Z
|
# -*- coding: utf-8 -*-
"""hw10_adversarial_attack.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1yPa2ushzqw8FNobfonL79PHzudn0vjrN
# **Homework 10 - Adversarial Attack**
Slides: https://reurl.cc/v5kXkk
Videos:
TA: [email protected]
## Enviroment & Download
We make use of [pytorchcv](https://pypi.org/project/pytorchcv/) to obtain CIFAR-10 pretrained model, so we need to set up the enviroment first. We also need to download the data (200 images) which we want to attack.
"""
!nvidia-smi
# set up environment
!pip install pytorchcv
# download
!gdown --id 1fHi1ko7wr80wXkXpqpqpOxuYH1mClXoX -O data.zip
# unzip
!unzip ./data.zip
!rm ./data.zip
"""## Global Settings
* $\epsilon$ is fixed to be 8. But on **Data section**, we will first apply transforms on raw pixel value (0-255 scale) **by ToTensor (to 0-1 scale)** and then **Normalize (subtract mean divide std)**. $\epsilon$ should be set to $\frac{8}{255 * std}$ during attack.
* Explaination (optional)
* Denote the first pixel of original image as $p$, and the first pixel of adversarial image as $a$.
* The $\epsilon$ constraints tell us $\left| p-a \right| <= 8$.
* ToTensor() can be seen as a function where $T(x) = x/255$.
* Normalize() can be seen as a function where $N(x) = (x-mean)/std$ where $mean$ and $std$ are constants.
* After applying ToTensor() and Normalize() on $p$ and $a$, the constraint becomes $\left| N(T(p))-N(T(a)) \right| = \left| \frac{\frac{p}{255}-mean}{std}-\frac{\frac{a}{255}-mean}{std} \right| = \frac{1}{255 * std} \left| p-a \right| <= \frac{8}{255 * std}.$
* So, we should set $\epsilon$ to be $\frac{8}{255 * std}$ after ToTensor() and Normalize().
"""
import torch
import torch.nn as nn
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
batch_size = 8
# the mean and std are the calculated statistics from cifar_10 dataset
cifar_10_mean = (0.491, 0.482, 0.447) # mean for the three channels of cifar_10 images
cifar_10_std = (0.202, 0.199, 0.201) # std for the three channels of cifar_10 images
# convert mean and std to 3-dimensional tensors for future operations
mean = torch.tensor(cifar_10_mean).to(device).view(3, 1, 1)
std = torch.tensor(cifar_10_std).to(device).view(3, 1, 1)
epsilon = 8/255/std
# TODO: iterative fgsm attack
# alpha (step size) can be decided by yourself
alpha = 0.01/255/std
root = './data' # directory for storing benign images
# benign images: images which do not contain adversarial perturbations
# adversarial images: images which include adversarial perturbations
"""## Data
Construct dataset and dataloader from root directory. Note that we store the filename of each image for future usage.
"""
import os
import glob
import shutil
import numpy as np
from PIL import Image
from torchvision.transforms import transforms
from torch.utils.data import Dataset, DataLoader
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(cifar_10_mean, cifar_10_std)
])
adv_set = AdvDataset(root, transform=transform)
adv_names = adv_set.__getname__()
adv_loader = DataLoader(adv_set, batch_size=batch_size, shuffle=False)
print(f'number of images = {adv_set.__len__()}')
"""## Utils -- Benign Images Evaluation"""
# to evaluate the performance of model on benign images
"""## Utils -- Attack Algorithm"""
# perform fgsm attack
# TODO: perform iterative fgsm attack
# set alpha as the step size in Global Settings section
# alpha and num_iter can be decided by yourself
"""## Utils -- Attack
* Recall
* ToTensor() can be seen as a function where $T(x) = x/255$.
* Normalize() can be seen as a function where $N(x) = (x-mean)/std$ where $mean$ and $std$ are constants.
* Inverse function
* Inverse Normalize() can be seen as a function where $N^{-1}(x) = x*std+mean$ where $mean$ and $std$ are constants.
* Inverse ToTensor() can be seen as a function where $T^{-1}(x) = x*255$.
* Special Noted
* ToTensor() will also convert the image from shape (height, width, channel) to shape (channel, height, width), so we also need to transpose the shape back to original shape.
* Since our dataloader samples a batch of data, what we need here is to transpose **(batch_size, channel, height, width)** back to **(batch_size, height, width, channel)** using np.transpose.
"""
# perform adversarial attack and generate adversarial examples
# create directory which stores adversarial examples
"""## Model / Loss Function
Model list is available [here](https://github.com/osmr/imgclsmob/blob/master/pytorch/pytorchcv/model_provider.py). Please select models which has _cifar10 suffix. Some of the models cannot be accessed/loaded. You can safely skip them since TA's model will not use those kinds of models.
"""
from pytorchcv.model_provider import get_model as ptcv_get_model
model = ptcv_get_model('preresnet110_cifar10', pretrained=True).to(device)
loss_fn = nn.CrossEntropyLoss()
benign_acc, benign_loss = epoch_benign(model, adv_loader, loss_fn)
print(f'benign_acc = {benign_acc:.5f}, benign_loss = {benign_loss:.5f}')
"""## FGSM"""
adv_examples, fgsm_acc, fgsm_loss = gen_adv_examples(model, adv_loader, fgsm, loss_fn)
print(f'fgsm_acc = {fgsm_acc:.5f}, fgsm_loss = {fgsm_loss:.5f}')
create_dir(root, 'fgsm', adv_examples, adv_names)
"""## I-FGSM"""
# TODO: iterative fgsm attack
adv_examples, ifgsm_acc, ifgsm_loss = gen_adv_examples(model, adv_loader, ifgsm, loss_fn)
print(f'ifgsm_acc = {ifgsm_acc:.5f}, ifgsm_loss = {ifgsm_loss:.5f}')
create_dir(root, 'ifgsm', adv_examples, adv_names)
"""## Compress the images"""
# Commented out IPython magic to ensure Python compatibility.
# %cd fgsm
# !tar zcvf ../fgsm.tgz *
# %cd ..
# %cd ifgsm
!tar zcvf ../ifgsm_preresnet110_1600.tgz *
# %cd ..
"""## Visualization"""
import matplotlib.pyplot as plt
classes = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
plt.figure(figsize=(10, 20))
cnt = 0
for i, cls_name in enumerate(classes):
path = f'{cls_name}/{cls_name}1.png'
# benign image
cnt += 1
plt.subplot(len(classes), 4, cnt)
im = Image.open(f'./data/{path}')
logit = model(transform(im).unsqueeze(0).to(device))[0]
predict = logit.argmax(-1).item()
prob = logit.softmax(-1)[predict].item()
plt.title(f'benign: {cls_name}1.png\n{classes[predict]}: {prob:.2%}')
plt.axis('off')
plt.imshow(np.array(im))
# adversarial image
cnt += 1
plt.subplot(len(classes), 4, cnt)
im = Image.open(f'./fgsm/{path}')
logit = model(transform(im).unsqueeze(0).to(device))[0]
predict = logit.argmax(-1).item()
prob = logit.softmax(-1)[predict].item()
plt.title(f'adversarial: {cls_name}1.png\n{classes[predict]}: {prob:.2%}')
plt.axis('off')
plt.imshow(np.array(im))
plt.tight_layout()
plt.show()
| 38.025455 | 286 | 0.67505 |
e4ae21080507e35b553b7b372118c5c586495e00
| 7,867 |
py
|
Python
|
main/make_gradsamplingbasedexact_mesh.py
|
tttor/nbwpg
|
271718362cf0cd810c7ea0cd9726e77276947e58
|
[
"MIT"
] | null | null | null |
main/make_gradsamplingbasedexact_mesh.py
|
tttor/nbwpg
|
271718362cf0cd810c7ea0cd9726e77276947e58
|
[
"MIT"
] | null | null | null |
main/make_gradsamplingbasedexact_mesh.py
|
tttor/nbwpg
|
271718362cf0cd810c7ea0cd9726e77276947e58
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import argparse, os, sys, pickle
import numpy as np, pathos.multiprocessing as mp, torch
import gym_util.common_util as cou, polnet as pn, util_bwopt as u
from collections import defaultdict
from poleval_pytorch import get_rpi_s, get_Ppi_ss, get_ppisteady_s, get_Qsa
if __name__ == '__main__':
main()
| 45.473988 | 109 | 0.645862 |
e4b424d5ad2b323394201895d8483eb6857e159f
| 3,158 |
py
|
Python
|
Python/tdw/FBOutput/StaticSpring.py
|
ricklentz/tdw
|
da40eec151acae20b28d6486defb4358d96adb0e
|
[
"BSD-2-Clause"
] | null | null | null |
Python/tdw/FBOutput/StaticSpring.py
|
ricklentz/tdw
|
da40eec151acae20b28d6486defb4358d96adb0e
|
[
"BSD-2-Clause"
] | null | null | null |
Python/tdw/FBOutput/StaticSpring.py
|
ricklentz/tdw
|
da40eec151acae20b28d6486defb4358d96adb0e
|
[
"BSD-2-Clause"
] | null | null | null |
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: FBOutput
import tdw.flatbuffers
# StaticSpring
def Id(self):
o = tdw.flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(tdw.flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# StaticSpring
# StaticSpring
# StaticSpring
# StaticSpring
# StaticSpring
# StaticSpring
def StaticSpringStart(builder): builder.StartObject(7)
| 38.048193 | 129 | 0.679227 |
e4b481ea04167900e771c376b8996b0f7e02b22f
| 221 |
py
|
Python
|
models/locobuyticketresponse.py
|
jujinesy/Empier_PythonKakaoBot
|
80d2951955002b1a0b5d77b5c2830bc8def63ea3
|
[
"MIT"
] | 3 |
2017-03-30T15:20:18.000Z
|
2018-01-04T12:46:05.000Z
|
models/locobuyticketresponse.py
|
skdltmxn/kakaobot
|
e738b4a8d994fc4125bbd471bd48378a11a8d371
|
[
"MIT"
] | 1 |
2020-08-06T08:13:22.000Z
|
2020-08-06T08:13:22.000Z
|
models/locobuyticketresponse.py
|
skdltmxn/kakaobot
|
e738b4a8d994fc4125bbd471bd48378a11a8d371
|
[
"MIT"
] | 5 |
2020-08-06T08:18:02.000Z
|
2021-02-28T03:59:45.000Z
|
# -*- coding: utf-8 -*-
from locoresponse import LocoResponse
| 18.416667 | 42 | 0.642534 |
e4b4e1f9c8eb01d9ce9d5ca44f6c9f1bce4a4c9a
| 91 |
py
|
Python
|
travian4api/resources.py
|
ihoromi4/travian4api
|
1fa9023d62d8dfca00f5276eff13868ddc057811
|
[
"BSD-3-Clause"
] | 2 |
2022-03-08T20:50:08.000Z
|
2022-03-08T20:50:13.000Z
|
travian4api/resources.py
|
ihoromi4/travian4api
|
1fa9023d62d8dfca00f5276eff13868ddc057811
|
[
"BSD-3-Clause"
] | null | null | null |
travian4api/resources.py
|
ihoromi4/travian4api
|
1fa9023d62d8dfca00f5276eff13868ddc057811
|
[
"BSD-3-Clause"
] | 2 |
2021-03-10T18:43:53.000Z
|
2021-12-18T13:31:22.000Z
|
RESOURCE_TYPES = ['lumber', 'clay', 'iron', 'crop']
LUMBER = 0
CLAY = 1
IRON = 2
CROP = 3
| 13 | 51 | 0.593407 |
e4b5033ef04e9a7be53412dd0c2573434a49130e
| 4,716 |
py
|
Python
|
pikapy/interpreter.py
|
DanyGLewin/pykachu
|
d9faeb3e938e8f8da3250e432a9dd70487291627
|
[
"MIT"
] | null | null | null |
pikapy/interpreter.py
|
DanyGLewin/pykachu
|
d9faeb3e938e8f8da3250e432a9dd70487291627
|
[
"MIT"
] | null | null | null |
pikapy/interpreter.py
|
DanyGLewin/pykachu
|
d9faeb3e938e8f8da3250e432a9dd70487291627
|
[
"MIT"
] | null | null | null |
"""Check the syntax and execute Pikachu commands.
Methods:
run -- The main context for the pikachu vm.
"""
from pikapy.utils import pika_error, pika_print
from pikapy.reader import PikaReader
from pikapy.stack import PikaStack
def run(file_name, args, debug):
"""
Run a specified Pikachu file in a virtual environment.
Arguments:
file_name -- the name and path of a file containing a pikachu program.
args -- the command line arguments specified when the pikachu interpreter was
run.
"""
pi_stack = PikaStack()
pika_stack = PikaStack()
stacks_dict = {
"pi pikachu": pi_stack,
"pika pikachu": pika_stack
}
for a in args:
pi_stack.PUSH(a)
reader = PikaReader(file_name)
while True:
try:
if debug:
try:
print "\nline {}: {}\npi {}\npika {}".format(reader.line_num, reader.lines[reader.line_num],
pi_stack.elements, pika_stack.elements)
except KeyError:
pass
command = next(reader)
except StopIteration:
print ''
break
command = command.split(' chu')[0]
terms = command.split()
if len(terms) == 0:
continue
if len(terms) == 1:
pika_error(reader.line_num, 'unknown command "{}"'.format(terms[0]))
elif len(terms) < 3:
command = " ".join(terms)
if command == "pi pikachu":
pi_stack.POP()
elif command == "pika pikachu":
pika_stack.POP()
elif command == "pi pika":
if not pi_stack.EMPTY():
pika_stack.PUSH(pi_stack.PEEK())
elif command == "pika pi":
if not pika_stack.EMPTY():
pi_stack.PUSH(pika_stack.PEEK())
elif command == "pi pi":
if not pika_stack.EMPTY():
pika_stack.RAND()
elif command == "pikachu pikachu":
try:
line_num = len(next(reader).split())
except StopIteration:
pika_error(reader.line_num - 1, "unexpected EoF, expected new line")
if pi_stack.PEEK() != pika_stack.PEEK():
continue
reader.goto(line_num)
elif command == "pika pika":
try:
line_num = len(next(reader).split())
except StopIteration:
pika_error(reader.line_num - 1, "unexpected EoF, expected new line")
if pi_stack.PEEK() == pika_stack.PEEK():
continue
reader.goto(line_num)
else:
pika_error(reader.line_num, 'unknown command "{}"'.format(reader.lines[reader.line_num]))
elif len(terms) < 4:
try:
current_stack = stacks_dict[" ".join(terms[-2:])]
except KeyError:
pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:])))
command = terms[0]
if command == "pikachu":
current_stack.DIV()
if current_stack.PEEK() == float('NaN'):
pika_error(reader.line_num, 'cannot divide by 0')
else:
current_stack.PUSH(1)
elif len(terms) < 5:
try:
current_stack = stacks_dict[" ".join(terms[-2:])]
except KeyError:
pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:])))
command = " ".join(terms[:-2])
if command == "pi pika":
current_stack.ADD()
elif command == "pika pi":
current_stack.SUB()
elif command == "pi pikachu":
current_stack.MULT()
elif command == "pika pikachu":
if not current_stack.EMPTY():
pika_print(current_stack.POP())
else:
pika_print("undefined")
elif command == "pikachu pikachu":
n = current_stack.POP()
if n and type(n) == int:
pika_print(chr(n))
else:
pika_print("undefined")
else:
current_stack.PUSH(2)
else:
try:
current_stack = stacks_dict[" ".join(terms[-2:])]
except KeyError:
pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:])))
current_stack.PUSH(len(terms) - 2)
| 37.133858 | 116 | 0.496395 |
e4b53b56c59f025bc7d30fa6a90cb388b81c2484
| 1,865 |
py
|
Python
|
app/accounts/views/vendor_profile.py
|
phessabi/eshop
|
6a5352753a0c27f9c3f0eda6eec696f49ef4a8eb
|
[
"Apache-2.0"
] | 1 |
2020-02-04T21:18:31.000Z
|
2020-02-04T21:18:31.000Z
|
app/accounts/views/vendor_profile.py
|
phessabi/eshop
|
6a5352753a0c27f9c3f0eda6eec696f49ef4a8eb
|
[
"Apache-2.0"
] | 12 |
2020-01-01T11:46:33.000Z
|
2022-03-12T00:10:01.000Z
|
app/accounts/views/vendor_profile.py
|
phessabi/eshop
|
6a5352753a0c27f9c3f0eda6eec696f49ef4a8eb
|
[
"Apache-2.0"
] | 1 |
2020-02-18T11:12:48.000Z
|
2020-02-18T11:12:48.000Z
|
from rest_framework import status
from rest_framework.generics import ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from _helpers.permissions import IsVendor
from _helpers.throttles import SustainedAnonRateThrottle, BurstAnonRateThrottle
from accounts.models import Vendor
from accounts.serializers import UserSerializer, VendorProfileSerializer
from accounts.serializers import VendorSerializer
| 40.543478 | 95 | 0.790885 |
e4b72c3c2f5a5bbfee4b0bb9f47cf02969cbd82b
| 31,394 |
py
|
Python
|
plotoptix/tkoptix.py
|
robertsulej/plotoptix
|
628694351791c7fb8cd631a6efe6cc0fd7d9f4f8
|
[
"libtiff",
"MIT"
] | 307 |
2019-04-03T10:51:41.000Z
|
2022-03-28T05:35:09.000Z
|
plotoptix/tkoptix.py
|
robertsulej/plotoptix
|
628694351791c7fb8cd631a6efe6cc0fd7d9f4f8
|
[
"libtiff",
"MIT"
] | 27 |
2019-05-11T08:53:32.000Z
|
2022-02-07T22:43:21.000Z
|
plotoptix/tkoptix.py
|
robertsulej/plotoptix
|
628694351791c7fb8cd631a6efe6cc0fd7d9f4f8
|
[
"libtiff",
"MIT"
] | 21 |
2019-08-29T21:50:23.000Z
|
2022-03-03T05:21:15.000Z
|
"""
Tkinter UI for PlotOptiX raytracer.
https://github.com/rnd-team-dev/plotoptix/blob/master/LICENSE.txt
Have a look at examples on GitHub: https://github.com/rnd-team-dev/plotoptix.
"""
import logging
import numpy as np
import tkinter as tk
from PIL import Image, ImageTk
from ctypes import byref, c_float, c_uint
from typing import List, Tuple, Optional, Union
from plotoptix.enums import *
from plotoptix._load_lib import PLATFORM
from plotoptix.npoptix import NpOptiX
| 46.099853 | 183 | 0.580334 |
e4b892cce045d4a84ff88607bc919a58e081ea7c
| 453 |
py
|
Python
|
tools/ckssh.py
|
luisxue/TreesShell
|
cd35826ca495264afa1e30f9b4f06eadd13ecb48
|
[
"MIT"
] | null | null | null |
tools/ckssh.py
|
luisxue/TreesShell
|
cd35826ca495264afa1e30f9b4f06eadd13ecb48
|
[
"MIT"
] | null | null | null |
tools/ckssh.py
|
luisxue/TreesShell
|
cd35826ca495264afa1e30f9b4f06eadd13ecb48
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Author: Luisxue <[email protected]>
# BLOG: https://luisxue.xcodn.com
#
# Notes: TreesShell for CentOS/RadHat 6+ Debian 7+ and Ubuntu 12+
#
# Project home page:
# http://trees.org.cn
# https://github.com/luisxue/TreesShell
import socket,sys
sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sk.settimeout(1)
try:
sk.connect((sys.argv[1],int(sys.argv[2])))
print 'ok'
except Exception:
print 'no'
sk.close()
| 22.65 | 65 | 0.688742 |
e4ba683b1acdb8fa2966f9142fd6e41d884299cc
| 4,144 |
py
|
Python
|
app.py
|
apizzo1/Hindsight_2020
|
51a124c7363a80ebd00999a3812a91c0b27f62cd
|
[
"MIT"
] | null | null | null |
app.py
|
apizzo1/Hindsight_2020
|
51a124c7363a80ebd00999a3812a91c0b27f62cd
|
[
"MIT"
] | null | null | null |
app.py
|
apizzo1/Hindsight_2020
|
51a124c7363a80ebd00999a3812a91c0b27f62cd
|
[
"MIT"
] | 1 |
2020-09-30T02:56:29.000Z
|
2020-09-30T02:56:29.000Z
|
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
import os
import requests
import urllib.parse
# API key introduction
# API_KEY = os.environ.get('API_KEY', '')
finnhub_API_Key = os.environ.get('finnhub_API_Key', '')
from flask import Flask, jsonify, render_template, request
db_url = os.environ.get('DATABASE_URL', '')
# create engine
engine = create_engine(db_url)
# reflect DB
Base=automap_base()
Base.prepare(engine, reflect = True)
# Flask init
app = Flask(__name__)
# dict_builder to take in sql response
# home route
if __name__ == '__main__':
app.run(debug=True)
| 36.034783 | 341 | 0.707288 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.