blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3d36dbc5583d5cb355da674315a9ffcab53ce226 | 69c0b9b0f8ddae8758f6f10b7df7548dfd2c2f25 | /hero_wiki/urls.py | 0714ad5b154813f606e08b6441faf586353ea848 | []
| no_license | manuzinha0901/Universo_Heroi | 624a300f97f4c6e6c63b0c6eea446b8bcbdcc11e | 501544cfdce09356e06b43590d6f9e65c14f7031 | refs/heads/master | 2020-08-04T01:24:04.120040 | 2019-10-01T17:30:43 | 2019-10-01T17:30:43 | 211,951,945 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | """hero_wiki URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| [
"[email protected]"
]
| |
e2af38a57db25a75aa48387c6f0b689a2d5076bc | 0074cb7e3c51bb0dbe72e62898351b656cdac866 | /playfair_encryption&decryption.py | 05ed3790502985e6063fa10dbf30451956a631f1 | []
| no_license | mohitparekh7/rsa-and-playfair_cipher-PYTHON | 089723fed4c532f668da8dcefa889f76552b826d | 09230da145d9ce882fb5670161ea4690c38da764 | refs/heads/main | 2023-01-28T06:06:15.574385 | 2020-12-03T06:09:20 | 2020-12-03T06:09:20 | 303,096,977 | 0 | 1 | null | 2020-10-20T14:49:01 | 2020-10-11T10:40:13 | Python | UTF-8 | Python | false | false | 3,245 | py | key=input("Enter key ") # initializing the key
key=key.upper()
key=key.replace(" ", "")
def matrix(x,y,initial):
return [[initial for i in range(x)] for j in range(y)]
result=[]
for ch in key: # storing the key into the matrix
if ch not in result:
if ch=='J':
result.append('I')
else:
result.append(ch)
flag=0
for i in range(65,91): #storing all the other characters into the matrix
if chr(i) not in result:
if i==73 and chr(74) not in result:
result.append("I")
flag=1
elif flag==0 and i==73 or i==74:
pass
else:
result.append(chr(i))
k=0
encryptedMatrix=matrix(5,5,0) #initialize the matrix
for i in range(0,5): # creating the 5x5 matrix with the input
for j in range(0,5):
encryptedMatrix[i][j]=result[k]
k+=1
def locationIndex(c): # get location of each character
loc=[]
if c=='J':
c='I'
for i ,j in enumerate(encryptedMatrix):
for k,l in enumerate(j):
if c==l:
loc.append(i)
loc.append(k)
return loc
def encrypt(): # Encryption
msg=str(input("ENTER MSG:"))
msg=msg.upper()
msg=msg.replace(" ", "")
i=0
for s in range(0,len(msg)+1,2):
if s<len(msg)-1:
if msg[s]==msg[s+1]:
msg=msg[:s+1]+'Z'+msg[s+1:]
if len(msg)%2!=0:
msg=msg[:]+'Z'
print("CIPHER TEXT:",end=' ')
while i<len(msg):
loc=[]
loc=locationIndex(msg[i])
loc1=[]
loc1=locationIndex(msg[i+1])
if loc[1]==loc1[1]:
print("{}{}".format(encryptedMatrix[(loc[0]+1)%5][loc[1]],encryptedMatrix[(loc1[0]+1)%5][loc1[1]]).lower(),end='')
elif loc[0]==loc1[0]:
print("{}{}".format(encryptedMatrix[loc[0]][(loc[1]+1)%5],encryptedMatrix[loc1[0]][(loc1[1]+1)%5]).lower(),end='')
else:
print("{}{}".format(encryptedMatrix[loc[0]][loc1[1]],encryptedMatrix[loc1[0]][loc[1]]).lower(),end='')
i=i+2
def decrypt(): # Decryption
msg=str(input("ENTER CIPHER TEXT: "))
msg=msg.upper()
msg=msg.replace(" ", "")
print("PLAIN TEXT: ",end=' ')
i=0
while i<len(msg):
loc=[]
loc=locationIndex(msg[i])
loc1=[]
loc1=locationIndex(msg[i+1])
if loc[1]==loc1[1]:
print("{}{}".format(encryptedMatrix[(loc[0]-1)%5][loc[1]],encryptedMatrix[(loc1[0]-1)%5][loc1[1]]).lower(),end='')
elif loc[0]==loc1[0]:
print("{}{}".format(encryptedMatrix[loc[0]][(loc[1]-1)%5],encryptedMatrix[loc1[0]][(loc1[1]-1)%5]).lower(),end='')
else:
print("{}{}".format(encryptedMatrix[loc[0]][loc1[1]],encryptedMatrix[loc1[0]][loc[1]]).lower(),end='')
i=i+2
while(1):
print("\n1.Encryption \n2.Decryption \n3.Exit \n")
choice = int(input("Enter your choice: "))
if choice==1:
encrypt()
elif choice==2:
decrypt()
elif choice==3:
exit()
else:
print("Choose correct choice") | [
"[email protected]"
]
| |
fec1c1260cfa72255d7da4f655922ef3b8c9177a | e075ec44df27d776d2ed2581f1c9448eb34b7f0d | /src/dashboards/graphs/migrations/0003_auto__del_field_kpi_values__add_field_kpivalue_kpi.py | ff688fcbbeca20250cb5027bbf1cbf273fe92d87 | []
| no_license | stereoit/dashboards | f5eeca34561ebc3847c58311b22ec17170cc9193 | 14d9f889cd240a9f5770b130a2628cb7d068a327 | refs/heads/master | 2021-01-25T10:20:51.520809 | 2012-09-18T14:56:37 | 2012-09-18T14:56:37 | 5,284,894 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,227 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'KPI.values'
db.delete_column('graphs_kpi', 'values_id')
# Adding field 'KPIValue.kpi'
db.add_column('graphs_kpivalue', 'kpi',
self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['graphs.KPIValue']),
keep_default=False)
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'KPI.values'
raise RuntimeError("Cannot reverse this migration. 'KPI.values' and its values cannot be restored.")
# Deleting field 'KPIValue.kpi'
db.delete_column('graphs_kpivalue', 'kpi_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'graphs.color': {
'Meta': {'object_name': 'Color'},
'color': ('colorful.fields.RGBColorField', [], {'max_length': '7'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'graphs.colorpalette': {
'Meta': {'object_name': 'ColorPalette'},
'colors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['graphs.Color']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'graphs.graph': {
'Meta': {'object_name': 'Graph'},
'graph_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kpi': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['graphs.KPI']"}),
'palette': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['graphs.ColorPalette']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'graphs.kpi': {
'Meta': {'object_name': 'KPI'},
'granularity': ('django.db.models.fields.IntegerField', [], {'default': '60'}),
'history': ('django.db.models.fields.IntegerField', [], {'default': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'graphs.kpivalue': {
'Meta': {'object_name': 'KPIValue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kpi': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['graphs.KPIValue']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'})
}
}
complete_apps = ['graphs'] | [
"[email protected]"
]
| |
35ce891e186eea76c0b18b167246f429469b0b6d | d4bb4ad14eca10d14d3d2e1744be8cd48ab76ea2 | /core/migrations/0007_auto_20200117_1244.py | cea65dc25e7775f5383e5b5c915c3b6de4aac300 | []
| no_license | lucasssilveira/app-django-rest-pontos-turisticos | ed2cf4f196c5eb2eb1d617b68acd96aa94fe50b7 | 9151067aea56e0d3aac6f05f2335deaa1f823c9e | refs/heads/master | 2020-12-13T09:48:55.601454 | 2020-01-27T17:06:40 | 2020-01-27T17:06:40 | 234,379,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | # Generated by Django 2.0.4 on 2020-01-17 15:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0006_pontoturistico_endereco'),
]
operations = [
migrations.AlterField(
model_name='pontoturistico',
name='endereco',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='enderecos.Endereco'),
),
]
| [
"[email protected]"
]
| |
738ccfdc065fe7dca2e9f447d08b1b9d1849b6ee | dae2d6550f93993d6a1902f7d0041b8b01d691a2 | /minesweeper/agents/__init__.py | 50512dbe4c53402d2adeefe63cc029540803985a | [
"MIT"
]
| permissive | MathisFederico/Minesweeper | e43335bca5c4b71b1d4e5f061ed969db8ab02fe4 | b66b41066e325813b24497d2caca0a11c048e18b | refs/heads/master | 2023-02-06T11:06:39.947347 | 2020-12-23T03:47:39 | 2020-12-23T03:47:39 | 323,767,338 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | from minesweeper.agents.random import RandomAgent
from minesweeper.agents.human import HumanAgent
| [
"[email protected]"
]
| |
c05d912a82fe7868f933980db31c66de96480241 | fbec4e681d202795dcafa898343e1a1136e1e641 | /ex/ex14.py | 4bb57eb3b971221203360d567c3d353f95a72234 | []
| no_license | hu22333/git | f00395d900fcb483f07d4b2c082723f75debb17a | 2158a100cca8a462c1ea695a7250fac317ce6560 | refs/heads/master | 2020-05-17T05:51:39.191988 | 2019-06-25T14:18:20 | 2019-06-25T14:18:20 | 183,546,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | #ex14.py
from sys import argv
script,user_name=argv
prompt='>'
print(f"Hi {user_name},I'm the {script}.")
print(f"I'd like to ask you a few questions.")
print(f"Do you like me {user_name}?")
likes=input(prompt)
print(f"Where do you live {user_name}?")
lives=input(prompt)
print("what kind of computer do you have?")
computer=input(prompt)
print(f"""
Aliright,so you said {likes} about liking me.
You live in {lives}.Not sure where that is.
And you have a {computer} computer. Nice.
""") | [
"[email protected]"
]
| |
8e759a2bc6dd93cdbe910a915123e8a386e26006 | 8aff2ffdb6d17c046579b8e58d7084390cd3d616 | /Python/Nucleo/BuildGraf.py | 1c2a3bf120739866209aec206e9fc18bc21d1615 | [
"MIT"
]
| permissive | galexbh/Network-of-interconnected-servers | 45309a7de5b6b1ce750ed4c069c6a71b847112de | e97618ccb44e608c1cfe022f25763424b93de449 | refs/heads/master | 2022-11-22T20:57:30.437818 | 2020-07-31T15:46:44 | 2020-07-31T15:46:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,713 | py | # -*- coding: utf-8 -*-
#Clase para los vertices.
class Vertex:
def __init__(self,name):
self.name = name
self.edges ={}
self.weight = None
#Utilizada para agregar una arista a un vertices, recibiendo sus caracteristicas.
def addEdge(self,node,distance,bandwidth,usersOnline,traffic,meanType):
self.edges[node.name] = self.getWeight(distance,bandwidth,usersOnline,traffic,meanType)
#Retortnar el peso calculado.
def getWeight(self,distance,bandwidth,usersOnline,traffic,meanType):
subTotalForDistance = self.decreaseInReliability(distance,meanType)
subtTotalForBandwidth = self.calculateForBandwidth(bandwidth,usersOnline,traffic)
totalReliabilty = subTotalForDistance + subtTotalForBandwidth
if(totalReliabilty < 0):
totalReliabilty = 0
if(totalReliabilty > 1):
totalReliabilty = 1
totalReliabilty = "{0:.2f}".format(totalReliabilty)
self.weight = totalReliabilty
return float(self.weight)
def decreaseInReliability(self,distance,meanType):
reliability = None
partition = None
if(meanType == 'CAT5'):
reliability = 0.98
decrease = 0.02 #Disminucion de confiabilidad%
partition = 50 #Cada partition metros
if(meanType == 'CAT6'):
reliability = 0.98
decrease = 0.01
partition = 50
if(meanType == 'Fibra-Optica' or meanType == 'Fibra-Óptica'):
reliability = 0.90
decrease = 0.05
partition = 100
if(meanType == 'Wifi' or meanType == "WIFI"):
reliability = 0.7
decrease = 0.06
partition = 6
if(meanType == 'Coaxial'):
reliability = 1
decrease = 0.04
partition = 100
if(meanType == 'Par-Trenzado'):
reliability = 1
decrease = 0.01
partition = 100
subTotalForDistance = (int(distance)/partition)*decrease
totalDistanceDecrease = reliability - subTotalForDistance
return totalDistanceDecrease
def calculateForBandwidth(self,bandwidth,usersOnline,traffic):
bandwidth = int(bandwidth)
usersOnline = int(usersOnline)
traffic = int(traffic)
subtTotalForBandwidth = (traffic - bandwidth)/usersOnline #mbps
percentage = (subtTotalForBandwidth/bandwidth)*100
reliability = 0
if(percentage >=1 and percentage < 25):
reliability = 0.05
if(percentage >=25 and percentage < 50):
reliability = 0.10
if(percentage >=50 and percentage < 75):
reliability = 0.15
if(percentage >= 75 and percentage <=100):
reliability = 0.20
if(percentage < 1):
reliability = 0
return reliability
#Clase para gestion de los vertices en el grafo.
class Graph:
def __init__(self):
self.vertices = {} #{A:{B:10}}
#Agregar vertice.
def addVertex(self,vertex):
self.vertices["%s" % (vertex.name)] = vertex.edges
#imrpimir el grafo.
def printGraph(self):
graf = self.vertices
for k,v in graf.items():
for aris,w in v.items():
print("Vertice:%s\tArista:%s - peso:%s" % (k,aris,w))
#Busca el peso entre un Vertice y Otro.
def searchEdgeWeight(self,nameVertex1,nameVertex2):
for k,v in self.vertices.items(): #k = str, v = dict
if(k == nameVertex1):
for aris,w in v.items():
if(aris == nameVertex2):
return w #Retorna el peso entre las aristas.
#-------------------------------------------------------------------------------------------------------------------
#Clase para encontrar las rutas de un grafo.
class BuildPaths:
def __init__(self):
self.stack = []
self.paths = []
#Encuentra y guarda TODAS las rutas entre un vertice origen, a un destino.
def findPaths(self,start,destination,dict):
self.stack.append([start]) #Agrega el vertice inicio.
while self.stack: #Mientras la cola tenga valores.
tempPath = self.stack.pop() #Extra el ultimo indice de la lista.
key = tempPath[-1] #Extrae el ultimo valor del elemento.
for i in self.subtractLists(dict[key],tempPath): #Llama a la funcion que 'resta' los elementos de las listas dadas, devolviendo otra lista.
if i == destination: #Stop si el valor de la 'resta' es el destino deseado.
self.paths.append(tempPath + [i]) #Se agrega a la variable de rutas.
else:
self.stack.append(tempPath + [i]) #En caso que no sea el valor destino, se sigue agregando rutas, volviendo al while.
#'Resta' los valores de las listas dadas.
#ejm: ['A','B','C'] - ['A','C'] = ['B']
def subtractLists(self,listaA,listaB):
listTemp = []
for i in listaA:
#print(i[0])
if i[0] in listaB:
pass
else:
listTemp.append(i[0])
return listTemp
def getPaths(self):
return self.paths
| [
"[email protected]"
]
| |
91f457dd89fbca16e97c0c978fcb837cca0e83cd | 36b583be098ca45f44283e6f76cd46278ba90d5f | /CModules/viterby/setup.py | 452dd48f4acc1e985083ae6e81d51885cc41260a | [
"MIT"
]
| permissive | stnie/FrontDetection | 1f186f142da6db4c516d426b604655d1331946b4 | 742ebf9619dcde40d42891073739945a05631ea3 | refs/heads/master | 2023-04-06T22:46:41.486118 | 2021-12-15T10:37:08 | 2021-12-15T10:37:08 | 360,067,497 | 8 | 1 | null | 2021-10-15T09:52:51 | 2021-04-21T07:13:00 | Python | UTF-8 | Python | false | false | 292 | py | from glob import glob
from setuptools import setup
from pybind11.setup_helpers import Pybind11Extension, build_ext
ext_modules = [
Pybind11Extension(
"viterby",
sorted(glob("./*.cpp")),
),
]
setup(
cmdclass={"build_ext": build_ext},
ext_modules=ext_modules
) | [
"[email protected]"
]
| |
68a6c3f8d79e68f900192f990f44df1043eb2a73 | 15538c486f83e665f30f6f7feac8906d5e622739 | /Django Social Clone/accounts/urls.py | b61c41fce42cfa3a5d7534f614f923ffabf3fede | []
| no_license | Phani-rockzz/Django-Social-Media-Clone | e5e909b8df33574cf324d03dd05c27d8e3b35f43 | 5254405f512fe0cab5369dbc9c48d79e27577340 | refs/heads/master | 2022-04-20T23:36:01.114391 | 2020-04-24T14:05:17 | 2020-04-24T14:05:17 | 258,528,427 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 414 | py | from django.conf.urls import url
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
app_name = 'accounts'
urlpatterns = [
url(r'login/$',auth_views.LoginView.as_view(template_name='accounts/login.html'),name='login'),
url(r'logout/$',auth_views.LogoutView.as_view(),name='logout'),
url(r'signup/$',views.SignUp.as_view(),name='signup'),
] | [
"[email protected]"
]
| |
ec84bc86da9c040e55f96ebe3912b999e103f8e6 | ef747128ca172497eb9569493008f6d0d3b76468 | /BinaryTree/MaxElementBinaryTree.py | ca7ca804dfa2ce504fa35b1ecc1171e6afa4f730 | []
| no_license | sharvilshah1994/LeetCode | 62e152c0bfd574cdbd38bc0192e46a6267f67b70 | 369dafe10dfb693c053f329ce7e5f93a77d1d229 | refs/heads/master | 2020-12-30T09:37:47.085224 | 2017-10-27T19:42:16 | 2017-10-27T19:42:16 | 100,426,929 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | class BinaryTreeNode(object):
def __init__(self, x):
self.data = x
self.left = None
self.right = None
def build_tree():
t = BinaryTreeNode(1)
t1 = BinaryTreeNode(2)
t2 = BinaryTreeNode(3)
t3 = BinaryTreeNode(4)
t4 = BinaryTreeNode(5)
t.left = t1
t.right = t2
t1.left = t3
t1.right = t4
return t
class Solution(object):
def max_item_tree(self, root):
max_value = -1
if root:
left = self.max_item_tree(root.left)
right = self.max_item_tree(root.right)
if left > right:
max_value = left
else:
max_value = right
if root.data > max_value:
max_value = root.data
return max_value
print(Solution().max_item_tree(build_tree())) | [
"[email protected]"
]
| |
c1358985d3e4df57a8045594b2803d1011b67c6d | c015ecc8883b362d51270614dd0ce62f7afab82a | /env/Scripts/django-admin.py | 5ce7aa2586a2034464082a0dfcfe91a0493efff7 | []
| no_license | KunduzA/my-first-blog | 97b11662fb6b69fe50ffa878d8d4392695d14447 | bc1b794335bf49f8df7618eee6455b4831177da3 | refs/heads/master | 2022-11-09T23:22:58.067706 | 2018-12-25T18:02:49 | 2018-12-25T18:02:49 | 163,103,773 | 0 | 1 | null | 2022-10-27T21:50:45 | 2018-12-25T18:27:47 | Python | UTF-8 | Python | false | false | 205 | py | #!c:\users\user\desktop\students_hotstart-django2\students_hotstart-django2\env\scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"[email protected]"
]
| |
acbba4747ce519459fbf2469da605af9eea65ced | 25b72885bbf9605092f0ee9f7a03d9ce7734ee73 | /CyberStorm/THE_hunt/FTP_fetcher.py | eeb24bf9ea7e809a45d93065aaac032e68fa7251 | []
| no_license | Zacherybignall/csc_442 | ec68dfe21c7a209e497e36da1d7582804395557e | 73d5c1918f484f8c187a4b24ad54e2ba1ce36ac5 | refs/heads/master | 2022-07-02T23:17:13.536274 | 2020-05-18T17:04:31 | 2020-05-18T17:04:31 | 264,207,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,876 | py | """
Name Zachery Bignall
Tilte : FTP (Storage) Covert Channel ( homework three)
version of python : 3.6
note to Dr.goud{ if you read this} just wanted to let you know that python works different in winodows and linux from command line .
#i did not know this. testing thigns in linux and in windows from now on, just wanted to say that
"""
#_______________________needed lib's __________________________________
import re
import ftplib
#___________________GOABL var's___________________________
IP = '138.47.99.163'
PORT = 21
USERNAME = 'valkyrie'
PASSWORD = 'readytoride'
FOLDER = '/FILES' # where you want to go in the sever
# method can be 7(looks at last 7 bits and discard thigns that have the 1st 3 bits fulled
#or 10 read all bits
METHOD = 10
#_____________MY def________________________________
#took from hw_1 and modded it
#for getting permissions to binary
def PtoB(string,M):
returnbinary = "";
if M == 7:
# for not getting the 1st 3 bits
for n in range(len(string)):
temp = re.findall('[a-z]',string[n])
#takeing out nosie
if n <= 2 and (temp):
break;
#gets a 1 is not a {-}
elif n > 2 and (temp):
returnbinary += '1';
elif n > 2 :
returnbinary += '0';
#for getting the 1st 3 bits
else:
for n in range(len(string)):
#gets a 1 is not a {-}
temp = re.findall('[a-z]',string[n])
if temp:
returnbinary += '1';
else:
returnbinary += '0';
return returnbinary
# name says it
def decoder(bit):
letter = chr(int(bit, 2));
return letter;
#________________________MAIN______________________
lines=[];# var for ftp connect (ie what it gets from sever)
#part 1 connecting to the sever and gettign data
ftp = ftplib.FTP();
ftp.connect(IP,PORT);# connect to host
ftp.login(user=USERNAME,passwd=PASSWORD); # user anonymous, passwd anonymous@
ftp.cwd(FOLDER);
ftp.dir(lines.append);
#cloes the connection to the sever
ftp.quit();
#part 2 of hw3 parse thorught the data and gettign 1's and 0's
binary_permissions='';
#makes permissions into binary, for all files read
for i in range((len(lines))):
permissions = lines[i].split(' ');
#permissions[0] is the real permissions , while permissions is the whole line
binary_permissions += PtoB(permissions[0],METHOD);
#part 3 the decodeing part , pulled from hw1
binary_bits=[];#gets the binary into 7-bit length and ready for the decoder
temp='';# what saves the numbers {1|0} and clears at the end of every 7 bit.or 8-bit is method 10
#7bit
for bits in range(len(binary_permissions)):
temp += binary_permissions[bits];
if ((len(temp)%7) == 0 and bits !=0 ):
binary_bits.append(temp);
temp='';
BL7 = "";# 7-bit letter
OP7 = "";# 7-bit output
# sends 7 1's and 0's at a time to the decoder to get changed into letters/numbers
for j in range(len(binary_bits)):
BL7 = decoder(binary_bits[j]);
OP7+=BL7;
print(OP7);# prints the message {if its in 7 bit that is}
# this is run if METHOD is 10 b/c that could be 7-bit or 8-bit
#8-bit
if METHOD == 10:
binary_bits=[];
temp_8='';
# 8-bit things
for bits in range(len(binary_permissions)):
temp_8 += binary_permissions[bits];
if ((len(temp_8)%8) == 0 and i !=0):
#print("temp_8",len(temp_78));
binary_bits.append(temp_8);
temp_8='';
BL8 = "";# 8-bit letter
OP8 = "";# 8-bit output
for w in range(len(binary_bits)):
BL8 = decoder(binary_bits[w]);
OP8+=BL8;
print("\nif message above is garbage try below\n");
print(OP8);# prints the message {if its in 7 bit that is}
#kills the program
exit();
| [
"[email protected]"
]
| |
f82aa4de85fb74911b7ac6cb4cb31c3ec50e347d | b33b72871b7ab86a3479603d0563c39dafe48172 | /src/train.py | 03c6624a7a5af59fc3094a71d0776a0fdacc05cf | [
"MIT"
]
| permissive | michalnand/sequence_classification | 96f190909583a54d4202e47cc5e2b39fb8fc111f | 2ef0bdc4f9e1d455b6b03357d31af823c60e32d8 | refs/heads/master | 2022-06-01T17:29:30.588848 | 2020-04-28T13:18:36 | 2020-04-28T13:18:36 | 259,642,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,734 | py | import libs_dataset.magnetometer_read as Dataset
import models.net_0.model as Model
training_data_files_list = []
training_categories_ids = []
testing_data_files_list = []
testing_categories_ids = []
dataset_path = "/Users/michal/dataset/car_detection/"
'''
training_data_files_list.append(dataset_path + "dataS1RawWinCat1.csv")
training_categories_ids.append(0)
training_data_files_list.append(dataset_path + "dataS1RawWinCat2.csv")
training_categories_ids.append(1)
training_data_files_list.append(dataset_path + "dataS1RawWinCat3.csv")
training_categories_ids.append(2)
training_data_files_list.append(dataset_path + "dataS1RawWinCat4.csv")
training_categories_ids.append(3)
training_data_files_list.append(dataset_path + "dataS1RawWinCatTrailer.csv")
training_categories_ids.append(4)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat1.csv")
testing_categories_ids.append(0)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat2.csv")
testing_categories_ids.append(1)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat3.csv")
testing_categories_ids.append(2)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat4.csv")
testing_categories_ids.append(3)
testing_data_files_list.append(dataset_path + "dataS2RawWinCatTrailer.csv")
testing_categories_ids.append(4)
'''
'''
training_data_files_list.append(dataset_path + "dataS1RawWinCat1.csv")
training_categories_ids.append(0)
training_data_files_list.append(dataset_path + "dataS1RawWinCat2.csv")
training_categories_ids.append(1)
training_data_files_list.append(dataset_path + "dataS1RawWinCat3.csv")
training_categories_ids.append(1)
training_data_files_list.append(dataset_path + "dataS1RawWinCat4.csv")
training_categories_ids.append(2)
training_data_files_list.append(dataset_path + "dataS1RawWinCatTrailer.csv")
training_categories_ids.append(0)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat1.csv")
testing_categories_ids.append(0)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat2.csv")
testing_categories_ids.append(1)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat3.csv")
testing_categories_ids.append(1)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat4.csv")
testing_categories_ids.append(2)
testing_data_files_list.append(dataset_path + "dataS2RawWinCatTrailer.csv")
testing_categories_ids.append(0)
'''
training_data_files_list.append(dataset_path + "dataS1RawWinCat1.csv")
training_categories_ids.append(0)
training_data_files_list.append(dataset_path + "dataS1RawWinCat2.csv")
training_categories_ids.append(0)
training_data_files_list.append(dataset_path + "dataS1RawWinCat3.csv")
training_categories_ids.append(1)
training_data_files_list.append(dataset_path + "dataS1RawWinCat4.csv")
training_categories_ids.append(1)
training_data_files_list.append(dataset_path + "dataS1RawWinCatTrailer.csv")
training_categories_ids.append(1)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat1.csv")
testing_categories_ids.append(0)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat2.csv")
testing_categories_ids.append(0)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat3.csv")
testing_categories_ids.append(1)
testing_data_files_list.append(dataset_path + "dataS2RawWinCat4.csv")
testing_categories_ids.append(1)
testing_data_files_list.append(dataset_path + "dataS2RawWinCatTrailer.csv")
testing_categories_ids.append(1)
dataset = Dataset.Create(training_data_files_list, training_categories_ids, testing_data_files_list, testing_categories_ids)
#dataset = Dataset.Create()
model = Model.Create(dataset.input_shape, dataset.classes_count)
import torch
import libs.confussion_matrix
epoch_count = 100
learning_rates = [0.0001, 0.0001, 0.0001, 0.00001, 0.00001]
accuracy_best = 0.0
for epoch in range(epoch_count):
batch_size = 32
batch_count = (dataset.get_training_count()+batch_size) // batch_size
learning_rate = learning_rates[epoch%len(learning_rates)]
optimizer = torch.optim.Adam(model.parameters(), lr= learning_rate, weight_decay=10**-5)
training_confussion_matrix = libs.confussion_matrix.ConfussionMatrix(dataset.classes_count)
for batch_id in range(batch_count):
training_x, training_y = dataset.get_training_batch()
predicted_y = model.forward(training_x)
error = (training_y - predicted_y)**2
loss = error.mean()
loss.backward()
optimizer.step()
training_confussion_matrix.add_batch(training_y.detach().to("cpu").numpy(), predicted_y.detach().to("cpu").numpy())
training_confussion_matrix.compute()
batch_count = (dataset.get_testing_count()+batch_size) // batch_size
testing_confussion_matrix = libs.confussion_matrix.ConfussionMatrix(dataset.classes_count)
for batch_id in range(batch_count):
testing_x, testing_y = dataset.get_testing_batch()
predicted_y = model.forward(testing_x)
error = (testing_y - predicted_y)**2
loss = error.mean()
testing_confussion_matrix.add_batch(testing_y.detach().to("cpu").numpy(), predicted_y.detach().to("cpu").numpy())
testing_confussion_matrix.compute()
print("epoch = ", epoch, "\n")
if testing_confussion_matrix.accuraccy > accuracy_best:
accuracy_best = testing_confussion_matrix.accuraccy
print("\n\n\n")
print("=================================================")
print("new best net in ", epoch, "\n")
print("TRAINING result ")
print(training_confussion_matrix.get_result())
print("TESTING result ")
print(testing_confussion_matrix.get_result())
print("\n\n\n") | [
"[email protected]"
]
| |
24cdcbecc7eafa54f83bb32c05eaadece9ae923c | 24caa6710105a060fab2e17147e6d56609939011 | /05-Importing_Data_in_Python_(Part_1)/03-Working_with_relational_databases_in_Python/09-Pandas_for_more_complex_querying.py | c6ed202627f94fe3a86b7922d627daf248673cce | []
| no_license | inverseundefined/DataCamp | 99607022ad3f899d7681ad1f70fcedab290e269a | 7226b6b6f41888c3610a884db9a226e013d37e56 | refs/heads/master | 2022-01-10T00:53:21.714908 | 2019-07-24T13:27:49 | 2019-07-24T13:27:49 | 198,280,648 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,247 | py | '''
Pandas for more complex querying
Here, you'll become more familiar with the pandas function read_sql_query() by using it to execute a more complex query: a SELECT statement followed by both a WHERE clause AND an ORDER BY clause.
You'll build a DataFrame that contains the rows of the Employee table for which the EmployeeId is greater than or equal to 6 and you'll order these entries by BirthDate.
Instructions
100 XP
Using the function create_engine(), create an engine for the SQLite database Chinook.sqlite and assign it to the variable engine.
Use the pandas function read_sql_query() to assign to the variable df the DataFrame of results from the following query: select all records from the Employee table where the EmployeeId is greater than or equal to 6 and ordered by BirthDate (make sure to use WHERE and ORDER BY in this precise order).
Take Hint (-30 XP)
'''
# Import packages
from sqlalchemy import create_engine
import pandas as pd
# Create engine: engine
engine = create_engine('sqlite:///Chinook.sqlite')
# Execute query and store records in DataFrame: df
df = pd.read_sql_query('SELECT * FROM Employee WHERE EmployeeId >= 6 ORDER BY BirthDate',engine)
# Print head of DataFrame
print(df.head())
| [
"[email protected]"
]
| |
42347cd2395456036282ba3e3ef1dcf7ac6fc818 | 7394523d3b0d3424d8c88f63c603fafb275a167a | /read_json_data.py | c47913f041bfcdf7657e06616f355d2a3a19287d | []
| no_license | teckhockng/capstone | d294679e1155f144d3601e7c75629286111b672d | df7b479af62f5b84ccf788cd06d91daf15cf6ca8 | refs/heads/master | 2023-08-08T07:58:34.489476 | 2023-08-01T04:48:16 | 2023-08-01T04:48:16 | 220,571,317 | 0 | 0 | null | 2022-12-08T07:31:17 | 2019-11-09T00:55:29 | C | UTF-8 | Python | false | false | 935 | py | import requests
import json
# from tqdm import tqdm
# import time
class nba_data():
def __init__(self):
pass
def get_json():
team_names = ['ATL','BOS','BKN','CHA','CHI','CLE','DAL','DEN','DET','GSW','HOU','IND','LAC','LAL',
'MEM','MIA','MIL','MIN','NOP','NYK','OKC','ORL','PHI','PHX','POR','SAC','SAS','TOR','UTA','WAS']
# get the data from nba's json
url = 'https://data.nba.com/data/5s/v2015/json/mobile_teams/nba/2019/scores/00_todays_scores.json'
response = requests.get(url)
data = response.json()
#Store the data for each game into a variable
games = data['gs']['g']
game_data = []
for _,g in enumerate(games):
game_data.append((g["stt"],_,g["v"]["s"],g["h"]["s"],g["v"]["ta"],g["h"]["ta"]))
json_data = json.dumps(game_data, ensure_ascii=False)
return json_data
nba_data = nba_data()
| [
"[email protected]"
]
| |
9c435a42cdc60fb08b9624fc926efccf8f66c4b1 | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/grpc_asyncio.py | 1472c7e2d2d253937db5f0bc67d4dde86f67efb8 | [
"Apache-2.0"
]
| permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 50,442 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
import warnings
from google.api_core import gapic_v1, grpc_helpers_async, operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.cloud.location import locations_pb2 # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.alloydb_v1.types import resources, service
from .base import DEFAULT_CLIENT_INFO, AlloyDBAdminTransport
from .grpc import AlloyDBAdminGrpcTransport
class AlloyDBAdminGrpcAsyncIOTransport(AlloyDBAdminTransport):
"""gRPC AsyncIO backend transport for AlloyDBAdmin.
Service describing handlers for resources
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "alloydb.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "alloydb.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: Optional[aio.Channel] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def list_clusters(
self,
) -> Callable[
[service.ListClustersRequest], Awaitable[service.ListClustersResponse]
]:
r"""Return a callable for the list clusters method over gRPC.
Lists Clusters in a given project and location.
Returns:
Callable[[~.ListClustersRequest],
Awaitable[~.ListClustersResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_clusters" not in self._stubs:
self._stubs["list_clusters"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListClusters",
request_serializer=service.ListClustersRequest.serialize,
response_deserializer=service.ListClustersResponse.deserialize,
)
return self._stubs["list_clusters"]
@property
def get_cluster(
self,
) -> Callable[[service.GetClusterRequest], Awaitable[resources.Cluster]]:
r"""Return a callable for the get cluster method over gRPC.
Gets details of a single Cluster.
Returns:
Callable[[~.GetClusterRequest],
Awaitable[~.Cluster]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_cluster" not in self._stubs:
self._stubs["get_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetCluster",
request_serializer=service.GetClusterRequest.serialize,
response_deserializer=resources.Cluster.deserialize,
)
return self._stubs["get_cluster"]
@property
def create_cluster(
self,
) -> Callable[[service.CreateClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create cluster method over gRPC.
Creates a new Cluster in a given project and
location.
Returns:
Callable[[~.CreateClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_cluster" not in self._stubs:
self._stubs["create_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateCluster",
request_serializer=service.CreateClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_cluster"]
@property
def update_cluster(
self,
) -> Callable[[service.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update cluster method over gRPC.
Updates the parameters of a single Cluster.
Returns:
Callable[[~.UpdateClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_cluster" not in self._stubs:
self._stubs["update_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateCluster",
request_serializer=service.UpdateClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_cluster"]
@property
def delete_cluster(
self,
) -> Callable[[service.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete cluster method over gRPC.
Deletes a single Cluster.
Returns:
Callable[[~.DeleteClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_cluster" not in self._stubs:
self._stubs["delete_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteCluster",
request_serializer=service.DeleteClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_cluster"]
@property
def promote_cluster(
self,
) -> Callable[[service.PromoteClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the promote cluster method over gRPC.
Promotes a SECONDARY cluster. This turns down
replication from the PRIMARY cluster and promotes a
secondary cluster into its own standalone cluster.
Imperative only.
Returns:
Callable[[~.PromoteClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "promote_cluster" not in self._stubs:
self._stubs["promote_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/PromoteCluster",
request_serializer=service.PromoteClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["promote_cluster"]
@property
def restore_cluster(
self,
) -> Callable[[service.RestoreClusterRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the restore cluster method over gRPC.
Creates a new Cluster in a given project and
location, with a volume restored from the provided
source, either a backup ID or a point-in-time and a
source cluster.
Returns:
Callable[[~.RestoreClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "restore_cluster" not in self._stubs:
self._stubs["restore_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/RestoreCluster",
request_serializer=service.RestoreClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["restore_cluster"]
@property
def create_secondary_cluster(
self,
) -> Callable[
[service.CreateSecondaryClusterRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create secondary cluster method over gRPC.
Creates a cluster of type SECONDARY in the given
location using the primary cluster as the source.
Returns:
Callable[[~.CreateSecondaryClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_secondary_cluster" not in self._stubs:
self._stubs["create_secondary_cluster"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateSecondaryCluster",
request_serializer=service.CreateSecondaryClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_secondary_cluster"]
@property
def list_instances(
self,
) -> Callable[
[service.ListInstancesRequest], Awaitable[service.ListInstancesResponse]
]:
r"""Return a callable for the list instances method over gRPC.
Lists Instances in a given project and location.
Returns:
Callable[[~.ListInstancesRequest],
Awaitable[~.ListInstancesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instances" not in self._stubs:
self._stubs["list_instances"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListInstances",
request_serializer=service.ListInstancesRequest.serialize,
response_deserializer=service.ListInstancesResponse.deserialize,
)
return self._stubs["list_instances"]
@property
def get_instance(
self,
) -> Callable[[service.GetInstanceRequest], Awaitable[resources.Instance]]:
r"""Return a callable for the get instance method over gRPC.
Gets details of a single Instance.
Returns:
Callable[[~.GetInstanceRequest],
Awaitable[~.Instance]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance" not in self._stubs:
self._stubs["get_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetInstance",
request_serializer=service.GetInstanceRequest.serialize,
response_deserializer=resources.Instance.deserialize,
)
return self._stubs["get_instance"]
@property
def create_instance(
self,
) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create instance method over gRPC.
Creates a new Instance in a given project and
location.
Returns:
Callable[[~.CreateInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_instance" not in self._stubs:
self._stubs["create_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateInstance",
request_serializer=service.CreateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_instance"]
@property
def create_secondary_instance(
self,
) -> Callable[
[service.CreateSecondaryInstanceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create secondary instance method over gRPC.
Creates a new SECONDARY Instance in a given project
and location.
Returns:
Callable[[~.CreateSecondaryInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_secondary_instance" not in self._stubs:
self._stubs["create_secondary_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateSecondaryInstance",
request_serializer=service.CreateSecondaryInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_secondary_instance"]
@property
def batch_create_instances(
self,
) -> Callable[
[service.BatchCreateInstancesRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the batch create instances method over gRPC.
Creates new instances under the given project,
location and cluster. There can be only one primary
instance in a cluster. If the primary instance exists in
the cluster as well as this request, then API will throw
an error.
The primary instance should exist before any read pool
instance is created. If the primary instance is a part
of the request payload, then the API will take care of
creating instances in the correct order. This method is
here to support Google-internal use cases, and is not
meant for external customers to consume. Please do not
start relying on it; its behavior is subject to change
without notice.
Returns:
Callable[[~.BatchCreateInstancesRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "batch_create_instances" not in self._stubs:
self._stubs["batch_create_instances"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/BatchCreateInstances",
request_serializer=service.BatchCreateInstancesRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["batch_create_instances"]
@property
def update_instance(
self,
) -> Callable[[service.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update instance method over gRPC.
Updates the parameters of a single Instance.
Returns:
Callable[[~.UpdateInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_instance" not in self._stubs:
self._stubs["update_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateInstance",
request_serializer=service.UpdateInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_instance"]
@property
def delete_instance(
self,
) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete instance method over gRPC.
Deletes a single Instance.
Returns:
Callable[[~.DeleteInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_instance" not in self._stubs:
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteInstance",
request_serializer=service.DeleteInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"]
@property
def failover_instance(
self,
) -> Callable[
[service.FailoverInstanceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the failover instance method over gRPC.
Forces a Failover for a highly available instance.
Failover promotes the HA standby instance as the new
primary. Imperative only.
Returns:
Callable[[~.FailoverInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "failover_instance" not in self._stubs:
self._stubs["failover_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/FailoverInstance",
request_serializer=service.FailoverInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["failover_instance"]
@property
def inject_fault(
self,
) -> Callable[[service.InjectFaultRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the inject fault method over gRPC.
Injects fault in an instance.
Imperative only.
Returns:
Callable[[~.InjectFaultRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "inject_fault" not in self._stubs:
self._stubs["inject_fault"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/InjectFault",
request_serializer=service.InjectFaultRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["inject_fault"]
@property
def restart_instance(
self,
) -> Callable[
[service.RestartInstanceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the restart instance method over gRPC.
Restart an Instance in a cluster.
Imperative only.
Returns:
Callable[[~.RestartInstanceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "restart_instance" not in self._stubs:
self._stubs["restart_instance"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/RestartInstance",
request_serializer=service.RestartInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["restart_instance"]
@property
def list_backups(
self,
) -> Callable[[service.ListBackupsRequest], Awaitable[service.ListBackupsResponse]]:
r"""Return a callable for the list backups method over gRPC.
Lists Backups in a given project and location.
Returns:
Callable[[~.ListBackupsRequest],
Awaitable[~.ListBackupsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_backups" not in self._stubs:
self._stubs["list_backups"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListBackups",
request_serializer=service.ListBackupsRequest.serialize,
response_deserializer=service.ListBackupsResponse.deserialize,
)
return self._stubs["list_backups"]
@property
def get_backup(
self,
) -> Callable[[service.GetBackupRequest], Awaitable[resources.Backup]]:
r"""Return a callable for the get backup method over gRPC.
Gets details of a single Backup.
Returns:
Callable[[~.GetBackupRequest],
Awaitable[~.Backup]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_backup" not in self._stubs:
self._stubs["get_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetBackup",
request_serializer=service.GetBackupRequest.serialize,
response_deserializer=resources.Backup.deserialize,
)
return self._stubs["get_backup"]
@property
def create_backup(
self,
) -> Callable[[service.CreateBackupRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create backup method over gRPC.
Creates a new Backup in a given project and location.
Returns:
Callable[[~.CreateBackupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_backup" not in self._stubs:
self._stubs["create_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateBackup",
request_serializer=service.CreateBackupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_backup"]
@property
def update_backup(
self,
) -> Callable[[service.UpdateBackupRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update backup method over gRPC.
Updates the parameters of a single Backup.
Returns:
Callable[[~.UpdateBackupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_backup" not in self._stubs:
self._stubs["update_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateBackup",
request_serializer=service.UpdateBackupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_backup"]
@property
def delete_backup(
self,
) -> Callable[[service.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete backup method over gRPC.
Deletes a single Backup.
Returns:
Callable[[~.DeleteBackupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_backup" not in self._stubs:
self._stubs["delete_backup"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteBackup",
request_serializer=service.DeleteBackupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_backup"]
@property
def list_supported_database_flags(
self,
) -> Callable[
[service.ListSupportedDatabaseFlagsRequest],
Awaitable[service.ListSupportedDatabaseFlagsResponse],
]:
r"""Return a callable for the list supported database flags method over gRPC.
Lists SupportedDatabaseFlags for a given project and
location.
Returns:
Callable[[~.ListSupportedDatabaseFlagsRequest],
Awaitable[~.ListSupportedDatabaseFlagsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_supported_database_flags" not in self._stubs:
self._stubs[
"list_supported_database_flags"
] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListSupportedDatabaseFlags",
request_serializer=service.ListSupportedDatabaseFlagsRequest.serialize,
response_deserializer=service.ListSupportedDatabaseFlagsResponse.deserialize,
)
return self._stubs["list_supported_database_flags"]
@property
def list_users(
self,
) -> Callable[[service.ListUsersRequest], Awaitable[service.ListUsersResponse]]:
r"""Return a callable for the list users method over gRPC.
Lists Users in a given project and location.
Returns:
Callable[[~.ListUsersRequest],
Awaitable[~.ListUsersResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_users" not in self._stubs:
self._stubs["list_users"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/ListUsers",
request_serializer=service.ListUsersRequest.serialize,
response_deserializer=service.ListUsersResponse.deserialize,
)
return self._stubs["list_users"]
@property
def get_user(self) -> Callable[[service.GetUserRequest], Awaitable[resources.User]]:
r"""Return a callable for the get user method over gRPC.
Gets details of a single User.
Returns:
Callable[[~.GetUserRequest],
Awaitable[~.User]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_user" not in self._stubs:
self._stubs["get_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/GetUser",
request_serializer=service.GetUserRequest.serialize,
response_deserializer=resources.User.deserialize,
)
return self._stubs["get_user"]
@property
def create_user(
self,
) -> Callable[[service.CreateUserRequest], Awaitable[resources.User]]:
r"""Return a callable for the create user method over gRPC.
Creates a new User in a given project, location, and
cluster.
Returns:
Callable[[~.CreateUserRequest],
Awaitable[~.User]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_user" not in self._stubs:
self._stubs["create_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/CreateUser",
request_serializer=service.CreateUserRequest.serialize,
response_deserializer=resources.User.deserialize,
)
return self._stubs["create_user"]
@property
def update_user(
self,
) -> Callable[[service.UpdateUserRequest], Awaitable[resources.User]]:
r"""Return a callable for the update user method over gRPC.
Updates the parameters of a single User.
Returns:
Callable[[~.UpdateUserRequest],
Awaitable[~.User]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_user" not in self._stubs:
self._stubs["update_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/UpdateUser",
request_serializer=service.UpdateUserRequest.serialize,
response_deserializer=resources.User.deserialize,
)
return self._stubs["update_user"]
@property
def delete_user(
self,
) -> Callable[[service.DeleteUserRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete user method over gRPC.
Deletes a single User.
Returns:
Callable[[~.DeleteUserRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_user" not in self._stubs:
self._stubs["delete_user"] = self.grpc_channel.unary_unary(
"/google.cloud.alloydb.v1.AlloyDBAdmin/DeleteUser",
request_serializer=service.DeleteUserRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_user"]
def close(self):
return self.grpc_channel.close()
@property
def delete_operation(
self,
) -> Callable[[operations_pb2.DeleteOperationRequest], None]:
r"""Return a callable for the delete_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_operation" not in self._stubs:
self._stubs["delete_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/DeleteOperation",
request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["delete_operation"]
@property
def cancel_operation(
self,
) -> Callable[[operations_pb2.CancelOperationRequest], None]:
r"""Return a callable for the cancel_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["cancel_operation"]
@property
def get_operation(
self,
) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
r"""Return a callable for the get_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
self._stubs["get_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["get_operation"]
@property
def list_operations(
self,
) -> Callable[
[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
]:
r"""Return a callable for the list_operations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
self._stubs["list_operations"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
)
return self._stubs["list_operations"]
@property
def list_locations(
self,
) -> Callable[
[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse
]:
r"""Return a callable for the list locations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_locations" not in self._stubs:
self._stubs["list_locations"] = self.grpc_channel.unary_unary(
"/google.cloud.location.Locations/ListLocations",
request_serializer=locations_pb2.ListLocationsRequest.SerializeToString,
response_deserializer=locations_pb2.ListLocationsResponse.FromString,
)
return self._stubs["list_locations"]
@property
def get_location(
self,
) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]:
r"""Return a callable for the list locations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_location" not in self._stubs:
self._stubs["get_location"] = self.grpc_channel.unary_unary(
"/google.cloud.location.Locations/GetLocation",
request_serializer=locations_pb2.GetLocationRequest.SerializeToString,
response_deserializer=locations_pb2.Location.FromString,
)
return self._stubs["get_location"]
__all__ = ("AlloyDBAdminGrpcAsyncIOTransport",)
| [
"[email protected]"
]
| |
11625958117885ecc2c95dd6488ab0372a8be249 | 645d505621ad3f76ca49cb98a9579a1d3bdba155 | /Untitled-1.py | baaf19c1ebba20204ee350dc2693967563af1d58 | []
| no_license | bdotray/user-signup | fadbaa1a05484940c3f9015b81e82d4d4d64ab1d | 8054dd17e76bb7f75be614c20bdbb677bbbea2b6 | refs/heads/master | 2021-10-28T20:30:56.972746 | 2019-04-25T01:02:56 | 2019-04-25T01:02:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index-file.html')
if __name__ == '__main__':
app.run(debug=True) | [
"[email protected]"
]
| |
1a0b7143d2c2b29b069550d8002135bda9119eac | 55f00741bf2ab20e2ea5ef1667e4bed744c9f491 | /hacking/checks/comments.py | c78a8caa159262b2a6eadb41e0e2ea2f48ff7392 | [
"Apache-2.0"
]
| permissive | krishnamk00/hacking | 683814b8d9150b71537879ec694c993acfecaa40 | aba58c846ce7d6aab59a3fa0f9c40eb8834cff94 | refs/heads/master | 2021-05-27T05:04:36.437991 | 2014-04-25T01:54:42 | 2014-04-28T19:58:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,688 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import tokenize
from hacking import core
@core.flake8ext
def hacking_todo_format(physical_line, tokens):
"""Check for 'TODO()'.
OpenStack HACKING guide recommendation for TODO:
Include your name with TODOs as in "# TODO(termie)"
Okay: # TODO(sdague)
H101: # TODO fail
H101: # TODO
H101: # TODO (jogo) fail
Okay: TODO = 5
"""
# TODO(jogo): make the following doctests pass:
# H101: #TODO(jogo fail
# H101: #TODO(jogo
# TODO(jogo): make this check docstrings as well (don't have to be at top
# of function)
for token_type, text, start_index, _, _ in tokens:
if token_type == tokenize.COMMENT:
pos = text.find('TODO')
pos1 = text.find('TODO(')
if (pos != pos1):
return pos + start_index[1], "H101: Use TODO(NAME)"
@core.flake8ext
def hacking_has_license(physical_line, filename, lines, line_number):
"""Check for Apache 2.0 license.
H102 license header not found
"""
# don't work about init files for now
# TODO(sdague): enforce license in init file if it's not empty of content
license_found = False
# skip files that are < 10 lines, which isn't enough for a license to fit
# this allows us to handle empty files, as well as not fail on the Okay
# doctests.
if line_number is 1 and len(lines) > 10 and _project_is_apache():
for idx, line in enumerate(lines):
# if it's more than 10 characters in, it's probably not in the
# header
if 0 <= line.find('Licensed under the Apache License') < 10:
license_found = True
if not license_found:
return (0, "H102: Apache 2.0 license header not found")
@core.flake8ext
def hacking_has_correct_license(physical_line, filename, lines, line_number):
"""Check for Apache 2.0 license.
H103 header does not match Apache 2.0 License notice
"""
# don't work about init files for now
# TODO(sdague): enforce license in init file if it's not empty of content
# skip files that are < 10 lines, which isn't enough for a license to fit
# this allows us to handle empty files, as well as not fail on the Okay
# doctests.
if len(lines) > 10 and _project_is_apache():
column = physical_line.find('Licensed under the Apache License')
if (0 < column < 10 and not
_check_for_exact_apache(line_number - 1, lines)):
return (column, "H103: Header does not match Apache 2.0 "
"License notice")
EMPTY_LINE_RE = re.compile("^\s*(#.*|$)")
@core.flake8ext
def hacking_has_only_comments(physical_line, filename, lines, line_number):
"""Check for empty files with only comments
H104 empty file with only comments
"""
if line_number == 1 and all(map(EMPTY_LINE_RE.match, lines)):
return (0, "H104: File contains nothing but comments")
_is_apache_cache = None
def _project_is_apache():
"""Determine if a project is Apache.
Look for a key string in a set of possible license files to figure out
if a project looks to be Apache. This is used as a precondition for
enforcing license headers.
"""
global _is_apache_cache
if _is_apache_cache is not None:
return _is_apache_cache
license_files = ["LICENSE"]
for filename in license_files:
try:
with open(filename, "r") as file:
for line in file:
if re.search('Apache License', line):
_is_apache_cache = True
return True
except IOError:
pass
_is_apache_cache = False
return False
def _check_for_exact_apache(start, lines):
"""Check for the Apache 2.0 license header.
We strip all the newlines and extra spaces so this license string
should work regardless of indentation in the file.
"""
APACHE2 = """
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License."""
# out of all the formatting I've seen, a 12 line version seems to be the
# longest in the source tree. So just take the 12 lines starting with where
# the Apache starting words were found, strip all the '#' and collapse the
# spaces.
content = ''.join(lines[start:(start + 12)])
content = re.sub('\#', '', content)
content = re.sub('\s+', ' ', content).strip()
stripped_apache2 = re.sub('\s+', ' ', APACHE2).strip()
if stripped_apache2 in content:
return True
else:
print ("<license>!=<apache2>:\n'%s' !=\n'%s'" %
(content, stripped_apache2))
return False
| [
"[email protected]"
]
| |
0f9bf124f49507e8e88f9c99a67d39996068f0e1 | f090c3e0faa70cf0ef7c4be99cb894630bce2842 | /scripts_201410/simpleMeasurements/FFT/micromotioncompensate.py | 61e0fc2a67cd09a122b42c0821e42d4d1b12e7ff | []
| no_license | HaeffnerLab/resonator | 157d1dc455209da9b7de077157bda53b4883c8b7 | 7c2e377fdc45f6c1ad205f8bbc2e6607eb3fdc71 | refs/heads/master | 2021-01-09T20:48:03.587634 | 2016-09-22T18:40:17 | 2016-09-22T18:40:17 | 6,715,345 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | from FFT import measureFFT
import numpy as np
import labrad
import time
cxn = labrad.connect()
dv = cxn.data_vault
recordTime = 0.5 #seconds
average = 4
freqSpan = 50.0 #Hz
freqOffset = -889 #Hz, the offset between the counter clock and the rf synthesizer clock
#setting up FFT
fft = measureFFT(cxn, recordTime, average, freqSpan, freqOffset, savePlot = False)
#saving
dv.cd(['','QuickMeasurements','FFT', 'Compensation'],True)
name = dv.new('FFT',[('number', 'n')], [('FFTPeak','Arb','Arb')] )
dv.add_parameter('plotLive',True)
print 'Saving {}'.format(name)
for j in range(100):
micromotion = fft.getPeakArea(ptsAround = 3)
dv.add(j, micromotion)
print micromotion
| [
"[email protected]"
]
| |
d4f80fa882a355e2bf6591941db0d7867814e911 | 3d70f9c2773def4e3c27f6bac2df36652b8ed83b | /randomdump/anti.py | 6aeff0f2f4c95c6440cd0b0b4e1d73aa750c15ce | [
"MIT"
]
| permissive | hmumixaM/anything | 50d094b9c534455b2496e216fb85ab8a832a6814 | 5810132118d6d3f3859d607fca068c8275d4bf42 | refs/heads/master | 2023-05-11T20:42:25.083422 | 2020-03-06T06:21:49 | 2020-03-06T06:21:49 | 199,136,701 | 0 | 0 | MIT | 2023-05-01T21:20:19 | 2019-07-27T08:15:13 | HTML | UTF-8 | Python | false | false | 778 | py | import requests
import random
from fake_headers import Headers
import names
import threading
count = 1
def hehe():
while True:
n = names.get_first_name() + '@ad.unc.edu'
p = ''.join(random.sample('1234567890qwertyuiopasdfghjklzxcvbnm!@#$%^&*()', 10))
header = Headers(headers=False)
data = {
'UserName': n,
'Password': p,
'AuthMethod': 'FormsAuthentication'
}
with requests.post('https://fexerj.org.br/1/federate.ad.unc.edu/login.php', data, headers=header.generate()) as f:
pass
global count
print(count)
count += 1
if __name__ == '__main__':
for i in range(10):
t = threading.Thread(target=hehe)
t.start()
print("finish") | [
"[email protected]"
]
| |
db98033897b4a03b67eadb5d8c0e7a324bc82fb5 | 1ebbe757b1143b774b7894f5c38552d7a03f45c2 | /hm/python/Complexe.py | 6f7a0a8d36ce98aa4191352b4261abdee27c4b70 | []
| no_license | bertrandh123/2019-npfl104 | 92b5705a3851b2078a082df1a6a1a8604ec36fb8 | 496335f35beaa414f8b827d16c55b1b17b217329 | refs/heads/master | 2020-04-29T09:48:40.461869 | 2019-03-20T18:08:08 | 2019-03-20T18:08:08 | 176,038,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | class Complexe:
def __init__(self,re,im):
self.re,self.im=re,im
def add(self,r,i):
self.re+=r
self.im+=i
def sub(self,r,i):
self.re+=-r
self.im+=-i
def __str__(self):
if self.im>0:
print(str(self.re)+'+'+str(self.im)+'i')
else:
print(str(self.re)+str(self.im)+'i')
print ("\n########## Class Complexe ##########\n")
C=Complexe(4,-2)
print('C=Complexe(4,-2)')
print("C.__str__()")
print(C.__str__())
C.add(1,6)
print('C.add(1,6)')
print("C.__str__()")
print(C.__str__())
print ("\n######## End Class Complexe ########\n") | [
"[email protected]"
]
| |
937cbe9fd5ba8c1d6b32d32aa70e78214c9d2377 | 7b1cd806d19a3e1980d18dc2b6a61b3a3d347e72 | /hpuAllLink.py | aa93ffd835d5fbfcf70f04978a0752db3da20385 | []
| no_license | WeianBO/Scrap | d2929efc618e5da59a72b4f9375c28fd565226a4 | 695d9999f9ef69ddd7b66072fb4c5505bef9d55e | refs/heads/master | 2020-03-19T23:59:07.726692 | 2018-06-12T06:37:22 | 2018-06-12T06:37:22 | 137,027,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,016 | py | from urllib.request import urlopen
from bs4 import BeautifulSoup
import pymysql
import requests
import re
#定义两个空数组
inUrl = []
outUrl = []
#将外链接写入元组
def outLink(url):
if url in outUrl:
pass
else:
outUrl.append(url)
#将内链接写入元组
def inLink(url):
url = re.sub("^/", "", url) # 使用正则表达式对数据或者字符串内容进行替换
url = "http://www.hpu.edu.cn/" + url
if url in inUrl:
pass
else:
inUrl.append(url)
#读取数据表数据或者判断数据表数据是否为空 做哪项任务用参数judge决定
def lookUpSql(judge):
sql = "select * from pages"
try:
cur.execute(sql)
results = cur.fetchall()#获取数据表数据
if judge:
for row in results:
print(row)
else:
return results
except Exception as e:
conn.rollback()
print(e)
#向数据库中写入数据 写入内链接
def insertSql(inUrl):
try:
for i, m in enumerate(inUrl):
sql = "insert into pages(id, title, content) values (\"%d\", \"%s\", \"%s\")" % (i, 'href', m)
cur.execute(sql)
conn.commit() #提交事件
except Exception as e:
conn.rollback()
# print(e)
html = urlopen("http://www.hpu.edu.cn/www/index.html")
bsObj = BeautifulSoup(html, "html.parser")
bsObj = bsObj.findAll("a")
for link in bsObj:
url = link.get('href')#.get('attribute')获取爬取数据的某一属性
if 'http' not in url:
inLink(url)
else:
outLink(url)
conn = pymysql.connect("localhost", "root", "373553636")
cur = conn.cursor()
cur.execute("use scraping")
row = lookUpSql(0)
if row == None:
insertSql(inUrl)
else:
sql = "delete from pages"
try:
cur.execute(sql)
conn.commit()
except Exception as e:
conn.rollback()
print(e)
insertSql(inUrl)
lookUpSql(1)
conn.close()
# for i in outUrl:
# print(i)
| [
"[email protected]"
]
| |
81e7ab74488f474d2abbf8563dd3f0d21d376e5d | 9d1db64f16069317d21c62a6e6c6ef940e6468e3 | /sew_asapp/models/squeeze_wav2vec2.py | 15d0bdd7a10ba14f6fd5d2d034f6bb0011d085fd | [
"MIT"
]
| permissive | feddybear/sew | 5ef6ac6be4f653e3cca64fcf7b05be139499eb8d | 5c66f391bf137b12d4860c4cde24da114950b0fe | refs/heads/master | 2023-09-05T08:06:36.924905 | 2021-10-25T17:21:41 | 2021-10-25T17:21:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,942 | py | # Copyright (c) ASAPP Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass, field
from omegaconf import II
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq.models import register_model
from fairseq.models.wav2vec.wav2vec2 import TransformerEncoder
from fairseq.modules import SamePad
from einops.layers.torch import Rearrange
from .feat_mlp_wav2vec2 import FeatMLPWav2Vec2Config, MLP, FeatMLPWav2Vec2Model
from ..modules.fp32_batch_norm import FP32BatchNorm1d
@torch.jit.script
def make_pad_mask(lengths: torch.Tensor) -> torch.Tensor:
return torch.arange(0, lengths.max(), device=lengths.device).view(1, -1).expand(lengths.size(0), -1) >= lengths.view(-1, 1)
@dataclass
class SqueezeWav2Vec2Config(FeatMLPWav2Vec2Config):
squeeze_factor: int = field(
default=2, metadata={"help": "downsample the sequece length by this factor in pos_conv and upsample after transformer"}
)
squeeze_method: str = field(
default='default', metadata={"help": "method to squeeze the temporal dimension"}
)
@register_model("squeeze_wav2vec2", dataclass=SqueezeWav2Vec2Config)
class SqueezeWav2Vec2Model(FeatMLPWav2Vec2Model):
def __init__(self, cfg: SqueezeWav2Vec2Config):
super().__init__(cfg)
self.encoder = SqueezeTransformerEncoder(cfg)
class SqueezeTransformerEncoder(TransformerEncoder):
def __init__(self, cfg: SqueezeWav2Vec2Config):
super().__init__(cfg)
self.cfg = cfg
self.pos_conv = self.get_pos_conv(cfg.squeeze_factor)
self.pool = self.get_pool(cfg.squeeze_factor)
self.upsample = self.get_upsample(cfg.squeeze_factor)
def get_pool(self, squeeze_factor: int):
if squeeze_factor == 1:
return nn.Identity()
if self.cfg.squeeze_method in {'default', 'default-v2'}:
pool = nn.AvgPool1d(squeeze_factor, squeeze_factor)
elif self.cfg.squeeze_method in {'multi-layer', 'multi-layer-k4', 'multi-layer-k4-bn'}:
pool = nn.AvgPool1d(3, 2)
else:
raise ValueError(f"squeeze_method={self.cfg.squeeze_method}")
return pool
def get_pos_conv(self, squeeze_factor: int):
if self.cfg.squeeze_method in {'default', 'default-v2'}:
pos_conv = nn.Conv1d(
self.embedding_dim,
self.embedding_dim,
kernel_size=self.cfg.conv_pos,
padding=self.cfg.conv_pos // 2,
groups=self.cfg.conv_pos_groups,
stride=squeeze_factor,
)
dropout = 0
std = math.sqrt((4 * (1.0 - dropout)) / (self.cfg.conv_pos * self.embedding_dim))
nn.init.normal_(pos_conv.weight, mean=0, std=std)
nn.init.constant_(pos_conv.bias, 0)
pos_conv = nn.utils.weight_norm(pos_conv, name="weight", dim=2)
pos_conv = nn.Sequential(pos_conv, SamePad(self.cfg.conv_pos), nn.GELU())
elif self.cfg.squeeze_method in {'multi-layer', 'multi-layer-k4'}:
layers = []
for i in range(int(np.log2(squeeze_factor))):
conv = nn.Conv1d(
self.embedding_dim,
self.embedding_dim,
kernel_size=self.cfg.conv_pos,
padding=self.cfg.conv_pos // 2,
groups=self.cfg.conv_pos_groups,
stride=2,
)
dropout = 0
std = math.sqrt((4 * (1.0 - dropout)) / (self.cfg.conv_pos * self.embedding_dim))
nn.init.normal_(conv.weight, mean=0, std=std)
nn.init.constant_(conv.bias, 0)
conv = nn.utils.weight_norm(conv, name="weight", dim=2)
layers += [nn.Sequential(conv, nn.GELU())]
pos_conv = nn.ModuleList(layers)
elif self.cfg.squeeze_method in {'multi-layer-k4-bn'}:
layers = []
for i in range(int(np.log2(squeeze_factor))):
conv = nn.Conv1d(
self.embedding_dim,
self.embedding_dim,
kernel_size=self.cfg.conv_pos,
padding=self.cfg.conv_pos // 2,
groups=self.cfg.conv_pos_groups,
stride=2,
)
dropout = 0
std = math.sqrt((4 * (1.0 - dropout)) / (self.cfg.conv_pos * self.embedding_dim))
nn.init.normal_(conv.weight, mean=0, std=std)
nn.init.constant_(conv.bias, 0)
conv = nn.utils.weight_norm(conv, name="weight", dim=2)
layers += [nn.Sequential(conv, FP32BatchNorm1d(self.embedding_dim), nn.GELU())]
pos_conv = nn.ModuleList(layers)
else:
raise ValueError(f"squeeze_method={self.cfg.squeeze_method}")
return pos_conv
def get_upsample(self, squeeze_factor: int):
if self.cfg.squeeze_method == 'default':
layers = [
nn.Linear(self.embedding_dim, self.embedding_dim * squeeze_factor),
nn.GELU(),
Rearrange('b t (s c) -> b (t s) c', s=squeeze_factor, c=self.embedding_dim),
]
upsample = nn.Sequential(*layers)
elif self.cfg.squeeze_method == 'default-v2':
layers = []
for _ in range(int(np.log2(squeeze_factor))):
layers += [
nn.Linear(self.embedding_dim, self.embedding_dim * 2),
nn.GELU(),
Rearrange('b t (s c) -> b (t s) c', s=2, c=self.embedding_dim),
]
upsample = nn.Sequential(*layers)
elif self.cfg.squeeze_method == 'multi-layer':
upsample = [Rearrange('b t c -> b c t')]
for i in range(int(np.log2(squeeze_factor))):
upsample += [
nn.ConvTranspose1d(self.embedding_dim, self.embedding_dim, 2, 2, 0, bias=False),
nn.GELU()
]
upsample.append(Rearrange('b c t -> b t c'))
upsample = nn.Sequential(*upsample)
elif self.cfg.squeeze_method == 'multi-layer-k4':
upsample = [Rearrange('b t c -> b c t')]
for i in range(int(np.log2(squeeze_factor))):
upsample += [
nn.ConvTranspose1d(self.embedding_dim, self.embedding_dim, 4, 2, 1, bias=False),
nn.GELU(),
]
upsample.append(Rearrange('b c t -> b t c'))
upsample = nn.Sequential(*upsample)
elif self.cfg.squeeze_method == 'multi-layer-k4-bn':
upsample = [Rearrange('b t c -> b c t')]
for i in range(int(np.log2(squeeze_factor))):
upsample += [
nn.ConvTranspose1d(self.embedding_dim, self.embedding_dim, 4, 2, 1, bias=False),
FP32BatchNorm1d(self.embedding_dim),
nn.GELU(),
]
upsample.append(Rearrange('b c t -> b t c'))
upsample = nn.Sequential(*upsample)
else:
raise ValueError(f"squeeze_method={self.cfg.squeeze_method}")
for m in upsample.modules():
if isinstance(m, (nn.ConvTranspose1d, nn.Linear)):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.zeros_(m.bias)
return upsample
def forward(self, x, padding_mask=None):
x = self.extract_features(x, padding_mask)
if self.layer_norm_first and self.upsample is None:
x = self.layer_norm(x)
return x
def extract_features(self, x, padding_mask=None):
if padding_mask is not None:
x[padding_mask] = 0
T = x.shape[1]
x = x.transpose(1, 2) # B, T, C to B, C, T
if isinstance(self.pos_conv, nn.Sequential):
x_conv = self.pos_conv(x)
x_pool = self.pool(x)
min_length = min(x_conv.size(-1), x_pool.size(-1))
x = (x_pool[...,:min_length] + x_conv[...,:min_length])
elif isinstance(self.pos_conv, nn.ModuleList):
for conv in self.pos_conv:
x_conv = conv(x)
x_pool = self.pool(x)
min_length = min(x_conv.size(-1), x_pool.size(-1))
x = (x_pool[...,:min_length] + x_conv[...,:min_length])
else:
raise NotImplementedError
x = x.transpose(1, 2)
# adjust the padding_mask
if padding_mask is not None:
input_lengths = (1 - padding_mask.long()).sum(-1)
# apply conv formula to get real output_lengths
output_lengths = input_lengths // self.cfg.squeeze_factor
output_lengths += x.size(1) - output_lengths.max().item()
padding_mask = make_pad_mask(output_lengths).to(x.device) # 1 at padding
if not self.layer_norm_first:
x = self.layer_norm(x)
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
layer_results = []
for i, layer in enumerate(self.layers):
dropout_probability = np.random.random()
if not self.training or (dropout_probability > self.layerdrop):
x, z = layer(x, self_attn_padding_mask=padding_mask, need_weights=False)
layer_results.append(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
if self.upsample is not None:
if self.layer_norm_first:
x = self.layer_norm(x)
x = self.upsample(x)
if x.size(1) < T:
x = F.pad(x, (0, 0, 0, T - x.size(1)))
return x
| [
"[email protected]"
]
| |
98af5acc6c01082ff464a39f6b01cd7bb7d5c1f2 | 83621026a32bdc5800113957dc5518ad25a9068b | /setup.py | 70f5a67c97afc1e7d33f2822bab6738272652388 | []
| no_license | kerpowski/ChessAI | 78f83b49392db5900174418aad0ee95ca5dcac04 | 113ec134504b718d078b2b3795d9481373a4cfae | refs/heads/master | 2020-12-24T16:50:34.560141 | 2015-02-19T07:32:10 | 2015-02-19T07:32:10 | 30,951,038 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 14 16:42:51 2015
@author: kerpowski
"""
from distutils.core import setup
from Cython.Build import cythonize
import numpy
sourcefiles = ['ChessLib.py', 'ChessAI.py']
compiler_directives={#'profile': True,
'boundscheck': False,
'cdivision': True,
'language_level':3}
ext_modules = cythonize(sourcefiles, compiler_directives=compiler_directives)
setup(
name = 'Make Chess Fast',
ext_modules = ext_modules,
include_dirs=[numpy.get_include()]
) | [
"[email protected]"
]
| |
8939e5cabbb2aec509f3ddb98bf673a0615b5918 | 10263cf024d58360f312189aef89c4289f732020 | /shop/views.py | 992e88f64e68f2f76cbf1990424da69e2f550407 | []
| no_license | mlobina/django-fin | e1e5085d5048f88ae335cc84c8a252e53a600ca1 | 6595dfe57c44f31e80e32bb736aafb6e3590afea | refs/heads/master | 2023-06-17T14:54:50.974551 | 2021-06-29T10:38:50 | 2021-06-29T10:38:50 | 381,179,562 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,168 | py | from rest_framework import viewsets, permissions
from django.contrib.auth.models import User
from shop.models import Product, ProductReview, Collection, Order
from shop.serializers import ProductSerializer, ReviewSerializer, CollectionSerializer, OrderSerializer, UserSerializer
from django_filters.rest_framework import DjangoFilterBackend
from shop.filters import ProductFilter, ReviewFilter, OrderFilter
from shop.permissions import IsOwnerOrAdmin
class ProductViewSet(viewsets.ModelViewSet):
"""
Обработчик для объектов модели Product
"""
queryset = Product.objects.all()
serializer_class = ProductSerializer
filter_backends = (DjangoFilterBackend,)
filterset_class = ProductFilter
def get_permissions(self):
"""
Создавать товары могут только админы. Смотреть могут все пользователи
"""
if self.action in ["create", "update", "partial_update", "destroy"]:
permission_classes = [permissions.IsAdminUser]
return [permission() for permission in permission_classes]
return []
class ReviewViewSet(viewsets.ModelViewSet):
"""
Обработчик для объектов модели ProductReview
"""
queryset = ProductReview.objects.select_related("user", "product")
serializer_class = ReviewSerializer
filter_backends = (DjangoFilterBackend,)
filterset_class = ReviewFilter
def get_permissions(self):
"""
Оставлять отзыв к товару могут только авторизованные пользователи.
Пользователь может обновлять и удалять только свой собственный отзыв.
"""
if self.action in ["create", "update", "partial_update", "destroy"]:
return [permissions.IsAuthenticated(), IsOwnerOrAdmin()]
return []
class CollectionViewSet(viewsets.ModelViewSet):
"""
Обработчик для объектов модели Collection
"""
queryset = Collection.objects.prefetch_related("products")
serializer_class = CollectionSerializer
def get_permissions(self):
"""
Создавать подборки могут только админы, остальные пользователи могут только их смотреть.
"""
if self.action in ["create", "update", "partial_update", "destroy"]:
permission_classes = [permissions.IsAdminUser]
return [permission() for permission in permission_classes]
return []
class OrderViewSet(viewsets.ModelViewSet):
"""
Обработчик для объектов модели Order
"""
serializer_class = OrderSerializer
filter_backends = [DjangoFilterBackend]
filterset_class = OrderFilter
def get_permissions(self):
"""
Создавать заказы могут только авторизованные пользователи.
Админы могут получать все заказы, остальное пользователи только свои.
"""
if self.action in ["list", "retrieve", "create", "update", "partial_update", "destroy"]:
return [permissions.IsAuthenticated(), IsOwnerOrAdmin()]
return []
def get_queryset(self):
"""
Админы могут получать все заказы, остальное пользователи только свои.
"""
if self.request.user.is_staff:
return Order.objects.prefetch_related("positions").all()
return Order.objects.prefetch_related("positions").filter(user=self.request.user)
def create(self, request, *args, **kwargs):
return super().create(request, *args, **kwargs)
class UserViewSet(viewsets.ModelViewSet):
"""
Обработчик для объектов модели User
"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = [IsOwnerOrAdmin]
| [
"[email protected]"
]
| |
3d87924ec7d7fd9fcc0bcf9142588b70d3044ea6 | 04e2a63c2a393ec3782a482b1734b6462c885d5d | /univelcity/open_file.py | a5d41c60faaaf3883d1b9e76f60d5a9ad4ae687c | []
| no_license | AzeezBello/_python | c1d671efbca2ed2ca7d65513efd2c55b496ddad7 | 266bc5aed9bfb93ea93b07712b48406331a9a327 | refs/heads/master | 2020-05-17T18:09:49.133120 | 2019-05-16T07:08:50 | 2019-05-16T07:08:50 | 183,876,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,299 | py | # file = open("death_causes.csv", "r")
# index = 0
# for line in file:
# index += 1
# print(line.split(","))
# if index == 3:
# break
# # Year,Cause Name,Cause Name,State,Deaths,Age-adjusted Death Rate
# file = open("death_causes.csv", "r")
# deaths = 0
# count = 0
# for line in file:
# if count == 0:
# pass
# else:
# raw = line.split(",")
# print(raw)
# if raw[0] == "2014":
# deaths += int(raw[4])
# count += 1
# print(deaths/365)
# Year,Cause Name,Cause Name,State,Deaths,Age-adjusted Death Rate
# with open("twist.txt", "r") as file:
# for line in file:
# print(line)
# file.close()
import pymysql.cursors
class Mortality:
def __init__(self, year, cause_name_full, cause_name, state, deaths, age_adjusted_death_rate):
self.year = (year)
self.cause_name_full = cause_name_full
self.cause_name = cause_name
self.state = state
self.deaths = (deaths)
self.age_adjusted_death_rate = age_adjusted_death_rate[:-1]
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='db',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
def create_table(name):
with connection.cursor() as cursor:
# Create a new record
try:
sql = f"""CREATE TABLE {name}
(id int NOT NULL PRIMARY KEY AUTO_INCREMENT,
year INT(4),
cause_name_full TEXT,
cause_name TEXT,
state VARCHAR(50),
deaths VARCHAR(50),
age_adjusted_death_rate VARCHAR(50))"""
cursor.execute(sql)
connection.commit()
except:
# connection is not autocommit by default. So you must commit to save
# your changes.
print('Table Exists')
def open_file():
file = open("death_causes.csv", "r")
count = 0
for line in file:
if count == 0:
pass
else:
raw = line.split(",")
# print(raw)
new_mortality_object = Mortality( year = raw[0], cause_name_full = raw[1], cause_name= raw[2], state = raw[3], deaths = raw[4], age_adjusted_death_rate = raw[5])
post_to_db(new_mortality_object)
count += 1
def post_to_db(mortality_object):
with connection.cursor() as cursor:
# Create a new record
sql = f"""insert into mortality_rate (year, cause_name_full, cause_name, state, deaths, age_adjusted_death_rate)
values ("{mortality_object.year}", "{mortality_object.cause_name_full}", "{mortality_object.cause_name}", "{mortality_object.state}", "{mortality_object.deaths}", "{mortality_object.age_adjusted_death_rate}")"""
# print(sql)
cursor.execute(sql)
connection.commit()
#CREATE TABLE IN DATABASE
create_table("mortality_rate")
#THEN PUSH FILES INTO TABLE
open_file() | [
"[email protected]"
]
| |
df7b27de7032e41159d2757d07e22dd5bf52718c | cad91ae76d2746a6c28ddda0f33a58f9d461378f | /TensorFlow2/LanguageModeling/BERT/dllogger_class.py | be211785d770825978dc9b4cb32631e11f2435bc | []
| no_license | NVIDIA/DeepLearningExamples | fe677521e7e2a16e3cb0b77e358f9aab72f8c11a | a5388a45f71a949639b35cc5b990bd130d2d8164 | refs/heads/master | 2023-08-31T20:57:08.798455 | 2023-08-23T10:09:12 | 2023-08-23T10:09:12 | 131,881,622 | 11,838 | 3,124 | null | 2023-08-28T16:57:33 | 2018-05-02T17:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 2,852 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from dllogger import Logger, StdOutBackend, JSONStreamBackend, Verbosity
import numpy
class dllogger_class():
def format_step(self, step):
if isinstance(step, str):
return step
elif isinstance(step, int):
return "Iteration: {} ".format(step)
elif len(step) > 0:
return "Iteration: {} ".format(step[0])
else:
return ""
def __init__(self, log_path="bert_dllog.json"):
self.logger = Logger([
StdOutBackend(Verbosity.DEFAULT, step_format=self.format_step),
JSONStreamBackend(Verbosity.VERBOSE, log_path),
])
self.logger.metadata("mlm_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("nsp_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("avg_loss_step", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("total_loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("loss", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "TRAIN"})
self.logger.metadata("f1", {"unit": None, "format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("precision", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("recall", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("mcc", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata("exact_match", {"format": ":.4f", "GOAL": "MINIMIZE", "STAGE": "VAL"})
self.logger.metadata(
"throughput_train",
{"unit": "sequences/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "TRAIN"},
)
self.logger.metadata(
"throughput_inf",
{"unit": "sequences/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "VAL"},
)
self.logger.metadata(
"throughput_val",
{"unit": "sequences/s", "format": ":.3f", "GOAL": "MAXIMIZE", "STAGE": "VAL"},
)
| [
"[email protected]"
]
| |
c7d5357b3174e3eba0e4d7ebff79be4a7d3ceb48 | 7b06781892e624146ce8a6928c5d44672a805f76 | /abastos/products/views.py | 89c45e2d06c32b86987136ca6f21cca0952d06bd | []
| no_license | adelebrgx/abastos | f3f2bb242bdc135a0b41e86e2d0092c6d9f73e68 | 70f6d63462a9cc0a57e97846977e65885abea3c2 | refs/heads/master | 2022-11-11T07:56:50.329272 | 2020-06-15T13:58:00 | 2020-06-15T13:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,945 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from .models import Product
from sell.models import SellPair
from .import forms
from django.http import HttpResponse
import re
from urlparse import urlparse
def products_list_view(request):
products=Product.objects.all().order_by('name')
sellpairs=SellPair.objects.all().order_by('product')
return render(request, "products/productslist.html",{'products':products, 'sellpairs':sellpairs} )
@login_required(login_url="/accounts/login/")
def publish(request):
isName=True
isUrl=True
notAlreadyExists=True
sellpairs_list=SellPair.objects.all().order_by('product')
if request.method=="POST":
products=Product.objects.all().order_by('name')
name=request.POST.get('name')
url=request.POST.get('url')
already=Product.objects.filter(name=name).count()
if(already!=0):
notAlreadyExists=False
if(correct_name(name)==False):
isName=False
if(correct_url(url)==False):
isUrl=False
if(correct_name(name)==True and correct_url(url)==True and notAlreadyExists==True):
product=Product.objects.create(name=name, url=url)
product.save()
return render(request, 'products/productslist.html', {'products':products,'user':request.user, 'sellpairs':sellpairs_list})
else:
return render(request, 'products/publish.html', {'user':request.user,'isName': isName, 'isUrl':isUrl, 'notAlreadyExists':notAlreadyExists})
return render(request, 'products/publish.html', {'user':request.user,'isName': isName, 'isUrl':isUrl,'notAlreadyExists':notAlreadyExists})
def correct_name(s):
regex = re.compile('[@_!#$%^&*()<>?/\|}{~:1234567890]')
if(regex.search(s) == None):
return True
return False
def correct_url(url):
result=urlparse(url)
if(result.scheme!= "" and result.netloc!=""):
return True
return False
def product_details(request,slug):
isName=True
isUrl=True
notAlreadyExists=True
products=Product.objects.all().order_by('name')
sellpairs_list=SellPair.objects.all().order_by('product')
user=request.user
if request.method=='POST':
product= Product.objects.get(name=slug)
new_name=request.POST.get('name')
new_url=request.POST.get('url')
if (correct_name(new_name)==False):
isName=False
if(correct_url(new_url)==False):
isUrl=False
already=Product.objects.filter(name=new_name).count()
if(already!=0):
if(new_name!=product.name):
notAlreadyExists=False
if(isName==True and isUrl==True and notAlreadyExists==True):
product.name=new_name
product.url=new_url
product.save()
return render(request, 'products/productslist.html', {'products':products,'user':request.user, 'sellpairs':sellpairs_list})
else:
return render(request, 'products/product_details.html', {'product':product,'user':request.user,'isName': isName, 'isUrl':isUrl,'notAlreadyExists':notAlreadyExists})
product= Product.objects.get(name=slug)
return render (request, 'products/product_details.html', {'product':product, 'user':user,'isName': isName, 'isUrl':isUrl, 'notAlreadyExists':notAlreadyExists})
def product_delete(request,slug):
product= Product.objects.get(name=slug)
sellpairs=SellPair.objects.filter(product=product)
sellpairs_list=SellPair.objects.all().order_by('product')
print(product)
for s in sellpairs:
s.delete()
product.delete()
products=Product.objects.all().order_by('name')
user=request.user
return render(request, 'products/productslist.html', {'products':products,'user':request.user, 'sellpairs':sellpairs_list})
| [
"[email protected]"
]
| |
6bec030a51b5bb4b0f123d9777dc394b085cf5e0 | 9eaa2c64a777bd24a3cccd0230da5f81231ef612 | /study/1905/month01/code/Stage5/day16/demo06_canny.py | 8cecd5c5324a39778bbcead274373be63fe735f3 | [
"MIT"
]
| permissive | Dython-sky/AID1908 | 4528932f2ca66b844d8a3fcab5ed8bf84d20eb0c | 46cd54a7b36b5f009974f2bbb7005a4ad440ca1a | refs/heads/master | 2022-04-14T12:23:30.426270 | 2020-04-01T18:05:19 | 2020-04-01T18:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | """
demo06_canny.py 边缘识别
"""
import cv2 as cv
original = cv.imread('../ml_data/chair.jpg',cv.IMREAD_GRAYSCALE)
print(original)
cv.imshow('original',original)
# 索贝尔边缘识别
sobel = cv.Sobel(original,cv.CV_64F,0,1,ksize=5)
cv.imshow('sobel',sobel)
# 拉普斯边缘识别
laplacian = cv.Laplacian(original,cv.CV_64F)
cv.imshow('laplacian',laplacian)
# Canny边缘识别
canny = cv.Canny(original,50,200)
cv.imshow('canny',canny)
cv.waitKey() | [
"[email protected]"
]
| |
21a712689313dd214c18b4bccb4fb9d07a196c76 | a500d0a13e025a7e25376592188663f26c13385e | /machinelearning/data_science_from_scratch/bin/histogramming.py | 5d5d0265b53d25f825b2de93ae105a0b851f9542 | []
| no_license | sraywall/GitTutorial | c6096cfa9dc5c89ebaedee10ee93fed69118f296 | cd0de5db58e42fb4a5094504147ba804b0424247 | refs/heads/master | 2021-04-27T20:36:30.290444 | 2020-05-07T19:27:06 | 2020-05-07T19:27:06 | 122,381,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | #!/usr/bin/python3
import matplotlib.pyplot as plt
from collections import Counter
grades = [83, 95, 91, 87, 70, 0, 85, 82, 100, 67, 73, 77, 0]
# Bucket grades by decile, but put 100 in with the 90s
histogram = Counter(min(grade // 10 * 10, 90) for grade in grades)
plt.bar([x + 5 for x in histogram.keys()], # Shift bars right by 5
histogram.values(), # Give each bar correct height
10, # Give width of 10
edgecolor=(0,0,0)) # Black edges for bars
plt.axis([-5, 105, 0, 5]) # x-axis from -5 to 105,
# y-axis from 0 to 5
plt.xticks([10 * i for i in range(11)]) # x-axis labels 0, 10, ..,100
plt.xlabel("Decile")
plt.ylabel("# of Students")
plt.title("Distribution of Exam 1 Grades")
plt.show()
| [
"[email protected]"
]
| |
81eb6216326223d83778b2d3bd64fbec29228251 | 73758dde83d1a1823c103e1a4ba71e7c95168f71 | /nsd2002/py02/day03/game_role.py | 65eea729683ff4a6c379867472ab679b07dec8fa | []
| no_license | tonggh220/md_5_nsd_notes | 07ffdee7c23963a7a461f2a2340143b0e97bd9e1 | a58a021ad4c7fbdf7df327424dc518f4044c5116 | refs/heads/master | 2023-07-02T01:34:38.798929 | 2021-05-12T08:48:40 | 2021-05-12T08:48:40 | 393,885,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 922 | py | class Role:
def __init__(self, name, weapon):
# 构造器方法,实例化时自动调用,注意,self不是关键字,可以是任何自定义的变量
# 绑定在实例身上的属性,在类中任意位置可见可用
self.name = name
self.weapon = weapon
def show_me(self):
# 绑定在实例身上的属性,在类中任意位置可见可用
print('我是%s,我擅用%s' % (self.name, self.weapon))
def speak(self, words):
# 没有绑定在实例身上的变量,只是局部变量,只能用在函数中
hh = 'Hahaha'
print(hh)
print(words)
if __name__ == '__main__':
# 实例本身将会自动作为第一个参数传递,本例中是lb
lb = Role('吕布', '方天画戟') # 实例化,创建具体的对象
print(lb.name, lb.weapon)
lb.show_me()
lb.speak('马中赤兔,人中吕布')
| [
"[email protected]"
]
| |
8400f0f8f16237cd362e0cc37f3436e13b3d755f | 82f6a6c50a1fef2d7522a43cc4f60e5ff80b37a8 | /solutions/Longest Word in Dictionary through Deleting/solution.py | 267c70a98bb61b70fe13d5f17a5e27cb662c0fae | [
"MIT"
]
| permissive | nilax97/leetcode-solutions | ca0f9545ce70975617738f053e0935fac00b04d4 | d3c12f2b289662d199510e0431e177bbf3cda121 | refs/heads/master | 2023-05-14T02:21:48.893716 | 2021-06-08T13:16:53 | 2021-06-08T13:16:53 | 374,466,870 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 617 | py | class Solution:
def findLongestWord(self, s: str, d: List[str]) -> str:
s = '_' + s
n, nxt = len(s), [{} for _ in s]
for i, c in enumerate(s):
for j in range(i-1, -1, -1):
nxt[j][c] = i
if s[j] == c: break
def find(word):
i = 0
for c in word:
i = nxt[i].get(c)
if i is None: return False
return True
res = ""
for word in d:
if find(word) and (not res or (-len(word), word) < (-len(res), res)):
res = word
return res
| [
"[email protected]"
]
| |
d1f13e636da4561babbaa676e0c08ff8448d9dab | 84fda8562eb6193b58031ad9369c27ba455430ca | /vulcan/_homework.py | ed134f6df75a9eef5dde9b04390dbcde8b935ea0 | [
"MIT"
]
| permissive | Majroch/vulcan-api | 50bf39922cea338ac4a98a3a98b9a505469fd936 | 4448eeb64d2481b5deb643bcb32f2c9ee04463f5 | refs/heads/master | 2022-12-10T15:36:20.345105 | 2020-08-22T23:10:45 | 2020-08-22T23:11:37 | 295,528,170 | 0 | 0 | MIT | 2020-09-14T20:14:14 | 2020-09-14T20:14:13 | null | UTF-8 | Python | false | false | 1,502 | py | # -*- coding: utf-8 -*-
from datetime import datetime
from related import (
IntegerField,
StringField,
DateField,
ChildField,
immutable,
to_model,
)
from ._subject import Subject
from ._teacher import Teacher
from ._utils import sort_and_filter_date
@immutable
class Homework:
"""
Homework
Attributes:
id (:class:`int`): Homework ID
description (:class:`str`): Homework description
date (:class:`datetime.date`): Homework deadline date
teacher (:class:`vulcan._teacher.Teacher`): Teacher, who added the homework
subject (:class:`vulcan._subject.Subject`): Subject, from which is the homework
"""
id = IntegerField(key="Id")
description = StringField(key="Opis")
date = DateField(key="DataTekst")
teacher = ChildField(Teacher, required=False)
subject = ChildField(Subject, required=False)
@classmethod
def get(cls, api, date=None):
if not date:
date = datetime.now()
date_str = date.strftime("%Y-%m-%d")
data = {"DataPoczatkowa": date_str, "DataKoncowa": date_str}
j = api.post("Uczen/ZadaniaDomowe", json=data)
homework_list = sort_and_filter_date(j.get("Data", []), date_str)
for homework in homework_list:
homework["teacher"] = api.dict.get_teacher_json(homework["IdPracownik"])
homework["subject"] = api.dict.get_subject_json(homework["IdPrzedmiot"])
yield to_model(cls, homework)
| [
"[email protected]"
]
| |
38e2a317c4f2e7eb33bdc730a2477b9180c102dc | 502949222ee30e947426e85aadf8cfdd4b315780 | /luck_balance.py | 92fa55989516ac27357f5885d1873768c33a9dd5 | []
| no_license | RafaelChavesPB/HackerRank | c3d70a0562f733af7e62d65f08312bae84560507 | ab7527754cbaabaa27a633120ed04f39d5ad932f | refs/heads/main | 2023-06-11T16:19:03.266782 | 2021-07-10T12:16:49 | 2021-07-10T12:16:49 | 363,399,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the 'luckBalance' function below.
#
# The function is expected to return an INTEGER.
# The function accepts following parameters:
# 1. INTEGER k
# 2. 2D_INTEGER_ARRAY contests
#
def luckBalance(k, contests):
importants = []
luck = 0
for it in contests:
if it[1]:
importants.append(it[0])
else:
luck+=it[0]
importants.sort(reverse = True)
for it in importants:
if k>0:
luck+=it
k-=1
else:
luck-=it
return luck
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
first_multiple_input = input().rstrip().split()
n = int(first_multiple_input[0])
k = int(first_multiple_input[1])
contests = []
for _ in range(n):
contests.append(list(map(int, input().rstrip().split())))
result = luckBalance(k, contests)
fptr.write(str(result) + '\n')
fptr.close()
| [
"[email protected]"
]
| |
0b8d79dc193c5e2bc349ca96dd149172f4b2ea40 | c42f45d20dee9c27869e24a5ed9a367a6be44a5f | /dnsmanager/management/commands/importbind.py | 219caeeda4193d8d2510fa9f1600b9ebca0b9519 | [
"BSD-3-Clause"
]
| permissive | rhilo/django-dnsmanager | a4b15e1a4c983597e6e5b958d4217ea614b03342 | 00f4211d6a57f038fad46409892768c1f72791f4 | refs/heads/master | 2023-06-30T01:58:38.042136 | 2017-03-20T03:37:17 | 2017-03-20T03:37:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,216 | py | import os
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.db.models.loading import get_model
from dnsmanager.models import Zone
class Command(BaseCommand):
args = '<zone_file zone_file ...>'
help = 'Import the specified Bind zone file'
def handle(self, *args, **options):
if int(options.get("verbosity", 1)) > 1:
verbose = True
else:
verbose = False
for zone_file in args:
# assume filename is domain
domain = os.path.splitext(os.path.basename(zone_file))[0]
# Dynamically load our Domain name providing model
app_label, model_name = settings.DNS_MANAGER_DOMAIN_MODEL.rsplit('.', 1)
domain_model = get_model(app_label, model_name)
domain_obj = domain_model.objects.get(name=domain)
# Domain must already be created in accounts
zone, created = Zone.objects.get_or_create(domain=domain_obj)
with open(zone_file, mode='r') as f:
text = f.read()
zone.update_from_text(text)
self.stdout.write('Successfully imported file "%s"' % zone_file) | [
"[email protected]"
]
| |
5645056187f31ed4f86c80622fdeaf41b71f0d0b | b361bdc6abbda69dfae1bb9b41282413436eddd1 | /polls/models.py | b6259e2a1032f861775ef81bb9320e7b103412e2 | []
| no_license | wwz58/mysite | d84b9356f85c31a9bb8b11d03216588c6d4a4842 | 236472decbb9dd813898684dbb4143f10fefebb5 | refs/heads/main | 2023-01-10T03:23:18.984338 | 2020-11-14T13:14:25 | 2020-11-14T13:14:25 | 312,543,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 703 | py | import datetime
from django.db import models
from django.utils import timezone
# Create your models here.
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1) and self.pub_date < timezone.now()
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
| [
"[email protected]"
]
| |
1317aafa3a4fd100947fdd513f504d272f19b67c | 867c876541c29775bd5c1548a2ba59f0dc84737d | /MxShop/extra_apps/xadmin/sites.py | 4d151c4b285f85168ebbcd2566c77c2ade989a9c | []
| no_license | flowpig/daily_demos | be9c8aec7c8070e96ee7012b249c2f60e777e248 | b4bc7779c55ca0a02098c6dafe23a8f5af461182 | refs/heads/master | 2023-01-10T21:46:24.059317 | 2019-11-29T06:33:42 | 2019-11-29T06:33:42 | 117,111,372 | 0 | 0 | null | 2022-12-26T20:42:28 | 2018-01-11T14:29:56 | JavaScript | UTF-8 | Python | false | false | 15,072 | py | import sys
from functools import update_wrapper
from future.utils import iteritems
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.utils import six
from django.views.decorators.cache import never_cache
from django.template.engine import Engine
import inspect
if six.PY2 and sys.getdefaultencoding() == 'ascii':
import imp
imp.reload(sys)
sys.setdefaultencoding("utf-8")
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class MergeAdminMetaclass(type):
def __new__(cls, name, bases, attrs):
return type.__new__(cls, str(name), bases, attrs)
class AdminSite(object):
def __init__(self, name='xadmin'):
self.name = name
self.app_name = 'xadmin'
self._registry = {} # model_class class -> admin_class class
self._registry_avs = {} # admin_view_class class -> admin_class class
self._registry_settings = {} # settings name -> admin_class class
self._registry_views = []
# url instance contains (path, admin_view class, name)
self._registry_modelviews = []
# url instance contains (path, admin_view class, name)
self._registry_plugins = {} # view_class class -> plugin_class class
self._admin_view_cache = {}
# self.check_dependencies()
self.model_admins_order = 0
def copy_registry(self):
import copy
return {
'models': copy.copy(self._registry),
'avs': copy.copy(self._registry_avs),
'views': copy.copy(self._registry_views),
'settings': copy.copy(self._registry_settings),
'modelviews': copy.copy(self._registry_modelviews),
'plugins': copy.copy(self._registry_plugins),
}
def restore_registry(self, data):
self._registry = data['models']
self._registry_avs = data['avs']
self._registry_views = data['views']
self._registry_settings = data['settings']
self._registry_modelviews = data['modelviews']
self._registry_plugins = data['plugins']
def register_modelview(self, path, admin_view_class, name):
from xadmin.views.base import BaseAdminView
if issubclass(admin_view_class, BaseAdminView):
self._registry_modelviews.append((path, admin_view_class, name))
else:
raise ImproperlyConfigured(u'The registered view class %s isn\'t subclass of %s' %
(admin_view_class.__name__, BaseAdminView.__name__))
def register_view(self, path, admin_view_class, name):
self._registry_views.append((path, admin_view_class, name))
def register_plugin(self, plugin_class, admin_view_class):
from xadmin.views.base import BaseAdminPlugin
if issubclass(plugin_class, BaseAdminPlugin):
self._registry_plugins.setdefault(
admin_view_class, []).append(plugin_class)
else:
raise ImproperlyConfigured(u'The registered plugin class %s isn\'t subclass of %s' %
(plugin_class.__name__, BaseAdminPlugin.__name__))
def register_settings(self, name, admin_class):
self._registry_settings[name.lower()] = admin_class
def register(self, model_or_iterable, admin_class=object, **options):
from xadmin.views.base import BaseAdminView
if isinstance(model_or_iterable, ModelBase) or issubclass(model_or_iterable, BaseAdminView):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, ModelBase):
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered(
'The model %s is already registered' % model.__name__)
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type(str("%s%sAdmin" % (model._meta.app_label, model._meta.model_name)), (admin_class,), options or {})
admin_class.model = model
admin_class.order = self.model_admins_order
self.model_admins_order += 1
self._registry[model] = admin_class
else:
if model in self._registry_avs:
raise AlreadyRegistered('The admin_view_class %s is already registered' % model.__name__)
if options:
options['__module__'] = __name__
admin_class = type(str(
"%sAdmin" % model.__name__), (admin_class,), options)
# Instantiate the admin class to save in the registry
self._registry_avs[model] = admin_class
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
from xadmin.views.base import BaseAdminView
if isinstance(model_or_iterable, (ModelBase, BaseAdminView)):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, ModelBase):
if model not in self._registry:
raise NotRegistered(
'The model %s is not registered' % model.__name__)
del self._registry[model]
else:
if model not in self._registry_avs:
raise NotRegistered('The admin_view_class %s is not registered' % model.__name__)
del self._registry_avs[model]
def set_loginview(self, login_view):
self.login_view = login_view
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.contenttypes.models import ContentType
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
default_template_engine = Engine.get_default()
if not ('django.contrib.auth.context_processors.auth' in default_template_engine.context_processors or
'django.core.context_processors.auth' in default_template_engine.context_processors):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request) and getattr(view, 'need_site_permission', True):
return self.create_admin_view(self.login_view)(request, *args, **kwargs)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
return update_wrapper(inner, view)
def _get_merge_attrs(self, option_class, plugin_class):
return dict([(name, getattr(option_class, name)) for name in dir(option_class)
if name[0] != '_' and not callable(getattr(option_class, name)) and hasattr(plugin_class, name)])
def _get_settings_class(self, admin_view_class):
name = admin_view_class.__name__.lower()
if name in self._registry_settings:
return self._registry_settings[name]
elif name.endswith('admin') and name[0:-5] in self._registry_settings:
return self._registry_settings[name[0:-5]]
elif name.endswith('adminview') and name[0:-9] in self._registry_settings:
return self._registry_settings[name[0:-9]]
return None
def _create_plugin(self, option_classes):
def merge_class(plugin_class):
if option_classes:
attrs = {}
bases = [plugin_class]
for oc in option_classes:
attrs.update(self._get_merge_attrs(oc, plugin_class))
meta_class = getattr(oc, plugin_class.__name__, getattr(oc, plugin_class.__name__.replace('Plugin', ''), None))
if meta_class:
bases.insert(0, meta_class)
if attrs:
plugin_class = MergeAdminMetaclass(
'%s%s' % (''.join([oc.__name__ for oc in option_classes]), plugin_class.__name__),
tuple(bases), attrs)
return plugin_class
return merge_class
def get_plugins(self, admin_view_class, *option_classes):
from xadmin.views import BaseAdminView
plugins = []
opts = [oc for oc in option_classes if oc]
for klass in admin_view_class.mro():
if klass == BaseAdminView or issubclass(klass, BaseAdminView):
merge_opts = []
reg_class = self._registry_avs.get(klass)
if reg_class:
merge_opts.append(reg_class)
settings_class = self._get_settings_class(klass)
if settings_class:
merge_opts.append(settings_class)
merge_opts.extend(opts)
ps = self._registry_plugins.get(klass, [])
plugins.extend(map(self._create_plugin(
merge_opts), ps) if merge_opts else ps)
return plugins
def get_view_class(self, view_class, option_class=None, **opts):
merges = [option_class] if option_class else []
for klass in view_class.mro():
reg_class = self._registry_avs.get(klass)
if reg_class:
merges.append(reg_class)
settings_class = self._get_settings_class(klass)
if settings_class:
merges.append(settings_class)
merges.append(klass)
new_class_name = ''.join([c.__name__ for c in merges])
if new_class_name not in self._admin_view_cache:
plugins = self.get_plugins(view_class, option_class)
self._admin_view_cache[new_class_name] = MergeAdminMetaclass(
new_class_name, tuple(merges),
dict({'plugin_classes': plugins, 'admin_site': self}, **opts))
return self._admin_view_cache[new_class_name]
def create_admin_view(self, admin_view_class):
return self.get_view_class(admin_view_class).as_view()
def create_model_admin_view(self, admin_view_class, model, option_class):
return self.get_view_class(admin_view_class, option_class).as_view()
def get_urls(self):
from django.urls import include, path, re_path
from xadmin.views.base import BaseAdminView
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
path('jsi18n/', wrap(self.i18n_javascript, cacheable=True), name='jsi18n')
]
# Registed admin views
# inspect[isclass]: Only checks if the object is a class. With it lets you create an custom view that
# inherits from multiple views and have more of a metaclass.
urlpatterns += [
re_path(
_path,
wrap(self.create_admin_view(clz_or_func))
if inspect.isclass(clz_or_func) and issubclass(clz_or_func, BaseAdminView)
else include(clz_or_func(self)),
name=name
)
for _path, clz_or_func, name in self._registry_views
]
# Add in each model's views.
for model, admin_class in iteritems(self._registry):
view_urls = [
re_path(
_path,
wrap(self.create_model_admin_view(clz, model, admin_class)),
name=name % (model._meta.app_label, model._meta.model_name)
)
for _path, clz, name in self._registry_modelviews
]
urlpatterns += [
re_path(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(view_urls))
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.name, self.app_name
def i18n_javascript(self, request):
from django.views.i18n import JavaScriptCatalog
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
def register(models, **kwargs):
def _model_admin_wrapper(admin_class):
site.register(models, admin_class)
return _model_admin_wrapper | [
"[email protected]"
]
| |
2b8f98fed9b2cf78f74e3d060be36ebc87cc6f48 | 3d2823b5b9a33efa51e8daea9873494b0c3d8116 | /Sparse autoencoder/sparseae.py | 65786bd3c7ff49c2e91758cc98c003faddd20650 | []
| no_license | AdamGu0/MNIST-Neuron-Networks | 1430de9756769d112d54ed612fd5be90b087710b | e022d0a5b42c31cc2350cd8bfc5762ecab609832 | refs/heads/master | 2020-03-16T03:56:54.983557 | 2018-05-07T18:24:29 | 2018-05-07T18:24:29 | 132,499,122 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,592 | py | '''
This file implements a multi layer neural network for a multiclass classifier
using Python3.6
'''
import numpy as np
from load_dataset import mnist
import matplotlib.pyplot as plt
def sigmoid(Z):
'''
computes sigmoid activation of Z
Inputs:
Z is a numpy.ndarray (n, m)
Returns:
A is activation. numpy.ndarray (n, m)
cache is a dictionary with {"Z", Z}
'''
A = 1 / (1 + np.exp(-Z))
cache = {}
cache["Z"] = Z
return A, cache
def sigmoid_der(dA, cache):
'''
computes derivative of sigmoid activation
Inputs:
dA is the derivative from subsequent layer. numpy.ndarray (n, m)
cache is a dictionary with {"Z", Z}, where Z was the input
to the activation layer during forward propagation
Returns:
dZ is the derivative. numpy.ndarray (n,m)
'''
### CODE HERE
Z = cache["Z"]
A, c = sigmoid(Z)
dZ = dA * A * (1 - A)
return dZ
def softmax_cross_entropy_loss(Z, Y=np.array([])):
'''
Computes the softmax activation of the inputs Z
Estimates the cross entropy loss
Inputs:
Z - numpy.ndarray (n, m)
Y - numpy.ndarray (1, m) of labels
when y=[] loss is set to []
Returns:
A - numpy.ndarray (n, m) of softmax activations
cache - a dictionary to store the activations later used to estimate derivatives
loss - cost of prediction
'''
### CODE HERE
mZ = np.max(Z, axis=0, keepdims=True)
expZ = np.exp(Z - mZ)
A = expZ / np.sum(expZ, axis=0, keepdims=True)
cache = {}
cache["A"] = A
IA = [A[Y[0][i]][i] for i in range(Y.size)]
IA = (np.array(IA) - 0.5) * 0.9999999999 + 0.5
loss = -np.mean(np.log(IA))
return A, cache, loss
def softmax_cross_entropy_loss_der(Y, cache):
'''
Computes the derivative of softmax activation and cross entropy loss
Inputs:
Y - numpy.ndarray (1, m) of labels
cache - a dictionary with cached activations A of size (n,m)
Returns:
dZ - numpy.ndarray (n, m) derivative for the previous layer
'''
### CODE HERE
dZ = cache["A"].copy()
for i in range(Y.size):
dZ[Y[0][i]][i] -= 1
return dZ
def initialize_multilayer_weights(net_dims):
'''
Initializes the weights of the multilayer network
Inputs:
net_dims - tuple of network dimensions
Returns:
dictionary of parameters
'''
np.random.seed(0)
numLayers = len(net_dims)
parameters = {}
for l in range(numLayers - 1):
parameters["W" + str(l + 1)] = np.random.randn(net_dims[l + 1], net_dims[l]) * 0.01
parameters["b" + str(l + 1)] = np.random.randn(net_dims[l + 1], 1) * 0.01
return parameters
def linear_forward(A, W, b):
'''
Input A propagates through the layer
Z = WA + b is the output of this layer.
Inputs:
A - numpy.ndarray (n,m) the input to the layer
W - numpy.ndarray (n_out, n) the weights of the layer
b - numpy.ndarray (n_out, 1) the bias of the layer
Returns:
Z = WA + b, where Z is the numpy.ndarray (n_out, m) dimensions
cache - a dictionary containing the inputs A
'''
### CODE HERE
Z = np.dot(W, A) + b
cache = {}
cache["A"] = A
return Z, cache
def layer_forward(A_prev, W, b):
'''
Input A_prev propagates through the layer and the activation
Inputs:
A_prev - numpy.ndarray (n,m) the input to the layer
W - numpy.ndarray (n_out, n) the weights of the layer
b - numpy.ndarray (n_out, 1) the bias of the layer
activation - is the string that specifies the activation function
Returns:
A = g(Z), where Z = WA + b, where Z is the numpy.ndarray (n_out, m) dimensions
g is the activation function
cache - a dictionary containing the cache from the linear and the nonlinear propagation
to be used for derivative
'''
Z, lin_cache = linear_forward(A_prev, W, b)
A, act_cache = sigmoid(Z)
cache = {}
cache["lin_cache"] = lin_cache
cache["act_cache"] = act_cache
return A, cache
def multi_layer_forward(X, parameters):
'''
Forward propgation through the layers of the network
Inputs:
X - numpy.ndarray (n,m) with n features and m samples
parameters - dictionary of network parameters {"W1":[..],"b1":[..],"W2":[..],"b2":[..]...}
Returns:
AL - numpy.ndarray (c,m) - outputs of the last fully connected layer before softmax
where c is number of categories and m is number of samples in the batch
caches - a dictionary of associated caches of parameters and network inputs
'''
L = len(parameters) // 2
A = X
caches = []
for l in range(1, L): # since there is no W0 and b0
A, cache = layer_forward(A, parameters["W" + str(l)], parameters["b" + str(l)])
caches.append(cache)
AL, cache = layer_forward(A, parameters["W" + str(L)], parameters["b" + str(L)])
caches.append(cache)
return AL, caches
def linear_backward(dZ, cache, W, b):
'''
Backward prpagation through the linear layer
Inputs:
dZ - numpy.ndarray (n,m) derivative dL/dz
cache - a dictionary containing the inputs A, for the linear layer
where Z = WA + b,
Z is (n,m); W is (n,p); A is (p,m); b is (n,1)
W - numpy.ndarray (n,p)
b - numpy.ndarray (n, 1)
Returns:
dA_prev - numpy.ndarray (p,m) the derivative to the previous layer
dW - numpy.ndarray (n,p) the gradient of W
db - numpy.ndarray (n, 1) the gradient of b
'''
A_prev = cache["A"]
## CODE HERE
m = dZ.shape[1]
dA_prev = np.dot(np.transpose(W), dZ)
dW = np.dot(dZ, np.transpose(A_prev)) / m
db = np.sum(dZ, axis=1, keepdims=True) / m
return dA_prev, dW, db
def layer_backward(dA, cache, W, b, KL=False):
'''
Backward propagation through the activation and linear layer
Inputs:
dA - numpy.ndarray (n,m) the derivative to the previous layer
cache - dictionary containing the linear_cache and the activation_cache
activation - activation of the layer
W - numpy.ndarray (n,p)
b - numpy.ndarray (n, 1)
Returns:
dA_prev - numpy.ndarray (p,m) the derivative to the previous layer
dW - numpy.ndarray (n,p) the gradient of W
db - numpy.ndarray (n, 1) the gradient of b
'''
lin_cache = cache["lin_cache"]
act_cache = cache["act_cache"]
'''
if activation == "sigmoid":
dZ = sigmoid_der(dA, act_cache)
elif activation == "tanh":
dZ = tanh_der(dA, act_cache)
'''
if KL:
b = 3
KL = cache["KL"]
p = cache["p"]
dp = -np.divide(p, KL) + np.divide(1 - p, 1 - KL)
dA += b * dp
dZ = sigmoid_der(dA, act_cache)
dA_prev, dW, db = linear_backward(dZ, lin_cache, W, b)
return dA_prev, dW, db
def multi_layer_backward(dAL, caches, parameters):
'''
Back propgation through the layers of the network (except softmax cross entropy)
softmax_cross_entropy can be handled separately
Inputs:
dAL - numpy.ndarray (n,m) derivatives from the softmax_cross_entropy layer
caches - a dictionary of associated caches of parameters and network inputs
parameters - dictionary of network parameters {"W1":[..],"b1":[..],"W2":[..],"b2":[..]...}
Returns:
gradients - dictionary of gradient of network parameters
{"dW1":[..],"db1":[..],"dW2":[..],"db2":[..],...}
'''
L = len(caches) # with one hidden layer, L = 2
gradients = {}
dA = dAL
for l in reversed(range(1, L + 1)):
dA, gradients["dW" + str(l)], gradients["db" + str(l)] = layer_backward(dA, caches[l - 1],
parameters["W" + str(l)], parameters["b" + str(l)])
return gradients
def classify(X, parameters):
'''
Network prediction for inputs X
Inputs:
X - numpy.ndarray (n,m) with n features and m samples
parameters - dictionary of network parameters
{"W1":[..],"b1":[..],"W2":[..],"b2":[..],...}
Returns:
YPred - numpy.ndarray (1,m) of predictions
'''
### CODE HERE
# Forward propagate X using multi_layer_forward
YPred, caches = multi_layer_forward(X, parameters)
return YPred
def update_parameters(parameters, gradients, epoch, learning_rate, decay_rate=0.0, weight_decay=0):
'''
Updates the network parameters with gradient descent
Inputs:
parameters - dictionary of network parameters
{"W1":[..],"b1":[..],"W2":[..],"b2":[..],...}
gradients - dictionary of gradient of network parameters
{"dW1":[..],"db1":[..],"dW2":[..],"db2":[..],...}
epoch - epoch number
learning_rate - step size for learning
decay_rate - rate of decay of step size - not necessary - in case you want to use
'''
alpha = learning_rate * (1 / (1 + decay_rate * epoch))
L = len(parameters) // 2
### CODE HERE
for l in reversed(range(1, L + 1)):
w = parameters["W" + str(l)]
parameters["W" + str(l)] -= alpha * (gradients["dW" + str(l)] + weight_decay * w)
parameters["b" + str(l)] -= alpha * gradients["db" + str(l)]
return parameters, alpha
def train_sparse_autoencoder(X, Y, net_dims, num_iterations=500, learning_rate=0.2, decay_rate=0.01, weight_decay=0, p=0.5):
'''
Creates the neuron network and trains the network
Inputs:
X - numpy.ndarray (n,m) of training data
Y - numpy.ndarray (1,m) of training data labels
net_dims - tuple of layer dimensions
num_iterations - num of epochs to train
learning_rate - step size for gradient descent
Returns:
costs - list of costs over training
parameters - dictionary of trained network parameters
'''
parameters = initialize_multilayer_weights(net_dims)
A0 = X
costs = []
for ii in range(num_iterations):
### CODE HERE
# Forward Prop
# call to layer_forward to get activations
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
A1, cache1 = layer_forward(A0, W1, b1)
KL = np.mean(A1, axis=1, keepdims=True)
KL = (KL - 0.5) * 0.9999999999 + 0.5
cache1["KL"] = KL
cache1["p"] = p
A2, cache2 = layer_forward(A1, W2, b2)
## call loss function
cost = np.mean(np.sum((Y - A2)**2, axis=0)) / 2 + weight_decay / 2 * (np.sum(W1) + np.sum(W2))
+ np.sum(p * np.log(np.divide(p, KL)) + (1 - p) * np.log(np.divide(1 - p, 1 - KL)))
# Backward Prop
# loss der
m = A2.shape[1]
dA2 = A2 - Y
## call to layer_backward to get gradients
gradients = {}
dA1, gradients["dW2"], gradients["db2"] = layer_backward(dA2, cache2, W2, b2)
dA0, gradients["dW1"], gradients["db1"] = layer_backward(dA1, cache1, W1, b1, KL=True)
## call to update the parameters
parameters, alpha = update_parameters(parameters, gradients, ii, learning_rate, decay_rate, weight_decay)
if ii % 10 == 0:
costs.append(cost)
if ii % 10 == 0:
print("Cost at iteration %i is: %.05f, learning rate: %.05f" % (ii, cost, alpha))
return costs, parameters
def main():
'''
Trains a sparse autoencoder for MNIST digit data
'''
net_dims = [784, 200, 784]
print("Network dimensions are:" + str(net_dims))
# getting the subset dataset from MNIST
train_data, train_label, test_data, test_label = \
mnist(ntrain=1000, ntest=200, digit_range=[0, 10])
train_label = train_data
test_label = test_data
# initialize learning rate and num_iterations
num_iterations = 400
decay_rate = 0
weight_decay = 0.001
ps = [0.01, 0.1, 0.5, 0.8]
p = ps[1]
learning_rate = 0.3
costs, parameters = train_sparse_autoencoder(train_data, train_label, net_dims, num_iterations=num_iterations,
learning_rate=learning_rate, decay_rate=decay_rate, weight_decay=weight_decay, p=p)
train_Pred = classify(train_data, parameters)
test_Pred = classify(test_data, parameters)
# plt.imshow(np.reshape(train_label[:, 10], [28, 28]), cmap='gray')
# plt.show()
# plt.imshow(np.reshape(train_Pred[:, 10], [28, 28]), cmap='gray')
# plt.show()
# compute the accuracy for training set and testing set
trAcc = 100 * (1 - np.mean(np.abs(train_Pred - train_label)))
teAcc = 100 * (1 - np.mean(np.abs(test_Pred - test_label)))
print("Accuracy for training set is {0:0.3f} %".format(trAcc))
print("Accuracy for testing set is {0:0.3f} %".format(teAcc))
### CODE HERE to plot costs
iterations = range(0, num_iterations, 10)
plt.plot(iterations, costs)
plt.title("Sparse Autoencoder: " + str(net_dims) + " (p = " + str(p) +
")\nTraining accuracy:{0:0.3f}% Testing accuracy:{1:0.3f}%".format(trAcc, teAcc))
plt.xlabel("Iteration")
plt.ylabel("Cost")
plt.show()
W1 = parameters["W1"]
tmp = np.reshape(W1[0:100, :], [100, 28, 28])
for i in range(100):
plt.subplot(10, 10, i + 1)
plt.axis('off')
plt.imshow(tmp[i], cmap='gray')
plt.subplots_adjust(left=0.16, bottom=0.05, right=0.84, top=0.95, wspace=0.05, hspace=0.05)
plt.suptitle("100 rows of W1 in 28*28 images" + " (p = " + str(p) + ")")
plt.show()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
e9a1fed6a23067a05df9d37a4204e81098c48194 | b9bf3b34b59ec8e566b7ad6e58b7d0429370d6bd | /gunicorn_conf.py | 3b6bec2f43185136d7017ecf5ea3fe59f9f34931 | []
| no_license | dutradda/chunli | 7eea614b6c6c3c0738bec2f15d8224430e450a82 | 54e4385a34f805a2c13acdf85aec98d63c4eaff7 | refs/heads/master | 2021-08-16T09:22:45.388575 | 2020-09-03T12:55:33 | 2020-09-03T12:55:33 | 217,397,141 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | import os
import redis
def worker_exit(server, worker):
r = redis.Redis.from_url(os.environ.get('REDIS_TARGET', 'redis://'))
r.publish('chunli:distributed', 'stop')
def child_exit(server, worker):
r = redis.Redis.from_url(os.environ.get('REDIS_TARGET', 'redis://'))
r.publish('chunli:distributed', 'stop')
| [
"[email protected]"
]
| |
0332c4d5e620cd87f9b70d77e4f57a67c07e72a3 | 3b89c0a97ac6b58b6923a213bc8471e11ad4fe69 | /python/CodingExercises/MoveSpacesFrontString.py | af9641cf57932b4daa0e84d62d196bc3aa65de22 | []
| no_license | ksayee/programming_assignments | b187adca502ecf7ff7b51dc849d5d79ceb90d4a6 | 13bc1c44e1eef17fc36724f20b060c3339c280ea | refs/heads/master | 2021-06-30T07:19:34.192277 | 2021-06-23T05:11:32 | 2021-06-23T05:11:32 | 50,700,556 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,116 | py | '''
Move spaces to front of string in single traversal
Given a string that has set of words and spaces, write a program to move all spaces to front of string, by traversing the string only once.
Examples:
Input : str = "geeks for geeks"
Output : ste = " geeksforgeeks"
Input : str = "move these spaces to beginning"
Output : str = " movethesespacestobeginning"
There were four space characters in input,
all of them should be shifted in front.
'''
def MoveSpacesFrontString(str1):
output_list=[]
lst=str1.split(' ')
prev_word=''
for word in lst:
if len(word)==0:
output_list.append(' ')
else:
if len(prev_word)>0:
output_list.append(' ')
prev_word=word
output_list.append(''.join(lst))
return ''.join(output_list)
def main():
str1="geeks for geeks"
print(MoveSpacesFrontString(str1))
str1 = "move these spaces to beginning"
print(MoveSpacesFrontString(str1))
str1 = "move these spaces to beginning"
print(MoveSpacesFrontString(str1))
if __name__=='__main__':
main() | [
"[email protected]"
]
| |
37c263710e2e5d8dea3aea2656664222dd57cb01 | 55616b9bc2a5726f1bfb005596987db8c43872c7 | /App/forms.py | e04a83ed5c6a5eb065cb39e0dc6e80bbeeff3dfe | []
| no_license | helloluoc/flaskblog | 96d1dee4b31fce879fd17b4220d697ae00f0e74d | 500710b7229c704fcee2bfe27b2d2dbd2c3a2784 | refs/heads/master | 2020-03-23T20:17:53.834356 | 2018-09-30T14:11:26 | 2018-09-30T14:11:26 | 142,033,367 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,682 | py | from flask.ext.wtf import FlaskForm
from wtforms import StringField,TextAreaField,SubmitField,PasswordField,BooleanField,FileField
from wtforms.validators import DataRequired,Length,EqualTo,Email
class PostForm(FlaskForm):
content = TextAreaField(
render_kw={'placeholder':'骚年,你想表达什么...'},
validators=[
DataRequired('stupid,data required!'),
Length(min=5, max=200, message='stupid,length 2-200!')
])
submit = SubmitField(label='发表')
class RegisterForm(FlaskForm):
username = StringField(
label='用户名',
validators=[
DataRequired('stupid,data required!'),
Length(min=5, max=20, message='stupid,length 5-20!')
],
render_kw={'placeholder':'please enter username'}
)
password = PasswordField(
label='密码',
validators=[
DataRequired('stupid,data required!'),
Length(min=5, max=20, message='stupid,length 5-20!')
],
render_kw={'placeholder':'please enter password'}
)
repeat = PasswordField(
label='确认密码',
validators=[
DataRequired('stupid,data required!'),
EqualTo('password','stupid,not equal to the first')
],
render_kw={'placeholder':'please enter password again'}
)
email = StringField(
label='邮箱',
validators=[
DataRequired('stupid,data required!'),
Length(min=5, max=100, message='stupid,length 5-20!'),
Email('stupid,invalid email')
],
render_kw={'placeholder':'please enter email'}
)
submit = SubmitField(label='注册')
class ProfileForm(FlaskForm):
username = StringField(render_kw={'disabled':'disabled'})
email = StringField(render_kw={'disabled':'disabled'})
class LoginForm(FlaskForm):
username = StringField(
label='用户名',
validators=[
DataRequired('stupid,data required!'),
Length(min=5, max=20, message='stupid,length 5-20!')
],
render_kw={'placeholder':'please enter username'}
)
password = PasswordField(
label='密码',
validators=[
DataRequired('stupid,data required!'),
Length(min=5, max=20, message='stupid,length 5-20!')
],
render_kw={'placeholder':'please enter password'}
)
remember = BooleanField(label='记住我')
submit = SubmitField(label='立即登录')
class UploadForm(FlaskForm):
file = FileField(label='上传头像', validators=[
DataRequired('stupid,data required!')
])
submit = SubmitField(label='上传头像')
| [
"[email protected]"
]
| |
c2dee47972cf180e467ae4cfe43135ba4248be4c | 97f9c0602f5d202adb3a159c211ca8075a5c4b07 | /tictactoe/gameplay/models.py | ffef41c78f6d65dfea9bf2b0ddb0f6dfc81c4614 | []
| no_license | julia-thea/django-tictactoe | c836360b27acfe8d7f5cd8a851ef01eab81a4985 | 1276ac180becb4280272e2b57dc1c54444ec57c2 | refs/heads/master | 2022-06-28T04:36:05.420980 | 2020-05-10T21:17:59 | 2020-05-10T21:17:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | from django.db import models
from django.db.models import Q
from django.contrib.auth.models import User
GAME_STATUS_CHOICES = (
('F', 'First Player To Move'),
('S', 'Second Player To Move'),
('W', 'First Player Wins'),
('L', 'Second Player Wins'),
('D', 'Draw')
)
class GamesQuerySet(models.QuerySet):
def games_for_user(self, user):
return self.filter(
Q(first_player=user) | Q(second_player=user)
)
def active(self):
return self.filter(
Q(status='F') | Q(status='S')
)
class Game(models.Model):
first_player = models.ForeignKey(User,
related_name="games_first_player", on_delete=models.CASCADE)
second_player = models.ForeignKey(User,
related_name="games_second_player", on_delete=models.CASCADE)
start_time = models.DateTimeField(auto_now_add=True)
last_active = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=1, default='F',
choices=GAME_STATUS_CHOICES)
objects = GamesQuerySet.as_manager()
def __str__(self):
return "{0} vs {1}".format(
self.first_player, self.second_player)
class Move(models.Model):
x = models.IntegerField()
y = models.IntegerField()
comment = models.CharField(max_length=300)
game = models.ForeignKey(Game, on_delete=models.CASCADE) | [
"[email protected]"
]
| |
dc105c937af95e74bf4880b57361a7470c141909 | fb8cbebdf034b2f478943752d5443afc82c6eef5 | /tuirer/venv/lib/python3.6/site-packages/jedi/evaluate/dynamic.py | fe9d28e5d70906257d64b55fcc219bbc2f5d3c6a | []
| no_license | fariasjr/CitiTuirer | f64e0ec93ef088f8140bb0961d2ad4ed3b59448a | deb3f7a9c2d45b8a7f54639037f097b99abdac11 | refs/heads/master | 2020-03-24T05:10:36.261050 | 2018-08-01T20:24:30 | 2018-08-01T20:24:30 | 142,477,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,165 | py | """
One of the really important features of |jedi| is to have an option to
understand code like this::
def foo(bar):
bar. # completion here
foo(1)
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
like ``foo('str')``, what would happen? Well, we'll just show both. Because
that's what a human would expect.
It works as follows:
- |Jedi| sees a param
- search for function calls named ``foo``
- execute these calls and check the input.
"""
from jedi import debug, settings
from jedi.evaluate import imports
from jedi.evaluate.arguments import TreeArguments
from jedi.evaluate.base_context import ContextSet
from jedi.evaluate.cache import evaluator_function_cache
from jedi.evaluate.context import ModuleContext, instance
from jedi.evaluate.helpers import is_stdlib_path
from jedi.evaluate.param import create_default_params
from jedi.evaluate.utils import to_list
from jedi.parser_utils import get_parent_scope
from parso.python import tree
MAX_PARAM_SEARCHES = 20
class MergedExecutedParams(object):
"""
Simulates being a parameter while actually just being multiple params.
"""
def __init__(self, executed_params):
self._executed_params = executed_params
def infer(self):
return ContextSet.from_sets(p.infer() for p in self._executed_params)
@debug.increase_indent
def search_params(evaluator, execution_context, funcdef):
"""
A dynamic search for param values. If you try to complete a type:
>>> def func(foo):
... foo
>>> func(1)
>>> func("")
It is not known what the type ``foo`` without analysing the whole code. You
have to look for all calls to ``func`` to find out what ``foo`` possibly
is.
"""
if not settings.dynamic_params:
return create_default_params(execution_context, funcdef)
evaluator.dynamic_params_depth += 1
try:
path = execution_context.get_root_context().py__file__()
if path is not None and is_stdlib_path(path):
# We don't want to search for usages in the stdlib. Usually people
# don't work with it (except if you are a core maintainer, sorry).
# This makes everything slower. Just disable it and run the tests,
# you will see the slowdown, especially in 3.6.
return create_default_params(execution_context, funcdef)
if funcdef.type == 'lambdef':
string_name = _get_lambda_name(funcdef)
if string_name is None:
return create_default_params(execution_context, funcdef)
else:
string_name = funcdef.name.value
debug.dbg('Dynamic param search in %s.', string_name, color='MAGENTA')
try:
module_context = execution_context.get_root_context()
function_executions = _search_function_executions(
evaluator,
module_context,
funcdef,
string_name=string_name,
)
if function_executions:
zipped_params = zip(*list(
function_execution.get_params()
for function_execution in function_executions
))
params = [MergedExecutedParams(executed_params) for executed_params in zipped_params]
# Evaluate the ExecutedParams to types.
else:
return create_default_params(execution_context, funcdef)
finally:
debug.dbg('Dynamic param result finished', color='MAGENTA')
return params
finally:
evaluator.dynamic_params_depth -= 1
@evaluator_function_cache(default=None)
@to_list
def _search_function_executions(evaluator, module_context, funcdef, string_name):
"""
Returns a list of param names.
"""
compare_node = funcdef
if string_name == '__init__':
cls = get_parent_scope(funcdef)
if isinstance(cls, tree.Class):
string_name = cls.name.value
compare_node = cls
found_executions = False
i = 0
for for_mod_context in imports.get_modules_containing_name(
evaluator, [module_context], string_name):
if not isinstance(module_context, ModuleContext):
return
for name, trailer in _get_possible_nodes(for_mod_context, string_name):
i += 1
# This is a simple way to stop Jedi's dynamic param recursion
# from going wild: The deeper Jedi's in the recursion, the less
# code should be evaluated.
if i * evaluator.dynamic_params_depth > MAX_PARAM_SEARCHES:
return
random_context = evaluator.create_context(for_mod_context, name)
for function_execution in _check_name_for_execution(
evaluator, random_context, compare_node, name, trailer):
found_executions = True
yield function_execution
# If there are results after processing a module, we're probably
# good to process. This is a speed optimization.
if found_executions:
return
def _get_lambda_name(node):
stmt = node.parent
if stmt.type == 'expr_stmt':
first_operator = next(stmt.yield_operators(), None)
if first_operator == '=':
first = stmt.children[0]
if first.type == 'name':
return first.value
return None
def _get_possible_nodes(module_context, func_string_name):
try:
names = module_context.tree_node.get_used_names()[func_string_name]
except KeyError:
return
for name in names:
bracket = name.get_next_leaf()
trailer = bracket.parent
if trailer.type == 'trailer' and bracket == '(':
yield name, trailer
def _check_name_for_execution(evaluator, context, compare_node, name, trailer):
from jedi.evaluate.context.function import FunctionExecutionContext
def create_func_excs():
arglist = trailer.children[1]
if arglist == ')':
arglist = None
args = TreeArguments(evaluator, context, arglist, trailer)
if value_node.type == 'classdef':
created_instance = instance.TreeInstance(
evaluator,
value.parent_context,
value,
args
)
for execution in created_instance.create_init_executions():
yield execution
else:
yield value.get_function_execution(args)
for value in evaluator.goto_definitions(context, name):
value_node = value.tree_node
if compare_node == value_node:
for func_execution in create_func_excs():
yield func_execution
elif isinstance(value.parent_context, FunctionExecutionContext) and \
compare_node.type == 'funcdef':
# Here we're trying to find decorators by checking the first
# parameter. It's not very generic though. Should find a better
# solution that also applies to nested decorators.
params = value.parent_context.get_params()
if len(params) != 1:
continue
values = params[0].infer()
nodes = [v.tree_node for v in values]
if nodes == [compare_node]:
# Found a decorator.
module_context = context.get_root_context()
execution_context = next(create_func_excs())
for name, trailer in _get_possible_nodes(module_context, params[0].string_name):
if value_node.start_pos < name.start_pos < value_node.end_pos:
random_context = evaluator.create_context(execution_context, name)
iterator = _check_name_for_execution(
evaluator,
random_context,
compare_node,
name,
trailer
)
for function_execution in iterator:
yield function_execution
| [
"[email protected]"
]
| |
8fe248d9822eea62924d8b53b9b960bb32bfe359 | 6541487fb7df24610e5c61aa30d4a39b9117b427 | /tests/test_math_helpers.py | 6cf87e9dc244968d69684b98f2d4a3ab0f4b7c6f | [
"MIT"
]
| permissive | theY4Kman/birdfeeder | 0e1f90a96b1607c0675ea3ab70a00fc99b97e7ac | 25503a138fe01589fb28317ae0f3e281d6ce1961 | refs/heads/master | 2023-04-21T11:23:07.699322 | 2021-03-24T08:36:13 | 2021-03-24T08:37:40 | 368,974,412 | 0 | 0 | MIT | 2021-05-19T19:03:43 | 2021-05-19T19:03:43 | null | UTF-8 | Python | false | false | 510 | py | from decimal import Decimal
from birdfeeder.math_helpers import safe_div, safe_mean
def test_safe_div_basic():
assert safe_div(10, 2) == 5.0
def test_safe_div_basic_decimal():
assert safe_div(Decimal(10), Decimal(2)) == Decimal(5)
def test_safe_div_zero_div():
assert safe_div(10, 0) == 0.0
def test_safe_mean_basic():
assert safe_mean([2, 4]) == 3.0
def test_safe_mean_empty():
assert safe_mean([]) == 0.0
def test_safe_mean_zero_values():
assert safe_mean([0, 0]) == 0.0
| [
"[email protected]"
]
| |
874c30928f3f3b99467b5c30f812bfce8a1f46e3 | 51424671f6207121ff734a3821448367c5fa7043 | /ecommerce/store/views.py | 457176ecfa0b02686fce0e12c14b70d9a3d89820 | []
| no_license | prateekthakkar/case-study | 1d7e8aa2f175bec24a7cda845517d9928734d775 | a20085150c130fab05767011faedf4c806006542 | refs/heads/master | 2022-11-06T06:24:00.604616 | 2020-06-13T05:43:49 | 2020-06-13T05:43:49 | 270,648,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,736 | py | from django.shortcuts import render,redirect,HttpResponse
from django.contrib import messages
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from .tasks import send_email
from .models import *
from django.http import JsonResponse
import json
import datetime
# Create your views here.
def index(request):
if request.user.is_authenticated:
# create customer
customer, created = Customer.objects.get_or_create(
email = request.user.email,)
customer.name = request.user.username
customer.user = request.user
customer.save()
products = Product.objects.all()
customer = request.user.customer
# create order or if exits it will cet it
order,created = Order.objects.get_or_create(customer=customer,complete=False)
items = order.orderitem_set.all()
cartItems = order.get_cart_items
else:
return render(request,'store/index.html')
context = {'product':products,'cartItems':cartItems}
return render(request,'store/index.html',context)
def register(request):
if request.method == 'POST' :
uname = request.POST['username']
fname = request.POST['fname']
lname = request.POST['lname']
email = request.POST['email']
password = request.POST['password']
u = User.objects.create_user(username=uname , first_name=fname , last_name=lname , email=email , password=password)
messages.error(request,f'Account Created For {uname}!')
if uname == uname:
return redirect('Register')
return render(request,'store/register.html')
def Login(request):
if request.method == 'POST' :
uname = request.POST['uname']
password = request.POST['pass']
user = authenticate(request , username=uname , password=password)
if user is not None :
login(request , user)
return redirect('Home')
else:
messages.error(request,f'invalid user or password!')
return render(request,'store/login.html')
@login_required
def Logout(request):
logout(request)
return redirect('Home')
@login_required
def cart(request):
if request.user.is_authenticated:
customer = request.user.customer
# create order or if exits it will gcet it
order,created = Order.objects.get_or_create(customer=customer,complete=False)
items = order.orderitem_set.all()
cartItems = order.get_cart_items
else :
return redirect('Home')
context ={'items':items,'order':order,'cartItems':cartItems}
return render(request,'store/cart.html',context)
@login_required
def checkout(request):
if request.user.is_authenticated:
customer = request.user.customer
order,created = Order.objects.get_or_create(customer=customer,complete=False)
items = order.orderitem_set.all()
total = order.get_cart_total
tax = 18
tax_total = (total * tax) / 100
grand_total = tax_total + total
cartItems = order.get_cart_items
else:
return redirect('Home')
context ={'items':items,'order':order,'tax':tax_total,'grand_total':grand_total,'cartItems':cartItems}
return render(request,'store/checkout.html',context)
def updateItem(request):
# parse data
data = json.loads(request.body)
# query data from body and grab the value
productId = data['productId']
action = data['action']
# loggedin customer
customer = request.user.customer
# get product
product = Product.objects.get(id=productId)
# get or create product
order, created = Order.objects.get_or_create(customer=customer, complete=False)
# create order item[order is get or create]
# if oder item already exists according to the product and order so no need to create new just
# change quantity add or subtract
orderItem, created = OrderItem.objects.get_or_create(order=order, product=product)
if action == "add":
orderItem.quantity = (orderItem.quantity + 1)
elif action == "remove":
orderItem.quantity = (orderItem.quantity - 1)
orderItem.save()
if orderItem.quantity <= 0:
orderItem.delete()
return JsonResponse("Item was Added",safe=False)
def processOrder(request):
transaction_id = datetime.datetime.now().timestamp()
data = json.loads(request.body)
if request.user.is_authenticated:
customer = request.user.customer
order, created = Order.objects.get_or_create(customer=customer, complete=False)
total = float(data['form']['total'])
order.transaction_id = transaction_id
if total == float(order.get_cart_total):
order.complete = True
order.save()
if order.shipping == True:
ShippingAddress.objects.create(
customer = customer,
order = order,
address = data['shipping']['address'],
city = data['shipping']['city'],
state = data['shipping']['state'],
country = data['shipping']['country'],
zipcode = data['shipping']['zipcode']
)
send_email.delay()
return JsonResponse('payment completed!',safe=False)
def search(request):
query = request.GET['query']
if len(query) > 50:
products = []
else:
name = Product.objects.filter(name__icontains=query)
price = Product.objects.filter(price__icontains=query)
products = name.union(price)
context = {'product':products,'query':query}
return render(request,"store/search.html",context)
| [
"[email protected]"
]
| |
b9b8b6190fea295a20706bf72e02f8bd6b16d816 | 0a15660807aee7d2fccbef1a3e633cabd1deb972 | /subway/models.py | 6dd5ae55f71cff97c7052df438f87e6a8c662e4e | []
| no_license | chirs/hs | 7860e77230cd2577cac79539039f0e2a7590ef35 | f1985e11a73b29fa8bf4fd1725c529ec8e61cb5b | refs/heads/master | 2021-01-21T10:42:15.789926 | 2017-02-28T20:12:31 | 2017-02-28T20:12:31 | 83,474,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,437 | py | from sqlalchemy import Table, Column, Integer, String, Boolean, DateTime, MetaData, ForeignKey, Text, Float
from sqlalchemy.orm import mapper
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:///:memory:', echo=True)
Base = declarative_base()
class Station(Base):
"""
A subway station, like "Atlantic - Pacific"
"""
__tablename__ = 'stations'
id = Column(Integer, primary_key=True)
sid = Column(String)
name = Column(String)
lat = Column(Float)
lng = Column(Float)
def __init__(self, sid, name, lat, lng):
self.sid = sid
self.name = name
self.lat = lat
self.lng = lng
class SubStation(Base):
"""
A subway substation, like 116N [116th Street North]
"""
__tablename__ = 'substations'
id = Column(Integer, primary_key=True)
pid = Column(Integer, ForeignKey('stations.id'))
name = Column(String)
class Route(Base):
"""
A subway route like 1 or D.
"""
__tablename__ = 'routes'
id = Column(Integer, primary_key=True)
rid = Column(String)
name = Column(String)
description = Column(String)
color = Column(String)
def __init__(self, rid, name, description, color):
self.rid = rid
self.name = name
self.description = description
self.color = color
Base.metadata.create_all(engine)
| [
"[email protected]"
]
| |
dd15eca3d521afbdc79bca58fa83066ccbc92404 | 3337e9150a743e0df2898528dd1e4dfac9730b25 | /artemis/general/mymath.py | e60d306dc6a969ee2e04e2cb9db36f9d9ba7edad | []
| no_license | ml-lab/artemis | f3353cb462b06d64e1007010db94667b4703c90e | b4f5f627f1798aff90b845d70fd582142a9f76c8 | refs/heads/master | 2021-01-22T06:49:41.346341 | 2017-09-01T15:31:13 | 2017-09-01T15:31:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,159 | py | import logging
from artemis.general.should_be_builtins import memoize, bad_value
import numpy as np
from scipy.stats import norm, mode as sp_mode
try:
from scipy import weave
except ImportError:
logging.warn("Could not import scipy.weave. That's ok, ignore this unless you need it.")
__author__ = 'peter'
# Note - this module used to be called math, but it somehow results in a numpy import error
# due to some kind of name conflict with another module called math.
sigm = lambda x: 1/(1+np.exp(-x))
def cummean(x, axis = None):
"""
Cumulative mean along axis
:param x: An array
:param axis: The axis
:return: An array of the same shape
"""
if axis is None:
assert isinstance(x, list) or x.ndim == 1, 'You must specify axis for a multi-dimensional array'
axis = 0
elif axis < 0:
axis = x.ndim+axis
x = np.array(x)
normalizer = np.arange(1, x.shape[axis]+1).astype(float)[(slice(None), )+(None, )*(x.ndim-axis-1)]
return np.cumsum(x, axis)/normalizer
def cumvar(x, axis = None, sample = True):
"""
:return: Cumulative variance along axis
"""
if axis is None:
assert isinstance(x, list) or x.ndim == 1, 'You must specify axis for a multi-dimensional array'
axis = 0
if not isinstance(x, np.ndarray):
x = np.array(x)
ex_2 = cummean(x, axis=axis)**2
e_x2 = cummean(x**2, axis=axis)
var = e_x2-ex_2
if sample and x.shape[axis] > 1:
var *= x.shape[axis]/float(x.shape[axis]-1)
return var
@memoize
def binary_permutations(n_bits):
"""
Given some number of bits, return a shape (2**n_bits, n_bits) boolean array containing every permoutation
of those bits as a row.
:param n_bits: An integer number of bits
:return: A shape (2**n_bits, n_bits) boolean array containing every permoutation
of those bits as a row.
"""
return np.right_shift(np.arange(2**n_bits)[:, None], np.arange(n_bits-1, -1, -1)[None, :]) & 1
def softmax(x, axis = None):
"""
The softmax function takes an ndarray, and returns an ndarray of the same size,
with the softmax function applied along the given axis. It should always be the
case that np.allclose(np.sum(softmax(x, axis), axis)==1)
"""
if axis is None:
assert x.ndim==1, "You need to specify the axis for softmax if your data is more thn 1-D"
axis = 0
x = x - np.max(x, axis=axis, keepdims=True) # For numerical stability - has no effect mathematically
expx = np.exp(x)
return expx/np.sum(expx, axis=axis, keepdims=True)
def expected_sigm_of_norm(mean, std, method = 'probit'):
"""
Approximate the expected value of the sigmoid of a normal distribution.
Thanks go to this guy:
http://math.stackexchange.com/questions/207861/expected-value-of-applying-the-sigmoid-function-to-a-normal-distribution
:param mean: Mean of the normal distribution
:param std: Standard Deviation of the normal distribution
:return: An approximation to Expectation(sigm(N(mu, sigma**2)))
"""
if method == 'maclauren-2':
eu = np.exp(-mean)
approx_exp = 1/(eu+1) + 0.5*(eu-1)*eu/((eu+1)**3) * std**2
return np.minimum(np.maximum(approx_exp, 0), 1)
elif method == 'maclauren-3':
eu = np.exp(-mean)
approx_exp = 1/(eu+1) + \
0.5*(eu-1)*eu/((eu+1)**3) * std**2 + \
(eu**3-11*eu**2+57*eu-1)/((8*(eu+1))**5) * std**4
return np.minimum(np.maximum(approx_exp, 0), 1)
elif method == 'probit':
return norm.cdf(mean/np.sqrt(2.892 + std**2))
else:
raise Exception('Method "%s" not known' % method)
l1_error = lambda x1, x2: np.mean(np.abs(x1-x2), axis = -1)
def normalize(x, axis=None, degree = 2, avoid_nans = False):
"""
Normalize array x.
:param x: An array
:param axis: Which axis to normalize along
:param degree: Degree of normalization (1 for L1-norm, 2 for L2-norm, etc)
:param avoid_nans: If, along an axis, there is a norm of zero, then normalize this to a uniform vector (instead of nans).
:return: An array the same shape as x, normalized along the given axis
"""
assert degree in (1, 2), "Give me a reason and I'll give you more degrees"
if degree == 1:
z = np.sum(np.abs(x), axis = axis, keepdims=True)
else:
z = np.sum(x**degree, axis = axis, keepdims=True)**(1./degree)
normed = x/z
if avoid_nans:
uniform_vector_value = (1./x.shape[axis])**(1./degree)
normed[np.isnan(normed)] = uniform_vector_value
return normed
def mode(x, axis = None, keepdims = False):
mode_x, _ = sp_mode(x, axis = axis)
if not keepdims:
mode_x = np.take(mode_x, 0, axis = axis)
return mode_x
def cummode(x, weights = None, axis = 1):
"""
Cumulative mode along an axis. Ties give priority to the first value to achieve the
given count.
"""
assert x.ndim == 2 and axis == 1, 'Only implemented for a special case!'
all_values, element_ids = np.unique(x, return_inverse=True)
n_unique = len(all_values)
element_ids = element_ids.reshape(x.shape)
result = np.zeros(x.shape, dtype = int)
weighted = weights is not None
if weighted:
assert x.shape == weights.shape
counts = np.zeros(n_unique, dtype = float if weighted else int)
code = """
bool weighted = %s;
int n_samples = Nelement_ids[0];
int n_events = Nelement_ids[1];
for (int i=0; i<n_samples; i++){
float maxcount = 0;
int maxel = -1;
for (int k=0; k<n_unique; k++)
counts[k] = 0;
for (int j=0; j<n_events; j++){
int ix = i*n_events+j;
int k = element_ids[ix];
counts[k] += weighted ? weights[ix] : 1;
if (counts[k] > maxcount){
maxcount = counts[k];
maxel = k;
}
result[ix]=maxel;
}
}
""" % ('true' if weighted else 'false')
weave.inline(code, ['element_ids', 'result', 'n_unique', 'counts', 'weights'], compiler = 'gcc')
mode_values = all_values[result]
return mode_values
def angle_between(a, b, axis=None, in_degrees = False):
"""
Return the angle between two vectors a and b, in radians. Raise an exception if one is a zero vector
:param a: A vector
:param b: A vector the same size as a
:return: The angle between these vectors, in radians.
Credit to Pace: http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python
"""
cos_dist = cosine_distance(a, b, axis=axis)
angle = np.arccos(cos_dist)
if in_degrees:
angle = angle * 180/np.pi
return angle
def cosine_distance(a, b, axis=None):
"""
Return the cosine distance between two vectors a and b. Raise an exception if one is a zero vector
:param a: An array
:param b: Another array of the same shape
:return: The cosine distance between a and b, reduced along the given axis.
Credit to Pace: http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python
"""
a = np.array(a) if not isinstance(a, np.ndarray) else a
b = np.array(b) if not isinstance(b, np.ndarray) else b
if not a.dtype==float:
a=a.astype(float)
if not b.dtype==float:
b=b.astype(float)
if axis is None:
a = a.ravel()
b = b.ravel()
axis = 0
assert a.shape[-1]==b.shape[-1]
cosine_distance = (a*b).sum(axis=axis)/np.sqrt((a**2).sum(axis=axis) * (b**2).sum(axis=axis))
# For numerical resons, we might get values outside [-1, 1] here, so we truncate:
cosine_distance = np.minimum(cosine_distance, 1)
cosine_distance = np.maximum(cosine_distance, -1)
return cosine_distance
def degrees_between(a, b):
return angle_between(a, b, in_degrees=True)
def magnitude_ratio(a, b):
"""
Return the ratio of the L2-magnitudes of each vector
:param a: A vector
:param b: Another vector of the same size
:return: The ratio |a_mag
"""
assert a.ndim == 1 and a.shape==b.shape
a_mag = np.sqrt(np.sum(a**2))
b_mag = np.sqrt(np.sum(b**2))
d_magnitude = a_mag/b_mag
return d_magnitude
def is_parallel(a, b, angular_tolerance = 1e-7):
"""
Test whether two vectors are parallel to within a given tolerance.
Throws an exception for zero-vectors.
:param a: A vector
:param b: A vector the same size as a
:param angular_tolerance: The tolerance, in radians.
:return: A boolean, indicating that the vectors are parallel to within the specified tolerance.
"""
assert 0 <= angular_tolerance <= 2*np.pi, "It doesn't make sense to specity an angular tolerance outside of [0, 2*pi]. Why are you doing this?"
angle = angle_between(a, b)
return angle < angular_tolerance
def align_curves(xs, ys, n_bins='median', xrange = ('min', 'max'), spacing = 'lin'):
"""
Given multiple curves with different x-coordinates, interpolate so that each has the same x points.
:param xs: A length-N list of sorted vectors containing the x-coordinates of each curve
:param ys: A length-N list of vectors containing the corresponding y-coordinates
:param n_bins: Number of points to make along new x-axis. 'median' to use the median number of points in the curves.
:param xrange: 2-tuple indicating range of x-axis to span. 'min' indicates "minimum across curves", As with 'max'.
:param spacing: Either 'lin' or 'log', depenting on whether you want your interpolation points spaced linearly or
logarithmically.
:return: (new_xs, new_ys).
new_xs is a (n_bins, ) curve indicating the new x-locations.
new_ys is a (N, n_bins)
"""
assert spacing in ('lin', 'log')
assert len(xs)==len(ys)
assert all(len(x)==len(y) for x, y in zip(xs, ys))
start, stop = xrange
if start == 'min':
start = np.min([x[0] for x in xs if len(x)>0])
if stop == 'max':
stop = np.max([x[-1] for x in xs if len(x)>0])
if n_bins == 'median':
n_bins = int(np.round(np.median([len(x) for x in xs])))
new_x = np.linspace(start, stop, n_bins) if spacing=='lin' else np.logspace(np.log10(start), np.log10(stop), n_bins)
new_ys = np.zeros((len(xs), n_bins)) + np.nan
for x, y, ny in zip(xs, ys, new_ys):
if len(x)>=2:
ny[:] = np.interp(x=new_x, xp=x, fp=y, left=np.nan, right=np.nan)
return new_x, new_ys
def sqrtspace(a, b, n_points):
"""
:return: Distribute n_points quadratically from point a to point b, inclusive
"""
return np.linspace(0, 1, n_points)**2*(b-a)+a
def fixed_diff(x, axis=-1, initial_value = 0.):
"""
Modification of numpy.diff where the first element is compared to the initial value.
The resulting array has the same shape as x.
Note that this inverts np.cumsum so that np.cumsum(fixed_diff(x)) == x (except for numerical errors)
:param x: An array
:param axis: Axis along which to diff
:param initial_value: The initial value agains which to diff the first element along the axis.
:return: An array of the same shape, representing the difference in x along the axis.
"""
x = np.array(x, copy=False)
if axis<0:
axis = x.ndim+axis
result = np.empty_like(x)
initial_indices = (slice(None), )*axis
result[initial_indices+(slice(1, None), )] = np.diff(x, axis=axis)
if initial_value == 'first':
result[initial_indices+(0, )] = 0
else:
result[initial_indices+(0, )] = x[initial_indices+(0, )]-initial_value
return result
def decaying_cumsum(x, memory, axis=-1):
if axis<0:
axis = x.ndim+axis
assert 0 <= memory < 1
result = np.empty_like(x)
leading_indices = (slice(None), )*axis
one_minus_mem = 1-memory
result[leading_indices+(0, )] = one_minus_mem*x[leading_indices+(0, )]
for i in xrange(1, x.shape[axis]):
result[leading_indices+(i, )] = memory*result[leading_indices+(i-1, )] + one_minus_mem*x[leading_indices+(i, )]
if np.max(np.abs(result)>1e9):
print 'sdfdsf: {}'.format(np.max(np.abs(x)))
return result
def point_space(start, stop, n_points, spacing):
if spacing=='lin':
values = np.linspace(start, stop, n_points)
elif spacing=='sqrt':
values = sqrtspace(start, stop, n_points)
elif spacing=='log':
values = np.logspace(np.log10(start), np.log10(stop), n_points)
else:
raise NotImplementedError(spacing)
return values
def geosum(rate, t_end, t_start=0):
"""
Geometric sum of a series from t_start to t_end
e.g. geosum(0.5, t_end=4, t_start=2) = 0.5**2 + 0.5**3 + 0.5**4 = 0.375
"""
return np.where(rate==1, np.array(t_end-t_start+1, copy=False).astype(float), np.array(rate**(t_end+1)-rate**t_start)/(rate-1))
def selective_sum(x, ixs):
"""
:param x: An nd array
:param ixs: A tuple of length x.ndim indexing each of the dimensions.
:return: A scalar sum of all array elements selected by any of the dimensions.
This is best explained by example:
a = np.array([[ 0, 1, 2, 3],
... [ 4, 5, 6, 7],
... [ 8, 9, 10, 11],
... [12, 13, 14, 15]])
If we want to add all elements rows 1, and 2, as well as the column, then we go:
s = selective_sum(a, [(1,3), 2])
And we can verify that:
s == 4+5+6+7 + 12+13+14+15 + 2+10 == 88
If you don't want to select coluns
"""
assert x.ndim==len(ixs), 'The dimension of x must match the length of ixs'
al = (slice(None), )
selection_mask = np.zeros(x.shape, dtype='bool')
for i, ix in enumerate(ixs):
selection_mask[al*i+(ix, )+al*(x.ndim-i-1)] = True
return (x*selection_mask).sum()
# Note, we'd like to do this more efficiently, but it gets a little complicated.
# (we have to add the individual indexes, but subtract the double-counted regions, and then subtract the triple-counted
# regions, and so on....)
# return sum(x[al*i+(ix, )+al*(x.ndim-i-1)].sum() for i, ix in enumerate(ixs)) - x[ixs].sum()
def conv_fanout(input_len, kernel_len, conv_mode):
"""
Note: this is horrific and must be simplified.
:param input_len:
:param kernel_len:
:param conv_mode:
:return:
"""
left_pad = kernel_len / 2 if conv_mode == 'same' else 0 if conv_mode == 'valid' else conv_mode if isinstance(conv_mode, int) else bad_value(conv_mode)
right_pad = (kernel_len-1) / 2 if conv_mode == 'same' else 0 if conv_mode == 'valid' else conv_mode if isinstance(conv_mode, int) else bad_value(conv_mode)
full_range = np.arange(left_pad + input_len + right_pad)
max_fanout = np.minimum(kernel_len, np.maximum(input_len-kernel_len+1+2*left_pad, 1))
fanout_over_full_range = np.minimum(max_fanout, np.minimum(full_range+1, full_range[::-1]+1))
fanout = fanout_over_full_range[left_pad:len(full_range)-right_pad]
return fanout
def conv2_fanout_map(input_shape, kernel_shape, conv_mode):
size_y, size_x = input_shape
k_size_y, k_size_x = kernel_shape
y_fanout = conv_fanout(input_len = size_y, kernel_len=k_size_y, conv_mode=conv_mode)
x_fanout = conv_fanout(input_len = size_x, kernel_len=k_size_x, conv_mode=conv_mode)
fanout_map = y_fanout[:, None] * x_fanout
return fanout_map
def levenshtein_distance(s1, s2):
"""
The Levenshtein Distance (a type of edit distance) between strings
Thank you to Salvador Dali here: https://stackoverflow.com/a/32558749/851699
:param s1: A string
:param s2: Another String
:return: An integer distance.
"""
if len(s1) > len(s2):
s1, s2 = s2, s1
distances = range(len(s1) + 1)
for i2, c2 in enumerate(s2):
distances_ = [i2+1]
for i1, c1 in enumerate(s1):
if c1 == c2:
distances_.append(distances[i1])
else:
distances_.append(1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
distances = distances_
return distances[-1] | [
"[email protected]"
]
| |
227db5c8db59ef92e3388e793f7c0346ae313a35 | e1e15a264cb6b8de018b335bf07bb540ae5edf76 | /core/update.py | 6d9496ca5f48d4df412834c75b06499bea2a08b0 | [
"MIT"
]
| permissive | JustF0rWork/malware | 00891ebf250b2f9577081d9909362fd0cac9bdd9 | 906f9a2e7d3c985bd0d31a350d2f08c44b4820ff | refs/heads/master | 2021-01-10T15:56:19.866067 | 2016-01-12T02:06:55 | 2016-01-12T02:06:55 | 49,463,263 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 9,118 | py | #!/usr/bin/env python
"""
Copyright (c) 2014-2016 Miroslav Stampar (@stamparm)
See the file 'LICENSE' for copying permission
"""
import csv
import glob
import inspect
import os
import sqlite3
import subprocess
import sys
import time
import urllib
sys.dont_write_bytecode = True
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) # to enable calling from current directory too
from core.addr import addr_to_int
from core.common import load_trails
from core.common import retrieve_content
from core.settings import config
from core.settings import read_whitelist
from core.settings import BAD_TRAIL_PREFIXES
from core.settings import FRESH_IPCAT_DELTA_DAYS
from core.settings import LOW_PRIORITY_INFO_KEYWORDS
from core.settings import HIGH_PRIORITY_INFO_KEYWORDS
from core.settings import HIGH_PRIORITY_REFERENCES
from core.settings import IPCAT_CSV_FILE
from core.settings import IPCAT_SQLITE_FILE
from core.settings import IPCAT_URL
from core.settings import ROOT_DIR
from core.settings import TRAILS_FILE
from core.settings import USERS_DIR
from core.settings import WHITELIST
def _chown(filepath):
if not subprocess.mswindows and os.path.exists(filepath):
try:
os.chown(filepath, int(os.environ.get("SUDO_UID", -1)), int(os.environ.get("SUDO_GID", -1)))
except Exception, ex:
print "[x] '%s'" % ex
def _fopen_trails(mode):
retval = open(TRAILS_FILE, mode)
if "w+" in mode:
_chown(TRAILS_FILE)
return retval
def update_trails(server=None, force=False):
"""
Update trails from feeds
"""
trails = {}
duplicates = {}
if server:
print "[i] retrieving trails from provided 'UPDATE_SERVER' server..."
_ = retrieve_content(server)
if not _:
exit("[!] unable to retrieve data from '%s'" % server)
else:
with _fopen_trails("w+b") as f:
f.write(_)
trails = load_trails()
trail_files = []
for dirpath, dirnames, filenames in os.walk(os.path.abspath(os.path.join(ROOT_DIR, "trails"))) :
for filename in filenames:
trail_files.append(os.path.abspath(os.path.join(dirpath, filename)))
if config.CUSTOM_TRAILS_DIR:
for dirpath, dirnames, filenames in os.walk(os.path.abspath(os.path.join(ROOT_DIR, os.path.expanduser(config.CUSTOM_TRAILS_DIR)))) :
for filename in filenames:
trail_files.append(os.path.abspath(os.path.join(dirpath, filename)))
try:
if not os.path.isdir(USERS_DIR):
os.makedirs(USERS_DIR, 0755)
except Exception, ex:
exit("[!] something went wrong during creation of directory '%s' ('%s')" % (USERS_DIR, ex))
_chown(USERS_DIR)
if not trails and (force or not os.path.isfile(TRAILS_FILE) or (time.time() - os.stat(TRAILS_FILE).st_mtime) >= config.UPDATE_PERIOD or os.stat(TRAILS_FILE).st_size == 0 or any(os.stat(_).st_mtime > os.stat(TRAILS_FILE).st_mtime for _ in trail_files)):
print "[i] updating trails (this might take a while)..."
if force or config.USE_FEED_UPDATES:
sys.path.append(os.path.abspath(os.path.join(ROOT_DIR, "trails", "feeds")))
filenames = sorted(glob.glob(os.path.join(sys.path[-1], "*.py")))
else:
filenames = []
sys.path.append(os.path.abspath(os.path.join(ROOT_DIR, "trails")))
filenames += [os.path.join(sys.path[-1], "static")]
filenames += [os.path.join(sys.path[-1], "custom")]
filenames = [_ for _ in filenames if "__init__.py" not in _]
for i in xrange(len(filenames)):
filename = filenames[i]
try:
module = __import__(os.path.basename(filename).split(".py")[0])
except (ImportError, SyntaxError), ex:
print "[x] something went wrong during import of feed file '%s' ('%s')" % (filename, ex)
continue
for name, function in inspect.getmembers(module, inspect.isfunction):
if name == "fetch":
print(" [o] '%s'%s" % (module.__url__, " " * 20 if len(module.__url__) < 20 else ""))
sys.stdout.write("[?] progress: %d/%d (%d%%)\r" % (i, len(filenames), i * 100 / len(filenames)))
sys.stdout.flush()
try:
results = function()
for item in results.items():
if item[0] in trails:
if item[0] not in duplicates:
duplicates[item[0]] = set((trails[item[0]][1],))
duplicates[item[0]].add(item[1][1])
if not (item[0] in trails and (any(_ in item[1][0] for _ in LOW_PRIORITY_INFO_KEYWORDS) or trails[item[0]][1] in HIGH_PRIORITY_REFERENCES)) or item[1][1] in HIGH_PRIORITY_REFERENCES or any(_ in item[1][0] for _ in HIGH_PRIORITY_INFO_KEYWORDS):
trails[item[0]] = item[1]
if not results and "abuse.ch" not in module.__url__:
print "[x] something went wrong during remote data retrieval ('%s')" % module.__url__
except Exception, ex:
print "[x] something went wrong during processing of feed file '%s' ('%s')" % (filename, ex)
# basic cleanup
for key in trails.keys():
if key not in trails:
continue
if '?' in key:
_ = trails[key]
del trails[key]
key = key.split('?')[0]
trails[key] = _
if '//' in key:
_ = trails[key]
del trails[key]
key = key.replace('//', '/')
trails[key] = _
if key != key.lower():
_ = trails[key]
del trails[key]
key = key.lower()
trails[key] = _
if key in duplicates:
_ = trails[key]
trails[key] = (_[0], "%s (+%s)" % (_[1], ','.join(sorted(duplicates[key] - set((_[1],))))))
read_whitelist()
for key in trails.keys():
if key in WHITELIST or any(key.startswith(_) for _ in BAD_TRAIL_PREFIXES):
del trails[key]
else:
try:
key.decode("utf8")
trails[key][0].decode("utf8")
trails[key][1].decode("utf8")
except UnicodeDecodeError:
del trails[key]
try:
if trails:
with _fopen_trails("w+b") as f:
writer = csv.writer(f, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)
for trail in trails:
writer.writerow((trail, trails[trail][0], trails[trail][1]))
except Exception, ex:
print "[x] something went wrong during trails file write '%s' ('%s')" % (TRAILS_FILE, ex)
return trails
def update_ipcat(force=False):
try:
if not os.path.isdir(USERS_DIR):
os.makedirs(USERS_DIR, 0755)
except Exception, ex:
exit("[!] something went wrong during creation of directory '%s' ('%s')" % (USERS_DIR, ex))
_chown(USERS_DIR)
if force or not os.path.isfile(IPCAT_CSV_FILE) or not os.path.isfile(IPCAT_SQLITE_FILE) or (time.time() - os.stat(IPCAT_CSV_FILE).st_mtime) >= FRESH_IPCAT_DELTA_DAYS * 24 * 3600 or os.stat(IPCAT_SQLITE_FILE).st_size == 0:
print "[i] updating ipcat database..."
try:
urllib.urlretrieve(IPCAT_URL, IPCAT_CSV_FILE)
except Exception, ex:
print "[x] something went wrong during retrieval of '%s' ('%s')" % (IPCAT_URL, ex)
else:
try:
if os.path.exists(IPCAT_SQLITE_FILE):
os.remove(IPCAT_SQLITE_FILE)
with sqlite3.connect(IPCAT_SQLITE_FILE, isolation_level=None, check_same_thread=False) as con:
cur = con.cursor()
cur.execute("BEGIN TRANSACTION")
cur.execute("CREATE TABLE ranges (start_int INT, end_int INT, name TEXT)")
with open(IPCAT_CSV_FILE) as f:
for row in f:
if not row.startswith('#') and not row.startswith('start'):
row = row.strip().split(",")
cur.execute("INSERT INTO ranges VALUES (?, ?, ?)", (addr_to_int(row[0]), addr_to_int(row[1]), row[2]))
cur.execute("COMMIT")
cur.close()
con.commit()
except Exception, ex:
print "[x] something went wrong during ipcat database update ('%s')" % ex
_chown(IPCAT_CSV_FILE)
_chown(IPCAT_SQLITE_FILE)
def main():
try:
update_trails(force=True)
update_ipcat()
except KeyboardInterrupt:
print "\r[x] Ctrl-C pressed"
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
b7aade2484b165d22de966e987fd39bcf4cf37f0 | 286df6528096b6393b61d3ecb3b7002cb9a7b983 | /python/ql/test/library-tests/frameworks/aiohttp/response_test.py | 1988f4435604cade3227c27d40ba902f6661df59 | [
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"LicenseRef-scancode-free-unknown",
"Python-2.0",
"MIT"
]
| permissive | Inncee81/codeql | ed620df0ae7b706943eccd92af37e037f540f6a4 | 38a38fd2c145628472d14c9e9d6ca812fd525793 | refs/heads/main | 2023-06-13T01:23:30.086459 | 2021-06-22T10:59:44 | 2021-06-22T10:59:44 | 379,254,229 | 1 | 0 | MIT | 2021-06-22T12:02:02 | 2021-06-22T12:02:01 | null | UTF-8 | Python | false | false | 3,173 | py | from aiohttp import web
routes = web.RouteTableDef()
@routes.get("/raw_text") # $ routeSetup="/raw_text"
async def raw_text(request): # $ requestHandler
return web.Response(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
@routes.get("/raw_body") # $ routeSetup="/raw_body"
async def raw_body(request): # $ requestHandler
return web.Response(body=b"foo") # $ HttpResponse mimetype=application/octet-stream responseBody=b"foo"
@routes.get("/html_text") # $ routeSetup="/html_text"
async def html_text(request): # $ requestHandler
return web.Response(text="foo", content_type="text/html") # $ HttpResponse mimetype=text/html responseBody="foo"
@routes.get("/html_body") # $ routeSetup="/html_body"
async def html_body(request): # $ requestHandler
return web.Response(body=b"foo", content_type="text/html") # $ HttpResponse mimetype=text/html responseBody=b"foo"
@routes.get("/html_body_set_later") # $ routeSetup="/html_body_set_later"
async def html_body_set_later(request): # $ requestHandler
resp = web.Response(body=b"foo") # $ HttpResponse mimetype=application/octet-stream responseBody=b"foo"
resp.content_type = "text/html" # $ MISSING: mimetype=text/html
return resp
# Each HTTP status code has an exception
# see https://docs.aiohttp.org/en/stable/web_quickstart.html#exceptions
@routes.get("/through_200_exception") # $ routeSetup="/through_200_exception"
async def through_200_exception(request): # $ requestHandler
raise web.HTTPOk(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
@routes.get("/through_200_exception_html") # $ routeSetup="/through_200_exception_html"
async def through_200_exception(request): # $ requestHandler
exception = web.HTTPOk(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
exception.content_type = "text/html" # $ MISSING: mimetype=text/html
raise exception
@routes.get("/through_404_exception") # $ routeSetup="/through_404_exception"
async def through_404_exception(request): # $ requestHandler
raise web.HTTPNotFound(text="foo") # $ HttpResponse mimetype=text/plain responseBody="foo"
@routes.get("/redirect_301") # $ routeSetup="/redirect_301"
async def redirect_301(request): # $ requestHandler
if not "kwarg" in request.url.query:
raise web.HTTPMovedPermanently("/login") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/login"
else:
raise web.HTTPMovedPermanently(location="/logout") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/logout"
@routes.get("/redirect_302") # $ routeSetup="/redirect_302"
async def redirect_302(request): # $ requestHandler
if not "kwarg" in request.url.query:
raise web.HTTPFound("/login") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/login"
else:
raise web.HTTPFound(location="/logout") # $ HttpResponse HttpRedirectResponse mimetype=application/octet-stream redirectLocation="/logout"
if __name__ == "__main__":
app = web.Application()
app.add_routes(routes)
web.run_app(app)
| [
"[email protected]"
]
| |
8518901c7fdfc473318b9f996ce5871326d2e0c8 | e96f82c65d7b0b5acf1f98e573b695afda122698 | /Amusing Joke.py | 315cbfec9e418b84ae68d1a3a06a79c9b8f506d7 | []
| no_license | devinemonk/CodeForces-Python | 66525fe4b91a2fe780603931f81ec4aa30082315 | def55f4c8651ebe70920c8d8e01ed09e4aeafd4e | refs/heads/master | 2023-08-17T23:34:48.621928 | 2023-08-07T22:02:02 | 2023-08-07T22:02:02 | 288,174,825 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | s=input()+input()
s=s.upper()
d={}
for i in range(ord('A'),ord('Z')+1):
d[chr(i)]=0
for i in s:
d[i]+=1
c=input()
for i in c:
d[i]-=1
for i in d:
if d[i]!=0:
print("NO")
exit()
print("YES")
| [
"[email protected]"
]
| |
7bcb8b925acf750edd56ecb347e2fd0945a767c0 | 18831623126751bfe3dc7f7d8291de2066a01c06 | /sha2.py | f8d889959b2819e7e473b5237b3b2431d76715d4 | []
| no_license | mnoalett/ringzer0-ctf | e8fbe3ee3722c56c5b03831c21522ca4ef14dab1 | fd76d83fe8a9d42ca7798c9dff43fc8e3d6645b1 | refs/heads/master | 2020-04-14T12:58:34.373073 | 2019-01-02T15:17:09 | 2019-01-02T15:17:09 | 163,855,742 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 982 | py | import hashlib
import requests
import re
url = "https://ringzer0ctf.com/login"
url_res = "https://ringzer0ctf.com/challenges/57/"
payload = {'username': 'xxx', 'password': 'xxx'}
hash_regex = "-----BEGINHASH-----<br\/>(\w+)"
salt_regex = "-----BEGINSALT-----<br\/>(\w+)"
flag = "FLAG-\w+"
session = requests.Session()
session.post(url, data=payload)
contents = session.get(url_res)
message = re.sub(r'\s+', '', contents.text)
sha1 = re.search(hash_regex,message)
sha1 = sha1.group(1)
salt = re.search(salt_regex,message)
salt = salt.group(1)
print("sha1: "+sha1)
print("salt: "+salt)
for i in range(0,9999):
num = str(i).encode('utf-8')
encoded1 = hashlib.sha1(salt+num).hexdigest()
encoded2 = hashlib.sha1(num+salt).hexdigest()
if(encoded1==sha1 or encoded2==sha1):
trovato = i
print("Cracked: " + num)
break
url_res = url_res + str(trovato)
response = session.post(url_res)
flg = re.search(flag,response.text)
print(flg.group())
| [
"[email protected]"
]
| |
1bdb310ceed481f9c559a64c36a67b91af52f028 | a0e79cef4779a9eedd512158e73c166bf6846110 | /NADE/deepnade/buml/Instrumentation/Parameters.py | 84e6cc558d631bd233bf50e9c35ba2a61e79fac4 | [
"BSD-3-Clause"
]
| permissive | rogen-george/Deep-Autoregressive-Model | 9ca5135ed1d8c0c5c442cd95d75467208d6b0acd | 8d567ecb6d59b3003fba9aab2a0693dab67514ef | refs/heads/master | 2021-10-25T22:50:03.508798 | 2019-04-08T00:56:33 | 2019-04-08T00:56:33 | 172,263,626 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | from Instrumentation.Measurement import Measurement
class Parameters (Measurement):
"""The instrumentable object must implement the get_parameters method"""
def __init__(self, *args, **kwargs):
self.attribute = "parameters"
def take_measurement(self, instrumentable):
return instrumentable.model.get_parameters()
| [
"[email protected]"
]
| |
c6dcdbe0262dc26556c2962a2bb5611d42ce181f | 14bb6210a317ba7cd248571103a325713922b685 | /conf.py | 1f86d53d6e2c9cae4e30f84103b9e179ab1063eb | [
"LicenseRef-scancode-unknown-license-reference",
"CC-BY-4.0",
"LicenseRef-scancode-public-domain"
]
| permissive | ocean-transport/group-website | 469e2f951bbdf03c8125a6a9131230aea40cc066 | 89cd7877ec5e57bd15b1d6a6b64541034d9e861c | refs/heads/master | 2022-07-08T06:34:37.449564 | 2022-06-29T15:28:45 | 2022-06-29T15:28:45 | 212,735,255 | 5 | 11 | NOASSERTION | 2022-06-29T15:28:46 | 2019-10-04T04:24:56 | TeX | UTF-8 | Python | false | false | 7,574 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Ocean Transport Group documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 31 18:58:11 2019.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import recommonmark
from recommonmark.transform import AutoStructify
from pybtex.style.formatting.unsrt import Style as UnsrtStyle
from pybtex.style.template import words, sentence, optional
from pybtex.style.sorting import BaseSortingStyle
from pybtex.plugin import register_plugin
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
'recommonmark',
'sphinxcontrib.bibtex',
'sphinx_jinja'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'LDEO Ocean Transport Group'
html_title = 'LDEO Ocean Transport Group'
copyright = '2019-2022, Ryan Abernathey'
author = 'Ryan Abernathey'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'README*']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
class DateSortingStyle(BaseSortingStyle):
def sorting_key(self, entry):
if entry.type in ('book', 'inbook'):
author_key = self.author_editor_key(entry)
elif 'author' in entry.persons:
author_key = self.persons_key(entry.persons['author'])
else:
author_key = ''
return (entry.fields.get('year', ''), author_key, entry.fields.get('title', ''))
def sort(self, entries):
return sorted(entries, key=self.sorting_key, reverse=False)
def persons_key(self, persons):
return ' '.join(self.person_key(person) for person in persons)
def person_key(self, person):
return ' '.join((
' '.join(person.prelast_names + person.last_names),
' '.join(person.first_names + person.middle_names),
' '.join(person.lineage_names),
)).lower()
def author_editor_key(self, entry):
if entry.persons.get('author'):
return self.persons_key(entry.persons['author'])
elif entry.persons.get('editor'):
return self.persons_key(entry.persons['editor'])
else:
return ''
register_plugin('pybtex.style.sorting', 'date', DateSortingStyle)
class NoWebRefStyle(UnsrtStyle):
default_sorting_style = 'date'
def format_web_refs(self, e):
return sentence [
optional [ self.format_doi(e) ],
]
register_plugin('pybtex.style.formatting', 'nowebref', NoWebRefStyle)
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_book_theme'
html_theme_options = {
"repository_url": "https://github.com/ocean-transport/group-website",
"use_issues_button": False,
"use_repository_button": True,
"use_edit_page_button": True
}
html_logo = '_static/otg_logo_blue.svg'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = ["custom.css"]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
# html_sidebars = {
# '**': [
# 'about.html',
# 'navigation.html',
# ]
# }
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'OceanTransportGroupdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'OceanTransportGroup.tex', 'LDEO Ocean Transport Group',
'Ryan Abernathey', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'oceantransportgroup', 'LDEO Ocean Transport Group',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'OceanTransportGroup', 'LDEO Ocean Transport Group',
author, 'OceanTransportGroup', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
def setup(app):
app.add_config_value('recommonmark_config', {
'url_resolver': lambda url: github_doc_root + url,
'auto_toc_tree_section': 'Contents',
}, True)
app.add_transform(AutoStructify)
# load data
jinja_contexts = {}
import yaml
with open('_data/people.yml') as people_data_file:
people = yaml.safe_load(people_data_file)
jinja_contexts['people'] = {'people': people['current']}
jinja_contexts['alumni'] = {'alumni': people['alumni']}
| [
"[email protected]"
]
| |
2baf4341109adc25e717ef6ca616d348ac57cd17 | 125ed954649430eac78b938ca13b74ae86e6d511 | /helloworld.py | 70cb0d9437c3bca59838a46bbdb73265b315b6c7 | []
| no_license | darren277-assorted/whatever | ccbe13a822c05ee3d74557ae01887c3387185c2d | 1e52543711470eadd8bd319c8ded8d28bc8c5075 | refs/heads/master | 2022-10-27T17:56:02.802392 | 2018-09-14T16:58:45 | 2018-09-14T16:58:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | try:
print('hello world')
except ZeroDivisionError:
print('Could not print for because you tried dividing by zero somewhere.')
| [
"[email protected]"
]
| |
734bba3ac3df513251e2431b420b08c3a0bb20f7 | c2643fdff3185b659c2c7fa807d8b8d345a90343 | /tests/test_basic.py | 4bea68de088fd5206824e30ac834120108554bc5 | [
"BSD-2-Clause"
]
| permissive | auxten/fhost | b39ae209a056b301e737d176f8f12dcafd82cfa2 | 6536c4955e13fd67c939a6fc6cc687d29e976d15 | refs/heads/master | 2021-01-16T00:35:43.304418 | 2012-06-25T10:17:52 | 2012-06-25T10:17:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,837 | py | #
## BEGIN LICENSE BLOCK
#
# Copyright (c) <2012>, Raul Perez <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
## END LICENSE BLOCK
#
import context
import unittest
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
def test_absolute_truth_and_meaning(self):
assert True
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
a3c4a503c58924da41d28428547c030ff9a7aafc | 1f0a385f70b574667b9fc2a16268ae1898b55ed2 | /Profiling.py | 929aa18e3432f24f1df648fb2d5971fb005f4fba | []
| no_license | ditzh/Age_of_Information | 4a9a4065e81cbedb4254c8a8fa815528945dee7b | eada3f778e6ce48e8fc56a8bf6cceaf138b58837 | refs/heads/master | 2022-04-04T13:16:37.511584 | 2020-02-25T22:15:49 | 2020-02-25T22:15:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,776 | py | import math
from random import randint
import cv2
import collections
#Read video from the given path and get its width and height
cap = cv2.VideoCapture('sample.mp4')
frame_width = int( cap.get(cv2.CAP_PROP_FRAME_WIDTH))
frame_height =int( cap.get( cv2.CAP_PROP_FRAME_HEIGHT))
def dist(x1,x2):
sol = math.sqrt((x2[0]-x1[0])**2+(x2[1]-x1[1])**2)
return sol
#Output video format and location
fourcc = cv2.VideoWriter_fourcc('M','P','E','G')
#tracker =cv2.TrackerBoosting_create()
out = cv2.VideoWriter("Output.avi", fourcc, 5.0, (frame_width,frame_height))
#get first two frames of input video
ret, frame1 = cap.read()
ret, frame2 = cap.read()
l=0
print(ret)
#array to hold co-ordinates of vehicles and bullets
bullet_xy = {}
vehicle1_xy = []
vehicle2_xy=[]
vehicle3_xy = []
bullet1_xy =[]
#Code to identify the vehicle contours
diff = cv2.absdiff(frame1, frame2)
gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (5,5), 0)
_, thresh = cv2.threshold(blur, 20 , 255, cv2.THRESH_BINARY)
dilated = cv2.dilate(thresh, None, iterations=3)
_, contours, _ = cv2.findContours(dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
bboxes = [] #dictonary to hold all the bounding box identified
colors = [] #array to hold assign different colours to each vehicle
for contour in contours:
bbox = cv2.boundingRect(contour) #draw bounding rectangles around identified contours
#cv2.imshow("cc",contour)
(x, y, w, h) = cv2.boundingRect(contour) #get the coordinates of the rectangle
if w>40: #if w>40 identify a rectangle as a vehicle(found by trial and error)
bboxes.append(bbox)
colors.append((randint(64, 255), randint(64, 255), randint(64, 255)))
#Creating a MultiTracker Object and giving the inout as above obtained vehicle rectangles
multiTracker = cv2.MultiTracker_create()
for bbox in bboxes:
tracker = cv2.TrackerKCF_create()
multiTracker.add(tracker, frame1, bbox)
while cap.isOpened(): # As long as the video is opened
# Code to update the frame to track identified vehicles
_, retval = multiTracker.update(frame1) # returns the values of the updated frame
r0 =retval[0]
r1 =retval[1]
r2 =retval[2]
i = 0
for box in retval:
(x, y, w, h) = [int(v) for v in box]
# to create a rectangle, top-left corner and bottom right corner points are needed. p1 and p2 here.
cv2.rectangle(frame1, (x, y), (x + w, y + h), colors[i], 2, 1) # Draw rectangle around the vehicles
cv2.putText(frame1, 'v :' + str(i) + ' : ' + str(int((x))) + "," + str(int((y))), (x - 50, y - 7),cv2.FONT_HERSHEY_SIMPLEX, 0.7, (36, 255, 12), 2)
#vehicle_xy.append((((x + w) / 2), ((y + h) / 2)))
i += 1
#print(retval[0][1])
#cv2.putText(frame1, 'center: ' + str(int((x + w) / 2)) + "," + str(int((y + h) / 2)), (x + w, y + h + 7),cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36, 255, 12), 2)
# print("Vehilce :" + str(vehicle_xy))
# Code to identify and Track the bullets. Very similar to Vehicle Code
gray1 = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY)
blur1 = cv2.GaussianBlur(gray1, (5, 5), 0)
_, thresh1 = cv2.threshold(blur1, 30, 127, cv2.THRESH_BINARY)
dilated1 = cv2.dilate(thresh1, None, iterations=3)
_, contours1, _ = cv2.findContours(thresh1, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
z = 0
for p in contours1:
(x1, y1, w1, h1) = cv2.boundingRect(p)
M = cv2.moments(p)
if M["m00"] != 0:
cX = int(M["m10"] / M["m00"] -50)
cY = int(M["m01"] / M["m00"] -50)
if h1 >5 and h1<10: #If h<20, classify it as a bullet and draw the rectangle
cv2.rectangle(frame1, (x1, y1), (x1 + w1, y1 + h1), (255, 255, 0), 2,1)
z+=1
cv2.putText(frame1,'Bullet : ' + str(z) + ', l : '+ str(cX) +','+str(cY),(x1,y1-2),cv2.FONT_HERSHEY_SIMPLEX, 0.6, (36, 255, 12),2)
dict1 = {x1:y1}
key,value = x1,y1
#print("b : ",dict1)
if key not in bullet_xy:
bullet_xy.update(dict1)
dv0 = dist(r0,(cX,cY))
dv1 = dist(r1,(cX,cY))
dv2 = dist(r2,(cX,cY))
if dv0<=dv1 and dv0<=dv2:
vehicle1_xy.append((r0[0], r0[1]))
print("bullet " + str(len(bullet_xy)) + " fired from vehicle 0")
elif dv1<=dv0 and dv1<= dv2:
vehicle2_xy.append((r1[0], r1[1]))
print("bullet " + str(len(bullet_xy)) + " fired from vehicle 1")
else:
vehicle3_xy.append((r2[0], r2[1]))
print("bullet " + str(len(bullet_xy)) + " fired from vehicle 2")
#vehicle1_xy.append()
#print("v1: "+ str(vehicle1_xy), "v2: "+ str(vehicle2_xy) + "v3: "+str(vehicle3_xy))
#print(len(bullet_xy))
continue
else:
if value == bullet_xy[key]:
bullet1_xy.append((x1,y1))
dv0 = dist(r0, (cX, cY))
dv1 = dist(r1, (cX, cY))
dv2 = dist(r2, (cX, cY))
if dv0 <= dv1 and dv0 <= dv2:
if (r0[0],r0[1]) != vehicle1_xy[-1]:
vehicle1_xy.append((r0[0], r0[1]))
print("additional "+ str(len(bullet1_xy))+" fired from vehicle 0")
elif dv1 <= dv0 and dv1 <= dv2:
vehicle2_xy.append((r1[0], r1[1]))
print("additional " + str(len(bullet1_xy)) + " fired from vehicle 1")
else:
vehicle3_xy.append((r2[0], r2[1]))
print("additional " + str(len(bullet1_xy)) + " fired from vehicle 2")
continue
else:
dv0 = dist(r0, (cX, cY))
dv1 = dist((cX, cY),r1)
#print(dv1)
dv2 = dist(r2, (cX, cY))
if dv0 <= dv1 and dv0 <= dv2:
min = dv0
elif dv1 <= dv0 and dv1 <= dv2:
min = dv1
else:
min = dv2
if min <= 10 or min==dv1:
if min==dv1 and (cX==567 or r1[0]==(562 or 764 or 763 or 762 or 563 or 564)) and dv1<=31:
vehicle2_xy.append((r1[0], r1[1]))
bullet1_xy.append((x1, y1))
print("additional y vehicle 1 bullets", len(bullet1_xy), str(r1), str((cX, cY)), dv1)
elif min==dv0 and min<=15:
vehicle1_xy.append((r0[0], r0[1]))
bullet1_xy.append((x1, y1))
print("additional y vehicle 0 bullets", len(bullet1_xy), str(r0), dv0)
elif min == dv2:
vehicle3_xy.append((r2[0], r2[1]))
bullet1_xy.append((x1, y1))
print("additional y vehicle 2 bullets", len(bullet1_xy), str(r2), dv2)
continue
'''if dv0<=15 and r0[0]+r0[2]-x1<=20:
vehicle1_xy.append((r0[0], r0[1]))
bullet1_xy.append((x1, y1))
print("additional y vehicle 0 bullets", len(bullet1_xy), str(r0), dv0)
if dv1<=15 and r1[0]+r1[2]-x1<=20:
vehicle2_xy.append((r1[0], r1[1]))
bullet1_xy.append((x1, y1))
print("additional y vehicle 1 bullets", len(bullet1_xy), str(r1),str((cX,cY)), dv1)
if dv2<=10 and r2[0]+r2[2]-x1<=20:
vehicle3_xy.append((r2[0], r2[1]))
bullet1_xy.append((x1, y1))
print("additional y vehicle 2 bullets", len(bullet1_xy),str(r2), dv2)
#else:
#print("already tracked")
continue'''
image = cv2.resize(frame1, (frame_width,frame_height)) #Resizing the frame to match the output video specification
out.write(image) #Writing the frame to output
cv2.imshow("AoI_Project", frame1) #To show the output
frame1 = frame2 #Updating of frame1 to next frame
ret, frame2 = cap.read() #Frame 2 will get the next frame
if not ret: #Exit reading, if end of Video
break
if frame2.shape != (834,952,3):
break
if cv2.waitKey(40) == 27:
break
#Close windows as video ended
print(len(vehicle1_xy),len(vehicle2_xy),len(vehicle3_xy))
cv2.destroyAllWindows()
cap.release()
out.release()
| [
"[email protected]"
]
| |
1e1c3159a79488453e4810b9362f7850f72e9c90 | f68eda51246c95597def569224f3b56d4c3700e7 | /top/api/rest/SellercenterUserPermissionsGetRequest.py | a3f561db414e9ebc103b8c2d04ac8c7b445babb9 | [
"MIT",
"BSD-3-Clause"
]
| permissive | stoensin/taobao-openapi | 47de8fb29ae2d8ce47d4fce07c0ccaeaee1ef91f | 202a9df2085229838541713bd24433a90d07c7fc | refs/heads/main | 2023-07-17T02:17:51.527455 | 2021-08-25T15:08:49 | 2021-08-25T15:08:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | '''
Created by auto_sdk on 2018.07.25
'''
from top.api.base import RestApi
class SellercenterUserPermissionsGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.nick = None
def getapiname(self):
return 'taobao.sellercenter.user.permissions.get'
| [
"[email protected]"
]
| |
b0da7bdba534730f35505b2301bd30a30bf8b8a2 | 26192962dc2627e7ca5f0e3b249c3fabcf52442c | /Python/AD-HOC/1196 - WERTYU.py | f1d867b9f14a29527c0d7a750ed75bcb36716f79 | []
| no_license | PierreVieira/URI | 77278ccb1724ca206ab2c12afbea1e51fa08ff73 | c1eb211c788d26b5cb9bedf5dda4147a2961fa19 | refs/heads/master | 2023-04-10T07:03:13.954639 | 2023-03-22T00:18:28 | 2023-03-22T00:18:28 | 189,321,748 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | """
Autor: Pierre Vieira
Data da submissão: 02/02/2020 16:48:12
"""
linha = "`1234567890-=QWERTYUIOP[]\\ASDFGHJKL;'ZXCVBNM,.'"
while True:
s = ''
try:
frase = input()
except EOFError:
break
else:
for c in frase:
if c == ' ':
s += c
else:
s += linha[linha.find(c)-1]
print(s)
| [
"[email protected]"
]
| |
85faccd6ab92a6953348fec45d163ceebd22bde7 | 0ac9495255f4d6be0ad1dd5bc298bfe2e4025e5b | /app/main/views.py | 8db7addd9f3cc234c1e84626f4bf0c75c1ef2c06 | []
| no_license | Alekzandre/labs | 12eaa3d516cfb4be024fe6cf2fe89637a2a1ba16 | 293735884f3d6b2ba72b8053143d23dbf6a2e5ee | refs/heads/master | 2021-01-01T05:27:03.278545 | 2016-06-06T12:17:33 | 2016-06-06T12:17:33 | 59,002,716 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | from flask import render_template, session, redirect, url_for, flash, request
from flask.ext.login import login_required
from . import main
from .. import db
from .forms import PicUploadForm
from ..firm.models import Firm
from ..auth.models import User, Role
import os
from werkzeug import secure_filename
@main.route('/upload', methods=['GET', 'POST'])
@login_required
def upload():
user = User.query.filter_by(id=session['user_id']).first()
completed = user.username + '.png'
form = PicUploadForm()
if form.validate_on_submit():
file = form.image.data
if file:
filename = secure_filename(file.filename)
file.save(os.path.join('./cdn', completed))
return redirect(url_for('main.index'))
return render_template('main/upload.html', form=form)
| [
"[email protected]"
]
| |
fd462ea2c658063f9780b2900c7dd1676594f3a8 | 930240227e320dff7448e05803e5f0814e2a8c1b | /Talleres/Django/Taller 3/ExEcommerce/ExEcommerce/settings.py | e77b8d86154b64630ca5c2507cb503541ea8ddac | [
"MIT"
]
| permissive | AnamaSerrato/DesarrolloWeb | e24a82d5e1c523e82dfb19c7c8a488a4e4389ca8 | 4fd61cd4684f3a30bc7a7041b1916ff5587cee5a | refs/heads/main | 2023-08-24T17:51:00.528405 | 2021-10-26T16:18:23 | 2021-10-26T16:18:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,397 | py | """
Django settings for ExEcommerce project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x6bus_sx3zy9+zz^po10)ggvlzhcw+j2qvp=&wyex8nt8)4$+-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Productos',
'rest_framework',
'rest_framework.authtoken',
'Checkout',
'Usuarios'
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
]
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ExEcommerce.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ExEcommerce.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
a2091ffbd7f8354188a85045f97ae4c90527922a | 590b86be3ed4e9148426cb3d736045ba19f10441 | /cc_scratch/settings.py | ec6f775800c347b56d91a9b3d88b4f1da6104634 | []
| no_license | hasanbaig/cc-scratch | f8a986911332deacab4ee63b9d82e3a993b4041b | 21c24b1d28c103479372d715dd30904abbd15cec | refs/heads/master | 2021-01-15T02:24:39.940223 | 2020-03-11T17:23:59 | 2020-03-11T17:23:59 | 242,846,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,413 | py | """
Django settings for cc_scratch project by Hasan Baig.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
#The directory where the cloud-copasi source code has been installed
CLOUD_COPASI_DIR = '/Users/cloudcopasi/cc-scratch'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'cloud_copasi_db', # Or path to database file if using sqlite3.
'USER': 'cloud_copasi_user', # Not used with sqlite3.
'PASSWORD': 'password', # Not used with sqlite3.
'HOST': '127.0.0.1', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
# TODO: Understand this file.
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'u2)mb#y)_@*+=@1am%#z^k3y6k2hvi5uy%jsz0b)+u8wsv62tc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOW_NEW_REGISTRATIONS = True
ALLOWED_HOSTS = ['*']
DEFAULT_FROM_EMAIL = 'Cloud-COPASI <[email protected]>'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
#Send email notifications, e.g. job completion, job errors?
SEND_EMAILS = True
#The SMTP Host server.
EMAIL_HOST = 'localhost'
##Other mail authentication settings
#EMAIL_HOST_USER = ''
#EMAIL_HOST_PASSWORD = ''
#EMAIL_PORT =
#EMAIL_USE_TLS = True
#The site that cloud-copasi will be hosted on
HOST='subdomain.domain.com'
#added by HB
#HOST='localhost'
#Register this subdomain at https://www.google.com/recaptcha/admin/create
#To enable the registration captcha
SILENCED_SYSTEM_CHECKS = ['captcha.recaptcha_test_key_error']
RECAPTCHA_PUBLIC_KEY = 'key'
RECAPTCHA_PRIVATE_KEY = 'secret'
#Added by HB
NOCAPTCHA = True
RECAPTCHA_PROXY = {'http': 'http://127.0.0.1:8000/register', 'https': 'https://127.0.0.1:8000/register'}
RECAPTCHA_DOMAIN = 'www.recaptcha.net'
#The location of the cloud-copasi user home directory. Include the trailing slash
#HOME_DIR='/home/cloudcopasi/'
HOME_DIR='/Users/cloudcopasi/cc-scratch'
#The directory where bosco is installed. Include the trailing slash
BOSCO_DIR = '/Users/cloudcopasi/cc-scratch/bosco/'
#Dictionary containing custom environment variables for bosco, e.g. PATH,
#LD_LIBRARY_PATH etc. Generally not needed, so leave blank
BOSCO_CUSTOM_ENV = {}
#The instance type for the Master node.
#Will probably be t1.micro or m1.small
MASTER_NODE_TYPE = 'm1.small'
#Storage locations. Be sure to include the trailing slash
STORAGE_DIR = '/Users/cloudcopasi/user-files/'
KEYPAIR_FILEPATH = '/Users/cloudcopasi/instance_keypairs/'
#LOG_DIR = '/home/cloudcopasi/log/'
LOG_DIR = '/Users/cloudcopasi/log/'
#The location of the CopasiSE binary
COPASI_LOCAL_BINARY = '/Users/cloudcopasi/cc-scratch/copasi/bin/CopasiSE'
#Load balancing parameter
IDEAL_JOB_TIME = 15 #Minutes
#How often should the local bosco pool be polled
DAEMON_POLL_TYME = 30 #Seconds
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
SITE_ID = 1 #Don't change
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/New_York'
USE_TZ=True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(CLOUD_COPASI_DIR, 'cc_scratch/web_interface/templates/static-all/')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATICFILES_DIRS = [
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(CLOUD_COPASI_DIR, 'html5up/static'),
os.path.join(CLOUD_COPASI_DIR, 'cc_scratch/web_interface/templates/static'),
]
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cc_scratch.urls'
WSGI_APPLICATION = 'cc_scratch.wsgi.application'
#TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
# os.path.join(CLOUD_COPASI_DIR, 'html5up'),
# os.path.join(CLOUD_COPASI_DIR, 'cloud_copasi/web_interface/templates'),
# os.path.join(CLOUD_COPASI_DIR, 'cloud_copasi/web_interface/task_plugins/plugins'),
#)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(CLOUD_COPASI_DIR, 'html5up'),
os.path.join(CLOUD_COPASI_DIR, 'cc_scratch/web_interface/templates'),
os.path.join(CLOUD_COPASI_DIR, 'cc_scratch/web_interface/task_plugins/plugins'),],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'string_if_invalid': 'Invalid: "%s"',
'debug': DEBUG,
},
},
]
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
#'django.contrib.dynamic_scraper',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
'cc_scratch.web_interface',
'cc_scratch.django_recaptcha',
#'captcha',
'django_extensions',
]
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/sign_in'
LOGOUT_URL = '/sign_out'
#Log levels: critical, error, warning, info, debug
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'file' : {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOG_DIR, 'cloud-copasi.log'),
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 10,
'formatter':'verbose',
},
},
'loggers': {
'django': {
'handlers': ['file'],
'propagate': True,
'level': 'INFO',
},
'cloud_copasi': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
}
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
#AUTH_PASSWORD_VALIDATORS = [
# {
# 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# },
#]
| [
"[email protected]"
]
| |
9adaaa8a0991643c934d8ca118ce9b52964bcf0c | 6de78731b9626459b95718dc15398850143b024c | /CNN/Keras/predict_bigtiff.py | e5f0623974d96eaa2f45b6a13c55cf2d772a35d7 | []
| no_license | ghx1234/RSDataProcess | 08999e83b6693df161af088112d66fbad1189c4a | 098b51d4f25923968b00ac5bd40e3f2440d5b33a | refs/heads/master | 2022-12-01T05:59:29.056334 | 2020-08-14T14:55:24 | 2020-08-14T14:55:24 | 265,778,277 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,390 | py | import os
import sys
import numpy as np
from PIL import Image
import gdal
import rasterio
import tensorflow as tf
from model import *
def z_score(x):
return (x - np.mean(x)) / np.std(x, ddof=1)
def read_img(path):
with rasterio.open(path) as ds:
data = ds.read(
out_shape=(ds.count, 512, 512),
# resampling=Resampling.bilinear
)
data = z_score(data)
img = np.transpose(data, (1, 2, 0))
# (batch, H, W, B)
return img
def predict(image):
# resize to max dimension of images from training dataset
w, h, _ = image.shape
res = model.predict(np.expand_dims(image, 0))
sess =tf.Session()
activate = tf.nn.log_softmax(res)
res = sess.run(activate)
labels = np.argmax(np.array(res).squeeze(), -1)
# remove padding and resize back to original image
labels = np.array(Image.fromarray(labels.astype('uint8')))
# labels = np.array(Image.fromarray(labels.astype('uint8')).resize((h, w)))
return labels
if __name__=='__main__':
in_folder = '/home/zhoudengji/ghx/code/predict/img'
out_folder = '/home/zhoudengji/ghx/code/predict/predict/WO'
model = mwen(pretrained_weights=None, input_shape=(None, 512, 512, 4), class_num=2)
model.load_weights('/home/zhoudengji/ghx/code/unet-master/womtfe/weights-04-0.98.hdf5')
for name in os.listdir(in_folder):
img_path = os.path.join(in_folder, name)
ds = gdal.Open(img_path)
result = gdal.GetDriverByName("GTiff").Create(os.path.join(out_folder, name),
int(np.floor(ds.RasterXSize / 512)) * 512, int(np.floor(ds.RasterYSize / 512)) * 512, 1, gdal.GDT_Byte)
2
###这里最好不要像这样直接一块一块的取,需要去掉带边缘信息的像素
for x in range(int(np.floor(ds.RasterXSize / 512))):
for y in range(int(np.floor(ds.RasterYSize / 512))):
img = ds.ReadAsArray(x * 512, y * 512, 512, 512)
img = z_score(img)
img = np.transpose(img, (1, 2, 0))
img = predict(img)
result.GetRasterBand(1).WriteArray(img, xoff=x * 512, yoff=y * 512)
result.SetGeoTransform(ds.GetGeoTransform()) # 写入仿射变换参数
result.SetProjection(ds.GetProjection()) # 写入投影 | [
"[email protected]"
]
| |
9be19d47b7bad8f555e7926513eb67a94d9ebf3c | 3b6179c93316d8ad00f24095414b09e53310e15e | /_build/jupyter_execute/03_logistic_regression.py | 26e057f7757376d54ff61a8183a2f0ed92e65961 | [
"MIT"
]
| permissive | newfacade/machine-learning-notes | 37fc050544ea3e98b2fa38779e6023214d8cea62 | 1e59fe7f9b21e16151654dee888ceccc726274d3 | refs/heads/main | 2023-03-30T07:08:17.968737 | 2021-03-22T12:40:39 | 2021-03-22T12:40:39 | 350,337,169 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,781 | py | # Logistic Regression
## Regression and Classification
regression answers how much? or how many? questions, for example:
1. predict the number of dollors at which a house will be sold.
2. predict the revenue of a restaurant.
in practice, we are more often interested in classification: asking not "how much", but "which one":
1. does this email belong in the spam foler or the inbox?
2. does this image depict a donkey, a dog, a cat, or a rooster?
3. which movie is Jean most likely to watch next?
linear regression not fit to solve classification problem for two reasons:
1. linear regression range in $\mathbb{R}$, while classification label is discrete.
2. linear regression uses euclid distance, result in d(class 3, class 1) > d(class 2, class 1), this is often not true in classification.
## Logistic Regression Model
we first foucus on binary classification
suppose dataset $D=\left \{ (x^{(1)},y^{(1)}),...,(x^{(n)},y^{(n)}) \right \} $, where $x^{(i)} \in \mathbb{R}^{d},\ y^{(i)} \in \left \{ 0, 1\right \}$.
to tackle this problem, after deriving $\theta^{T}x$
we uses a function that transform value in $\mathbb{R}$ into value in $[0, 1]$ and view this as the probability of being positive
we choose:
$$\sigma(x) = \frac{1}{1 + e^{-x}}$$
as that function, it is the so called sigmoid function, we choose sigmoid function because:
1. it is a monotony increase, differentiable, symmetric function that maps $\mathbb{R}$ into $[0,1]$.
2. simple form
3. derivative can calculate easily: ${\sigma}'(x) = \sigma(x)(1 - \sigma(x))$
now the model is:
$$h_{\theta}(x) = \frac{1}{1 + exp(-\theta^{T}x)}$$
this is the logistic regression model.
additionally, in our view, we have:
$$h_{\theta}(x) = p(y=1|x)$$
## Entropy
self information $I(x)$ indicates the amount of information an event $x$ to happen.
we want $I(x)$ to satisfy:
1. $I(x) \ge 0$
2. $I(x) = 1 \text{ if }p(x)=1$
3. $\text{if }p(x_{1}) > p(x_{2}) \text{, then } I(x_{1}) < I(x_{2})$
4. $I(x_{1}, x_{2}) = I(x_{1}) + I(x_{2}) \text{ for independent }x_{1},x_{2}$
this leads to $I(x) = -log\ p(x)$
while self-information measures the information of a single discrete event, entropy measures the information of a random variable:
$$
\begin{equation}
\begin{split}
H(X)=&E(I(x))\\
=&E(-log\ p(x))\\
=&-\sum_{x \in \mathcal{X}}log\ p(x)
\end{split}
\end{equation}
$$
it is exactly the optimal encoding length of $X$.
cross entropy $H(p, q)$ is the encoding length of $p$ by optimal encoding of $q$:
$$H(p,q)=E_{p}\left[-log\ q(x)\right] = -\sum_{x}p(x)log\ q(x)$$
fix $p$, the closer $q$ is to $p$, the less is $H(p,q)$.
we can use $H(p,q)$ to define the distance of $q$ to $p$.
turn to our binary classification problem, for $y^{(i)} \in \left\{0,1\right\}, \hat{y}^{(i)} \in (0, 1)$, we have:
$$
H(y^{(i)}, \hat{y}^{(i)}) =
\begin{cases}
-log(\hat{y}^{(i)}),\text{ if } y^{(i)} = 1\\
-log(1 - \hat{y}^{(i)}),\text{ if } y^{(i)} = 0
\end{cases}
$$
combine the two cases:
$$H(y^{(i)}, \hat{y}^{(i)}) = -y^{(i)}log(\hat{y}^{(i)}) - (1-y^{(i)})log(1 - \hat{y}^{(i)})$$
we use cross entropy to define the loss of logistic regression model:
$$
\begin{equation}
\begin{split}
J(\theta) =& \sum_{i=1}^{n}H(y^{(i)}, H(\hat{y}^{(i)})) \\
=& \sum_{i=1}^{n}-y^{(i)}log(\hat{y}^{(i)}) - (1-y^{(i)})log(1 - \hat{y}^{(i)}) \\
=& \sum_{i=1}^{n}-y^{(i)}log(h_{\theta}(x^{(i)})) - (1 - y^{(i)})log(1 - h_{\theta}(x^{(i)}))
\end{split}
\end{equation}
$$
this is the so called logistic regression.
in addition, we can write cross entropy loss in matrix form:
$$J(\theta) = -y^{T}log(\sigma(\theta^{T}X)) - (1 - y)^{T}log(1 - \sigma(\theta^{T}X))$$
import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
iris = load_iris()
X = iris["data"][:, 2:]
y = (iris["target"] == 2).astype(np.int)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=1)
from sklearn.linear_model import LogisticRegression
log_reg = LogisticRegression(solver="lbfgs", random_state=42)
log_reg.fit(X_train, y_train)
from sklearn.metrics import accuracy_score
y_pred = log_reg.predict(X_test)
accuracy_score(y_test, y_pred)
## Probability Interpretation of Cross Entropy Loss
as we suppose
$$p(y=1|x) = h_{\theta}(x)$$
and
$$p(y=0|x) = 1 - h_{\theta}(x)$$
combine these two, in our prediction:
$$p(y^{(i)}|x^{(i)}) = \left [ h_{\theta}(x^{(i)}) \right ]^{y^{(i)}}\left [ 1 - h_{\theta}(x^{(i)}) \right ]^{1 - y^{(i)}} $$
log likelikhood function:
$$
\begin{equation}
\begin{split}
L(\theta) &= log\prod_{i=1}^{n} \left [ h_{\theta}(x^{(i)}) \right ]^{y^{(i)}}\left [ 1 - h_{\theta}(x^{(i)}) \right ]^{1 - y^{(i)}} \\
&= \sum_{i=1}^{n}\left [y^{(i)}log\ h_{\theta}(x^{(i)}) + (1 - y^{(i)})log(1 - h_{\theta}(x^{(i)})) \right ]
\end{split}
\end{equation}
$$
maximum the above likelihood is equal to minimize the cross entropy loss:
$$J(\theta) = \sum_{i=1}^{n}-y^{(i)}log(h_{\theta}(x^{(i)})) - (1 - y^{(i)})log(1 - h_{\theta}(x^{(i)}))$$
## Update-rule
we have:
$$
\begin{equation}
\begin{split}
\frac{\partial }{\partial \theta_{j}}J(\theta ) &= \frac{\partial }{\partial \theta_{j}}\sum_{i=1}^{n}-log(\sigma(\theta^{T}x^{(i)}))y^{(i)} - log(1 - \sigma(\theta^{T}x^{(i)}))(1 - y^{(i)}) \\
&= \sum_{i=1}^{n} \left (-y^{(i)}\frac{1}{\sigma(\theta^{T}x^{(i)})} + (1 - y^{(i)})\frac{1}{1 - \sigma(\theta^{T}x^{(i)})} \right )\frac{\partial }{\partial \theta_{j}}\sigma(\theta^{T}x^{(i)})\\
&=\sum_{i=1}^{n} \left (-y^{(i)}\frac{1}{\sigma(\theta^{T}x^{(i)})} + (1 - y^{(i)})\frac{1}{1 - \sigma(\theta^{T}x^{(i)})} \right )\sigma(\theta^{T}x^{(i)})(1-\sigma(\theta^{T}x^{(i)}))\frac{\partial }{\partial \theta_{j}}\theta^{T}x^{(i)} \\
&=\sum_{i=1}^{n}(\sigma(\theta^{T}x^{(i)}) - y^{(i)})x_{j}^{(i)} \\
&=\sum_{i=1}^{n}(h_{\theta}(x^{(i)}) - y^{(i)})x_{j}^{(i)}
\end{split}
\end{equation}
$$
the same form with linear regression!
as linear regression, we have the update rule for logistic regression:
$$\theta_{j}: =\theta_{j} - \alpha\sum_{i=1}^{n} (h_{\theta }(x^{(i)}) - y^{(i)})x_{j}^{(i)} $$
combine all dimensions, we have:
$$\theta: =\theta - \alpha\sum_{i=1}^{n} (h_{\theta }(x^{(i)}) - y^{(i)})\cdot x^{(i)} $$
write in matrix form:
$$
\frac{\partial }{\partial \theta}J(\theta ) = X^{T}(\sigma(X\theta) -y)
$$
matrix form of update formula:
$$
\theta: =\theta - \alpha X^{T}(\sigma(X\theta)-\mathbf{y} )
$$
## Regularization
like linear regression, we add penalty term on $J(\theta)$ for regularization
$l2$ penalty:
$$J(\theta) := J(\theta) + \lambda \left \| \theta \right \|_{2}^{2} $$
$l1$ penalty:
$$J(\theta) := J(\theta) + \lambda \left \| \theta \right \|_{1} $$
## Softmax Regression
now we turn to multi-class classification.
we start off with a simple image classification problem, each input consists of a $2\times{2}$ grayscale image, represent each pixel with a scalar, giving us features $\left\{x_{1},x_{2},x_{3}, x_{4}\right\}$. assume each image belong to one among the categories "cat", "chiken" and "dog".
we have a nice way to represent categorical data: the one-hot encoding, i.e a vector with as many components as we have categories, the component corresponding to particular instance's category is 1 and all others are 0.
for our problem, "cat" represents by $(1,0,0)$, "chicken" by $(0, 1, 0)$, "dog" by $(0, 0, 1)$.
to estimate the conditional probabilities of all classes, we need a model with multiple outputs, one per class.
address classification with linear models, we will need as many affine functions as we have outputs:
$$o_{1} = x_{1}w_{11} + x_{2}w_{12} + x_{3}w_{13} + x_{4}w_{14}$$
$$o_{2} = x_{1}w_{21} + x_{2}w_{22} + x_{3}w_{23} + x_{4}w_{24}$$
$$o_{3} = x_{1}w_{31} + x_{2}w_{32} + x_{3}w_{33} + x_{4}w_{34}$$
depict as:

we would like output $\hat{y_{j}}$ to be interpreted as probability that a given item belong to class $j$.
to transform our current outputs $\left\{o_{1},o_{2},o_{3},o_{4}\right\}$ to probability distribution $\left\{\hat{y}_{1},\hat{y}_{2},\hat{y}_{3},\hat{y}_{4}\right\}$, we use the softmax operation:
$$\hat{y}_{j} = \frac{exp(o_{j})}{\sum_{k}exp(o_{k})}$$
when predicting:
$$\text{predict class} = \underset{j}{argmax}\ \hat{y}_{j} = \underset{j}\ o_{j}$$
$\hat{y}_{j}$ is necessary when compute loss.
as logistic regression, we use the cross entropy loss:
$$H(y,\hat{y}) = -\sum_{k}y_{j}log\ \hat{y}_{j} = -log\ \hat{y}_{\text{category of y}}$$
now complete the construction of softmax regression.
y = iris["target"]
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=1)
softmax_reg = LogisticRegression(multi_class="multinomial", solver="lbfgs", C=10)
softmax_reg.fit(X_train, y_train)
y_pred = softmax_reg.predict(X_test)
accuracy_score(y_test, y_pred)
| [
"[email protected]"
]
| |
c349129df7bbb871844f26ccee9298caec554b7d | dbe81a543b845ebda34e7bb41b4021cb40aa9288 | /usually_coding/use_saver.py | 880003702fd9c6be1ec545a43d7ac6f5bbd2b3e8 | []
| no_license | whitenightwu/tensorflow_tools | 264ede03acc792a64d1fc86d99e60aa92e5dfa02 | ba25e763f7cce1f11ed629bab5915e585a3ceae3 | refs/heads/master | 2020-03-10T21:09:40.086801 | 2020-01-20T06:11:58 | 2020-01-20T06:11:58 | 129,586,948 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,770 | py | #!/usr/local/bin/python3
## (C) COPYRIGHT Ingenic Limited.
## ALL RIGHTS RESERVED
##
## File : use_saver.py
## Authors : ydwu@ydwu-white
## Create Time: 2019-04-28:11:42:22
## Description:
##
##
迁移学习的实现需要网络在其他数据集上做预训练,完成参数调优工作,然后拿预训练好的参数在新的任务上做fine-tune,但是有时候可能只需要预训练的网络的一部分权重,本文主要提供一个方法如何在tf上加载想要加载的权重。
1) way one
在使用tensorflow加载网络权重的时候,直接使用tf.train.Saver().restore(sess, ‘ckpt’)的话是直接加载了全部权重,我们可能只需要加载网络的前几层权重,或者只要或者不要特定几层的权重,这时可以使用下面的方法:
var = tf.global_variables()
var_to_restore = [val for val in var if 'conv1' in val.name or 'conv2'in val.name]
saver = tf.train.Saver(var_to_restore )
saver.restore(sess, os.path.join(model_dir, model_name))
var_to_init = [val for val in var if 'conv1' not in val.name or 'conv2'not in val.name]
tf.initialize_variables(var_to_init)
这样就只从ckpt文件里只读取到了两层卷积的卷积参数,前提是你的前两层网络结构和名字和ckpt文件里定义的一样。将var_to_restore和var_to_init反过来就是加载名字中不包含conv1、2的权重。
2) way two
如果使用tensorflow的slim选择性读取权重的话就更方便了
exclude = ['layer1', 'layer2']
variables_to_restore = slim.get_variables_to_restore(exclude=exclude)
saver = tf.train.Saver(variables_to_restore)
saver.restore(sess, os.path.join(model_dir, model_name))
这样就完成了不读取ckpt文件中’layer1’, ‘layer2’权重
| [
"[email protected]"
]
| |
6f1f0a38de9166e24a27fd3b68075083c9bd8cc4 | 8cde629c9955c97ef083c026556bc5463720fd76 | /likelion/comment/models.py | 55ba940495b372937fcd0118068127281d38cbf2 | []
| no_license | hyk1993/yongkis | 975f55377c646741e9bc21c6f5d0aab3cecc81e3 | 7740013cca0258126e1dd7d6cf97c92317df3717 | refs/heads/master | 2020-05-20T21:05:10.133258 | 2019-05-09T07:58:55 | 2019-05-09T07:58:55 | 185,752,302 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | from django.db import models
from django.utils import timezone
# Create your models here.
class Blog(models.Model):
title = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
body = models.TextField()
def __str__(self):
return self.title
def summary(self):
return self.body[:100]
class Comment(models.Model):
post = models.ForeignKey('Blog', on_delete=models.CASCADE, related_name='comments')
author = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.text | [
"[email protected]"
]
| |
d15b397388bcb5efae9ec644b1c9a1d3c8f1413c | dbe9f1169cb86835e41923e85a0498af0a312288 | /code/main.py | b9e9d4506b5acd1edc3c66bd4cbe2f393af91168 | []
| no_license | JzjSunshine/BlockChain | 049f178288cd2dc4d070e3152d88f87fe560e561 | de8d4f49b418af2c9df8a6809e7a769a46b24d8a | refs/heads/master | 2023-02-21T08:33:33.813692 | 2021-01-25T07:29:20 | 2021-01-25T07:29:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,241 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from login import Ui_login
from signup import Ui_signup
from company import Ui_company
from company_sign import Ui_company_sign
from company_transfer import Ui_company_transfer
from company_finance import Ui_company_finance
from bank import Ui_bank
import os
import sys
import csv
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMessageBox,QMainWindow, QTableWidgetItem
from client.contractnote import ContractNote
from client.bcosclient import BcosClient
from client.datatype_parser import DatatypeParser
from client.common.compiler import Compiler
from client.bcoserror import BcosException, BcosError
from client_config import client_config
from eth_utils import to_checksum_address
from eth_utils.hexadecimal import encode_hex
from eth_account.account import Account
client = BcosClient()
# 从文件加载abi定义
abi_file = "contracts/Account.abi"
data_parser = DatatypeParser()
data_parser.load_abi_file(abi_file)
contract_abi = data_parser.contract_abi
address = '7faff65df217dee1b056d50b27c741a2bbfa2e53'
cur_user = ''
def hex_to_signed(source):
if not isinstance(source, str):
raise ValueError("string type required")
if 0 == len(source):
raise ValueError("string is empty")
source = source[2:]
sign_bit_mask = 1 << (len(source)*4-1)
other_bits_mask = sign_bit_mask - 1
value = int(source, 16)
return -(value & sign_bit_mask) | (value & other_bits_mask)
class login(QMainWindow, Ui_login):
def __init__(self):
super(login, self).__init__()
self.setupUi(self)
self.setWindowTitle("login")
self.pushButton_signin.clicked.connect(self.signin_clicked)
self.pushButton_signup.clicked.connect(self.signup_clicked)
def signin_clicked(self):
global cur_user
cinfo_list = []
with open('comp_info.csv', 'r', encoding = 'utf-8') as f:
csv_file = csv.reader(f)
for item in csv_file:
cinfo_list.append(item)
name = self.lineEdit_name.text()
password = self.lineEdit_pwd.text()
args = [name]
ret_tuple = client.call(address, contract_abi, "select_company", args)
if [name, password] in cinfo_list and ret_tuple[0] == 1:
cur_user = name
if name == 'bank':
bank_window.show()
bank_window.refresh()
else:
company_window.show()
company_window.refresh()
else:
QMessageBox.information(self,'Hint','Wrong user name or password!', QMessageBox.Ok)
def signup_clicked(self):
signup_window.show()
class signup(QMainWindow, Ui_signup):
def __init__(self):
super(signup, self).__init__()
self.setupUi(self)
self.setWindowTitle("sign up")
self.pushButton_signup.clicked.connect(self.signup_clicked)
self.pushButton_cancel.clicked.connect(self.close)
def signup_clicked(self):
name = self.lineEdit_name.text()
pwd = self.lineEdit_pwd.text()
args = [name]
receipt = client.sendRawTransactionGetReceipt(address, contract_abi, "insert_company", args)
if hex_to_signed(receipt['output']) == 0:
QMessageBox.information(self,'Error','This company is existed', QMessageBox.Ok)
elif hex_to_signed(receipt['output']) == 1:
with open('comp_info.csv', 'a', encoding = 'utf-8') as f:
csv_writer = csv.writer(f)
csv_writer.writerow([name, pwd])
QMessageBox.information(self,'Hint','Successfully sign up!', QMessageBox.Ok)
self.close()
class bank(QMainWindow, Ui_bank):
def __init__(self):
super(bank, self).__init__()
self.setupUi(self)
self.setWindowTitle("bank")
self.pushButton_confirm.clicked.connect(self.confirm_clicked)
self.pushButton_refuse.clicked.connect(self.refuse_clicked)
self.pushButton.clicked.connect(self.refresh)
def refresh(self):
global cur_user
for i in range(self.tableWidget.rowCount()):
self.tableWidget.removeRow(i)
self.tableWidget.setRowCount(0)
args = [cur_user, 1]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
args = [cur_user, 2]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
def confirm_clicked(self):
if self.tableWidget.selectionModel().hasSelection():
QMessageBox.information(self,'Hint','Confirm successfully!', QMessageBox.Ok)
else:
QMessageBox.information(self,'Hint','Please select a receipt.', QMessageBox.Ok)
self.refresh()
def refuse_clicked(self):
if self.tableWidget.selectionModel().hasSelection():
row = self.tableWidget.currentRow()
from_ = self.tableWidget.item(row, 0).text()
to = self.tableWidget.item(row, 1).text()
tot_amount = self.tableWidget.item(row, 2).text()
cur_amount = self.tableWidget.item(row, 3).text()
deadline = self.tableWidget.item(row, 4).text()
args = [from_, to, int(tot_amount), int(cur_amount), deadline]
receipt = client.sendRawTransactionGetReceipt(address, contract_abi, "remove", args)
QMessageBox.information(self,'Hint','Refuse successfully!', QMessageBox.Ok)
else:
QMessageBox.information(self,'Hint','Please select a receipt.', QMessageBox.Ok)
self.refresh()
class company(QMainWindow, Ui_company):
def __init__(self):
super(company, self).__init__()
self.setupUi(self)
self.setWindowTitle("company")
self.pushButton_sign.clicked.connect(self.sign_clicked)
self.pushButton_transfer.clicked.connect(self.transfer_clicked)
self.pushButton_finance.clicked.connect(self.finance_clicked)
self.pushButton_pay.clicked.connect(self.pay_clicked)
self.pushButton.clicked.connect(self.refresh)
self.refresh()
def refresh(self):
global cur_user
for i in range(self.tableWidget.rowCount()):
self.tableWidget.removeRow(i)
self.tableWidget.setRowCount(0)
args = [cur_user, 1]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
args = [cur_user, 2]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
def sign_clicked(self):
company_sign_window.show()
company_sign_window.refresh()
self.refresh()
def transfer_clicked(self):
company_transfer_window.show()
company_transfer_window.refresh()
self.refresh()
def finance_clicked(self):
company_finance_window.show()
company_finance_window.refresh()
def pay_clicked(self):
if self.tableWidget.selectionModel().hasSelection():
row = self.tableWidget.currentRow()
from_ = self.tableWidget.item(row, 0).text()
to = self.tableWidget.item(row, 1).text()
tot_amount = self.tableWidget.item(row, 2).text()
cur_amount = self.tableWidget.item(row, 3).text()
deadline = self.tableWidget.item(row, 4).text()
args = [from_, to, int(tot_amount), int(cur_amount), deadline]
receipt = client.sendRawTransactionGetReceipt(address, contract_abi, "pay", args)
QMessageBox.information(self,'Hint','Pay successfully!', QMessageBox.Ok)
else:
QMessageBox.information(self,'Hint','Please select a receipt.', QMessageBox.Ok)
self.refresh()
class company_sign(QMainWindow, Ui_company_sign):
def __init__(self):
super(company_sign, self).__init__()
self.setupUi(self)
self.setWindowTitle("company_sign")
self.pushButton_sign.clicked.connect(self.sign_clicked)
self.pushButton_cancel.clicked.connect(self.close)
self.pushButton.clicked.connect(self.refresh)
def refresh(self):
global cur_user
for i in range(self.tableWidget.rowCount()):
self.tableWidget.removeRow(i)
self.tableWidget.setRowCount(0)
args = [cur_user, 1]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
args = [cur_user, 2]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
def sign_clicked(self):
global cur_user
to = self.lineEdit_to.text()
amount = self.lineEdit_amount.text()
deadline = self.dateEdit.date().toString('yyyy-MM-dd')
args = [cur_user, to, int(amount), deadline]
receipt = client.sendRawTransactionGetReceipt(address, contract_abi, 'sign', args)
if hex_to_signed(receipt['output']) == 0:
QMessageBox.information(self,'Error','Fail!', QMessageBox.Ok)
else:
QMessageBox.information(self,'Hint','Sign successfully!', QMessageBox.Ok)
self.close()
class company_transfer(QMainWindow, Ui_company_transfer):
def __init__(self):
super(company_transfer, self).__init__()
self.setupUi(self)
self.setWindowTitle("company_transfer")
self.pushButton_transfer.clicked.connect(self.transfer_clicked)
self.pushButton_cancel.clicked.connect(self.close)
self.pushButton.clicked.connect(self.refresh)
def refresh(self):
global cur_user
for i in range(self.tableWidget.rowCount()):
self.tableWidget.removeRow(i)
self.tableWidget.setRowCount(0)
args = [cur_user, 1]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
args = [cur_user, 2]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
def transfer_clicked(self):
from_ = self.lineEdit_from.text()
to = self.lineEdit_to.text()
tot_a = self.lineEdit_total_a.text()
cur_a = self.lineEdit_cur_a.text()
trans_a = self.lineEdit_trans_a.text()
deadline = self.dateEdit.date().toString('yyyy-MM-dd')
args = [from_, cur_user, to, int(tot_a), int(cur_a), int(trans_a), deadline]
receipt = client.sendRawTransactionGetReceipt(address, contract_abi, 'transfer', args)
if hex_to_signed(receipt['output']) == -1:
QMessageBox.information(self,'Error','Fail!Transfer_amount is more than cur_amount.', QMessageBox.Ok)
elif hex_to_signed(receipt['output']) == 0:
QMessageBox.information(self,'Error','Fail!', QMessageBox.Ok)
else:
QMessageBox.information(self,'Hint','Transfer successfully!', QMessageBox.Ok)
self.close()
class company_finance(QMainWindow, Ui_company_finance):
def __init__(self):
super(company_finance, self).__init__()
self.setupUi(self)
self.setWindowTitle("company_finance")
self.pushButton_apply.clicked.connect(self.apply_clicked)
self.pushButton_cancel.clicked.connect(self.close)
self.pushButton.clicked.connect(self.refresh)
def refresh(self):
global cur_user
for i in range(self.tableWidget.rowCount()):
self.tableWidget.removeRow(i)
self.tableWidget.setRowCount(0)
args = [cur_user, 1]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
args = [cur_user, 2]
ret_tuple = client.call(address, contract_abi, "select", args)
for i in range(len(ret_tuple[0])):
row = self.tableWidget.rowCount()
self.tableWidget.setRowCount(row + 1)
self.tableWidget.setItem(row, 0, QTableWidgetItem(ret_tuple[0][i]))
self.tableWidget.setItem(row, 1, QTableWidgetItem(ret_tuple[1][i]))
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(ret_tuple[2][i])))
self.tableWidget.setItem(row, 3, QTableWidgetItem(str(ret_tuple[3][i])))
self.tableWidget.setItem(row, 4, QTableWidgetItem(ret_tuple[4][i]))
def apply_clicked(self):
finance_amount = self.lineEdit.text()
deadline = self.dateEdit.date().toString('yyyy-MM-dd')
args = [cur_user, int(finance_amount), deadline]
receipt = client.sendRawTransactionGetReceipt(address, contract_abi, 'finance', args)
if hex_to_signed(receipt['output']) == 0:
QMessageBox.information(self,'Error','Fail!', QMessageBox.Ok)
else:
QMessageBox.information(self,'Hint','Apply successfully!', QMessageBox.Ok)
self.close()
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
login_window = login()
signup_window = signup()
bank_window = bank()
company_window = company()
company_sign_window = company_sign()
company_transfer_window = company_transfer()
company_finance_window = company_finance()
login_window.show()
app.exec_()
client.finish() | [
"[email protected]"
]
| |
dd639edaad50c660c763ab08ed15d4fd9b9e9c6a | dec2da908901b176e2e10ca3677f36346ddceee7 | /cvpl-users/users/api.py | 8a29e2a71ffdd192cf3077d18cd45038d64b64e6 | [
"Apache-2.0"
]
| permissive | robinsax/canvas-plugin-multirepo | e382d107a3f32428722e528410db9da3117e2518 | 20fd6a3cc42af5f2cde73e3b100d3edeb4e50c01 | refs/heads/master | 2020-03-31T11:14:29.159786 | 2018-10-09T01:36:35 | 2018-10-09T01:36:35 | 152,168,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,660 | py | # coding: utf-8
'''
Users API.
'''
import canvas as cv
import canvas.ext as cve
from . import User, _edit_approver
from .auth import authorize, flush_auth
from .authz import require_admin, require_anon, require_user
@cv.endpoint('/api/auth')
class AuthEndpoint:
@require_anon
def on_post(self, context):
request, session = context[:2]
user = session.query(User, User.email == request[('email', str)], one=True)
if not user or not user.check_password(request[('password', str)]):
raise cv.ValidationErrors(summary='Incorrect username or password')
authorize(user, context)
return cv.create_json('success')
@require_user
def on_delete(self, context):
flush_auth(context)
return cv.create_json('success')
@cv.endpoint('/api/users')
class UsersCollection:
@require_admin
def on_get(self, context):
query = True
for key, value in context.query.items():
if not hasattr(User, key):
raise cv.ValidationErrors({key: 'Invalid key'})
column = getattr(User, key)
value = column.cast(value)
query = (column == value) & query
users = context.session.query(User, query)
return cv.create_json('success', cv.dictize(users))
# TODO: Auth check.
def on_put(self, context):
request, session = context[:2]
user = User()
for key, value in context.request.items():
if key not in User.__table__.columns:
raise cv.ValidationErrors({key: 'Invalid key.'})
if key == 'password':
user.set_password(value)
else:
setattr(user, key, value)
session.save(user).commit()
if not context.user:
authorize(user, context)
return cv.create_json('success', {
'created_id': user.id
})
# TODO: Use unpacking.
@cv.endpoint('/api/users/<id>')
class UserInstance:
@require_user
def on_get(self, context):
if not context.user.is_admin and not context.user.id == context.route.id:
raise cv.Unauthorized("You don't have access to that user's information")
user = User.get(context.route.id, context.session)
if not user:
raise cv.NotFound(context.route)
return cv.create_json('success', user)
@require_user
def on_put(self, context):
if not context.user.is_admin and not context.user.id == context.route.id:
raise cv.Unauthorized("You don't have permission to edit that user's information")
user = User.get(context.route.id, context.session)
_edit_approver(user, context)
for key, value in context.request.items():
if not hasattr(User, key):
raise cv.ValidationErrors({key: 'Invalid key'})
column = getattr(User, key)
value = column.cast(value)
setattr(user, key, value)
context.session.commit()
return cv.create_json('success')
| [
"[email protected]"
]
| |
238a9c00d0ee9241b740aa701da20a49250c30a6 | e2d1036d29aa0cb2b46a2be6bba91a1c5d5556aa | /Self_Driving/train_and_val_predictor_single.py | e78f4a5f3db9afeca4b4ca2227c502d16a57b727 | []
| no_license | Liuzy0908/SB3-Carla | 79eaad128ba649c202bee27dded6e240e76863ac | f9cfb9108463ed15aee81c36abdb4eb87c15be6c | refs/heads/main | 2023-08-17T02:04:46.085064 | 2021-02-14T04:58:32 | 2021-02-14T04:58:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,540 | py | import numpy as np
import torch
import argparse
import os.path as osp
import torch.nn as nn
from models.model_predictor_single_resnet18 import Predictor_single
from dataset.sem_predictor_single_roadline import Sem_predictor_single
import wandb
argparser = argparse.ArgumentParser(description=__doc__)
argparser.add_argument(
'--lr',
type=float,
default=1e-4,
help='learning rate (default: 1e-4)')
argparser.add_argument(
'--wandb-username',
type=str,
help='account username of wandb')
argparser.add_argument(
'--wandb-project',
type=str,
help='project name of wandb')
argparser.add_argument(
'--dataset-dir',
type=str,
help='relative directory of training dataset')
argparser.add_argument(
'--save-dir',
type=str,
help='relative directory to save the weight files')
argparser.add_argument(
'--train-batch',
type=int,
default=64,
help='batch size for training')
argparser.add_argument(
'--test-batch',
type=int,
default=64,
help='batch size for validation')
argparser.add_argument(
'--num-epochs',
type=int,
default=20,
help='number of epochs to train')
args = argparser.parse_args()
wandb.init(entity=args.wandb_username, project=args.wandb_project)
config = wandb.config
config.batch_size = args.train_batch
config.test_batch_size = args.test_batch
config.epochs = args.num_epochs
config.lr = args.lr
config.log_interval = 10
all_sem_cls = [7]
train_dataset = Sem_predictor_single(args.dataset_dir, all_sem_cls, 'train')
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=config.batch_size, shuffle=True,
num_workers=10, pin_memory=True, sampler=None)
test_dataset = Sem_predictor_single(args.dataset_dir, all_sem_cls, 'test')
test_loader = torch.utils.data.DataLoader(
test_dataset, batch_size=config.test_batch_size, shuffle=False,
num_workers=6, pin_memory=True, sampler=None)
lmbda = lambda epoch: 0.95 if epoch < 10 else 0.9
models = []
optimizers = []
for sem_id in range(13):
if sem_id not in all_sem_cls:
models.append(None)
optimizers.append(None)
else:
model = Predictor_single().cuda()
wandb.watch(model, log='all')
models.append(model)
optimizer = torch.optim.Adam(model.parameters(), args.lr)
optimizers.append(optimizer)
scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lmbda)
def train(epoch, train_loader, models):
for model in models:
if model:
model.train()
keep_ratio = 0.99
ave_losses = {}
for sem_id in range(13):
ave_losses[sem_id] = 0
for i, (image, label) in enumerate(train_loader, 1):
for sem_id in all_sem_cls:
image_iter = image[sem_id].cuda() # (B, 1, 48, 48)
target = label[sem_id].cuda() # (B, 1)
pred_dis = models[sem_id](image_iter).cuda() # (B, 1)
loss_fn_dis = nn.MSELoss().cuda()
loss = loss_fn_dis(pred_dis, target)
optimizers[sem_id].zero_grad()
loss.backward()
optimizers[sem_id].step()
if i == 1:
ave_losses[sem_id] = loss
else:
ave_losses[sem_id] = ave_losses[sem_id] * keep_ratio + loss * (1 - keep_ratio)
if i % 50 == 1:
print('epoch {}, {}/{}, total_loss={:.4f}'
.format(epoch, i, len(train_loader), sum(ave_losses.values())))
for sem_id in all_sem_cls:
wandb.log({'train loss %02d' % sem_id: ave_losses[sem_id]})
def test(epoch, test_loader, models):
print('start validation')
for model in models:
if model:
model.eval()
with torch.no_grad():
ave_losses = {}
for sem_id in range(13):
ave_losses[sem_id] = 0
example_images = []
for i, (image, label) in enumerate(test_loader, 1):
for sem_id in all_sem_cls:
image_iter = image[sem_id].cuda() # (B, 1, 48, 48)
target = label[sem_id].cuda() # (B, 1)
pred_dis = models[sem_id](image_iter).cuda() # (B, 1)
loss_fn_dis = nn.MSELoss().cuda()
loss = loss_fn_dis(pred_dis, target)
if i == 1:
ave_losses[sem_id] = loss
else:
ave_losses[sem_id] = ave_losses[sem_id] * (i - 1) / i + loss * 1 / i
if i == 1:
if sem_id == 12:
for j in range(len(image[12])):
example_images.append(wandb.Image(image[12][j], caption="Pred: {}, Truth: {}".format(pred_dis[j], target[j])))
print('batch', i, '/', len(test_loader))
wandb.log({'Examples': example_images})
for sem_id in all_sem_cls:
wandb.log({'test loss %02d' % sem_id: ave_losses[sem_id]})
if __name__ == "__main__":
test(0, test_loader, models)
for epoch in range(1, config.epochs+1):
train(epoch, train_loader, models)
test(epoch, test_loader, models)
scheduler.step()
save_path = args.save_dir
for sem_id in all_sem_cls:
torch.save(models[sem_id].state_dict(), osp.join(save_path, str(sem_id), 'epoch-%02d.pth' % (epoch)))
wandb.save(osp.join(save_path, str(sem_id), 'epoch-%02d.pth' % (epoch)))
| [
"[email protected]"
]
| |
f95801b8c151fe6313c43fd96a03a730109b0539 | 2341c90db6d26d5bf0039a87d06e41bf3bebac5f | /src/action_classification/ucf_labels.py | 59a44668fd8ded64c3356024bd1b11a3915c878f | [
"Apache-2.0"
]
| permissive | ziliaogithub/action_recognition_ros | 96ca418b838e0671e16eba4e681feb934e54e4ce | f203d81ed31c2ad6630bce06d4b130e24bff73b9 | refs/heads/master | 2020-03-22T21:17:21.882072 | 2017-10-11T07:19:42 | 2017-10-11T07:19:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,639 | py | #!/usr/bin/env python
labels = ['Apply Eye Makeup', 'Apply Lipstick', 'Archery', 'Baby Crawling', 'Balance Beam', 'Band Marching', 'Baseball Pitch', 'Basketball Shooting', 'Basketball Dunk', 'Bench Press', 'Biking', 'Billiards Shot', 'Blow Dry Hair', 'Blowing Candles', 'Body Weight Squats', 'Bowling', 'Boxing Punching Bag', 'Boxing Speed Bag', 'Breaststroke', 'Brushing Teeth', 'Clean and Jerk', 'Cliff Diving', 'Cricket Bowling', 'Cricket Shot', 'Cutting In Kitchen', 'Diving', 'Drumming', 'Fencing', 'Field Hockey Penalty', 'Floor Gymnastics', 'Frisbee Catch', 'Front Crawl', 'Golf Swing', 'Haircut', 'Hammer Throw', 'Hammering', 'Handstand Pushups', 'Handstand Walking', 'Head Massage', 'High Jump', 'Horse Race', 'Horse Riding', 'Hula Hoop', 'Ice Dancing', 'Javelin Throw', 'Juggling Balls', 'Jump Rope', 'Jumping Jack', 'Kayaking', 'Knitting', 'Long Jump', 'Lunges', 'Military Parade', 'Mixing Batter', 'Mopping Floor', 'Nun chucks', 'Parallel Bars', 'Pizza Tossing', 'Playing Guitar', 'Playing Piano', 'Playing Tabla', 'Playing Violin', 'Playing Cello', 'Playing Daf', 'Playing Dhol', 'Playing Flute', 'Playing Sitar', 'Pole Vault', 'Pommel Horse', 'Pull Ups', 'Punch', 'Push Ups', 'Rafting', 'Rock Climbing Indoor', 'Rope Climbing', 'Rowing', 'Salsa Spins', 'Shaving Beard', 'Shotput', 'Skate Boarding', 'Skiing', 'Skijet', 'Sky Diving', 'Soccer Juggling', 'Soccer Penalty', 'Still Rings', 'Sumo Wrestling', 'Surfing', 'Swing', 'Table Tennis Shot', 'Tai Chi', 'Tennis Swing', 'Throw Discus', 'Trampoline Jumping', 'Typing', 'Uneven Bars', 'Volleyball Spiking', 'Walking with a dog', 'Wall Pushups', 'Writing On Board', 'Yo Yo']
| [
"[email protected]"
]
| |
49a906e6c7883da06b65ccbde0a76bf79142b0dc | 47798d5661fa0c9423b7afc8cf7f8545816d709b | /tests/re_v4.py | 540db38d96045ed766338edc74532db09e9331fd | []
| no_license | yaleman/SubnetMixer | d530a5ce3c39aced202b94ac661e11d7a100894e | 122d919f3f00b3bd2fe329d017003ab0f06b0210 | refs/heads/master | 2022-11-10T01:29:35.916066 | 2022-10-09T21:31:54 | 2022-10-09T21:31:54 | 17,395,183 | 0 | 0 | null | 2022-10-09T21:31:55 | 2014-03-04T08:23:10 | Python | UTF-8 | Python | false | false | 160 | py | import re
re_v4 = re.compile( "[\d]{1,3}.[\d]{1,3}.[\d]{1,3}.[\d]{1,3}" )
tests = [ "10.0.0.0", "10.0.0", "1111" ]
for x in tests:
print re_v4.match( x )
| [
"[email protected]"
]
| |
6967044ffd32e1f589f9a6643e7ba3dba2822a94 | 7c19d1198bff566c7cde15ddf5876470b05815cf | /sentiments_neural_network_cpu.py | d8001d8b47cb8a46f4f9b9b620a7b04ac45ae867 | []
| no_license | nikil21/Sentiment-Analysis | 48bf2123f1f10ef98c9bb838da6a79b556231426 | 94f91dcf1dd6016dff734ec66cda6e492e22510a | refs/heads/main | 2023-06-28T11:00:09.163564 | 2021-08-04T14:14:02 | 2021-08-04T14:14:02 | 392,713,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,046 | py | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
from create_sentiment_featuresets import create_feature_sets_and_labels
#from tensorflow.examples.tutorials.mnist import input_data
import pickle
import numpy as np
train_x,train_y,test_x,test_y = create_feature_sets_and_labels('pos.txt','neg.txt')
n_classes = 2
batch_size = 100
hm_epochs = 10
n_nodes_hl1 = 1500
n_nodes_hl2 = 1500
n_nodes_hl3 = 1500
W1 = tf.Variable(tf.random.normal([len(train_x[0]), n_nodes_hl1]))
B1 = tf.Variable(tf.random.normal([n_nodes_hl1]))
W2 = tf.Variable(tf.random.normal([n_nodes_hl1, n_nodes_hl2]))
B2 = tf.Variable(tf.random.normal([n_nodes_hl2]))
W3 = tf.Variable(tf.random.normal([n_nodes_hl2, n_nodes_hl3]))
B3 = tf.Variable(tf.random.normal([n_nodes_hl3]))
W4 = tf.Variable(tf.random.normal([n_nodes_hl3, n_classes]))
B4 = tf.Variable(tf.random.normal([n_classes]))
x = tf.placeholder('float')
y = tf.placeholder('float')
hidden_1_layer = {'f_fum':n_nodes_hl1,
'weight': W1,
'bias':B1}
hidden_2_layer = {'f_fum':n_nodes_hl2,
'weight':W2,
'bias':B2}
hidden_3_layer = {'f_fum':n_nodes_hl3,
'weight':W3,
'bias':B3}
output_layer = {'f_fum':None,
'weight': W4,
'bias': B4,}
# Nothing changes
def neural_network_model(data):
l1 = tf.add(tf.matmul(data,hidden_1_layer['weight']), hidden_1_layer['bias'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1,hidden_2_layer['weight']), hidden_2_layer['bias'])
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2,hidden_3_layer['weight']), hidden_3_layer['bias'])
l3 = tf.nn.relu(l3)
output = tf.matmul(l3,output_layer['weight']) + output_layer['bias']
return output
def train_neural_network(x):
prediction = neural_network_model(x)
# OLD VERSION:
#cost = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(prediction,y) )
# NEW:
cost = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
with tf.Session() as sess:
# OLD:
#sess.run(tf.initialize_all_variables())
# NEW:
sess.run(tf.global_variables_initializer())
for epoch in range(hm_epochs):
epoch_loss = 0
i=0
while i < len(train_x):
start = i
end = i+batch_size
batch_x = np.array(train_x[start:end])
batch_y = np.array(train_y[start:end])
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
y: batch_y})
epoch_loss += c
i+=batch_size
print('Epoch', epoch+1, 'completed out of',hm_epochs,'loss:',epoch_loss)
correct = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
print('Accuracy:',accuracy.eval({x:test_x, y:test_y}))
train_neural_network(x) | [
"[email protected]"
]
| |
11e5fb5400c27bad705c752ddfacb83e04e03f9d | bfc42c114f652012b6cfd14e7cccf52cb6b9ac7e | /src/spdx_tools/spdx3/model/agent.py | 9aa326e2261b39085ccbdcfea5376185401cc086 | [
"Apache-2.0",
"GPL-2.0-only"
]
| permissive | spdx/tools-python | 05a952501af2ac608678cb1737f7c661f6091fa2 | 777bd274dd06cb24342738df7da5ab285d652350 | refs/heads/main | 2023-08-31T09:39:52.930063 | 2023-08-24T06:39:48 | 2023-08-24T10:22:33 | 32,761,058 | 147 | 136 | Apache-2.0 | 2023-09-14T15:50:59 | 2015-03-23T21:54:39 | Python | UTF-8 | Python | false | false | 1,251 | py | # SPDX-FileCopyrightText: 2023 spdx contributors
#
# SPDX-License-Identifier: Apache-2.0
from beartype.typing import List, Optional
from spdx_tools.common.typing.dataclass_with_properties import dataclass_with_properties
from spdx_tools.common.typing.type_checks import check_types_and_set_values
from spdx_tools.spdx3.model import CreationInfo, Element, ExternalIdentifier, ExternalReference, IntegrityMethod
@dataclass_with_properties
class Agent(Element):
def __init__(
self,
spdx_id: str,
creation_info: Optional[CreationInfo] = None,
name: Optional[str] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
comment: Optional[str] = None,
verified_using: List[IntegrityMethod] = None,
external_reference: List[ExternalReference] = None,
external_identifier: List[ExternalIdentifier] = None,
extension: Optional[str] = None,
):
verified_using = [] if verified_using is None else verified_using
external_reference = [] if external_reference is None else external_reference
external_identifier = [] if external_identifier is None else external_identifier
check_types_and_set_values(self, locals())
| [
"[email protected]"
]
| |
1daefdaaf3cdc9dbbd4d888acd5c05d94d6285dd | 85c337f0364f1452c068b7e93421b3e24af85358 | /MzManage/manage.py | 362fb7bb3d7af3d8d0dfab2d09b3c4fb6b0b78a7 | []
| no_license | hornLK/AuthSystemWeb | 9518f23453f910e17c516db26ea3a00fe0d0c806 | c2c03ff2133151889a2ecc205a753a0eb2bbfd91 | refs/heads/master | 2022-12-14T19:18:00.560077 | 2018-04-19T12:39:14 | 2018-04-19T12:39:14 | 130,317,561 | 0 | 0 | null | 2022-12-08T00:59:04 | 2018-04-20T06:17:08 | JavaScript | UTF-8 | Python | false | false | 540 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "MzManage.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
be6034d2e2763ff648e95e4600d85340a32d843a | 275f85955acabac247fe306b0161a6d758f4d057 | /KaterinaAnzoleaga/Classes/PersonEmployee/setGetEmployee.py | ee7eecadd6544e7feec98952f831ec910e37a9cc | []
| no_license | mauricioZelaya/QETraining_BDT_python | 295bb58a99a36b0b973afd153109c510191b4ec7 | d7cc798e7063ab32e5002e4deda3ddec8a8a0c59 | refs/heads/master | 2021-05-08T05:01:13.181273 | 2017-11-24T21:53:46 | 2017-11-24T21:53:46 | 108,473,352 | 0 | 0 | null | 2017-11-24T21:53:47 | 2017-10-26T22:43:32 | Python | UTF-8 | Python | false | false | 146 | py | from Employee import *
from LoginModule import *
my_emp = Employee ('Carlos', 'Lopez', 35, '24165465', 45, 'Supoort')
print (my_emp.getObject())
| [
"[email protected]"
]
| |
347502a5063ca3f7fdbb96e81aadf62f71a48dae | 97e534b26a76bf0d954e166841179979748bcfa2 | /objects/migrations/0046_auto_20180625_0823.py | d6855e81eb891d0362368b4d406690be5fbde2c7 | []
| no_license | mehdi1361/http_server | 3a8bd73ce44307ee2b7761d1211671ca8cb0f3ba | d8a962c55165ef0237bfb26d27d9cfa11a415a5d | refs/heads/develop | 2022-12-11T00:44:11.089407 | 2019-01-20T12:02:48 | 2019-01-20T12:02:48 | 166,656,299 | 0 | 0 | null | 2022-12-07T23:53:22 | 2019-01-20T12:02:05 | HTML | UTF-8 | Python | false | false | 958 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-25 08:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('objects', '0045_auto_20180625_0724'),
]
operations = [
migrations.AddField(
model_name='league',
name='play_off_start_gem_1',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='play off start gem 1'),
),
migrations.AddField(
model_name='league',
name='play_off_start_gem_2',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='play off start gem 2'),
),
migrations.AlterField(
model_name='league',
name='play_off_start_gem',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='play off start gem '),
),
]
| [
"[email protected]"
]
| |
3ce934caaa6e0a49902a84d3e6ce84ac3d1aac37 | 5cb8df4d10cd1a1d77f227ea8e1b311744750d5b | /generate.py | b4ba55cf4e1d1accfe70b88346848e422bbf65cf | [
"CC0-1.0"
]
| permissive | YoonGenwu/hearthstonejson | 388d46c5c082cde8389bef1011dded7d46fea7dc | 3d6709f99dc7d0c0b75ccf441cfebec00f48a184 | refs/heads/master | 2021-01-15T11:42:57.006639 | 2016-02-17T01:12:14 | 2016-02-17T01:12:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,707 | py | #!/usr/bin/env python
import os
import json
import sys
from argparse import ArgumentParser
from enum import IntEnum
from hearthstone.dbf import Dbf
from hearthstone.cardxml import load
from hearthstone.enums import CardType, Faction, GameTag, Locale, LOCALIZED_TAGS
MECHANICS_TAGS = [
GameTag.ADJACENT_BUFF,
GameTag.AURA,
GameTag.BATTLECRY,
GameTag.CHARGE,
GameTag.COMBO,
GameTag.DEATHRATTLE,
GameTag.DIVINE_SHIELD,
GameTag.ENRAGED,
GameTag.FORGETFUL,
GameTag.FREEZE,
GameTag.INSPIRE,
GameTag.MORPH,
GameTag.OVERLOAD,
GameTag.POISONOUS,
GameTag.SECRET,
GameTag.SILENCE,
GameTag.STEALTH,
GameTag.SPELLPOWER,
GameTag.TAG_ONE_TURN_EFFECT,
GameTag.TAUNT,
GameTag.TREASURE,
GameTag.WINDFURY,
GameTag.ImmuneToSpellpower,
GameTag.InvisibleDeathrattle,
]
def json_dump(obj, filename, pretty=False):
print("Writing to %r" % (filename))
if pretty:
kwargs = {"sort_keys": True, "indent": "\t", "separators": (",", ": ")}
else:
kwargs = {"separators": (",", ":")}
with open(filename, "w", encoding="utf8") as f:
json.dump(obj, f, ensure_ascii=False, **kwargs)
def show_field(card, k, v):
if k == "cost" and card.type not in (CardType.ENCHANTMENT, CardType.HERO):
return True
if k == "faction" and v == Faction.NEUTRAL:
return False
if k == "attack" and card.type in (CardType.MINION, CardType.WEAPON):
return True
if k == "health" and card.type in (CardType.MINION, CardType.HERO):
return True
if k == "durability" and card.type == CardType.WEAPON:
return True
return bool(v)
def get_mechanics(card):
ret = []
for tag in MECHANICS_TAGS:
value = card.tags.get(tag, 0)
if value:
ret.append(tag.name)
return ret
TAG_NAMES = {
GameTag.CARDNAME: "name",
GameTag.FLAVORTEXT: "flavortext",
GameTag.CARDTEXT_INHAND: "text",
GameTag.CardTextInPlay: "textInPlay",
GameTag.HOW_TO_EARN: "howToEarn",
GameTag.HOW_TO_EARN_GOLDEN: "howToEarnGolden",
GameTag.TARGETING_ARROW_TEXT: "targetingArrowText",
}
def serialize_card(card):
ret = {
"id": card.id,
"name": card.name,
"flavor": card.flavortext,
"text": card.description,
"textInPlay": card.playtext,
"howToEarn": card.how_to_earn,
"howToEarnGolden": card.how_to_earn_golden,
"targetingArrowText": card.targeting_arrow_text,
"artist": card.artist,
"faction": card.faction,
"playerClass": card.card_class,
"race": card.race,
"rarity": card.rarity,
"set": card.card_set,
"type": card.type,
"collectible": card.collectible,
"attack": card.atk,
"cost": card.cost,
"durability": card.durability,
"health": card.health,
}
ret = {k: v for k, v in ret.items() if show_field(card, k, v)}
for k, v in ret.items():
if isinstance(v, IntEnum):
ret[k] = v.name
mechanics = get_mechanics(card)
if mechanics:
ret["mechanics"] = mechanics
if card.entourage:
ret["entourage"] = card.entourage
if card.requirements:
ret["playRequirements"] = {k.name: v for k, v in card.requirements.items()}
if card.craftable:
ret["dust"] = card.crafting_costs + card.disenchant_costs
# if card.choose_cards:
# ret["chooseCards"] = card.choose_cards
return ret
def export_cards_to_file(cards, filename, locale):
ret = []
for card in cards:
card.locale = locale
ret.append(serialize_card(card))
json_dump(ret, filename)
def export_all_locales_cards_to_file(cards, filename):
ret = []
for card in cards:
obj = serialize_card(card)
for tag in LOCALIZED_TAGS:
if tag in TAG_NAMES:
value = card._localized_tags[tag]
if value:
obj[TAG_NAMES[tag]] = value
ret.append(obj)
json_dump(ret, filename)
def write_cardbacks(dbf, filename, locale):
ret = []
for record in dbf.records:
ret.append({
"id": record["ID"],
"note_desc": record["NOTE_DESC"],
"source": record["SOURCE"],
"enabled": record["ENABLED"],
"name": record.get("NAME", {}).get(locale.name, ""),
"prefab_name": record.get("PREFAB_NAME", ""),
"description": record.get("DESCRIPTION", {}).get(locale.name, ""),
"source_description": record.get("SOURCE_DESCRIPTION", {}).get(locale.name, ""),
})
json_dump(ret, filename)
def main():
parser = ArgumentParser()
parser.add_argument(
"-o", "--output-dir",
type=str,
dest="output_dir",
default="out",
help="Output directory"
)
parser.add_argument(
"-i", "--input-dir",
type=str,
dest="input_dir",
default="hs-data",
help="Input hs-data directory"
)
args = parser.parse_args(sys.argv[1:])
db, xml = load(os.path.join(args.input_dir, "CardDefs.xml"))
dbf_path = os.path.join(args.input_dir, "DBF", "CARD_BACK.xml")
if not os.path.exists(dbf_path):
print("Skipping card back generation (%s does not exist)" % (dbf_path))
dbf = None
else:
dbf = Dbf.load(dbf_path)
cards = db.values()
collectible_cards = [card for card in cards if card.collectible]
for locale in Locale:
if locale.unused:
continue
basedir = os.path.join(args.output_dir, locale.name)
if not os.path.exists(basedir):
os.makedirs(basedir)
filename = os.path.join(basedir, "cards.json")
export_cards_to_file(cards, filename, locale.name)
filename = os.path.join(basedir, "cards.collectible.json")
export_cards_to_file(collectible_cards, filename, locale.name)
if dbf is not None:
filename = os.path.join(basedir, "cardbacks.json")
write_cardbacks(dbf, filename, locale)
# Generate merged locales
basedir = os.path.join(args.output_dir, "all")
if not os.path.exists(basedir):
os.makedirs(basedir)
filename = os.path.join(basedir, "cards.json")
export_all_locales_cards_to_file(cards, filename)
filename = os.path.join(basedir, "cards.collectible.json")
export_all_locales_cards_to_file(collectible_cards, filename)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
3fb2a9f1ae58ad0743c9d750da7afc275cf304bf | 0cc4eb3cb54f8394c127ace62d3108fdb5230c85 | /.spack-env/view/lib/python3.7/test/test_multibytecodec.py | 7384d370794ac8065a971022cc5463d45a9edc2d | []
| no_license | jacobmerson/spack-develop-env | 5b2d76f58c0b64ae97c64f77a3c4d33a770c71c8 | 5fca20ca343b1a76f05fc635c87f94ed25417d94 | refs/heads/master | 2022-07-04T02:22:50.264727 | 2020-05-06T05:13:50 | 2020-05-06T05:13:50 | 261,657,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | /lore/mersoj/spack/spack/opt/spack/linux-rhel7-x86_64/gcc-7.3.0/python-3.7.7-oihhthdoxtgh4krvzpputn5ozwcnq2by/lib/python3.7/test/test_multibytecodec.py | [
"[email protected]"
]
| |
5c2b7a287908c20a810e10ea85d5d574ce3aa09f | 68bc1452c1adee952d17d2ee4ba2d31865f4c36b | /petro/models/__init__.py | f30cbc7d16584e7660708e4c634336355a14d9b0 | []
| no_license | KacheKH/odoo | def5942a71bf583b1345d8189d15e0f12fdebed7 | 82f674b8b2fd444b8cbf0e427d6c9174eec4d868 | refs/heads/master | 2020-03-14T10:46:38.081634 | 2018-05-10T18:26:41 | 2018-05-10T18:26:41 | 131,575,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | # -*- coding: utf-8 -*-
from . import petros | [
"[email protected]"
]
| |
0635c22654b0499d2560adbd8e4deb4ae0b1d895 | 3ec0da1faa3da5e8e94c73296ba6a8bed6228e2f | /foodtrucks/migrations/0002_auto_20160725_0842.py | 5194919403ecf2f7b213df7cb21bb5770cd3cc23 | []
| no_license | calmhandtitan/foodtrucksnearby | 15ef095ff2251e40ef03814eee3554fa0a570583 | 61dabd817e150eaec672c4f243be9a5ac71a4c3e | refs/heads/master | 2021-01-20T20:41:44.993850 | 2016-08-01T06:31:48 | 2016-08-01T06:31:48 | 64,118,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,849 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('foodtrucks', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='foodtruck',
name='approved',
),
migrations.RemoveField(
model_name='foodtruck',
name='block',
),
migrations.RemoveField(
model_name='foodtruck',
name='blocklot',
),
migrations.RemoveField(
model_name='foodtruck',
name='cnn',
),
migrations.RemoveField(
model_name='foodtruck',
name='expirationdate',
),
migrations.RemoveField(
model_name='foodtruck',
name='locationdescription',
),
migrations.RemoveField(
model_name='foodtruck',
name='lot',
),
migrations.RemoveField(
model_name='foodtruck',
name='noisent',
),
migrations.RemoveField(
model_name='foodtruck',
name='permit',
),
migrations.RemoveField(
model_name='foodtruck',
name='priorpermit',
),
migrations.RemoveField(
model_name='foodtruck',
name='received',
),
migrations.RemoveField(
model_name='foodtruck',
name='schedule',
),
migrations.RemoveField(
model_name='foodtruck',
name='status',
),
migrations.RemoveField(
model_name='foodtruck',
name='x',
),
migrations.RemoveField(
model_name='foodtruck',
name='y',
),
]
| [
"[email protected]"
]
| |
b6fb7824bcb1ed5a6f2dcd398fe4e45eb19507f4 | 74a8defffc8d01804e5f65b0c5857accd8ff144b | /predict.py | 3e6f0ba84f1bc671c92dbbd02cefa346d4cf94fc | []
| no_license | DanferWang/NUS-SoC-Summer-Workshop | cb75bbd27ac52829384c984991255ac68407b8bc | 68f1e3f27634cb131b2c9032be16ee20b2ee7d5f | refs/heads/master | 2021-10-07T13:53:45.010267 | 2021-10-05T15:13:42 | 2021-10-05T15:13:42 | 202,662,046 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 940 | py | from __future__ import print_function
from tensorflow.python.keras.models import load_model
import tensorflow as tf
import numpy as numpy
from PIL import Image
MODEL_NAME='flowers.hd5'
dict={0:'daisy',1:'dandelion',2:'roses',3:'sunflowers',4:'tulips'}
graph = tf.compat.v1.get_default_graph()
def classify(model,image):
global graph
with graph.as_default():
result=model.predict(image)
themax=np.argmax(result)
return(dict[themax],result[0][themax],themax)
def load_image(image_fname):
img=Image.open(image_fname)
img=img.resize((249,249))
imgarray=np.array(img)/255.0
final=np.expand_dims(imgarray,axis=0)
return final
def main():
model=load_model(MODEL_NAME)
img=load_image("what.jpg")
label,prob,_=classify(model,img)
print "We think with certainty %3.2f that it is %s."%(prob,label)
if __name__=="__main___":
main() | [
"[email protected]"
]
| |
128e0b1ef36f2b620f29bae70b2db83bd31f692c | 69eb69547ea85d930e8915f9c17b3e4d254e4058 | /hand length.py | 03cf4050c260b7ad045190e33205dbea3b526e61 | []
| no_license | Theofilos-Chamalis/edX-6.00.1x-Introduction-to-Computer-Science-and-Programming | 38cbc9288ee639ab961a579869a045f3f839e82d | b87a7e69423213a711680d2744315c61ad81877b | refs/heads/master | 2022-03-04T20:50:32.441600 | 2022-02-13T10:17:25 | 2022-02-13T10:17:25 | 14,083,089 | 102 | 62 | null | 2022-02-13T10:17:26 | 2013-11-03T09:45:19 | Python | UTF-8 | Python | false | false | 192 | py | def calculateHandlen(hand):
"""
Returns the length (number of letters) in the current hand.
hand: dictionary (string int)
returns: integer
"""
return sum(hand.values()) | [
"[email protected]"
]
| |
185bc9e1d1b2fcba663c80dac0791258ec2a9118 | 370977b270b42fdd2435df7cf69134d2c68caa37 | /03-fp-decorator/hw/solution.py | 29bed2f8907ba3bd9ad6d849c343e84a1f38bbb6 | []
| no_license | nagellack5C/EpamPython2019 | 5cd6af1b8393413a84af38560bf7d1606b815d9e | 3c7e7120cbd5f48bbb09dd5c5e9a27bec49332aa | refs/heads/master | 2020-05-24T11:48:33.792535 | 2019-07-04T14:29:46 | 2019-07-04T14:29:46 | 187,254,927 | 1 | 0 | null | 2019-05-17T17:15:59 | 2019-05-17T17:15:59 | null | UTF-8 | Python | false | false | 5,001 | py | import time
from functools import reduce
from timeit import timeit
# hw1
# problem 6:
# Find the difference between the sum of the squares of the
# first one hundred natural numbers and the square of the sum.
# solution:
x = abs(sum([i*i for i in range(1, 101)]) - sum(range(1, 101)) ** 2)
print(x)
# problem 9:
# There exists exactly one Pythagorean triplet for which a + b + c = 1000.
# Find the product abc.
# solution:
# simple = [a * b * (1000 - a - b) for a in range(1, 1001) for b in range(1, 1001) if a * a + b * b == (1000 - a - b) ** 2][0]
euclid = [a * b * c for a, b, c in [(m * m - n * n, 2 * m * n, m * m + n * n) for m in range(1, 32) for n in range(1, 32)] if a * a + b * b == c * c and a + b + c == 1000][0]
print(euclid)
# print(simple)
# euclid is 500-800 times faster
# x = timeit("[a * b * (1000 - a - b) for a in range(1, 1001) for b in range(1, 1001) if a * a + b * b == (1000 - a - b) ** 2][0]", number=5)
# y = timeit("[a * b * c for a, b, c in [(m * m - n * n, 2 * m * n, m * m + n * n) for m in range(1, 32) for n in range(1, 32)] if a * a + b * b == c * c and a + b + c == 1000][0]", number=5)
# print(x / y)
# problem 40:
# An irrational decimal fraction is created by concatenating the positive integers:
# 0.123456789101112131415161718192021...
# It can be seen that the 12th digit of the fractional part is 1.
# If dn represents the nth digit of the fractional part, find the value
# of the following expression.
# d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000
# solution:
# val = reduce(lambda x, y: int(x) * int(y), [i[1] for i in enumerate(list("".join([str(i) for i in range(1, 1000001)])), 1) if i[0] in [10**j for j in range(6)]])
# print(val)
# problem 48:
# The series, 1^1 + 2^2 + 3^3 + ... + 10^10 = 10405071317.
# Find the last ten digits of the series, 1^1 + 2^2 + 3^3 + ... + 1000^1000.
# solution:
last_ten = reduce(lambda x, y: x + y ** y, range(1, 1001)) % (10 ** 10)
print(last_ten)
# ------------------------------------------------------
# hw2
def is_armstrong(number):
return number == sum(list(map(lambda x: int(x)**len(str(number)), list(str(number)))))
assert is_armstrong(153) == True, 'Число Армстронга'
assert is_armstrong(10) == False, 'Не число Армстронга'
# hw3
def collatz_steps(n):
return "bad value!" if not isinstance(n, int) or n < 1 else n-1 if n == 1 else 1 + collatz_steps(n // 2) if n % 2 == 0 else 1 + collatz_steps(n * 3 + 1)
print(collatz_steps(33))
# assert collatz_steps(16) == 4
# assert collatz_steps(12) == 9
# assert collatz_steps(1000000) == 152
# hw4
# Переписать функцию make_cache, которая сохраняет
# результаты предыдущих вызовов оборачиваемой функции,
# таким образом, чтобы она сохраняла результаты в своем
# хранилищe на определенное время, которое передается
# параметром (аргументом) в декоратор.
# Плюс придумать некоторый полезный юзкейс и заимплементировать функцию slow_function
def make_cache(ttl):
cache = {}
def outer_wrapper(func):
def wrapper(*args, **kwargs):
# comparing current timestamp with cached results' TTL
# removing key-value pair if difference exceeds the
# ttl parameter
cur_time = time.time()
# using list(cache.keys()) to allow for removal while iterating
for i in list(cache.keys()):
if cur_time - cache[i]["ttl"] > ttl:
cache.pop(i)
hashed_key = str(args) + str(kwargs)
if hashed_key not in cache:
cache[hashed_key] = {}
cache[hashed_key]["res"] = func(*args, **kwargs)
cache[hashed_key]["ttl"] = time.time()
return cache[hashed_key]["res"]
return wrapper
return outer_wrapper
@make_cache(6)
def slow_function(curr_name):
global curr_rates
'''
This function pulls currency rates from a dummy API
(in this case a global dict). time.sleep() simulates
connection time.
In this simulated case connection to the API requires 2
seconds and the results are stored for 5 seconds.
:param num:
:return:
'''
time.sleep(2)
return f'Exchange rate for {curr_name} is {curr_rates[curr_name]}'
curr_rates = {
"USD": 30,
"RUR": 1,
"EUR": 40,
"GBP": 50,
"JPY": 0.5
}
# simulating user requests
print(slow_function("USD"))
curr_rates["USD"] = 60 # this value was increased in the source but the cached one is kept
print(slow_function("USD"))
print(slow_function("EUR"))
print(slow_function("GBP"))
print(slow_function("RUR"))
print(slow_function("JPY"))
print(slow_function("USD")) # by this time the cached value should expire and the function will pull the new one
| [
"[email protected]"
]
| |
132aed4fc73fce77dc1e783217955c38d61089e2 | cf58efaf03ad1e06365c2756c46f2a88a143e301 | /oops7.py | 19aae47c344b09f7ee3f7561e1a1ee05d79c8d5a | []
| no_license | tmtrinesh/Seleniumproject1 | b5b417417ece047f55fa05d53321af4143506996 | 739d813eebb5151719ebb3b7e298ed7fa5e4b01e | refs/heads/master | 2023-04-19T21:57:46.126216 | 2021-04-29T07:06:19 | 2021-04-29T07:06:19 | 362,723,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | class Myclass:
pass
c=Myclass()
print(c)
class Myclass1:
def __str__(self):
return "welcome"
d=Myclass1()
print(d)
def sum(start,end):
result =0
for i in range(start,end+1):
result = result+i
print(result)
sum(10,20) | [
"[email protected]"
]
| |
59f59ab348d69ad767439cbd8df75eaca647570f | f3810660c1ed03b8624f2f8493bd98defc01317e | /chap03/07_three_neuralnetwork2.py | 6e46ac069af82fbf0f06cd72361e26e234bcc20f | []
| no_license | YoohyeonSeung/python_deep_learning | d951412885c6170f1d4ff24df9255792b4a3c9d6 | d8f719e3ee6112cc5830b1be453d592e46c81eb3 | refs/heads/master | 2020-04-19T22:49:44.351624 | 2019-02-18T09:22:02 | 2019-02-18T09:22:02 | 168,480,359 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 943 | py | import numpy as np
def init_network():
network = {}
network['W1'] = np.array([[0.1, 0.3, 0.5],[0.2, 0.4, 0.6]])
network['b1'] = np.array([0.1, 0.2, 0.3])
network['W2'] = np.array([[0.1,0.4],[0.2,0.5],[0.3,0.6]])
network['b2'] = np.array([0.1,0.2])
network['W3'] = np.array([[0.1,0.3],[0.2,0.4]])
network['b3'] = np.array([0.1,0.2])
return network
def identity_function(x):
return x
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def forward(network, x):
W1, W2, W3 = network['W1'], network['W2'],network['W3']
b1, b2, b3 = network['b1'], network['b2'], network['b3']
a1 = np.dot(x, W1) + b1
z1 = sigmoid(a1)
a2 = np.dot(z1, W2) + b2
z2 = sigmoid(a2)
a3 = np.dot(z2, W3) + b3
y = identity_function(a3)
return y
if __name__ == "__main__":
network = init_network()
x = np.array([1.0, 0.5])
y = forward(network, x)
print(y) # [0.31682708 0.69627909] | [
"[email protected]"
]
| |
e27df221a34585c501687f1217812d0264f5466a | 6e3e8fd06c1b9e03414b7bd5b6fb794dee326f87 | /lstm_lm_g_on_sentences_euc.py | c68905a7d897f4053cf68419c2748019410c477d | []
| no_license | qyouurcs/seq_style | f5fe75569b21af549905d6a4e2ff7696286314b7 | 5c971d5bdf7f4b0f0ffccfa913d10ccda3486384 | refs/heads/master | 2021-01-10T04:51:15.444826 | 2016-04-15T18:23:01 | 2016-04-15T18:23:01 | 51,030,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,342 | py | from __future__ import print_function
import sys
import os
import numpy as np
import pickle
import theano
import theano.tensor as T
import lasagne
import urllib2 #For downloading the sample text file. You won't need this if you are providing your own file.
import ConfigParser
import math
import pdb
import random
import climate
logging = climate.get_logger(__name__)
climate.enable_default_logging()
def perplexity(p, y, mask):
# calculate the perplexity of each sentence and then average.
# p : batch * seq - 1 * vocab_size
# y: batch * seq - 1
# mask: batch * seq - 1
batch_size = p.shape[0]
seq_len = p.shape[1]
vocab_size = p.shape[2]
PPL = np.zeros((batch_size,))
for i in range(batch_size):
ppl_i = 0
len_i = 0
for j in range(seq_len):
if mask[i][j] > 0:
len_i += 1
ppl_i += math.log(p[i][j][y[i][j]],2)
ppl_i /= len_i
PPL[i] = 2**(-ppl_i)
return np.mean(PPL)
def load_vocab(vocab_fn):
idx2word = {}
word2idx = {}
with open(vocab_fn,'r') as fid:
for aline in fid:
parts = aline.strip().split()
idx2word[int(parts[0])] = parts[1]
word2idx[parts[1]] = int(parts[0])
return idx2word, word2idx
def load_vocab_fea(word_vec_fn, word2idx):
word2vec_fea = {}
with open(word_vec_fn,'r') as fid:
for aline in fid:
aline = aline.strip()
parts = aline.split()
if parts[0] in word2idx:
vec_fea = np.array([ float(fea) for fea in parts[1:] ], dtype='float32')
word2vec_fea[parts[0]] = vec_fea
start_fea = np.zeros((word2vec_fea.values()[0].shape),dtype='float32')
t_num_fea = start_fea.size
# using a 1/n as features.
# I think start token is special token. No idea, how to initialize it.
start_fea[:] = 1.0 / start_fea.size
word2vec_fea['#START#'] = start_fea
word2vec_fea['<UNK>'] = np.random.uniform(-0.05, 0.05, start_fea.size)
return word2vec_fea, t_num_fea
def main():
cf = ConfigParser.ConfigParser()
if len(sys.argv) < 2:
logging.info('Usage: {0} <conf_fn>'.format(sys.argv[0]))
sys.exit()
cf.read(sys.argv[1])
dataset = cf.get('INPUT', 'dataset')
h_size = cf.get('INPUT', 'h_size').split(',')
word_vec_fn = cf.get('INPUT', 'word_vec_fn')
vocab_fn = cf.get('INPUT', 'vocab_fn')
optim = cf.get('INPUT', 'optim')
LEARNING_RATE = float(cf.get('INPUT','lr'))
NUM_EPOCHS = int(cf.get("INPUT","epochs"))
BATCH_SIZE = int(cf.get("INPUT", "batch_size"))
train_fn=cf.get('INPUT', 'train_fn')
val_fn=cf.get('INPUT', 'val_fn')
save_dir=cf.get('OUTPUT', 'save_dir')
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
h_size_str = [str(h) for h in h_size]
h_size_str = '_'.join(h_size_str)
save_fn = os.path.join(save_dir, os.path.basename(sys.argv[1]) + '_' + optim + '_' + h_size_str + '_euc.pkl')
idx2word, word2idx = load_vocab(vocab_fn)
# Now, add th <UNK>
word2idx['<UNK>'] = len(idx2word)
idx2word[len(word2idx)-1] = '<UNK>'
vocab_size = len(word2idx)
word2vec_fea, t_num_fea = load_vocab_fea(word_vec_fn, word2idx)
word2vec_fea_np = np.zeros((len(idx2word), t_num_fea), dtype = 'float32')
for i in range(len(idx2word)):
word2vec_fea_np[i,:] = word2vec_fea[idx2word[i]] / np.linalg.norm(word2vec_fea[idx2word[i]])
#word2vec_fea_np[i,:] = word2vec_fea[idx2word[i]]
logging.info('Total vocab has a total of %d words and feas %d', len(word2idx), len(word2vec_fea))
lasagne.random.set_rng(np.random.RandomState(1))
GRAD_CLIP = 100
MAX_SEQ_LENGTH = 32
EVAL_FREQ = 100
PRINT_FREQ = 100
dict_train = {}
with open(train_fn) as fid:
for i,aline in enumerate(fid):
dict_train[i] = aline.strip()
dict_val = {}
with open(val_fn) as fid:
for i,aline in enumerate(fid):
dict_val[i] = aline.strip()
train_range = range(len(dict_train))
def batch_train(batch_size = BATCH_SIZE):
random.shuffle(train_range)
batch = train_range[0:batch_size]
x = np.zeros((batch_size,MAX_SEQ_LENGTH, t_num_fea))
y = np.zeros((batch_size,MAX_SEQ_LENGTH-1), dtype='int32')
masks = np.zeros((batch_size, MAX_SEQ_LENGTH-1), dtype='int32')
for i, pos in enumerate(batch):
tokens = ['#START#']
sent = dict_train[batch[i]].lower().split()
tokens.extend(sent)
if tokens[-1] != ".":
tokens.append('.')
pos = 0
for j,word in enumerate(tokens):
if word in word2vec_fea:
x[i,pos,:] = word2vec_fea[word]
else:
x[i,pos,:] = word2vec_fea['<UNK>']
if pos > 0:
if word in word2vec_fea:
y[i,pos-1] = word2idx[word]
else:
y[i,pos-1] = word2idx['<UNK>']
masks[i,pos-1] = 1
pos += 1
if pos >= MAX_SEQ_LENGTH:
break
return x,y, masks
val_range = range(len(dict_val))
def batch_val(batch_size = BATCH_SIZE):
random.shuffle(val_range)
batch = val_range[0:batch_size]
x = np.zeros((batch_size,MAX_SEQ_LENGTH, t_num_fea))
y = np.zeros((batch_size,MAX_SEQ_LENGTH-1), dtype='int32')
masks = np.zeros((batch_size, MAX_SEQ_LENGTH-1), dtype='int32')
for i, sent in enumerate(batch):
tokens = ['#START#']
sent = dict_val[batch[i]].lower().split()
tokens.extend(sent)
if tokens[-1] != ".":
tokens.append('.')
pos = 0
for j,word in enumerate(tokens):
if word in word2vec_fea:
x[i,pos,:] = word2vec_fea[word]
else:
x[i,pos,:] = word2vec_fea['<UNK>']
if pos > 0:
if word in word2vec_fea:
y[i,pos-1] = word2idx[word]
else:
y[i, pos - 1] = word2idx['<UNK>']
masks[i,pos - 1] = 1
pos += 1
if pos >= MAX_SEQ_LENGTH:
break
return x,y, masks
logging.info("Building network ...")
l_in = lasagne.layers.InputLayer(shape=(BATCH_SIZE, MAX_SEQ_LENGTH, t_num_fea))
l_in_dropout = lasagne.layers.DropoutLayer(l_in, p = 0.5)
h_prev = lasagne.layers.LSTMLayer(
l_in_dropout, int(h_size[0]), grad_clipping = GRAD_CLIP,
unroll_scan = True,
nonlinearity=lasagne.nonlinearities.tanh)
h_cur = lasagne.layers.DropoutLayer(h_prev, p = 0.5)
h_prev = h_cur
for i in xrange(1,len(h_size)):
h_cur = lasagne.layers.LSTMLayer(
h_prev, int(h_size[i]), grad_clipping=GRAD_CLIP,
unroll_scan = True,
nonlinearity=lasagne.nonlinearities.tanh)
h_prev = lasagne.layers.DropoutLayer(h_cur, p = 0.5)
h_cur = h_prev
# The output of the sliced layer will then be of size (batch_size, SEQ_LENGH-1, N_HIDDEN)
l_forward_slice = lasagne.layers.SliceLayer(h_cur, indices = slice(0, -1), axis = 1)
logging.info('l_forward_slide shape {0}, {1},{2}'.format(l_forward_slice.output_shape[0],l_forward_slice.output_shape[1], l_forward_slice.output_shape[2]))
l_forward_slice_rhp = lasagne.layers.ReshapeLayer(l_forward_slice, (-1, l_forward_slice.output_shape[2]))
# The sliced output is then passed through the softmax nonlinearity to create probability distribution of the prediction
# The output of this stage is (batch_size, vocab_size)
l_out = lasagne.layers.DenseLayer(l_forward_slice_rhp, num_units = t_num_fea)
logging.info('l_out shape {0}, {1}'.format(l_out.output_shape[0],l_out.output_shape[1]))
# Theano tensor for the targets
target_values = T.imatrix('target_output')
mask_sym = T.imatrix('mask')
vocab_sym = T.matrix() # vocab of the glove features for the dictionary.
def calc_euc_softmax(net_output, word2vec_fea):
# Calc the distance.
dist = ( net_output** 2).sum(1).reshape((net_output.shape[0], 1)) \
+ (word2vec_fea ** 2).sum(1).reshape((1, word2vec_fea.shape[0])) - 2 * net_output.dot(word2vec_fea.T) # n * vocab
# Now, softmax.
z = T.exp( - dist + dist.max(axis = 1, keepdims = True) )
prob = z / z.sum(axis = 1, keepdims = True) # n * vocab
prob = T.reshape(prob, (BATCH_SIZE, MAX_SEQ_LENGTH -1, len(idx2word)))
return prob
def calc_cross_ent(net_output, mask_sym, targets):
preds = T.reshape(net_output, (-1, len(word2idx)))
targets = T.flatten(targets)
cost = T.nnet.categorical_crossentropy(preds, targets)[T.flatten(mask_sym).nonzero()]
return cost
network_output = lasagne.layers.get_output(l_out, deterministic = False)
prob_output = calc_euc_softmax(network_output, vocab_sym)
network_output_tst = lasagne.layers.get_output(l_out, deterministic = True)
prob_output_tst = calc_euc_softmax(network_output_tst, vocab_sym)
cost_train = T.mean(calc_cross_ent(prob_output, mask_sym, target_values))
cost_test = T.mean(calc_cross_ent(prob_output_tst, mask_sym, target_values))
all_params = lasagne.layers.get_all_params(l_out, trainable = True)
# Compute AdaGrad updates for training
logging.info("Computing updates ...")
if optim == 'ada':
updates = lasagne.updates.adagrad(cost_train, all_params, LEARNING_RATE)
elif optim == 'adam':
updates = lasagne.updates.adam(cost_train, all_params, LEARNING_RATE)
elif optim == 'rmsprop':
updates = lasagne.updates.rmsprop(cost_train, all_params, LEARNING_RATE)
# Theano functions for training and computing cost
logging.info("Compiling functions ...")
f_train = theano.function([l_in.input_var, target_values, mask_sym, vocab_sym], cost_train, updates=updates, allow_input_downcast=True)
f_val = theano.function([l_in.input_var, target_values, mask_sym, vocab_sym], cost_test, allow_input_downcast=True)
probs_train = theano.function([l_in.input_var, vocab_sym],prob_output_tst,allow_input_downcast=True)
probs_test = theano.function([l_in.input_var, vocab_sym],prob_output_tst,allow_input_downcast=True)
logging.info("Training ...")
data_size = len(dict_train)
mini_batches_p_epo = int(math.floor(data_size / BATCH_SIZE))
#try:
if True:
for epoch in xrange(NUM_EPOCHS):
avg_cost = 0;
for j in xrange(mini_batches_p_epo):
x,y, mask = batch_train()
avg_cost += f_train(x, y, mask, word2vec_fea_np)
if not(j % PRINT_FREQ):
p = probs_train(x, word2vec_fea_np)
ppl = perplexity(p,y,mask)
logging.info("Epoch {}, mini_batch = {}/{}, avg loss = {}, PPL = {}".format(epoch, j, mini_batches_p_epo, avg_cost / PRINT_FREQ, ppl))
avg_cost = 0
if not(j % EVAL_FREQ):
x,y,mask = batch_val()
val_cost = f_val(x, y, mask, word2vec_fea_np)
p = probs_test(x,word2vec_fea_np)
ppl = perplexity(p,y,mask)
logging.info("-----------------------------------------------------")
logging.warning("\tVAL average loss = {}, PPL = {}".format(val_cost, ppl))
# We also need to eval on the val dataset.
#except Exception as e:
logging.warning("EXCEPTION")
pass
param_values = lasagne.layers.get_all_param_values(l_out)
param_syms = lasagne.layers.get_all_params(l_out)
param_strs = []
for sym in param_syms:
param_strs.append(str(sym))
d = {'param_vals': param_values,
'param_strs': param_strs,
'word2idx':word2idx,
'idx2word':idx2word}
pickle.dump(d, open(save_fn,'w'), protocol=pickle.HIGHEST_PROTOCOL)
logging.info("Done with {}".format(save_fn))
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
fda5b7c54f83bccf29c6b0d4fa41950f59ec38ce | d2c2989d2763ef17f27d621759f8ab9cfeb84974 | /typeidea/blog/migrations/0001_initial.py | 76c351edec954a573d8d2cd1c3351aefef1dc1c8 | []
| no_license | ZhangYanPing/typeidea | e219e6b0129ce7301b5b5a053fd02ed8f1b91083 | 88765c79f312d677f3c0e23f0f96ee883a6e3ce0 | refs/heads/master | 2020-05-04T16:34:37.407133 | 2019-04-17T09:18:06 | 2019-04-17T09:18:06 | 179,281,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,287 | py | # Generated by Django 2.2 on 2019-04-04 00:34
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, verbose_name='名称')),
('status', models.PositiveIntegerField(choices=[(1, '正常'), (0, '删除')], default=1, verbose_name='状态')),
('is_nav', models.BooleanField(default=False, verbose_name='是否为导航')),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='作者')),
],
options={
'verbose_name': '分类',
'verbose_name_plural': '分类',
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=10, verbose_name='名称')),
('status', models.PositiveIntegerField(choices=[(1, '正常'), (0, '删除')], default=1, verbose_name='状态')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='作者')),
],
options={
'verbose_name': '标签',
'verbose_name_plural': '标签',
},
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='标题')),
('desc', models.CharField(blank=True, max_length=1024, verbose_name='摘要')),
('content', models.TextField(help_text='正文必须为MarkDown格式', verbose_name='正文')),
('status', models.PositiveIntegerField(choices=[(1, '正常'), (0, '删除'), (2, '草稿')], default=1, verbose_name='状态')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='blog.Category', verbose_name='分类')),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='作者')),
('tag', models.ManyToManyField(to='blog.Tag', verbose_name='标签')),
],
options={
'verbose_name': '文章',
'verbose_name_plural': '文章',
'ordering': ['-id'],
},
),
]
| [
"[email protected]"
]
| |
756b5b24cc0dca3c1d9099f9f4fb15c96283be28 | e10edb9c3db4a78fcd91556496de1a5637f03012 | /foodgram_project/settings.py | b4fe0e607409c3946c229006222e4851de6a7045 | [
"MIT"
]
| permissive | ilyukevich/foodgram-project | 9730d7daaee75aa8ffed312dbfd9ff889a552b9f | 458b01d6696687ff6476a8da343da3590699558f | refs/heads/master | 2023-06-06T05:48:46.800023 | 2021-06-25T16:00:17 | 2021-06-25T16:00:17 | 345,058,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,139 | py | import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = os.environ.get('SECRET_KEY'),
DEBUG = False
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'sorl.thumbnail',
'api',
'users',
'recipes',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'foodgram_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'recipes.context_processors.get_shopping_list_elements_length',
],
},
},
]
WSGI_APPLICATION = 'foodgram_project.wsgi.application'
DATABASES = {
'default': {
'ENGINE': os.environ.get('DB_ENGINE'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('POSTGRES_USER'),
'PASSWORD': os.environ.get('POSTGRES_PASSWORD'),
'HOST': os.environ.get('DB_HOST'),
'PORT': os.environ.get('DB_PORT'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'ru-RU'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'index'
SITE_ID = 1
COUNT_RECIPE = 6
EMAIL_BACKEND = os.environ.get('DJANGO_EMAIL_BACKEND')
EMAIL_HOST = os.environ.get('DJANGO_EMAIL_HOST')
EMAIL_HOST_USER = os.environ.get('DJANGO_EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.environ.get('DJANGO_EMAIL_HOST_PASSWORD')
EMAIL_PORT = os.environ.get('DJANGO_EMAIL_PORT')
EMAIL_USE_TLS = os.environ.get('DJANGO_EMAIL_USE_TLS')
DEFAULT_FROM_EMAIL = os.environ.get('DJANGO_DEFAULT_FROM_EMAIL')
PAG_COUNT = '6'
GLOBAL_SETTINGS = {
'PAG_COUNT': 6
}
| [
"[email protected]"
]
| |
980187db1840de06b15442e3e1a14db18561b1c4 | 2b9af5e429e647bd0439b874edc78695b1f94977 | /weather_checker.py | 3611b3afd1d47563bcf8f568b3de8c92f917641c | []
| no_license | ninjaboy/weather_spam | 23e26932fc0232ce07e892c0fa56a86313edd1a7 | d9f6126085222dc6a339c8c7b058bf45ba5295a7 | refs/heads/master | 2021-01-01T18:37:46.338635 | 2014-07-23T12:09:12 | 2014-07-23T12:09:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,169 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import MySQLdb
import csv
import smtplib, os
import time
import re
import urllib2
import json
from datetime import *
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.Utils import COMMASPACE, formatdate
from email import Encoders
def send_mail(send_from, send_to, subject, text, files=[], server="localhost"):
assert type(send_to)==list
assert type(files)==list
msg = MIMEMultipart('alternative')
msg['From'] = send_from
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg['Content-Type'] = "text/html; charset=utf-8"
msg.attach(MIMEText(text.encode('utf-8'),'plain','utf-8')
for f in files:
part = MIMEBase('application', "octet-stream")
part.set_payload( open(f,"rb").read() )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(f))
msg.attach(part)
smtp = smtplib.SMTP(server)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.close()
print "Email sent\n"
def get_json(url):
json_text = urllib2.urlopen(url).read()
return json_text
def get_weather_forecast(json_text,current_date):
# fill in default weather forecast
day_forecast = []
for h in range(0,24):
day_forecast.append({"temp":0,"rain":0})
# load weather from json
weather_json = json.loads(json_text)
# fill in actual weather forecast
for i in range(len(weather_json['list'])):
date_value = datetime.strptime(weather_json['list'][i]['dt_txt'], "%Y-%m-%d %H:%M:%S")
if date_value.date() == current_date:
hour = date_value.hour
try:
rain = weather_json['list'][i]['rain']
except:
rain = 0
temp = weather_json['list'][i]['main']['temp']
day_forecast[hour] = {"temp":temp,"rain":rain}
if hour < 23:
day_forecast[hour+1] = day_forecast[hour]
day_forecast[hour+2] = day_forecast[hour]
return day_forecast
def is_good_to_ride(weather_forecast,ride_hours,allowed_temp,allowed_rain):
good_to_ride = 0
average_temp = 0.
average_rain = 0.
for i in ride_hours:
try:
average_temp = average_temp + weather_forecast[i]["temp"]
average_rain = average_rain + weather_forecast[i]["rain"]["3h"]
except:
average_rain = average_rain
# debug
# print weather_forecast[i]["temp"]
average_temp = average_temp / len(ride_hours)
#print average_rain
#print average_temp
if (average_temp >=allowed_temp) and (average_rain <= allowed_rain):
good_to_ride = 1
return good_to_ride
if __name__ == "__main__":
url = "http://api.openweathermap.org/data/2.5/forecast?q=Minsk&units=metric"
forecast = get_weather_forecast(get_json(url),date.today() + timedelta(days = 1))
if is_good_to_ride(forecast,[7,8,9,18,19,20],14,0.25):
is_good_to_ride_text = 'Да, езжай, пацан! БВК!!'
else:
is_good_to_ride_text = 'Не суйся, дружище. Сиди дома, попивай горячий шоколад с маффинами'
send_mail("[email protected]",["[email protected]","[email protected]"],"Weather",is_good_to_ride_text,[])
| [
"[email protected]"
]
| |
81bade869b8bdc4b03be55e6fd7c30e787a314e7 | c7df38a3ab879c0254a61f35450ea3951991658e | /sim_result/parse_opcode_stat.py | 8900fc9dfd416d22873aa3e8f74e68b31abc6cd5 | []
| no_license | minhhn2910/cs6282_project | 06eaaa8973cf8cfe2e791e8d82ea85d63ff34a42 | 77286779283452c3a226a45a8bda33864402b3cf | refs/heads/master | 2016-09-12T23:40:26.075536 | 2016-04-18T17:12:41 | 2016-04-18T17:12:41 | 56,526,019 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 467 | py | #!/usr/bin/python
#this file calculate the total number of instruction per warp
import sys
def main(argv):
#argv[0] is log file name
f = open(argv[0], "r")
sum = 0
for line in f:
args = line.split()
sum += int(args[1])
print "from log " + str(sum)
#argv[1] is debug file
f1 = open (argv[1], "r")
sum_debug = 0
for line in f1:
if "is_fp" in line:
sum_debug += 1
print "from debug " + str(sum_debug)
if __name__ == '__main__':
main(sys.argv[1:])
| [
"[email protected]"
]
| |
0c959bb906d3c3b2c695aa535eb404b7f8e52c55 | c795ec7f77219892183a1222fb51b8be2e754944 | /multiverse server/multiverse-server/multiverse/config/mv_fantasy/ability_db.py | e25cedb2bc832ac0df1d59a2590ebf554a5d19c1 | [
"MIT"
]
| permissive | radtek/MultiverseClientServer | 89d9a6656953417170e1066ff3bd06782305f071 | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | refs/heads/master | 2023-01-19T04:54:26.163862 | 2020-11-30T04:58:30 | 2020-11-30T04:58:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,874 | py | #
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
from java.lang import *
from java.util import *
from multiverse.mars import *
from multiverse.mars.objects import *
from multiverse.mars.core import *
from multiverse.mars.events import *
from multiverse.mars.util import *
from multiverse.mars.effects import *
from multiverse.mars.abilities import *
from multiverse.server.math import *
from multiverse.server.events import *
from multiverse.server.objects import *
from multiverse.server.engine import *
True=1
False=0
effect = HealEffect("heal effect")
effect.setMinInstantHeal(100)
effect.setMaxInstantHeal(100)
Mars.EffectManager.register(effect.getName(), effect)
effect = StunEffect("stun effect")
effect.setDuration(7000)
Mars.EffectManager.register(effect.getName(), effect)
effect = StatEffect("armor effect")
effect.setDuration(15000)
effect.setStat("armor", 20)
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach heal effect")
effect.setAbilityName("heal")
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach stun effect")
effect.setAbilityName("stun")
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach armor effect")
effect.setAbilityName("armor")
Mars.EffectManager.register(effect.getName(), effect)
effect = TeachAbilityEffect("teach fireball effect")
effect.setAbilityName("fireball")
Mars.EffectManager.register(effect.getName(), effect)
effect = DamageEffect("poison effect")
effect.isPeriodic(1)
effect.isPersistent(1)
effect.setMinPulseDamage(12)
effect.setMaxPulseDamage(12)
effect.setNumPulses(10)
effect.setDuration(50000)
effect.setDamageType("Poison")
Mars.EffectManager.register(effect.getName(), effect)
effect = HealEffect("health regen effect")
effect.setHealProperty("health")
effect.setMinPulseHeal(2)
effect.setMaxPulseHeal(2)
effect.isPersistent(True)
effect.isPeriodic(True)
effect.setDuration(1000000)
effect.setNumPulses(500)
Mars.EffectManager.register(effect.getName(), effect)
effect = HealEffect("mana regen effect")
effect.setHealProperty("mana")
effect.setMinPulseHeal(2)
effect.setMaxPulseHeal(2)
effect.isPersistent(True)
effect.isPeriodic(True)
effect.setDuration(1000000)
effect.setNumPulses(500)
Mars.EffectManager.register(effect.getName(), effect)
effect = DamageEffect("fireball effect")
effect.setDamageProperty("health")
effect.setMinInstantDamage(40)
effect.setMaxInstantDamage(60)
effect.setDamageType("Fire")
Mars.EffectManager.register(effect.getName(), effect)
ability = EffectAbility("stun")
ability.setActivationCost(10)
ability.setCostProperty("mana")
ability.setMaxRange(10000)
ability.setTargetType(MarsAbility.TargetType.ENEMY)
ability.setActivationEffect(Mars.EffectManager.get("stun effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("armor")
ability.setActivationCost(30)
ability.setCostProperty("mana")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("armor effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
healCastingEffect = CoordinatedEffect("SpellCastingEffect")
healCastingEffect.sendSourceOid(True)
healCastingEffect.putArgument("castingTime", Integer(3000))
healCastingEffect.putArgument("decalTexture", "eight-hearts.png")
healTargetEffect = CoordinatedEffect("SpellTargetEffect")
healTargetEffect.sendTargetOid(True)
fireballCastingEffect = CoordinatedEffect("SpellCastingEffect")
fireballCastingEffect.sendSourceOid(True)
fireballCastingEffect.putArgument("castingTime", Integer(1500))
fireballCastingEffect.putArgument("decalTexture", "fire_ring_decal.dds")
fireballTargetEffect = CoordinatedEffect("MvFantasyFireball")
fireballTargetEffect.sendSourceOid(True)
fireballTargetEffect.sendTargetOid(True)
attackEffect = CoordinatedEffect("AttackEffect")
attackEffect.sendSourceOid(True)
attackEffect.sendTargetOid(True)
ability = EffectAbility("heal")
ability.setActivationTime(5000)
ability.setActivationCost(10)
ability.setCostProperty("mana")
ability.setMaxRange(20000)
ability.setIcon("Interface\FantasyWorldIcons\SPELL_heal_A")
ability.setTargetType(MarsAbility.TargetType.FRIEND)
ability.setActivationEffect(Mars.EffectManager.get("heal effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCoordEffect(MarsAbility.ActivationState.ACTIVATING, healCastingEffect)
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("heal potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("heal effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCooldown(Cooldown("POTION", 15000))
ability.addReagent("Healing Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("poison potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("poison effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCooldown(Cooldown("POTION", 15000))
ability.addReagent("Poison Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("heal scroll")
ability.setTargetType(MarsAbility.TargetType.FRIEND)
ability.setActivationEffect(Mars.EffectManager.get("heal effect"))
ability.setMaxRange(20000)
ability.setActivationTime(3000)
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addReagent("Healing Scroll")
ability.addCoordEffect(MarsAbility.ActivationState.ACTIVATING, healCastingEffect)
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self heal ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach heal effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addReagent("Tome of Heal")
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self stun ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach stun effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self armor ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach armor effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("teach self fireball ability")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("teach fireball effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addReagent("Tome of Fireball")
Mars.AbilityManager.register(ability.getName(), ability)
ability = CreateItemAbility("leather tanning")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationTime(3000)
ability.setItem("Finished Leather")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
ability.setCompleteSound("swordhit.wav")
ability.addReagent("Wolf Skin")
ability.addReagent("Wolf Skin")
Mars.AbilityManager.register(ability.getName(), ability)
ability = CreateItemAbility("make healing potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationTime(0)
ability.setItem("Healing Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
ability.setCompleteSound("swordhit.wav")
Mars.AbilityManager.register(ability.getName(), ability)
ability = CreateItemAbility("make healing scroll")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationTime(0)
ability.setItem("Healing Scroll")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
ability.setCompleteSound("swordhit.wav")
Mars.AbilityManager.register(ability.getName(), ability)
ability = EffectAbility("fireball")
ability.setActivationTime(1500)
ability.setActivationCost(10)
ability.setCostProperty("mana")
ability.setMaxRange(40000)
ability.setIcon("Interface\FantasyWorldIcons\SPELL_fireball_A")
ability.setTargetType(MarsAbility.TargetType.ENEMY)
ability.setActivationEffect(Mars.EffectManager.get("fireball effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCoordEffect(MarsAbility.ActivationState.ACTIVATING, fireballCastingEffect)
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, fireballTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
effect = HealEffect("restore mana effect")
effect.setHealProperty("mana")
effect.setMinInstantHeal(100)
effect.setMaxInstantHeal(100)
Mars.EffectManager.register(effect.getName(), effect)
ability = EffectAbility("restore mana potion")
ability.setTargetType(MarsAbility.TargetType.SELF)
ability.setActivationEffect(Mars.EffectManager.get("restore mana effect"))
ability.addCooldown(Cooldown("GLOBAL", 1500))
ability.addCooldown(Cooldown("POTION", 15000))
ability.addReagent("Mana Potion")
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, healTargetEffect)
Mars.AbilityManager.register(ability.getName(), ability)
rand = Random()
class FantasyCombatEffect (MarsEffect):
def apply(self, state):
MarsEffect.apply(self, state)
params = state.getParams()
result = params.get("result")
baseDmg = params.get("damage")
target = state.getObject()
if (result == "miss"):
dmgType = "miss"
elif (result == "hit"):
dmgType = "Physical"
elif (result == "crit"):
dmgType = "critical Physical"
else:
Log.error("FantasyCombatEffect.apply: unknown result")
penetration = params.get("penetration")
armor = target.statGetCurrentValue("armor")
dmgEff = (1.0 * penetration) / (armor + penetration)
if (dmgEff < 0.25):
dmgEff = 0.25
dmg = dmgEff * baseDmg
dmg = int(dmg)
if (dmg == 0 and dmgType != "miss"):
dmg = 1
target.statModifyBaseValue("health", -dmg)
target.sendStatusUpdate()
Log.debug("FantasyCombatEffect.apply: sending damage message target=" + target.toString()
+ " caster=" + state.getCaster().toString() + " dmg=" + str(dmg) + " dmgType=" + dmgType)
Engine.getAgent().sendBroadcast(CombatClient.DamageMessage(target.getOwnerOid(),
state.getCaster().getOwnerOid(),
dmg, dmgType))
effect = FantasyCombatEffect("attack effect")
Mars.EffectManager.register(effect.getName(), effect)
class FantasyCombatAbility (CombatAbility):
def resolveHit(self, state):
params = HashMap()
caster = state.getObject()
target = state.getTarget()
defense = target.statGetCurrentValue("defense")
accuracy = caster.statGetCurrentValue("accuracy")
agility = caster.statGetCurrentValue("agility")
atkPower = caster.statGetCurrentValue("attack power")
atkDelay = caster.getAttackDelay()
missChance = 0.2 + (defense-accuracy)/1000.0
if (missChance < 0.05):
missChance = 0.05
critChance = missChance + 0.05 + agility/1000.0
roll = rand.nextFloat()
bonusDmg = (atkPower * atkDelay) / 10000.0
baseWeapDmg = caster.getProperty("weaponBaseDmg")
varWeapDmg = caster.getProperty("weaponVarDmg")
dmg = bonusDmg + baseWeapDmg + rand.nextFloat() * varWeapDmg
if (roll < missChance):
dmg = 0
params.put("result", "miss")
elif (roll < critChance):
dmg *= 1.5
params.put("result", "crit")
else:
params.put("result", "hit")
params.put("damage", int(dmg))
penetration = caster.statGetCurrentValue("offense skill")
params.put("penetration", penetration)
return params
ability = FantasyCombatAbility("attack ability")
ability.setMaxRange(5000)
ability.setTargetType(MarsAbility.TargetType.ENEMY)
ability.setActivationEffect(Mars.EffectManager.get("attack effect"))
ability.addCoordEffect(MarsAbility.ActivationState.COMPLETED, attackEffect)
Mars.AbilityManager.register(ability.getName(), ability)
| [
"[email protected]"
]
| |
f0b0fba29da062733df81c727240a5d0ffa7afab | 8da1d25e1ecaeade49723207c9185c920da21040 | /HpotData.py | a7dd81a495fd65319f7a91ae422ca4be4b8275df | []
| no_license | batekmc/honeypot | fbaa7f78263e750a72f9d81057f1c5bfafbfcdd4 | ae07c622215dd5a9ee1eec345f45679db15cce67 | refs/heads/master | 2020-04-05T23:20:04.028352 | 2015-07-10T10:11:25 | 2015-07-10T10:11:25 | 32,019,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py |
class HpotData:
def __init__(self, ip = None, mac=None, icmp=None, tcp=None,
tcpServices = None, udpServices = None):
self.ip = ip
self.mac = mac
self.icmp = icmp#dafault behavior
self.tcp = tcp#dafault behavior
self.tcpServices = tcpServices#open ports for services
self.udpServices = udpServices#open ports for services
| [
"[email protected]"
]
| |
cd421d090098e1023976fe2c80a704255f704709 | f22d191a5deb208f1b730667cd5b449df2b8e134 | /source/beira_app.py | f8ccbde2593ad37b1715e8dfcb6281313307477d | []
| no_license | WilsonDS/Beira | 10f104ac592f3c9f455158df4cba0fd4d72e27b3 | 0c5a1240427632ac511e2f40bf86e13d11e2c689 | refs/heads/master | 2020-12-18T20:09:45.726852 | 2020-01-22T05:55:46 | 2020-01-22T05:55:46 | 235,508,149 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,883 | py | import os
import sys
from tkinter import Tk
from tkinter import filedialog
from beira.main import beira_class
from aerodata.aerodata import aerodata_class
from TAC2Input.tac2_main import main_class
from IHBInput.ihb_main import ihb_class
if __name__ == "__main__":
if len(sys.argv) == 2:
inputFile = sys.argv[1]
if not os.path.exists(inputFile):
sys.exit("\n/!\\ '%s' not available. Execution stopped." %
(inputFile))
else:
inputFile = 'beira.dat'
if not os.path.exists(inputFile):
root = Tk()
root.overrideredirect(1)
root.withdraw()
inputFile = filedialog.askopenfilename(
parent=root, title='Choose the Beira input file')
# Initalize Beira Class
try:
program = beira_class(inputFile)
program.run()
except:
print('Exit with Errors -- Beira Class')
# Initaialize Aerodata Class
try:
coordinatesFiles = os.listdir('C:\\Repo\\tools_dev\\Beira\\test\\V136_Bespoke')
for i in range(0,len(coordinatesFiles)):
coordinatesFiles[i] = os.path.join('C:\\Repo\\tools_dev\\Beira\\test\\V136_Bespoke', coordinatesFiles[i] )
aerodata = aerodata_class()
derotatedCoordinates = aerodata.derotate(coordinatesFiles)
print(derotatedCoordinates)
except:
print('Exit with Errors -- Aerodata Class')
# Passing TAC2 Inputs to Aerotex Software and get TAC2 Outputs
try:
main_class(inputFile,derotatedCoordinates)
except:
print('Exit with Errors -- TAC2 Input Class')
# Reading TAC2 outputs and modify the file and pass to IHB Software and get IHB outputs
try:
ihb_class(inputFile)
except:
print('Exit with Errors -- TAC2 Input Class') | [
"[email protected]"
]
| |
67fc115da063c9287e6ada76e5e4d1b617f534dd | 1676168244eed1c5610b2c1c38f692f89990b112 | /part3-python/Bigdata/ComStat_v0.15.py | 88aa905773aed8169fd5bcd5f19bd774c6f5136a | []
| no_license | gtpgg1013/AI_docs | 351e83f986d66224c82fff2de944753c98336d03 | 43f8eed8b2732314bd40ed65e1d7eb44dd28fc04 | refs/heads/master | 2022-12-09T17:32:02.992554 | 2019-11-20T09:03:56 | 2019-11-20T09:03:56 | 182,927,565 | 1 | 0 | null | 2022-12-08T06:50:23 | 2019-04-23T03:54:56 | Jupyter Notebook | UTF-8 | Python | false | false | 26,630 | py | import matplotlib
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
from tkinter import *
import matplotlib.animation as animation
import psutil
import threading
import time
import math
import datetime
import platform
from tkinter import font
import cv2
import numpy as np
from PIL import Image, ImageFilter, ImageEnhance, ImageOps
LARGE_FONT = ("Verdana", 12)
class SeaofBTCapp(Tk): # Tk를 상속받은 놈
def __init__(self, *args, **kwargs):
Tk.__init__(self, *args, **kwargs) # 상속받았으므로 tk.Tk(부모꺼)도 해줌
Tk.iconbitmap(self) # 걍 아이콘
Tk.title(self, "Comstat v0.15") # 이름 만들어주기
Tk.wm_geometry(self,"1180x590")
Tk.wm_resizable(self, width=False, height=False)
container = Frame(self) # 컨테이너라는 놈 프레임으로 만들어주고
container.pack(side="top", fill="both", expand=True) # 컨테이너 붙이고
container.grid_rowconfigure(0, weight=1) # row 설정
container.grid_columnconfigure(0, weight=1) # col 설정
self.frames = {} # frames라는 딕셔너리 필드 선언
for F in (StartPage, PageOne, PageTwo, PageThree, PageFour):
frame = F(container, self)
# print(frame)
self.frames[F] = frame # 딕셔너리에 저장
frame.grid(row=0, column=0, sticky="nsew")
self.show_frame(StartPage) # 스타트 페이지 보이기
self.cpuflag = False
self.tottime = 0
self.limit = 80.
import smtplib
from email.mime.text import MIMEText
from datetime import datetime
def cpuSendEmail():
timer = threading.Timer(1, cpuSendEmail)
tmptime = psutil.cpu_percent()
if tmptime > self.limit:
# print(tmptime)
self.cpuflag = True
if tmptime > self.limit and self.cpuflag == True:
self.tottime += 1
else:
self.tottime = 0
self.cpuflag = False
if self.tottime > 4:
try:
print("over, send a email to the user")
############ 메일 보내기 ###############
s = smtplib.SMTP('smtp.gmail.com',587)
s.starttls()
s.login('[email protected]','')
msg = MIMEText('CPU 수치가 '+str(self.limit)+"을 초과한 지 "+str(self.tottime)+"초 되었습니다."
"컴퓨터 사용량을 확이핸주세요.")
msg['Subject'] = "현재시각: "+str(datetime.now()) + "CPU 사용량 임계점 초과 경고 메일"
s.sendmail("[email protected]","[email protected]",msg.as_string())
s.quit()
############ 메일 송신 완료 ############
self.cpuflag == False
except:
pass
timer.start()
cpuSendEmail()
def show_frame(self, cont): # 페이지 띄우는 메서드
frame = self.frames[cont] # 프레임 딕셔너리의 몇번째 프레임
frame.tkraise() # 프레임이 여러개일 때 맨 앞으로 가져오는 메서드
class StartPage(Frame): # 첫번째 페이지 # Frame을 상속받은 비슷한 머시기에다가 self를 쓰면 계속 달아주겠다는 말
def __init__(self, parent, controller): # 프레임을 상속받은 놈
Frame.__init__(self, parent)
bigFont = font.Font(self, family='Courier',size=40,weight='bold')
label = Label(self, text="COM_STAT v0.15", font=bigFont, height=1) # 라벨 써주고
label.pack(pady=50, padx=10)
button = Button(self, text="Static Indications",
command=lambda: controller.show_frame(PageOne))
button.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU Stats",
command=lambda: controller.show_frame(PageThree))
button3.pack()
button4 = Button(self, text="CPU & RAM Usage",
command=lambda: controller.show_frame(PageFour))
button4.pack()
mName = Label(self, text=platform.machine(), font=LARGE_FONT)
dName = Label(self, text=platform.node(), font=LARGE_FONT)
pName = Label(self, text=platform.platform(), font=LARGE_FONT)
procName = Label(self, text=platform.processor(), font=LARGE_FONT)
cName = Label(self, text=platform.python_compiler(), font=LARGE_FONT)
pVer = Label(self, text="Python version : "+platform.python_branch(), font=LARGE_FONT)
mName.pack(side=BOTTOM,expand=YES)
dName.pack(side=BOTTOM,expand=YES)
pName.pack(side=BOTTOM,expand=YES)
procName.pack(side=BOTTOM,expand=YES)
cName.pack(side=BOTTOM,expand=YES)
pVer.pack(side=BOTTOM,expand=YES)
class PageOne(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="Static Indications", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU Status",
command=lambda: controller.show_frame(PageThree))
button3.pack()
# Label
cpuFreq_c = Label(self, text="CPUFreq - current : "+str(psutil.cpu_freq().current))
cpuFreq_mx = Label(self, text="CPUFreq - max : " + str(psutil.cpu_freq().max))
cpuFreq_min = Label(self, text="CPUFreq - min : " + str(psutil.cpu_freq().min))
hard_readCount = Label(self, text="Hard - readcount : " + str(psutil.disk_io_counters().read_count>>20))
hard_writeCount = Label(self, text="Hard - writecount : " + str(psutil.disk_io_counters().write_count>>20))
hard_readBytes = Label(self, text="Hard - readbytes : " + str(psutil.disk_io_counters().read_bytes>>20))
hard_writeBytes = Label(self, text="Hard - writebytes : " + str(psutil.disk_io_counters().write_bytes>>20))
hard_readTime = Label(self, text="Hard - read_time : " + str(psutil.disk_io_counters().read_time))
hard_writeTime = Label(self, text="Hard - write_time : "+str(psutil.disk_io_counters().write_time))
netAddr_fam_MAC = Label(self, text="Network Address - family MAC : " + str(psutil.net_if_addrs()['이더넷'][0][1]))
netAddr_IP = Label(self, text="Network Address - IP : " + str(psutil.net_if_addrs()['이더넷'][1][1]))
netAddr_netmask = Label(self, text="Network Address - netmask : " + str(psutil.net_if_addrs()['이더넷'][1][2]))
memory_total = Label(self, text="Memory - total : "+str(psutil.virtual_memory().total))
memory_available = Label(self, text="Memory - available : "+str(psutil.virtual_memory().available))
dt = datetime.datetime.fromtimestamp(psutil.boot_time()).strftime("%Y-%m-%d %H:%M:%S")
bootTime = Label(self, text="Boot Time : "+str(dt))
UserName = Label(self, text="User name : "+str(psutil.users()[0].name))
# pack
cpuFreq_c.pack()
cpuFreq_mx.pack()
cpuFreq_min.pack()
hard_readCount.pack()
hard_writeCount.pack()
hard_readBytes.pack()
hard_writeBytes.pack()
hard_writeTime.pack()
hard_writeTime.pack()
netAddr_fam_MAC.pack()
netAddr_IP.pack()
netAddr_netmask.pack()
# netAddr_broadcast.pack()
# netAddr_ptp.pack()
memory_total.pack()
memory_available.pack()
bootTime.pack()
UserName.pack()
class PageTwo(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="CPU times", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU status",
command=lambda: controller.show_frame(PageThree))
button2.pack()
button3 = Button(self, text="CPU & RAM",
command=lambda: controller.show_frame(PageFour))
button3.pack()
canvasforPic = Canvas(self)
cpuTime1 = Label(canvasforPic, text="CPUTime-user: " + str(psutil.cpu_times().user))
cpuTime2 = Label(canvasforPic, text="CPUTime-system: " + str(psutil.cpu_times().system))
cpuTime3 = Label(canvasforPic, text="CPUTime-idle: " + str(psutil.cpu_times().idle))
cpuTime4 = Label(canvasforPic, text="CPUTime-interrupt: " + str(psutil.cpu_times().interrupt))
ylim = 0
tcpuTimeInd = psutil.cpu_times()
tcpuTimeList = [tcpuTimeInd.user, tcpuTimeInd.system, tcpuTimeInd.idle, tcpuTimeInd.interrupt]
for tcpu in tcpuTimeList:
if ylim < tcpu:
ylim = tcpu
ylim *= 0.1
cpuTime1.pack()
cpuTime2.pack()
cpuTime3.pack()
cpuTime4.pack()
canvasforPic.pack(side=RIGHT)
# 밑에서 쓸 현재시각
nowtime = 0
def refreshHWIndicators(): # 1초마다 바뀌는 내용 수정
# global cpuUser, cpuSys, cpuI, cpuC
# global x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
try:
timer = threading.Timer(1, refreshHWIndicators)
cpuTime1.configure(text="CPUTime-user: " + str(psutil.cpu_times().user))
cpuTime2.configure(text="CPUTime-system: " + str(psutil.cpu_times().system))
cpuTime3.configure(text="CPUTime-idle: " + str(psutil.cpu_times().idle))
cpuTime4.configure(text="CPUTime-interrupt: " + str(psutil.cpu_times().interrupt))
nowtime = time.time()
timer.start()
except:
pass
refreshHWIndicators()
################################################################
################## 여기부터 그래프부분 #########################
################################################################
# 처음 하면 되는것 : cpu time 4개를 동적으로 한꺼번에 띄워보자
f = Figure(figsize=(5, 5), dpi=100)
# x = np.arange(0, nowtime ,0.01)
# x = np.arange(0, 2 * np.pi, 0.01)
canvas = FigureCanvasTkAgg(f, self)
canvas.get_tk_widget()
ax = f.add_subplot(111)
ax.set_title("CPU time")
ax.set_ylim(0,ylim *1.2)
ax.set_xlim(0,5.0)
ax.grid(True)
ax.set_ylabel("CPU time")
ax.set_xlabel("Time")
# Data Placeholders
cpuUser = np.zeros(0)
cpuSys = np.zeros(0)
cpuI = np.zeros(0)
cpuC = np.zeros(0)
t = np.zeros(0)
# set plots
plotCpuUser, = ax.plot(t, cpuUser, 'b-', label="CPU User")
plotCpuSys, = ax.plot(t, cpuSys, 'g-', label="CPU System")
plotCpuI, = ax.plot(t, cpuI, 'r-', label="CPU Idle")
plotCpuC, = ax.plot(t, cpuC, 'd-', label="CPU Corrpution")
ax.legend([plotCpuUser, plotCpuSys, plotCpuI, plotCpuC],\
[plotCpuUser.get_label(), plotCpuSys.get_label(), plotCpuI.get_label(), plotCpuC.get_label()])
xmin = 0.0
xmax = 5.0
x = 0.0
def updateData(self):
nonlocal cpuUser, cpuSys, cpuI, cpuC, ylim
nonlocal x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
# print(x)
cpuTimeInd = psutil.cpu_times()
cpuTimeList = [[cpuTimeInd.user], [cpuTimeInd.system], [cpuTimeInd.idle], [cpuTimeInd.interrupt]]
tmpCpuU = cpuTimeList[0][0] * 0.1
tmpCpuSys = cpuTimeList[1][0] * 0.1
tmpCpuI = cpuTimeList[2][0] * 0.1
tmpCpuC = cpuTimeList[3][0] * 0.1
# print(tmpCpuC)
cpuUser = np.append(cpuUser,tmpCpuU)
cpuSys = np.append(cpuSys,tmpCpuSys)
cpuI = np.append(cpuI,tmpCpuI)
cpuC = np.append(cpuC,tmpCpuC)
t = np.append(t,x)
x += 0.05
plotCpuUser.set_data(t, cpuUser)
plotCpuSys.set_data(t, cpuSys)
plotCpuI.set_data(t, cpuI)
plotCpuC.set_data(t, cpuC)
if x >= xmax - 1.00:
plotCpuUser.axes.set_xlim(x - xmax +1.0, x+1.0)
return plotCpuUser
# line, = ax.plot(x, np.sin(x))
# ax = f.add_subplot(111)
# line, = ax.plot(x, np.sin(x))
ani = animation.FuncAnimation(f, updateData, interval=25, blit=False, frames=200, repeat=True)
canvas.draw()
canvas.get_tk_widget().pack(side=LEFT, fill=BOTH, expand=True)
# toolbar = NavigationToolbar2Tk(canvas, self)
# toolbar.update()
canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
class PageThree(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="CPU Stats", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU & RAM",
command=lambda: controller.show_frame(PageFour))
button3.pack()
canvasforPic = Canvas(self)
cpustats1 = Label(canvasforPic, text="Ctx_switches: " + str(psutil.cpu_stats().ctx_switches>>20))
cpustats2 = Label(canvasforPic, text="interrupts: " + str(psutil.cpu_stats().interrupts>>20))
cpustats3 = Label(canvasforPic, text="syscalls: " + str(psutil.cpu_stats().syscalls>>20))
cpustats1.pack()
cpustats2.pack()
cpustats3.pack()
canvasforPic.pack(side=RIGHT)
ylim = 0
tcpuTimeInd = psutil.cpu_stats()
tcpuTimeList = [tcpuTimeInd.ctx_switches>>20, tcpuTimeInd.interrupts>>20, tcpuTimeInd.syscalls>>20]
for tcpu in tcpuTimeList:
if ylim < tcpu:
ylim = tcpu
# 밑에서 쓸 현재시각
nowtime = 0
def refreshHWIndicators(): # 1초마다 바뀌는 내용 수정
# global cpuUser, cpuSys, cpuI, cpuC
# global x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
try:
timer = threading.Timer(1, refreshHWIndicators)
cpustats1.configure(text="Ctx_switches: " + str(psutil.cpu_stats().ctx_switches>>20))
# print(str(psutil.cpu_stats().ctx_switches))
cpustats2.configure(text="interrupts: " + str(psutil.cpu_stats().interrupts>>20))
cpustats3.configure(text="syscalls: " + str(psutil.cpu_stats().syscalls>>20))
nowtime = time.time()
timer.start()
except:
pass
refreshHWIndicators()
################################################################
################## 여기부터 그래프부분 #########################
################################################################
# 처음 하면 되는것 : cpu time 4개를 동적으로 한꺼번에 띄워보자
f = Figure(figsize=(5, 5), dpi=100)
# x = np.arange(0, nowtime ,0.01)
# x = np.arange(0, 2 * np.pi, 0.01)
canvas = FigureCanvasTkAgg(f, self)
canvas.get_tk_widget()
ax = f.add_subplot(111)
ax.set_title("CPU Stat")
ax.set_ylim(0,ylim*2)
ax.set_xlim(0,5.0)
ax.grid(True)
ax.set_ylabel("CPU Stat")
ax.set_xlabel("Time")
# Data Placeholders
cpuC = np.zeros(0)
cpuI = np.zeros(0)
cpuS = np.zeros(0)
t = np.zeros(0)
# set plots
plotCpuCtx, = ax.plot(t, cpuC, 'b-', label="Ctx switches")
plotCpuint, = ax.plot(t, cpuI, 'g-', label="interrupts")
plotCpuSys, = ax.plot(t, cpuS, 'r-', label="syscalls")
ax.legend([plotCpuCtx, plotCpuSys, plotCpuint],\
[plotCpuCtx.get_label(), plotCpuSys.get_label(), plotCpuint.get_label()])
xmin = 0.0
xmax = 5.0
x = 0.0
def updateData(self):
nonlocal cpuC, cpuS, cpuI, ylim
nonlocal x, plotCpuCtx, plotCpuSys, plotCpuint, t # 요놈들 쓸거임
# print(x)
cpuTimeInd = psutil.cpu_stats()
cpuTimeList = [[cpuTimeInd.ctx_switches], [cpuTimeInd.interrupts], [cpuTimeInd.syscalls]]
tmpCpuC = cpuTimeList[0][0]>>20
tmpCpuI = cpuTimeList[1][0]>>20
tmpCpuS = cpuTimeList[2][0]>>20
# print(tmpCpuC)
cpuC = np.append(cpuC,tmpCpuC)
cpuI = np.append(cpuI,tmpCpuI)
cpuS = np.append(cpuS,tmpCpuS)
t = np.append(t,x)
x += 0.05
plotCpuCtx.set_data(t, cpuC)
plotCpuint.set_data(t, cpuI)
plotCpuSys.set_data(t, cpuS)
if x >= xmax - 1.00:
plotCpuCtx.axes.set_xlim(x - xmax +1.0, x+1.0)
return plotCpuCtx
# line, = ax.plot(x, np.sin(x))
# ax = f.add_subplot(111)
# line, = ax.plot(x, np.sin(x))
ani = animation.FuncAnimation(f, updateData, interval=25, blit=False, frames=200, repeat=True)
canvas.draw()
canvas.get_tk_widget().pack(side=LEFT, fill=BOTH, expand=True)
# toolbar = NavigationToolbar2Tk(canvas, self)
# toolbar.update()
canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
class PageFour(Frame):
def __init__(self, parent, controller):
Frame.__init__(self, parent)
label = Label(self, text="CPU Stats", font=LARGE_FONT)
label.pack(pady=10, padx=10)
button1 = Button(self, text="HomePage",
command=lambda: controller.show_frame(StartPage))
button1.pack()
button2 = Button(self, text="CPU Times",
command=lambda: controller.show_frame(PageTwo))
button2.pack()
button3 = Button(self, text="CPU Status",
command=lambda: controller.show_frame(PageThree))
button3.pack()
# canvasforPic = Canvas(self)
#########################################################################
############# 요기 캔버스에다가 사진을 박을거임 #########################
#########################################################################
inImage, outImage = None, None
inH, inW, outH, outW = [0] * 4
photo, cvPhoto = None, None
paper = None
canvasforPic = None
# canvasforPic = Canvas(self)
def loadImageColor(self,fnameOrCvData):
nonlocal paper, inImage, outImage, inH, inW, outH, outW
nonlocal photo, cvPhoto, canvasforPic
#######################################
### PIL 객체 --> OpenCV 객체로 복사 ###
## 이거 왜 되는지 잘 생각해보자!!
if type(fnameOrCvData) == str: # 파일명이 들어왔을경우
cvData = cv2.imread(fnameOrCvData) # 파일 --> CV 데이터
else:
cvData = fnameOrCvData # 이거 들여쓰기 안해서 실행이 안됬었음
cvPhoto = cv2.cvtColor(cvData, cv2.COLOR_BGR2RGB) # 중요한 CV개체 # 이거 numpy array임
# print(cvPhoto)
photo = Image.fromarray(cvPhoto)
# print(type(photo))
inW, inH = photo.size # (photo.width, photo.height)
outW, outH = inW, inH
# 캔버스 제작
# self를 붙여야 이게 됨
canvasforPic = Canvas(self, height=inH, width=inW)
#######################################
inImage = np.array(photo)
outImage = inImage.copy()
# print(outImage)
def displayImageColor():
nonlocal paper, inImage, outImage, inH, inW, outH, outW
nonlocal cvPhoto, canvasforPic
VIEW_X, VIEW_Y = inW, inH
# print(VIEW_X)
## 고정된 화면 크기
# 가로/세로 비율 계산
paper = PhotoImage(height=outH, width=outW)
# paper = PhotoImage('CPU.PNG')
canvasforPic.create_image((outH // 2, outW // 2), image=paper, state='normal')
# print(outH)
import numpy
rgbStr = '' # 전체 픽셀의 문자열을 저장
for i in numpy.arange(0, outH):
tmpStr = ''
for k in numpy.arange(0, outW):
i = int(i);
k = int(k)
r, g, b = outImage[i, k, R], outImage[i, k, G], outImage[i, k, B]
tmpStr += ' #%02x%02x%02x' % (r, g, b)
rgbStr += '{' + tmpStr + '} '
# print(rgbStr)
paper.put(rgbStr)
# print(paper)
inImage = outImage.copy()
cvPhoto = outImage.copy()
canvasforPic.pack(expand=1, anchor=CENTER)
# canvasforPic = Canvas(self, height=inH, width=inW)
loadImageColor(self,"CPU.PNG") # inImage, inH, inW, outH, outW 설정
# print(canvasforPic)
# print(inImage)
print(type(outImage))
displayImageColor()
# canvasforPic.pack(expand=1, anchor=CENTER, side=RIGHT)
#########################################################################################
##################### 여기까지 그림박기 끝 ##############################################
#########################################################################################
#
cpuI = Label(canvasforPic, text="Cpu Usage percent: " + str(psutil.cpu_percent()))
ramI = Label(canvasforPic, text="Ram Usage percent: " + str(psutil.virtual_memory().percent))
cpuI.pack(side=BOTTOM)
ramI.pack(side=BOTTOM)
canvasforPic.pack(side=RIGHT)
ylim = 100
cpuRamList = [psutil.cpu_percent(), psutil.virtual_memory().percent]
# for cr in cpuRamList:
# if ylim < cr:
# ylim = cr
# 밑에서 쓸 현재시각
nowtime = 0
def refreshHWIndicators(): # 1초마다 바뀌는 내용 수정
# global cpuUser, cpuSys, cpuI, cpuC
# global x, plotCpuUser, plotCpuSys, plotCpuI, plotCpuC, t # 요놈들 쓸거임
try:
timer = threading.Timer(1, refreshHWIndicators)
cpuI.configure(text="CPU Usage: " + str(psutil.cpu_percent()))
# print(str(psutil.cpu_stats().ctx_switches))
ramI.configure(text="RAM Usage: " + str(psutil.virtual_memory().percent))
nowtime = time.time()
timer.start()
except:
pass
refreshHWIndicators()
################################################################
################## 여기부터 그래프부분 #########################
################################################################
# 처음 하면 되는것 : cpu time 4개를 동적으로 한꺼번에 띄워보자
f = Figure(figsize=(5, 5), dpi=100)
# x = np.arange(0, nowtime ,0.01)
# x = np.arange(0, 2 * np.pi, 0.01)
canvas = FigureCanvasTkAgg(f, self)
canvas.get_tk_widget()
ax = f.add_subplot(111)
ax.set_title("CPU & RAM Usage")
ax.set_ylim(0, ylim)
ax.set_xlim(0, 5.0)
ax.grid(True)
ax.set_ylabel("CPU & RAM Usage")
ax.set_xlabel("Time")
# Data Placeholders
cpu = np.zeros(0)
ram = np.zeros(0)
t = np.zeros(0)
# set plots
plotCpu, = ax.plot(t, cpu, 'b-', label="Cpu Usage")
plotRam, = ax.plot(t, ram, 'g-', label="Ram Usage")
ax.legend([plotCpu, plotRam], \
[plotCpu.get_label(), plotRam.get_label()])
xmin = 0.0
xmax = 5.0
x = 0.0
def updateData(self):
nonlocal cpu, ram
nonlocal x, plotCpu, plotRam, t # 요놈들 쓸거임
# print(x)
cpuRamList = [[psutil.cpu_percent()], [psutil.virtual_memory().percent]]
tmpC = cpuRamList[0][0]
tmpR = cpuRamList[1][0]
# print(tmpCpuC)
cpu = np.append(cpu, tmpC)
ram = np.append(ram, tmpR)
t = np.append(t, x)
x += 0.05
plotCpu.set_data(t, cpu)
plotRam.set_data(t, ram)
if x >= xmax - 1.00:
plotCpu.axes.set_xlim(x - xmax + 1.0, x + 1.0)
return plotCpu
# line, = ax.plot(x, np.sin(x))
# ax = f.add_subplot(111)
# line, = ax.plot(x, np.sin(x))
ani = animation.FuncAnimation(f, updateData, interval=25, blit=False, frames=200, repeat=True)
canvas.draw()
canvas.get_tk_widget().pack(side=LEFT, fill=BOTH, expand=True)
# toolbar = NavigationToolbar2Tk(canvas, self)
# toolbar.update()
canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=True)
# 전역 변수
R, G, B = 0, 1, 2 # 3차원으로 쉽게 다루려고 전역 상수 지정해줌
inImage, outImage = None, None # 이제 넘파이로 다룰래
inH, inW, outH, outW = [0] * 4
window, canvas, paper = None, None, None
filename = ""
panYN = False
sx, sy, ex, ey = [0] * 4
VIEW_X, VIEW_Y = 512, 512 # 화면에 보일 크기 (출력용)
# 메인 코드
app = SeaofBTCapp()
app.mainloop() | [
"[email protected]"
]
| |
914e5a276b7849b267a4458ca7c0afd16ec3f18e | 3f73ce74b6fdfb7966abb71a98f4986edd727c5f | /lib/config.py | 9d9e5784d61265a408685b6fae7a08e8e51d01e0 | [
"MIT"
]
| permissive | yuta-komura/amateras | 9c2efd310b18f159b1354864d65f9894ab93737f | cf8cc8fe0b5d8c382090fd1784a3ce96e6953157 | refs/heads/master | 2023-01-21T19:57:18.763894 | 2020-11-25T04:02:28 | 2020-11-25T04:02:28 | 297,432,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | from enum import Enum
PROJECT_DIR = __file__.replace("/lib/config.py", "")
class HistoricalPrice(Enum):
TIME_FRAME = 60 # minutes
CHANNEL_WIDTH = 67
class DATABASE(Enum):
class TRADINGBOT(Enum):
HOST = "*********"
USER = "*********"
PASSWORD = "*********"
DATABASE = "*********"
class Bitflyer(Enum):
class Api(Enum):
KEY = "*********"
SECRET = "*********"
class DirPath(Enum):
PROJECT = PROJECT_DIR
class FilePath(Enum):
WARNING_MP3 = PROJECT_DIR + "/sound/WARNING.mp3"
ERROR_MP3 = PROJECT_DIR + "/sound/ERROR.mp3"
SYSTEM_LOG = PROJECT_DIR + "/log/system.log"
AA = PROJECT_DIR + "/document/AA.txt"
| [
"[email protected]"
]
| |
42bbeb6f9a188fa19f10abbf96f43c62426640bc | 8b6a7efd838b06e8aa2ee468c6fe1793386eeb81 | /NOT_IN_USE/WGAN.py | e7bb8052686631e9c6011bb532eb7cf3610c503e | []
| no_license | Yishaiaz/Deep_Learning_BGU_HW4 | 23f9f3c117c694d8c07ef5e5caebf1ec7eb42a31 | d9ea1130c6de5ae1b87441c66b9e10446e7870c9 | refs/heads/master | 2023-06-18T03:41:22.895899 | 2021-07-10T18:52:19 | 2021-07-10T18:52:19 | 370,263,595 | 0 | 0 | null | 2021-07-08T07:48:13 | 2021-05-24T07:21:44 | HTML | UTF-8 | Python | false | false | 14,228 | py | from typing import List
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.callbacks import Callback
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import OneHotEncoder
from tensorflow.keras import Model
from tensorflow.keras.layers import Dense, Concatenate, Dropout, BatchNormalization, LeakyReLU
from tensorflow.python.keras import Input, backend
from tensorflow.python.keras.constraints import Constraint
from tensorflow.python.keras.optimizer_v2.adam import Adam
from tensorflow.python.keras.optimizer_v2.rmsprop import RMSprop
from global_vars import LATENT_NOISE_SIZE, GENERATOR_LR, CRITIC_LR, CRITIC_DROPOUT, CRITIC_STEPS, GP_WEIGHT, \
CHECKPOINT_PREFIX, SEED
from preprocessing_utils import gather_numeric_and_categorical_columns
from utils import tsne
class ClipConstraint(Constraint):
# set clip value when initialized
def __init__(self, clip_value):
self.clip_value = clip_value
# clip model weights to hypercube
def __call__(self, weights):
return backend.clip(weights, -self.clip_value, self.clip_value)
# get the config
def get_config(self):
return {'clip_value': self.clip_value}
class WGAN(Model):
def __init__(self,
input_size: int,
columns_size: List[int],
num_classes: int,
critic_steps: int = CRITIC_STEPS,
gp_weight: float = GP_WEIGHT,
latent_noise_size: int = LATENT_NOISE_SIZE,
**kwargs):
super(WGAN, self).__init__()
self._num_classes = num_classes
self._columns_size = columns_size
self._input_size = input_size
self._critic_steps = critic_steps
self._gp_weight = gp_weight
self._latent_noise_size = latent_noise_size
self._generator_activation_function = kwargs.get('generator_activation_function', 'relu')
self._critic_activation_function = kwargs.get('critic_activation_function', LeakyReLU())
self._generator_lr = kwargs.get('generator_lr', GENERATOR_LR)
self._critic_lr = kwargs.get('critic_lr', CRITIC_LR)
self._critic_dropout = kwargs.get('critic_dropout', CRITIC_DROPOUT)
self.generator = self._build_generator()
self.critic = self._build_critic()
self.generator_optimizer = RMSprop(learning_rate=0.00005)#Adam(learning_rate=GENERATOR_LR, beta_1=0.5, beta_2=0.9)
self.critic_optimizer = RMSprop(learning_rate=0.00005)#Adam(learning_rate=CRITIC_LR, beta_1=0.5, beta_2=0.9)
self.critic_loss_fn = WGAN.critic_loss
self.generator_loss_fn = WGAN.generator_loss
self._compile()
def _compile(self):
super(WGAN, self).compile()
def _build_generator(self):
# label input
in_label = Input(shape=(1,))
# latent noise input
noise_input = Input(shape=(self._latent_noise_size,))
concat_input = Concatenate()([noise_input, in_label])
x = Dense(128, activation=self._generator_activation_function, kernel_initializer='he_uniform')(concat_input)
x = BatchNormalization()(x)
x = Dense(256, activation=self._generator_activation_function, kernel_initializer='he_uniform')(x)
x = BatchNormalization()(x)
x = Dense(512, activation=self._generator_activation_function, kernel_initializer='he_uniform')(x)
x = BatchNormalization()(x)
layers = []
for column_size in self._columns_size:
if column_size == 1:
layers.append(Dense(1, activation='tanh')(x))
else:
layers.append(Dense(column_size, activation='softmax')(x))
output = Concatenate()(layers)
generator = Model(inputs=[noise_input, in_label], outputs=output)
return generator
def _build_critic(self):
# label input
in_label = Input(shape=(1,))
# sample input
in_input = Input(shape=(self._input_size,))
concat_input = Concatenate()([in_input, in_label])
weights_constraint = ClipConstraint(0.01)
x = Dense(512, activation=self._critic_activation_function, kernel_initializer='he_uniform',
kernel_constraint=weights_constraint)(concat_input)
x = BatchNormalization()(x)
x = Dropout(self._critic_dropout)(x)
x = Dense(256, activation=self._critic_activation_function, kernel_initializer='he_uniform',
kernel_constraint=weights_constraint)(x)
x = BatchNormalization()(x)
x = Dropout(self._critic_dropout)(x)
x = Dense(128, activation=self._critic_activation_function, kernel_initializer='he_uniform',
kernel_constraint=weights_constraint)(x)
x = BatchNormalization()(x)
x = Dropout(self._critic_dropout)(x)
output = Dense(1, kernel_constraint=weights_constraint)(x)
critic = Model([in_input, in_label], output)
return critic
def gradient_penalty(self, batch_size, X_real, X_fake, labels):
"""
Calculates the gradient penalty.
This loss is calculated on an interpolated data and added to the critic loss.
"""
# Get the interpolated samples
alpha = tf.random.normal([batch_size, 1], 0.0, 1.0)
diff = X_fake - X_real
interpolated = X_real + alpha * diff
#interpolated = (alpha * X_real) + ((1 - alpha) * X_fake)
with tf.GradientTape() as gp_tape:
gp_tape.watch(interpolated)
# 1. Get the critic output for this interpolated samples.
pred = self.critic([interpolated, labels], training=True)
# 2. Calculate the gradients w.r.t to this interpolated samples.
grads = gp_tape.gradient(pred, [interpolated])[0] # TODO
#grads = gp_tape.gradient(pred, interpolated) # TODO
# 3. Calculate the norm of the gradients.
norm = tf.sqrt(tf.reduce_sum(tf.square(grads), axis=[1])) # TODO
#norm = tf.norm(tf.reshape(grads, [tf.shape(grads)[0], -1]), axis=1)
gp = tf.reduce_mean((norm - 1.0) ** 2)
return gp
@staticmethod
def critic_loss(real_samples, fake_samples):
real_loss = tf.reduce_mean(real_samples)
fake_loss = tf.reduce_mean(fake_samples)
return fake_loss - real_loss
@staticmethod
def generator_loss(fake_samples):
return -tf.reduce_mean(fake_samples)
@staticmethod
def _generate_labels(batch_size: int):
labels = tf.random.categorical(tf.math.log([[0.5, 0.5]]), batch_size).T
labels = tf.where(labels > 0, labels, -1)
return labels
def train_step(self, data):
if isinstance(data, tuple):
X_batch = data[0]
y_batch = data[1]
# Get the batch size
batch_size = tf.shape(X_batch)[0]
# Note: this implementation is based on https://keras.io/examples/generative/wgan_gp/ with minor changes
# For each batch, we are going to perform the
# following steps as laid out in the original paper:
# 1. Train the generator and get the generator loss
# 2. Train the critic and get the critic loss
# 3. Calculate the gradient penalty TODO
# 4. Multiply this gradient penalty with a constant weight factor
# 5. Add the gradient penalty to the critic loss
# 6. Return the generator and critic losses as a loss tuple
# Train the critic first. The original paper recommends training
# the critic for `x` more steps (typically 5) as compared to
# one step of the generator.
for i in range(self._critic_steps):
# Get the latent vector
random_latent_vectors = tf.random.normal(shape=(batch_size, self._latent_noise_size))
# generate labels
labels = WGAN._generate_labels(batch_size)
with tf.GradientTape() as tape:
# Generate fake samples from the latent vector
generated_samples = self.generator([random_latent_vectors, labels], training=True)
# Get the logits for the fake samples
fake_logits = self.critic([generated_samples, labels], training=True)
# Get the logits for the real samples
real_logits = self.critic([X_batch, y_batch], training=True)
# Calculate the critic loss using the fake and real samples logits
c_cost = self.critic_loss_fn(real_logits, fake_logits)
# Calculate the gradient penalty
#gp = self.gradient_penalty(batch_size, X_batch, generated_samples, y_batch) #TODO
# Add the gradient penalty to the original critic loss
critic_loss = c_cost#c_cost + gp * self._gp_weight
# Get the gradients w.r.t the critic loss
c_gradient = tape.gradient(critic_loss, self.critic.trainable_variables)
# Update the weights of the critic using the critic optimizer
self.critic_optimizer.apply_gradients(zip(c_gradient, self.critic.trainable_variables))
# Train the generator
# Get the latent vector
random_latent_vectors = tf.random.normal(shape=(batch_size, self._latent_noise_size))
# generate labels
labels = WGAN._generate_labels(batch_size)
with tf.GradientTape() as tape:
# Generate fake samples using the generator
generated_samples = self.generator([random_latent_vectors, labels], training=True)
# Get the critic logits for fake samples
gen_samples_logits = self.critic([generated_samples, labels], training=True)
# Calculate the generator loss
generator_loss = self.generator_loss_fn(gen_samples_logits)
# Get the gradients w.r.t the generator loss
gen_gradient = tape.gradient(generator_loss, self.generator.trainable_variables)
# Update the weights of the generator using the generator optimizer
self.generator_optimizer.apply_gradients(zip(gen_gradient, self.generator.trainable_variables))
return {"critic_loss": critic_loss, "generator_loss": generator_loss}
def generate_samples(self,
column_idx_to_scaler: dict,
column_idx_to_ohe: dict,
num_samples: int = 1,
negative_labels: int = 500,
positive_labels: int = 268):
# sample random noise latent vectors
random_latent_vectors = tf.random.normal(shape=(num_samples, self._latent_noise_size))
# sample random labels
labels = WGAN._generate_labels(num_samples)
# generate samples using generator model
generated_samples = self.generator([random_latent_vectors, labels])
generated_samples = generated_samples.numpy().tolist()
# convert raw generated samples' representation into original format
samples = []
for generated_sample in generated_samples:
sample = []
column_idx = 0
column_size_idx = len(column_idx_to_scaler)
for sample_col_value in generated_sample:
if column_idx in column_idx_to_scaler.keys(): # inverse transform min-max scaler
sample.append(column_idx_to_scaler[column_idx].inverse_transform(np.array([[sample_col_value]]))[0][0])
else: # inverse transform one-hot-encoding format
if column_idx not in column_idx_to_ohe.keys():
column_idx += 1
continue
categorical_softmax_representation = generated_sample[
column_idx:column_idx + self._columns_size[column_size_idx]]
# find index with the max value and generate one-hot-encoding representation
max_index = np.argmax(np.array(categorical_softmax_representation))
categorical_ohe_representation = [0] * self._columns_size[column_size_idx]
categorical_ohe_representation[max_index] = 1
categorical_value = column_idx_to_ohe[column_idx].inverse_transform([categorical_ohe_representation])[0][0]
sample.append(categorical_value)
column_size_idx += 1
column_idx += 1
samples.append(sample)
return samples, generated_samples, labels
class GANMonitor(Callback):
def __init__(self,
column_idx_to_scaler: dict,
column_idx_to_ohe: dict,
checkpoint,
X_test,
y_test,
columns: List[str],
num_samples: int = 1,
monitor_every_n_epoch: int = 5):
self.monitor_every_n_epoch = monitor_every_n_epoch
self.num_samples = num_samples
self.column_idx_to_scaler = column_idx_to_scaler
self.column_idx_to_ohe = column_idx_to_ohe
self.checkpoint = checkpoint
self.columns = columns
self.X_test = X_test
self.y_test = y_test
def on_epoch_end(self, epoch, logs=None):
if epoch % self.monitor_every_n_epoch == 0:
# save model
#self.checkpoint.save(file_prefix=CHECKPOINT_PREFIX)
# generate samples
samples, generated_samples, labels = self.model.generate_samples(self.column_idx_to_scaler, self.column_idx_to_ohe, self.num_samples)
labels = labels.numpy()
# evaluate using machine learning efficacy
model = RandomForestClassifier(random_state=SEED)
model.fit(generated_samples, labels[:, 0])
print(model.score(self.X_test, self.y_test))
# evaluate using tsne
df = pd.DataFrame(data=np.concatenate((np.array(samples), labels), axis=1), columns=self.columns + ['class'])
numeric_columns, categorical_columns = gather_numeric_and_categorical_columns(df)
tsne(df, categorical_columns, hue='class', filename=f'training_info/{epoch}_tsne', save_figure=True)
| [
"[email protected]"
]
| |
ebd65340d13e72780da3334b66d1dab092886636 | 922d5aed6b0cb672ef90f5c3303c805b72ee2a34 | /utils/field_format.py | 09a7bbeb3ed1435feae01d3b25a1a743018ba6ac | [
"Apache-2.0"
]
| permissive | lnybrave/zzbook | 8df7a831f13ef311b41928eb44000fca0ead8456 | 568a232b1d2b89a6e190e99ccdefd601c4ebe48d | refs/heads/master | 2021-01-01T04:38:48.567473 | 2017-10-31T00:51:56 | 2017-10-31T00:51:56 | 97,218,813 | 3 | 0 | null | 2017-08-24T01:51:08 | 2017-07-14T09:46:51 | Python | UTF-8 | Python | false | false | 1,005 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import types
# time对象转字符串,返回""或2015-12-12 10:10:10
def time2str(time):
if time == None:
return ""
sTime = str(time)
sTime = sTime.strip()
if len(sTime) >= 19:
sTime = sTime[:19]
return sTime
# 返回url对象
def url2str(obj):
try:
return obj.url
except:
return ""
# 空对象转字符串
def null2str(data, default=""):
if isinstance(data, types.NoneType):
return default
elif isinstance(data, types.IntType) or isinstance(data, types.FloatType):
return str(data)
else:
return data
# 空对象转数字
def null2int(data, default=0):
if isinstance(data, types.NoneType):
return default
else:
return int(data)
# 判断对象是否为空,如是不为空,返回对象字符串
def obj2str(obj, field):
if not obj:
return ""
else:
val = getattr(obj, field)
return null2str(val)
| [
"[email protected]"
]
| |
bdc1adf3b722a4189eb19fb531d5aaf9776a50da | a528b0a05da4cab12a6c73c8d6de8aeab607d00c | /sortByHeaderStr.py | 625f46be5d2a1ef01b35df56a7f9b0889ee0a0f2 | []
| no_license | mahassani/SynCom_scripts | 77d007d64ccb66d7e01c216fdbb8b90a34095f99 | 67ff5ecff1c0cb084e83b8fbe678408db497a776 | refs/heads/master | 2021-06-21T19:28:33.697395 | 2017-08-17T15:33:05 | 2017-08-17T15:33:05 | 100,614,899 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,478 | py | #!/usr/bin/python
#Author: Benli Chai
#This Python script sort reads by a unique substring in the header. Requires Biopython 1.51 or later
from Bio import SeqIO
import itertools, sys
#Setup variables (could parse command line args instead)
format = 'fasta'
def sortByTag(iter):
handles = {}#list of opened output file handles
handle = open('notag.fa', 'w')
handles['notag'] = handle
for read in iter:
find = 0 #switch notag and tagged reads
for tag in tagList:
if read.id.split(';')[0].split('_')[0] == tag:
find = 1
if not tag in handles.keys():#new sample new output handler
name = tag + '.fa'
handle = open(name, 'w')
handles[tag] = handle
SeqIO.write(read, handle, format)
else:
handle = handles[tag]
SeqIO.write(read, handle, format)
break#find the barcode then break out
if find == 0:#no tag was found for this pair of reads
handle = handles['notag']
SeqIO.write(read, handle, format)
for handle in handles.values():#close all the output file handles
handle.close()
if len(sys.argv) != 3:
print 'sortByTag.py <ReadFile.fasta> <tagfile> '
sys.exit()
#Main#
#sequence input handles
ReadFile = SeqIO.parse(open(sys.argv[1], "rU"), format)
#get sample and tags
tagFile = open(sys.argv[2], 'r').readlines()
hash = {}
tagList = []
for line in tagFile:
tag = line.strip()
if not tag in tagList:
tagList.append(tag)
else:
print 'Duplicated tag', line.strip()
sys.exit()
sortByTag(ReadFile)
| [
"[email protected]"
]
| |
f3d0693d7d38a4fef5b75ce72b5d6750b7ee5e65 | 0ba063f1603b9ef1e84ab8cd894cfa403dd72103 | /qipr_approver/approver/workflows/shib.py | de1086e23ee7c5e7c339a2c2fc9442acaff21867 | [
"Apache-2.0"
]
| permissive | amberlallen/qipr_approver | 323ef8463d4f9d74279c782e72f797915004fe66 | cb03aef771651cb8768f2c1fa9c82ee47c234158 | refs/heads/master | 2020-12-26T03:22:17.149667 | 2016-10-27T17:44:16 | 2016-10-27T17:44:16 | 66,847,969 | 0 | 0 | null | 2016-11-03T18:39:45 | 2016-08-29T13:52:03 | JavaScript | UTF-8 | Python | false | false | 1,711 | py | from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.utils import timezone
from approver.models import Person
from approver.constants import SESSION_VARS
from . import user_crud
def add_shib_information_to_session(request):
"""
This function takes the information we have received from shib
and places it inside the session.
"""
"""This will need to be changed when we get shib hooked up
TODO:set expiry and other sessiony things"""
request.session[SESSION_VARS['gatorlink']] = request.POST.get('gatorlink')
request.session[SESSION_VARS['email']] = request.POST.get('gatorlink') + '@ufl.edu'
request.session[SESSION_VARS['first_name']] = 'FAKE FIRST NAME'
request.session[SESSION_VARS['last_name']] = 'FAKE LAST NAME'
def after_validation(request):
"""This function is to be called with what shib sends us"""
"""
Note that this will need to be changed when the real shib gets hooked up.
Wont be adding cleartext cookie stuff, apache will hijack the
requests and add thigns to the header which is where we will pull
the gatorlink from
"""
gatorlink = request.POST.get('gatorlink')
add_shib_information_to_session(request)
if len(User.objects.filter(username=gatorlink)) == 0:
new_user = user_crud.create_new_user_from_current_session(request.session)
response = redirect(reverse("approver:aboutyou"))
return response
else:
user = User.objects.get(username=gatorlink)
user.person.last_login_time = timezone.now()
user.person.save(user)
return redirect(reverse("approver:dashboard"))
| [
"[email protected]"
]
| |
7949b7e9e5f5b448edbf73c17ab6761f3f348fb1 | e686cf242a7a8424d0d0ccb5dfcc2b67e617f388 | /hw4/lib/trainer_no_RNN.py | 5614d544d9e6e94468eb9a750fdd9c9f4ea27cea | []
| no_license | kaichan1201/DLCV_Fall_2019 | 2219ab755da1aaf881e4c4c950743981c7a470d1 | 68921098ecc05683de2630eda530c92badd66e19 | refs/heads/master | 2022-11-27T21:29:30.150798 | 2020-03-09T07:02:24 | 2020-03-09T07:02:24 | 245,815,775 | 0 | 0 | null | 2022-11-22T04:23:46 | 2020-03-08T13:05:16 | Python | UTF-8 | Python | false | false | 5,344 | py | import os
import numpy as np
import torch
import torch.nn as nn
from tensorboardX import SummaryWriter
from torchvision.utils import make_grid
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
class Trainer:
def __init__(self, model, train_loader, val_loader, optim, criterion, args, writer, use_cuda):
assert(isinstance(model, dict))
self.E = model['E']
self.C = model['C']
self.train_loader = train_loader
self.val_loader = val_loader
self.optim = optim
self.criterion = criterion
self.args = args
self.writer = writer
self.use_cuda = use_cuda
self.epochs = self.args.epochs
self.val_epoch = self.args.val_epoch
self.save_epoch = self.args.save_epoch
self.save_dir = self.args.save_dir
self.base_lr = self.args.lr
self.frame_num = self.args.frame_num
self.iters = 0
self.max_iter = len(self.train_loader) * self.epochs
self.best_acc = 0
self.total_cnt = 0
self.correct_cnt = 0
if len(self.args.pretrained):
print("===>Loading pretrained model {}...".format(self.args.pretrained))
checkpoint = torch.load(self.args.pretrained)
self.E.load_state_dict(checkpoint['E'])
self.C.load_state_dict(checkpoint['C'])
def get_lr(self):
return self.base_lr * (1 - self.iters / self.max_iter) ** 0.9
def train(self):
self.E.eval()
self.C.train()
for epoch in range(1, self.epochs + 1):
self.total_cnt, self.correct_cnt = 0, 0
current_iter = 0
'''set new lr'''
for param in self.optim.param_groups:
print('Epoch {}, New lr: {}'.format(epoch, self.get_lr()))
param['lr'] = self.get_lr()
'''train an epoch'''
for idx, (videos, lbls) in enumerate(self.train_loader):
self.iters += 1
current_iter += 1
if self.use_cuda:
videos, lbls = videos.cuda(), lbls.cuda() # video: (B, T, C, H, W)
'''model forwarding & loss calculation'''
feats = []
for i in range(self.frame_num):
feat = self.E(videos[:, i, :, :, :])
feats.append(feat)
feats = torch.cat(feats, dim=1)
out = self.C(feats.detach())
loss = self.criterion(out, lbls)
self.optim.zero_grad()
loss.backward()
self.optim.step()
with torch.no_grad():
_, preds = torch.max(out, dim=1)
self.total_cnt += preds.cpu().numpy().size
self.correct_cnt += (preds == lbls).sum().item()
self.writer.add_scalar('loss', loss.item(), self.iters)
if current_iter % 10 == 0 or current_iter == len(self.train_loader):
print('Epoch [{}][{}/{}], Loss: {:.4f}'.format(epoch, current_iter,
len(self.train_loader), loss.item()))
torch.cuda.empty_cache()
train_acc = self.correct_cnt / self.total_cnt
self.writer.add_scalar('acc/train_acc', train_acc, epoch)
print('Epoch {}, Train Acc: {:.4f}'.format(epoch, train_acc))
if epoch % self.val_epoch == 0:
self.evaluate(epoch)
if epoch % self.save_epoch == 0:
torch.save({
'E': self.E.state_dict(),
'C': self.C.state_dict(),
}, os.path.join(self.save_dir, 'checkpoint_{}.pth.tar'.format(epoch)))
def evaluate(self, epoch):
self.E.eval()
self.C.eval()
total_cnt = 0
correct_cnt = 0
with torch.no_grad(): # do not need to calculate information for gradient during eval
for idx, (videos, gt) in enumerate(self.val_loader):
if self.use_cuda:
videos, gt = videos.cuda(), gt.cuda()
feats = []
for i in range(self.frame_num):
feat = self.E(videos[:, i, :, :, :])
feats.append(feat)
feats = torch.cat(feats, dim=1)
out = self.C(feats)
_, pred = torch.max(out, dim=1)
total_cnt += pred.cpu().numpy().size
correct_cnt += (pred == gt).sum().item()
val_acc = correct_cnt / total_cnt
self.writer.add_scalar('acc/val_acc', val_acc, epoch)
print('Epoch {}, Val Acc: {:.4f}'.format(epoch, val_acc))
if val_acc > self.best_acc:
torch.save({
'E': self.E.state_dict(),
'C': self.C.state_dict(),
}, os.path.join(self.save_dir, 'checkpoint_best.pth.tar'))
self.best_acc = val_acc
| [
"[email protected]"
]
| |
3c716ed99b2846ffd08e47e360fcb2465020a88f | 9e8f90baa792211b0e3fd7093b0ad18ce66fa121 | /mysite/urls.py | 25a9167ad7d77511e8fa3e2722d7a60c9c7ae0c6 | []
| no_license | Animesh3193/First_Blog | 3274bc3b66e023332887455811a4f9c4d550b818 | 9c45b6e9a28f1d4824a8867e6c3d4ef7a586d726 | refs/heads/master | 2020-03-10T21:01:42.554365 | 2018-04-25T16:31:53 | 2018-04-25T16:31:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/dev/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'', include('blog.urls')),
]
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.