metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "JosueJoshua/blog_tutorial",
"score": 2
} |
#### File: blog_tutorial/my_blog/models.py
```python
from django.db import models
# Create your models here.
class Blog(models.Model):
title = models.CharField(max_length=100) # 博客题目
category = models.CharField(max_length=50, blank=True) # 博客标签
date_time = models.DateTimeField(auto_now_add=True) # 博客日期
content = models.TextField(blank=True, null=True) # 博客文章正文
# python2使用__unicode__,python3使用__str__
def __str__(self):
return self.title
class Meta:
# 按时间下降排序
ordering = ['-date_time']
``` |
{
"source": "JosueJoshua/my_blog",
"score": 2
} |
#### File: my_blog/my_blog/views.py
```python
from django.shortcuts import render
from django.http import HttpResponse
from my_blog.models import Blog
from datetime import datetime
from django.http import Http404
# Create your views here.
def home(request):
post_list = Blog.objects.all() # 获取全部Blog对象
return render(request, 'home.html', {'post_list':post_list})
def detail(request, id):
try:
post = Blog.objects.get(id=str(id))
except Blog.DoesNotExist:
raise Http404
return render(request, 'post.html', {'post':post})
``` |
{
"source": "JosueJuarez/M-todos-Num-ricos",
"score": 4
} |
#### File: Ec. No lineales/Ejecutables/Punto_Fijo.py
```python
import numpy as np
from sympy import *
from sympy.utilities.lambdify import lambdify
import matplotlib.pyplot as plt
init_printing(use_unicode=True)
# In[2]:
#calcular la derivada de f
x = symbols('x')
funcion = 2*x**3 - 9*x**2 + 7*x + 6
gfuncion = (-2*x**3 + 9*x**2 - 6)/7 #escribir la función aqui
dgfuncion = diff(gfuncion, x)
print(str(dgfuncion))
# In[3]:
f = lambdify(x, funcion)
g = lambdify(x, gfuncion)
dg = lambdify(x, dgfuncion)
# In[4]:
X = np.linspace(-1, 4, 100)
plt.plot(X,dg(X), label = 'g´(x)')
plt.ylim(-1,1)
plt.legend()
plt.show()
# La desigualdad se cumple aproximadamente entre en los intervalos $[-0.44, 0.46]$ y $[2.5, 3.34]$, aun que en realidad, en el primer intervalo no converge.
# In[5]:
e = 0.0001 #error
maxit = 100 #iteraciones máximas
# In[6]:
def PuntoFijo(x0, func = g, error = e, iterations = maxit):
it = 0
while (abs(f(x0)) > e) and (it < maxit):
it += 1
xk = g(x0)
x0 = xk
return x0
# In[7]:
sol = PuntoFijo(2.6)
print(sol)
# In[8]:
plt.plot(X, f(X), label='f(x)')
plt.plot(sol,f(sol),'ro')
plt.legend()
plt.show()
# In[ ]:
``` |
{
"source": "josuel23/CD_heroku",
"score": 3
} |
#### File: josuel23/CD_heroku/web.py
```python
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def index():
return "FACULDADE IMPACTA - ENGENHARIA DA COMPUTAÇÃO"
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
``` |
{
"source": "josuel23/cursoemvideo-python",
"score": 4
} |
#### File: mundo-01/AC-3/AC_03.py
```python
class Gerador:
def __init__(self, nome, potencia, capacidade, tanque):
self.__nome = nome
self.__potencia = potencia
self.__capacidade = capacidade
self.__tanque = tanque
self.__status = 0
self.__combustivel = 0
def get_nome(self):
return self.__nome
def get_potencia(self):
return self.__potencia
def get_capacidade(self):
return self.__capacidade
def get_tanque(self):
return self.__tanque
def get_combustivel(self):
return self.__combustivel
def get_status(self):
if self.__status == 1:
return f"{self.__nome} Ligado\t\t\t"
return f"{self.__nome} Desligado\t\t\t"
def set_status(self):
if self.__status == 1:
self.__status = 0
else:
if (self.__nome != 'G1') and ('Desligado' in geradores[0].get_status()):
return f'{self.__nome} não pode ser ligado por que G1 está desligado'
if self.__combustivel < 50:
return f'{self.__nome} não pode ser ligado por falta de combustível'
self.__combustivel -= 50
self.__status = 1
return self.get_status()
def set_combustivel(self, litros):
if litros > 0 and type(litros) == int:
if (self.__combustivel + litros) > self.__tanque:
return f'O tanque não suporta mais {litros} litro(s)'
else:
self.__combustivel += litros
return f'Combustível atualizado: {self.__combustivel}'
else:
return 'Os valores de abastecimento devem ser inteiros positivos'
def auxilar_liga_desliga(self):
if self.__status == 1:
print(f'{self.__nome} está Ligado, deseja Desligar?')
else:
print(f'{self.__nome} está Desligado, deseja Ligar?')
print(f'1 - Sim\n2 - Não')
if input_escolhas(2) == 1:
return self.set_status()
def analisar_combustivel(self):
if self.__combustivel / self.__tanque < 0.2:
return f'{self.__combustivel}/{self.__tanque} litros\t\t(ABASTECER)'
return f'{self.__combustivel}/{self.__tanque} litro(s)\t\t'
def menu_inicial():
opcoes = []
opcoes.append(["Acionamento manual de gerador", 'acionar'])
opcoes.append(["Status dos geradores", 'status'])
opcoes.append(["Status dos tanques de combustível", 'combustivel'])
opcoes.append(["Abastecer tanque de combustível", 'abastecer'])
opcoes.append(["Detalhes do gerador", 'detalhes'])
opcoes.append(["Sair", 'sair'])
print(f' {"-"*13}Menu Principal{"-"*13}')
for index, opcao in enumerate(opcoes):
print(index + 1, " - ", opcao[0])
opcao = input_escolhas(len(opcoes))
menu_opcao(opcoes[opcao - 1][0], opcoes[opcao - 1][1])
def menu_opcao(descricao, opcao):
print(f' {"-"*3}{descricao}{"-"*3}')
if opcao == 'acionar':
gerador = procurar_gerador()
if gerador:
print(gerador.auxilar_liga_desliga(), '\t\t\t<-------')
elif opcao == 'status':
print('STATUS DOS GERADORES:', '\t\t\t<-------')
for gerador in geradores:
if 'Desligado' in gerador.get_status():
print(f'{gerador.get_nome()} - Desligado', '\t\t\t<-------')
else:
print(f'{gerador.get_nome()} - Ligado', '\t\t\t<-------')
elif opcao == 'combustivel':
print('STATUS DOS GERADORES:')
for gerador in geradores:
print(gerador.analisar_combustivel(), '\t\t\t<-------')
elif opcao == 'abastecer':
gerador = procurar_gerador()
if gerador:
litros = ""
while type(litros) != int:
try:
litros = int(
input(f'Quantidade de Litros de Combustível: '))
except Exception:
print('Digite apenas numeros inteiros')
print(gerador.set_combustivel(litros), '\t\t\t<-------')
elif opcao == 'detalhes':
gerador = procurar_gerador()
if gerador:
print('STATUS DOS GERADORES:', '\t\t\t<-------')
print(f'Nome: {gerador.get_nome()}')
print(f'Potência: {gerador.get_potencia()}')
print(
f'Capacidade de geração de energia: {gerador.get_capacidade()}')
print(f'Tamanho do Tanque: {gerador.get_tanque()}')
print(f'Status: {gerador.get_status()}')
elif opcao == 'sair':
print('O programa será desligado.', '\t\t\t<-------')
return
menu_inicial()
def procurar_gerador():
nome_gerador = input(f'Informe o Nome do Gerador: ')
for gerador in geradores:
if gerador.get_nome() == nome_gerador:
return gerador
print(f"Gerador não encontrado", '\t\t\t<-------')
def input_escolhas(alternativas, opcao=0):
while type(opcao) != int or not (0 < opcao <= alternativas):
try:
opcao = int(input(f'Escolha uma opção entre 1 e {alternativas}: '))
except Exception:
print(f"Escolha uma opção entre 1 e {alternativas}: ")
return opcao
geradores = []
geradores.append(Gerador('G1', 150, 10000, 700))
geradores.append(Gerador('G2', 85, 7000, 400))
geradores.append(Gerador('G3', 85, 7000, 400))
geradores.append(Gerador('G4', 50, 5000, 300))
geradores[0].set_combustivel(100)
geradores[0].set_status()
menu_inicial()
``` |
{
"source": "JosueLabres/video-creator",
"score": 3
} |
#### File: video-creator/robots/text.py
```python
import requests
from bs4 import BeautifulSoup
import pysbd
import re
import json
from ibm_watson import NaturalLanguageUnderstandingV1
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
from ibm_watson.natural_language_understanding_v1 import Features, EntitiesOptions, KeywordsOptions
class TextRobot():
def __init__(self, searchTerm):
self.searchTerm = searchTerm
self.wikipedia = ''
def fetchContentFromWikipedia(self):
r = requests.get(f'https://pt.wikipedia.org/wiki/{self.searchTerm}')
bs = BeautifulSoup(r.text, 'html.parser')
content = bs.find(id='mw-content-text')
content = content.find_all('p')
wikipedia = ''
for p in content:
wikipedia = f'{wikipedia}{p.get_text()}'
self.wikipedia = wikipedia
return wikipedia
def sanitizeContent(self):
sanitize = re.sub('\[[0-9]*\]', ' ', self.wikipedia)
return sanitize
def breakContentIntoSentences(self, text):
seg = pysbd.Segmenter(language="en", clean=False)
return seg.segment(text)
def watson(self,sentences, maxSentences):
sentence = []
for i in range(maxSentences):
print('get watson')
natural_language_understanding = NaturalLanguageUnderstandingV1(
version='2021-08-01',
)
response = natural_language_understanding.analyze(
text=sentences[i],
features=Features(keywords=KeywordsOptions())).get_result()
keywords = []
for keyword in response['keywords']:
keywords.append(keyword['text'])
sentence.append({
'sentence': sentences[i],
'keywords': keywords
})
return sentence
``` |
{
"source": "josuelopes512/mastercode_films_api",
"score": 2
} |
#### File: mastercode_films_api/api/models.py
```python
from django.db.models import (
Model,
IntegerField,
UUIDField,
BooleanField,
BigAutoField,
TextField,
CharField,
JSONField,
FloatField,
DateTimeField,
SlugField,
)
from django.db.models.signals import pre_save
from .utils import *
from datetime import datetime as dt
from django.db import models
from random import randint
import uuid
# Create your models here.
class Movie(Model):
uuid = UUIDField(default=uuid.uuid4, unique=True, null=False)
movie_id = IntegerField(unique=True)
adult = BooleanField(default=False)
backdrop_path = CharField(max_length=255, default="")
genre_ids = JSONField(null=True)
original_language = CharField(max_length=255, default="")
original_title = CharField(max_length=255)
overview = TextField(blank=True)
poster_path = CharField(max_length=255, default="")
release_date = CharField(max_length=255, default="")
title = CharField(max_length=255)
video = BooleanField(default=False)
vote_average = FloatField()
vote_count = IntegerField()
popularity = FloatField()
media_type = CharField(max_length=255, default="movie")
updated_at = DateTimeField(auto_now_add=True)
created_at = DateTimeField(auto_now_add=True)
backdrop_b64 = TextField(null=True, blank=True)
poster_b64 = TextField(null=True, blank=True)
slug = SlugField(null=True, unique=True, blank=True)
title_norm = CharField(max_length=255, null=True, blank=True)
recommended = JSONField(null=True)
budget = IntegerField(null=True)
homepage = CharField(max_length=255, null=True, blank=True)
imdb_id = CharField(max_length=255, null=True, blank=True)
production_companies = JSONField(null=True)
production_countries = JSONField(null=True)
revenue = IntegerField(null=True)
runtime = IntegerField(null=True)
spoken_languages = JSONField(null=True)
status = CharField(max_length=255, null=True, blank=True)
tagline = CharField(max_length=255, null=True, blank=True)
# def __init__(self, *args, **kwargs) -> None:
# self.setAllWithEval(kwargs)
# super().__init__(*args,**kwargs)
# def setAllWithEval(self, kwargs):
# for key in list(kwargs.keys()):
# if key in ('id'):
# kwargs['movie_id'] = kwargs[key]
# del kwargs[key]
# if key not in ('uuid', 'created_at', 'updated_at'):
# setattr(self, key, kwargs[key])
@property
def name(self):
return self.title
def save(self, *args, **kwargs):
if not self.backdrop_b64 :
backdrop_inst_b64(self, save=False)
if not self.poster_b64 :
poster_inst_b64(self, save=False)
if not self.slug :
slugify_inst_title(self, save=False)
if not self.recommended:
recommended_item(self, save=False)
conds = [
self.budget, self.homepage,
self.imdb_id, self.production_companies,
self.production_countries, self.revenue,
self.runtime, self.spoken_languages,
self.status, self.tagline
]
if not all(conds):
add_infos(self, save=False)
super().save(*args, **kwargs)
def slug_pre_save(sender, instance, *args, **kwargs):
if not instance.backdrop_b64:
backdrop_inst_b64(instance, save=False)
if not instance.poster_b64:
poster_inst_b64(instance, save=False)
if not instance.slug:
slugify_inst_title(instance, save=False)
if not instance.recommended:
recommended_item(instance, save=False)
conds = [
instance.budget, instance.homepage,
instance.imdb_id, instance.production_companies,
instance.production_countries, instance.revenue,
instance.runtime, instance.spoken_languages,
instance.status, instance.tagline
]
if not all(conds):
add_infos(instance, save=False)
pre_save.connect(slug_pre_save, sender=Movie)
``` |
{
"source": "josuelopes512/teste_python",
"score": 3
} |
#### File: teste_python/temp/functions.py
```python
def separa_palavras(lista_tokens):
return [token for token in lista_tokens if token.isalpha()]
def normalizacao(lista_palavras):
return [palavra.lower() for palavra in lista_palavras]
def insere_letras(fatias):
letras = 'abcedfghijklmnopqrstuvwxyzáâàãéêèíîìóôòõúûùç'
return [esquerdo + letra + direito for esquerdo, direito in fatias for letra in letras]
def gerador_palavras(palavra):
return insere_letras([(palavra[:i], palavra[i:]) for i in range(len(palavra) + 1)])
def probabilidade(palavra_gerada):
return frequencia[palavra_gerada] / total_palavras
def corretor(palavra_errada):
return max(gerador_palavras(palavra_errada), key=probabilidade)
def cria_dados_teste(nome_arquivo):
lista_palavras_teste = []
f = open(nome_arquivo, 'r')
for linha in f:
correta, errada = linha.split()
lista_palavras_teste.append((correta, errada))
f.close()
return lista_palavras_teste
def avaliador(testes):
numero_palavras = len(testes)
acertou = 0
for correta, errada in testes:
palavra_corrigida = corretor(errada)
if palavra_corrigida == correta:
acertou += 1
taxa_acerto = round(acertou * 100 / numero_palavras, 2)
print(f'{taxa_acerto}% de {numero_palavras} palavras')
def deletando_caracter(fatias):
return [esquerdo + direito[1:] for esquerdo, direito in fatias]
def gerador_palavras(palavra):
fatias = [(palavra[:i], palavra[i:]) for i in range(len(palavra) + 1)]
palavras_geradas = insere_letras(fatias)
palavras_geradas += deletando_caracter(fatias)
return palavras_geradas
def troca_caracter(fatias):
letras = 'abcedfghijklmnopqrstuvwxyzáâàãéêèíîìóôòõúûùç'
return [esquerdo + letra + direito[1:] for esquerdo, direito in fatias for letra in letras]
def gerador_palavras(palavra):
fatias = [(palavra[:i], palavra[i:]) for i in range(len(palavra) + 1)]
palavras_geradas = insere_letras(fatias)
palavras_geradas += deletando_caracter(fatias)
palavras_geradas += troca_caracter(fatias)
return palavras_geradas
def insere_letras(fatias):
letras = 'abcedfghijklmnopqrstuvwxyzáâàãéêèíîìóôòõúûùç'
return [esquerdo + letra + direito for esquerdo, direito in fatias for letra in letras]
def deletando_caracter(fatias):
return [esquerdo + direito[1:] for esquerdo, direito in fatias]
def troca_caracter(fatias):
letras = 'abcedfghijklmnopqrstuvwxyzáâàãéêèíîìóôòõúûùç'
return [esquerdo + letra + direito[1:] for esquerdo, direito in fatias for letra in letras]
def invertendo_caracter(fatias):
return [esquerdo + direito[1] + direito[0] + direito[2:] for esquerdo, direito in fatias if len(direito) > 1]
def gerador_palavras(palavra):
fatias = [(palavra[:i], palavra[i:]) for i in range(len(palavra) + 1)]
palavras_geradas = insere_letras(fatias)
palavras_geradas += deletando_caracter(fatias)
palavras_geradas += troca_caracter(fatias)
palavras_geradas += invertendo_caracter(fatias)
return palavras_geradas
def avaliador_v1(testes, vocabulario):
numero_palavras = len(testes)
acertou = desconhecidas = 0
for correta, errada in testes:
palavra_corrigida = corretor(errada)
desconhecidas += (correta not in vocabulario)
if palavra_corrigida == correta:
acertou += 1
taxa_acerto = round(acertou * 100 / numero_palavras, 2)
taxa_desconhecidas = round(desconhecidas * 100 / numero_palavras, 2)
print(f'{taxa_acerto}% de {numero_palavras} das palavras conhecidas\n'
f'e {taxa_desconhecidas}% das palavras desconhecidas')
def gerador_inception(palavras_geradas):
novas_palavras = []
for palavra in palavras_geradas:
novas_palavras += gerador_palavras(palavra)
return novas_palavras
def corretor_super_sayajin_v1(palavra_errada):
palavras_geradas = gerador_palavras(palavra_errada)
palavras_inception = gerador_inception(palavras_geradas)
todas_palavras = set(palavras_geradas + palavras_inception)
candidatos = [palavra_errada]
for palavra in todas_palavras:
if palavra in vocabulario:
candidatos.append(palavra)
print(f'Temos {len(candidatos)} candidatos a palavra correta.\n'
f'São eles {candidatos}')
palavra_correta = max(candidatos, key=probabilidade)
return palavra_correta
def corretor_super_sayajin(palavra_errada):
palavras_geradas = gerador_palavras(palavra_errada)
palavras_inception = gerador_inception(palavras_geradas)
todas_palavras = set(palavras_geradas + palavras_inception)
candidatos = [palavra_errada]
for palavra in todas_palavras:
if palavra in vocabulario:
candidatos.append(palavra)
palavra_correta = max(candidatos, key=probabilidade)
return palavra_correta
def avaliador(testes, vocabulario):
numero_palavras = len(testes)
acertou = desconhecidas = 0
for correta, errada in testes:
palavra_corrigida = corretor_super_sayajin(errada)
desconhecidas += (correta not in vocabulario)
if palavra_corrigida == correta:
acertou += 1
taxa_acerto = round(acertou * 100 / numero_palavras, 2)
taxa_desconhecidas = round(desconhecidas * 100 / numero_palavras, 2)
print(f'{taxa_acerto}% de {numero_palavras} das palavras conhecidas\n'
f'e {taxa_desconhecidas}% das palavras desconhecidas')
def avaliador(testes, vocabulario):
numero_palavras = len(testes)
acertou = desconhecidas = 0
for correta, errada in testes:
palavra_corrigida = corretor_super_sayajin(errada)
desconhecidas += (correta not in vocabulario)
if palavra_corrigida == correta:
acertou += 1
else:
print(f"{errada} - {corretor(errada)} - {palavra_corrigida}")
taxa_acerto = round(acertou * 100 / numero_palavras, 2)
taxa_desconhecidas = round(desconhecidas * 100 / numero_palavras, 2)
print(f'\n\n{taxa_acerto}% de {numero_palavras} das palavras conhecidas\n'
f'e {taxa_desconhecidas}% das palavras desconhecidas')
def insere_letras(fatias):
letras = 'abcedfghijklmnopqrstuvwxyzáâàãéêèíîìóôòõúûùç'
return [esquerdo + letra + direito for esquerdo, direito in fatias for letra in letras]
def deletando_caracter(fatias):
return [esquerdo + direito[1:] for esquerdo, direito in fatias]
def troca_caracter(fatias):
letras = 'abcedfghijklmnopqrstuvwxyzáâàãéêèíîìóôòõúûùç'
return [esquerdo + letra + direito[1:] for esquerdo, direito in fatias for letra in letras]
def invertendo_caracter(fatias):
return [esquerdo + direito[1] + direito[0] + direito[2:] for esquerdo, direito in fatias if len(direito) > 1]
def gerador_palavras(palavra):
fatias = [(palavra[:i], palavra[i:]) for i in range(len(palavra) + 1)]
palavras_geradas = insere_letras(fatias)
palavras_geradas += deletando_caracter(fatias)
palavras_geradas += troca_caracter(fatias)
palavras_geradas += invertendo_caracter(fatias)
return palavras_geradas
def avaliador(testes, vocabulario):
numero_palavras = len(testes)
acertou = desconhecidas = 0
for correta, errada in testes:
palavra_corrigida = corretor(errada)
desconhecidas += (correta not in vocabulario)
if palavra_corrigida == correta:
acertou += 1
taxa_acerto = round(acertou * 100 / numero_palavras, 2)
taxa_desconhecidas = round(desconhecidas * 100 / numero_palavras, 2)
print(f'{taxa_acerto}% de {numero_palavras} das palavras conhecidas\n'
f'e {taxa_desconhecidas}% das palavras desconhecidas')
``` |
{
"source": "josue-lubaki/FastAPI-Python-Tutorial",
"score": 2
} |
#### File: app/routers/post.py
```python
from fastapi import Response, status, HTTPException, Depends, APIRouter
from .. import models, schemas, oauth2
from typing import List, Optional
from sqlalchemy.orm import Session
from sqlalchemy import func
from ..database import get_db
router = APIRouter(
prefix='/posts',
tags=['Posts']
)
# @router.get("/", response_model=List[schemas.Post])
@router.get("/", response_model=List[schemas.PostOut])
def get_posts(db: Session = Depends(get_db), current_user: int = Depends(oauth2.get_current_user), limit: int = 10, skip: int = 0, search: Optional[str] = ""):
# Add filter
posts = db.query(models.Post).filter(
models.Post.title.contains(search)).limit(limit).offset(skip).all()
results = db.query(models.Post, func.count(models.Vote.post_id).label("votes")).join(
models.Vote, models.Vote.post_id == models.Post.id, isouter=True).group_by(models.Post.id).filter(
models.Post.title.contains(search)).limit(limit).offset(skip).all()
return results
@router.post("/", status_code=status.HTTP_201_CREATED, response_model=schemas.Post)
def create_posts(post: schemas.PostCreate, db: Session = Depends(get_db), current_user: int = Depends(oauth2.get_current_user)):
# add information of owner
new_post = models.Post(owner_id=current_user.id, **post.dict())
db.add(new_post)
db.commit()
db.refresh(new_post)
return new_post
@router.get("/latest", response_model=schemas.PostOut)
def get_latest_post(db: Session = Depends(get_db)):
post = db.query(models.Post, func.count(models.Vote.post_id).label("votes")).join(
models.Vote, models.Vote.post_id == models.Post.id, isouter=True).group_by(models.Post.id).order_by(models.Post.id.desc()).first()
if post == None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Post table is empty")
return post
@router.get("/{id}", response_model=schemas.PostOut)
def get_post(id: int, db: Session = Depends(get_db), current_user: int = Depends(oauth2.get_current_user)):
post = db.query(models.Post, func.count(models.Vote.post_id).label("votes")).join(
models.Vote, models.Vote.post_id == models.Post.id, isouter=True).group_by(models.Post.id).filter(models.Post.id == id).first()
if not post:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Post with id: {id} was not found"
)
return post
@router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT)
def delete_post(id: int, db: Session = Depends(get_db), current_user: int = Depends(oauth2.get_current_user)):
post = db.query(models.Post).filter(models.Post.id == id).first()
if post == None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"Post with id: {id} was not found")
# Verifier si le post appartient à l'utilisateur courant avant de le supprimer
if post.owner_id != current_user.id:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="You are not the owner of this post")
db.delete(post)
db.commit()
return Response(status_code=status.HTTP_204_NO_CONTENT)
@router.put("/{id}", response_model=schemas.Post)
def update_post(id: int, updated_post: schemas.PostCreate, db: Session = Depends(get_db), current_user: int = Depends(oauth2.get_current_user)):
post_query = db.query(models.Post).filter(models.Post.id == id)
post = post_query.first()
if post == None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"Post with id: {id} was not found")
# Verifier si le post appartient à l'utilisateur courant avant de le modifier
if post.owner_id != current_user.id:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="You are not the owner of this post")
post_query.update(updated_post.dict(), synchronize_session=False)
db.commit()
return post_query.first()
``` |
{
"source": "josue-lubaki/traductor-api",
"score": 3
} |
#### File: app/routers/synthesize.py
```python
from typing import List
from fastapi import APIRouter, status, HTTPException
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
from ibm_watson import ApiException
from ibm_watson import TextToSpeechV1
from ..config import settings
from .. import schemas
router = APIRouter(
prefix='/synthesize',
tags=['Synthesize']
)
@router.post('/', status_code=status.HTTP_201_CREATED)
def create_synthesize(to_speech: schemas.TextToSpeech):
try:
# Invoke a method
authenticator = IAMAuthenticator(settings.api_key_text_speech)
text_to_speech = TextToSpeechV1(authenticator=authenticator)
text_to_speech.set_default_headers(
{'x-watson-learning-opt-out': "true"})
# Add the text to speech service to the service
text_to_speech.set_service_url(settings.url_instance_text_speech)
data = to_speech.dict()
title_file = data["title_file"]
path = f"public/mp3/{title_file}"
with open(path, 'wb') as audio_file:
audio_file.write(text_to_speech.synthesize(
text=data["text"],
voice=data["voice"],
accept="audio/mp3").get_result().content)
return {"message": f"{audio_file.name} created"}
except ApiException as ex:
print("Method failed with status code " +
str(ex.code) + ": " + ex.message)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Error: " + ex.message)
@router.post('/bytes', status_code=status.HTTP_201_CREATED)
def create_synthesize_to_bytes(to_speech: schemas.TextToSpeech):
try:
# Invoke a method
authenticator = IAMAuthenticator(settings.api_key_text_speech)
text_to_speech = TextToSpeechV1(authenticator=authenticator)
text_to_speech.set_default_headers(
{'x-watson-learning-opt-out': "true"})
# Add the text to speech service to the service
text_to_speech.set_service_url(settings.url_instance_text_speech)
data = to_speech.dict()
title_file = data["title_file"]
path = f"public/mp3/{title_file}"
with open(path, 'wb') as audio_file:
audio_file.write(
text_to_speech.synthesize(text=data["text"],
voice=data["voice"],
accept="audio/mp3").get_result().content)
# convertir le fichier mp3 en tableau de bytes
with open(path, 'rb') as audio_file:
audio_bytes = audio_file.read()
return {"bytes": f"{audio_bytes}"}
except ApiException as ex:
print("Method failed with status code " +
str(ex.code) + ": " + ex.message)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Error: " + ex.message)
@router.get('/voices', status_code=status.HTTP_200_OK, response_model=List[schemas.Voices])
def get_voices():
try:
authenticator = IAMAuthenticator(settings.api_key_text_speech)
text_to_speech = TextToSpeechV1(authenticator=authenticator)
text_to_speech.set_default_headers(
{'x-watson-learning-opt-out': "true"})
# Add the text to speech service to the service
text_to_speech.set_service_url(settings.url_instance_text_speech)
# Liste de voix disponibles
voices = text_to_speech.list_voices().get_result()
voices_list = []
for voice in voices["voices"]:
# Récupèrer le propriété "gender", "name" et "url"
response = {
"gender": voice['gender'],
"name": voice['name'],
"url": voice['url']
}
# ajouter response dans la liste voices_list
voices_list.append(response)
return voices_list
except ApiException as ex:
print("Method failed with status code " +
str(ex.code) + ": " + ex.message)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Error: " + ex.message)
```
#### File: app/routers/translate.py
```python
from fastapi import APIRouter, HTTPException, status
from ibm_watson import LanguageTranslatorV3
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
from ibm_watson import ApiException
from ..config import settings
from .. import schemas
router = APIRouter(
prefix='/translate',
tags=['Translate']
)
@router.post('/', status_code=status.HTTP_201_CREATED, response_model=schemas.TranslateResponse)
def translate(data: schemas.PostTranslate):
try:
# Authenticate with IAM
authenticator = IAMAuthenticator(settings.api_key)
language_translator = LanguageTranslatorV3(
version=settings.version,
authenticator=authenticator
)
# Add the language translator service to the service
language_translator.set_service_url(settings.url_instance)
translation = language_translator.translate(**data.dict()).get_result()
return translation["translations"][0]
except ApiException as ex:
print("Method failed with status code " +
str(ex.code) + ": " + ex.message)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Error: " + ex.message)
``` |
{
"source": "Josue-Martinez-Moreno/blendernc",
"score": 3
} |
#### File: Josue-Martinez-Moreno/blendernc/core.py
```python
import bpy
import os
import xarray
import numpy as np
import glob
from . import files_utils
class BlenderncEngine():
""""
"""
def __init__(self):
self.current_file_path = files_utils.get_addon_path()
# TO DO : move to file_utils
def check_files_netcdf(self,file_path):
"""
"""
#file_folder = os.path.dirname(file_path)
if "*" in file_path:
self.file_path = glob.glob(file_path)
self.check_netcdf()
elif os.path.isfile(file_path):
self.file_path = [file_path]
self.check_netcdf()
else:
raise NameError("File doesn't exist:",file_path)
def check_netcdf(self):
"""
Check if file is a netcdf and contain at least one variable.
"""
if len(self.file_path) == 1:
extension = self.file_path[0].split('.')[-1]
if extension == ".nc":
self.load_netcd()
else:
try:
self.load_netcdf()
except:
raise ValueError("File isn't a netCDF:",self.file_path)
else:
extension = self.file_path[0].split('.')[-1]
if extension == ".nc":
self.load_netcd()
else:
try:
self.load_netcdf()
except:
raise ValueError("Files aren't netCDFs:",self.file_path)
def load_netcdf(self,file=None):
"""
"""
if len(self.file_path) == 1 and file:
self.dataset = xarray.open_dataset(self.file_path,decode_times=False)
else:
self.dataset = xarray.open_mfdataset(self.file_path,decode_times=False,combine='by_coords')
def netcdf_var(self):
"""
"""
dimensions = [i for i in self.dataset.coords.dims.keys()]
variable = list(self.dataset.variables.keys() - dimensions)
if self.dataset[variable[0]].long_name:
var_names = [(variable[ii], variable[ii], self.dataset[variable[ii]].long_name, "DISK_DRIVE", ii) for ii in range(len(variable))]
else:
var_names = [(variable[ii], variable[ii], variable[ii], "DISK_DRIVE", ii) for ii in range(len(variable))]
return var_names
def netcdf_values(self,selected_variable,active_resolution):
"""
"""
self.selected_variable = selected_variable
variable = self.dataset[selected_variable]
dict_var_shape = {ii:slice(0,variable[ii].size,self.resolution_steps(variable[ii].size,active_resolution))
for ii in variable.coords if 'time' not in ii}
print(selected_variable,dict_var_shape)
# To Do: fix resolution implementation, perhaps a non linear coarsening
variable_res = variable.isel(dict_var_shape)
return variable_res
def resolution_steps(self,size,res):
res_interst = res/5 + 80
log_scale = np.log10(size)/np.log10((size*res_interst/100)) - 1
step = size * log_scale
if step ==0:
step = 1
return int(step)
``` |
{
"source": "Josue-Martinez-Moreno/phd_source",
"score": 2
} |
#### File: trackeddy_utils/OCCIPUT/trackeddy_OCCIPUT.py
```python
import matplotlib
matplotlib.use('agg')
import sys
from netCDF4 import Dataset
import os
os.environ["PROJ_LIB"] = "/g/data4/x77/jm5970/env/trackeddy/share/proj"
import cmocean as cm
from trackeddy.tracking import *
from trackeddy.datastruct import *
from trackeddy.geometryfunc import *
from trackeddy.physics import *
from numpy import *
from scipy.interpolate import griddata
import time as ttime
ensemble =int(sys.argv[1])
year =int(sys.argv[2])
file_division =int(sys.argv[3])
division_number =int(sys.argv[4])
file_count =int(sys.argv[5])
def split_list(alist, wanted_parts=1):
length = len(alist)
return np.array([ alist[i*length // wanted_parts: (i+1)*length // wanted_parts]
for i in range(wanted_parts) ])
outfile='/g/data/v45/jm5970/trackeddy_output/OCCIPUT/npy/ORCA025.L75-OCCITENS.{0:03}/'.format(ensemble)
try:
os.mkdir(outfile)
except:
print('previous data overwritten')
# Output data path
outputpath='/g/data/v45/amh157/OCCIPUT/SSH_ENSEMBLE_all/ORCA025.L75-OCCITENS.%03d-S/' % ensemble
tic=ttime.time()
# Import SSH values to python environment.
ncfile=Dataset(outputpath+'1d/ORCA025.L75-OCCITENS.{0:03}_y{1}.1d_SSH.nc'.format(ensemble,year))
time=ncfile.variables['time_counter'][:]
time_division=split_list(range(0,len(time)), wanted_parts=file_division)
#print(time_division)
ssh=ncfile.variables['ssh'][time_division[division_number][0]:time_division[division_number][-1]+1,:,:]
# Import geographic coor#dinates (Lon,Lat)
lon=ncfile.variables['nav_lon'][:]
lat=ncfile.variables['nav_lat'][:]
x=np.linspace(-180,180,shape(lon)[1])
y=np.linspace(-90,90,shape(lon)[0])
X,Y=meshgrid(x,y)
eta=np.zeros((shape(ssh)[0],len(y),len(x)))
for t in range(shape(ssh)[0]):
eta[t,:,:]=griddata((lon.ravel(),lat.ravel()),ssh[t,:,:].ravel(),(X,Y),'linear')
eta=np.ma.masked_where(isnan(eta),eta)
toc=ttime.time()
print('ellapsed time:',toc-tic )
# Import SSH 10 yrs mean values to python environment.
ncfile=Dataset('/g/data/v45/jm5970/trackeddy_output/OCCIPUT/pre-processing/ORCA025.L75-OCCITENS.{0:03}_y_mean.nc'.format(ensemble))
ssh_mean=squeeze(ncfile.variables['ssh'][:,:]).data
areamap=array([[0,len(x)],[0,len(y)]])
filters = {'time':{'type':'historical','t':None,'t0':None,'value':ssh_mean},
'spatial':{'type':'moving','window':120,'mode':'uniform'}}
preferences={'ellipse':0.7,'eccentricity':0.95,'gaussian':0.7}
#levels = {'max':nanmax(eta),'min':0.001,'step':0.002}
#eddytd=analyseddyzt(eta,x,y,0,shape(eta)[0],1,levels,areamap=areamap,mask='',maskopt='forcefit',timeanalysis='none'\
# ,preferences=preferences,filters=filters,destdir='',physics='',diagnostics=False,pprint=False)
#print("Saving Positive",file_count)
#save(outfile+'OCCIPUT_%05d_pos.npy' % file_count,eddytd)
levels = {'max':-nanmin(eta),'min':0.001,'step':0.002}
eddytdn=analyseddyzt(-eta,x,y,0,shape(eta)[0],1,levels,areamap=areamap,mask='',maskopt='forcefit',timeanalysis='none'\
,preferences=preferences,filters=filters,destdir='',physics='',diagnostics=False,pprint=False)
print("Saving Negative")
save(outfile+'OCCIPUT_%05d_neg.npy' % file_count,eddytdn)
```
#### File: satellite_model/trackeddy_analysis/trackeddy_model_neg.py
```python
import matplotlib
matplotlib.use('agg')
import sys
from netCDF4 import Dataset
import os
os.environ["PROJ_LIB"] = "/g/data/v45/jm5970/env/track_env/share/proj"
import cmocean as cm
from trackeddy.tracking import *
from trackeddy.datastruct import *
from trackeddy.geometryfunc import *
from trackeddy.physics import *
from trackeddy.plotfunc import *
from numpy import *
outputfilenumber =int(sys.argv[1])
division_number =int(sys.argv[2])
file_division =int(sys.argv[3])
file_count =int(sys.argv[4])
def split_list(alist, wanted_parts=1):
length = len(alist)
return np.array([ alist[i*length // wanted_parts: (i+1)*length // wanted_parts]
for i in range(wanted_parts) ])
outfile='/g/data/v45/jm5970/trackeddy_output/ACCESS_OM2/npy/'
# Output data path
outputpath='/g/data3/hh5/tmp/cosima/access-om2-01/01deg_jra55v13_iaf/output%03d/' % outputfilenumber
# Import SSH values to python environment.
ncfile=Dataset(outputpath+'ocean/ocean_daily.nc')
time=ncfile.variables['time'][:]
time_division=split_list(range(0,len(time)), wanted_parts=file_division)
#print(time_division)
eta=ncfile.variables['eta_t'][time_division[division_number][0]:time_division[division_number][-1]+1,:,:]
#print(np.shape(eta))
# Import geographic coor#dinates (Lon,Lat)
lon=ncfile.variables['xt_ocean'][:]
lat=ncfile.variables['yt_ocean'][:]
# Import SSH 10 yrs mean values to python environment.
ncfile=Dataset('/g/data/v45/jm5970/trackeddy_output/ACCESS_OM2/pre-processing/ACCESS-OM2_01d_eta_mean.nc')
ssh_mean=squeeze(ncfile.variables['eta_t'][:,:]).data
areamap=array([[0,len(lon)],[0,len(lat)]])
filters = {'time':{'type':'historical','t':None,'t0':None,'value':ssh_mean},
'spatial':{'type':'moving','window':120,'mode':'uniform'}}
preferences={'ellipse':0.7,'eccentricity':0.95,'gaussian':0.7}
#levels = {'max':eta.max(),'min':0.01,'step':0.01}
#eddytd=analyseddyzt(eta,lon,lat,0,shape(eta)[0],1,levels,areamap=areamap,mask='',maskopt='forcefit'\
# ,preferences=preferences,filters=filters,destdir='',physics='',diagnostics=False,pprint=True)
#print("Saving Positive",file_count)
#eddysplot=reconstruct_syntetic(shape(eta),lon,lat,eddytd)
#save(outfile+'ACCESS_%05d_pos.npy' % file_count,eddytd)
levels = {'max':-eta.min(),'min':0.01,'step':0.01}
eddytdn=analyseddyzt(-eta,lon,lat,0,shape(eta)[0],1,levels,areamap=areamap,mask='',maskopt='forcefit'\
,preferences=preferences,filters=filters,destdir='',physics='',diagnostics=False,pprint=False)
print("Saving Negative")
save(outfile+'ACCESS_%05d_neg.npy' % file_count,eddytdn)
```
#### File: satellite_model/trackeddy_analysis/trackeddy_satellite.py
```python
import matplotlib
matplotlib.use('agg')
from calendar import monthrange
import sys
from netCDF4 import Dataset
import os
os.environ["PROJ_LIB"] = "/g/data/v45/jm5970/env/track_env/share/proj"
import cmocean as cm
from trackeddy.tracking import *
from trackeddy.datastruct import *
from trackeddy.geometryfunc import *
from trackeddy.physics import *
from trackeddy.plotfunc import *
from numpy import *
#monthsend=int(sys.argv[3])
from os import listdir
from os.path import isfile, join
year=sys.argv[1]
index_files=int(sys.argv[2])
divisions=int(sys.argv[3])
inputfiles='/g/data/ua8/CMEMS_SeaLevel/v4-0/'+year+'/'
onlyfiles = [join(inputfiles, f) for f in listdir(inputfiles) if isfile(join(inputfiles, f))]
onlyfiles.sort()
print(onlyfiles)
def split_list(alist, wanted_parts=1):
length = len(alist)
return np.array([ alist[i*length // wanted_parts: (i+1)*length // wanted_parts]
for i in range(wanted_parts) ])
files2analyse=split_list(onlyfiles, divisions)
print('Analizing the year ',year,'from file ',files2analyse[index_files][0],'-',files2analyse[index_files][-1],']')
#inputfiles='/g/data/ua8/CMEMS_SeaLevel/v3-0/'+year+'/'
outfile='/g/data/v45/jm5970/trackeddy_output/AVISO+/npy/'
datashapetime=len(files2analyse[index_files])
try:
ncfile=Dataset(inputfiles+'dt_global_allsat_phy_l4_'+year+'0101_20180115.nc')
except:
ncfile=Dataset(inputfiles+'dt_global_allsat_phy_l4_'+year+'0101_20180516.nc')
#ncfile=Dataset(inputfiles+'dt_global_allsat_phy_l4_'+year+'0101_20170110.nc')
ssha=squeeze(ncfile.variables['sla'][:])
lon=ncfile.variables['longitude'][:]
lat=ncfile.variables['latitude'][:]
sshatime=zeros([datashapetime,shape(ssha)[0],shape(ssha)[1]])
ii=0
print('Start loading data')
for files in files2analyse[index_files]:
ncfile=Dataset(files)
sshatime[ii,:,:]=squeeze(ncfile.variables['sla'][:])
ii=ii+1
ncfile.close()
#for month in range(monthsin,monthsend):
# daysmonth=monthrange(int(year), month)[1]
# for days in range(1,daysmonth+1):
# print(inputfiles+'dt_global_allsat_phy_l4_'+year+'%02d'%month+'%02d'%days+'_20180115.nc')
# try:
# ncfile=Dataset(inputfiles+'dt_global_allsat_phy_l4_'+year+'%02d'%month+'%02d'%days+'_20180115.nc')
# except:
# ncfile=Dataset(inputfiles+'dt_global_allsat_phy_l4_'+year+'%02d'%month+'%02d'%days+'_20180516.nc')
#
# sshatime[ii,:,:]=squeeze(ncfile.variables['sla'][:])
# ii=ii+1
# ncfile.close()
sshatime=ma.masked_where(sshatime <= -2147483647, sshatime)
print('End loading data')
areamap=array([[0,len(lon)],[0,len(lat)]])
filters = {'time':{'type':None,'t':None,'t0':None,'value':None},
'spatial':{'type':'moving','window':51,'mode':'uniform'}}
levels = {'max':sshatime.max(),'min':0.001,'step':0.001}
eddytd=analyseddyzt(sshatime,lon,lat,0,shape(sshatime)[0],1,levels,areamap=areamap,mask='',timeanalysis='none'\
,filters=filters,destdir='',physics='',diagnostics=False,pprint=False)
print("Saving Positive")
save("{0}aviso_{1}-{2:03}_pos.npy".format(outfile,year,index_files),eddytd)
#levels = {'max':-sshatime.min(),'min':0.001,'step':0.001}
#eddytdn=analyseddyzt(-sshatime,lon,lat,0,shape(sshatime)[0],1,levels,areamap=areamap,mask='',timeanalysis='none'\
# ,filters=filters,destdir='',physics='',diagnostics=False,pprint=True)
#print("Saving Negative")
#save("{0}aviso_{1}-{2:03}_neg.npy".format(outfile,year,index_files),eddytdn)
``` |
{
"source": "Josue-Martinez-Moreno/raspiled",
"score": 3
} |
#### File: raspiled/src/raspiled_listener.py
```python
from __future__ import unicode_literals
from utils import *
from ledstrip import LEDStrip
import os
from subprocess import check_output, CalledProcessError
import time
from twisted.internet import reactor, endpoints,protocol
from twisted.protocols import basic
from twisted.web.resource import Resource
from twisted.web.server import Site, Request
from twisted.web.static import File
import json
from named_colours import NAMED_COLOURS
import copy
import logging
import configparser
import datetime
import requests
import random
import string
try:
#python2
from urllib import urlencode
except ImportError:
#python3
from urllib.parse import urlencode
APP_NAME="python ./raspiled_listener.py"
logging.basicConfig(format='[%(asctime)s RASPILED] %(message)s',
datefmt='%H:%M:%S',level=logging.INFO)
RASPILED_DIR = os.path.dirname(os.path.realpath(__file__)) #The directory we're running in
DEFAULTS = {
'config_path' : RASPILED_DIR,
'pi_host' : 'localhost',
'mopidy_port' : 6868,
'pi_port' : 9090, # the port our web server listens on (192.168.0.33:<pi_port>)
'pig_port' : 8888, # the port pigpio daemon is listening on for pin control commands
'latitude' : 52.2053, # If you wish to sync your sunrise/sunset to the real sun, enter your latitude as a decimal
'longitude' : 0.1218, # If you wish to sync your sunrise/sunset to the real sun, enter your longitude as a decimal
# Initial default values for your output pins. You can override them in your raspiled.conf file
'red_pin' : '27',
'green_pin' : '17',
'blue_pin' : '22'
}
config_path = os.path.expanduser(RASPILED_DIR+'/raspiled.conf')
wlist_path = os.path.expanduser(RASPILED_DIR+'/.whitelist.json')
parser = configparser.ConfigParser(defaults=DEFAULTS)
params = {}
if os.path.exists(config_path):
logging.info('Using config file: {}'.format(config_path))
parser.read(config_path)
params = Odict2int(parser.defaults())
config_file_needs_writing = False
else:
config_file_needs_writing = True
# No config file exists, give the user a chance to specify their pin configuration
logging.warn('No config file found. Creating default {} file.'.format(config_path))
logging.warn('*** Please edit this file as needed. ***')
# Allow user to customise their pin config
while True:
try: # These will assume the default settings UNLESS you enter a different value
user_input_red_pin = int(input('RED pin number [{}]:'.format(DEFAULTS["red_pin"])) or DEFAULTS["red_pin"])
user_input_green_pin = int(input('GREEN pin number [{}]:'.format(DEFAULTS["green_pin"])) or DEFAULTS["green_pin"])
user_input_blue_pin = int(input('BLUE pin number [{}]:'.format(DEFAULTS["blue_pin"])) or DEFAULTS["blue_pin"])
except (ValueError, TypeError):
logging.warn('*** The input should be an integer ***')
else:
DEFAULTS['red_pin'] = user_input_red_pin
DEFAULTS['green_pin'] = user_input_green_pin
DEFAULTS['blue_pin'] = user_input_blue_pin
if DEFAULTS['red_pin'] == DEFAULTS['blue_pin'] or DEFAULTS['red_pin'] == DEFAULTS['green_pin'] or DEFAULTS['green_pin'] == DEFAULTS['blue_pin']:
logging.warn('*** The pin number should be different for all pins. ***')
else:
config_file_needs_writing = True
break
# Check that our ports are sane:
user_pi_port = params.get("pi_port", DEFAULTS["pi_port"])
user_pig_port = params.get("pig_port", DEFAULTS["pig_port"])
while True:
config_is_ok = True
try:
if int(user_pi_port) == int(user_pig_port):
config_is_ok = False
raise RuntimeError("*** You cannot have the web server running on port {} while the pigpio daemon is also running on that port! ***".format(DEFAULTS["pi_port"]))
except RuntimeError as e:
logging.warn(e)
except (ValueError, TypeError):
logging.warn("*** You have specified an invalid port number for the Raspiled web server ({}) or the Pigpio daemon ({}) ***".format(DEFAULTS["pi_port"], DEFAULTS["pig_port"]))
else: # Config is fine... carry on
DEFAULTS["pi_port"] = user_pi_port
DEFAULTS["pig_port"] = user_pig_port
break
try:
user_pi_port = int(input('Raspiled web server port (e.g. 9090) [{}]:'.format(DEFAULTS["pi_port"])) or DEFAULTS["pi_port"])
user_pig_port = int(input('Pigpio daemon port (e.g. 8888) [{}]:'.format(DEFAULTS["pig_port"])) or DEFAULTS["pig_port"])
except (ValueError, TypeError):
logging.warn('*** The input should be an integer ***')
else:
config_file_needs_writing = True
# Now write the config file if needed
if config_file_needs_writing:
parser = configparser.ConfigParser(defaults=DEFAULTS)
with open(config_path, 'w') as f:
parser.write(f)
params = Odict2int(parser.defaults())
RESOLVED_USER_SETTINGS = params # Alias for clarity
DEBUG = False
def D(item):
if DEBUG:
logging.info(item)
class Preset(object):
"""
Represents a preset for the web UI for the user to click on
args and kwargs become the querystring
"""
args=None
kwargs=None
label=None
display_colour=None
display_gradient=None
def __init__(self, label="??", display_colour=None, display_gradient=None, is_sequence=False, is_sun=False, *args, **kwargs):
"""
Sets up this preset
"""
self.label=label
self.display_colour = display_colour
self.display_gradient= display_gradient or []
self.is_sequence = is_sequence
self.is_sun = is_sun
self.args = args
self.kwargs = kwargs
def __repr__(self):
"""
Says what this is
"""
out = "Preset '{label}': {colour} - {querystring} - {sunquery}".format(label=self.label, colour=self.colour, querystring=self.querystring, sunquery=self.sunquery)
return out
def __unicode__(self):
return self.render()
@property
def colours(self):
"""
Returns a faithful hex value for the given colour(s)
"""
if not self.display_gradient:
colours = [self.display_colour] #Listify single entity
else:
colours = self.display_gradient
colours_out_list = []
for colour_term in colours:
try:
col_value = NAMED_COLOURS[str(colour_term).lower()]
except KeyError:
col_value = colour_term
colours_out_list.append(col_value)
return colours_out_list
@property
def colour(self):
"""
Returns a string value for the colours in the form of faithful hex
"""
return ", ".join(self.colours)
def colours_for_css_background(self):
"""
Renders the colours as a CSS background!
linear-gradient(to right, col1 , col2, col3)
"""
css_colours = self.colours
if len(css_colours)<1: #No colours, go with trans
return "transparent"
elif len(css_colours)==1: #One colour means one single coloured bg
return self.colours[0]
return """linear-gradient(40deg, {colour_list})""".format(colour_list=", ".join(css_colours))
@property
def querystring(self):
"""
Converts args and kwargs into a querystring
"""
kwargs = copy.copy(self.kwargs)
for arg in self.args: #Add in terms for args
kwargs[arg] = ""
qs = urlencode(kwargs, doseq=True) #Flattens list
return qs
def render_css(self):
"""
Generates a CSS gradient from the self.display_gradient list
"""
if self.display_gradient:
return "background: linear-gradient(-40deg, {colour_values}); color: white; text-shadow: 2px 2px 2px #000000".format(colour_values=self.colour)
if self.display_colour:
contrast_colour = LEDStrip.contrast_from_bg(col=self.colour, dark_default="202020")
return "background: {display_colour}; color: {contrast_colour}".format(
display_colour=self.colours_for_css_background(),
contrast_colour=contrast_colour
)
return ""
def render_is_sequence(self):
"""
Returns Javascript boolean for whether this is a sequence or not
"""
if self.is_sequence:
return "true"
return ""
@property
def sunquery(self):
"""
Returns sunset or sunrise temperature values
"""
if self.is_sun:
sunarg={}
#for ii in range(0,len(self.display_gradient)):
if self.display_gradient[0]>self.display_gradient[1]:
sunarg['temp_start']=self.display_gradient[0]
sunarg['temp_end']=self.display_gradient[1]
else:
sunarg['temp_start']=self.display_gradient[1]
sunarg['temp_end']=self.display_gradient[0]
cs = urlencode(sunarg, doseq=True)
return cs
return ""
def render(self):
"""
Renders this preset as an HTML button or selection.
"""
html = """
<a href="javascript:void(0);" class="select_preset preset_button" data-qs="{querystring}" data-sequence="{is_sequence}" data-color="{sun_temp}" style="{css_style}">
{label}
</a>
""".format(
querystring=self.querystring,
css_style=self.render_css(),
label=self.label,
is_sequence=self.render_is_sequence(),
sun_temp=self.sunquery
)
return html
def render_select(self):
html = """
<option href="javascript:void(0);" value="{label}" class="select_preset preset_option" data-qs="{querystring}" data-sequence="{is_sequence}" data-color="{sun_temp}" style="{css_style}">
{label}
</option>
""".format(
querystring=self.querystring,
css_style=self.render_css(),
label=self.label,
is_sequence=self.render_is_sequence(),
sun_temp=self.sunquery
)
return html
class PresetSpace(object):
"""
Simply spaces presets apart!
"""
def render(self):
return " "
def render_select(self):
return " "
class RaspiledControlResource(Resource):
"""
Our web page for controlling the LED strips
"""
isLeaf = False # Allows us to go into dirs
led_strip = None # Populated at init
_path = None # If a user wants to hit a dynamic subpage, the path appears here
PARAM_TO_AUTHENTICATE = (
("user","newclient"),
("ukey","authenticate"),
)
#State what params should automatically trigger actions. If none supplied will show a default page. Specified in order of hierarchy
# State what params should automatically trigger actions. If none supplied will show a default page. Specified in order of hierarchy
PARAM_TO_ACTION_MAPPING = (
# Stat actions
("off", "off"),
("stop", "stop"),
("set", "set"),
("fade", "fade"),
("color", "fade"),
("colour", "fade"),
# Sequences
("sunrise", "sunrise"),
("morning", "alarm"),
("dawn", "alarm"),
("sunset", "sunset"),
("evening", "sunset"),
("dusk", "sunset"),
("night", "sunset"),
("jump", "jump"),
("rotate", "rotate"),
("rot", "rotate"),
("huerot", "rotate"),
("colors", "rotate"),
("colours", "rotate"),
# Docs:
("capabilities", "capabilities"),
("capability", "capabilities"),
("status", "status"),
)
# State what presets to render:
OFF_PRESET = Preset(label="""<img src="/static/figs/power-button-off.svg" class="icon_power_off"> Off""", display_colour="black", off="")
PRESETS = {
"Whites":( #I've had to change the displayed colours from the strip colours for a closer apparent match
Preset(label="Candle", display_colour="1500K", fade="1000K"),
Preset(label="Tungsten", display_colour="3200K", fade="2000K"),
Preset(label="Bulb match", display_colour="3900K", fade="ff821c"),
Preset(label="Warm white", display_colour="4800K", fade="2600k"), #Bulb match
Preset(label="Strip white", display_colour="6000K", fade="3200K"),
Preset(label="Daylight", display_colour="6900K", fade="5800K"),
Preset(label="Cool white", display_colour="9500K", fade="10500K"),
),
"Sunrise / Sunset":(
Preset(label="↑ 2hr", display_gradient=("2000K","5000K"), sunrise=60*60*2, is_sequence=True, is_sun=True),
Preset(label="↑ 1hr", display_gradient=("2000K","5000K"), sunrise=60*60*1, is_sequence=True, is_sun=True),
Preset(label="↑ 30m", display_gradient=("2000K","5000K"), sunrise=60*30, is_sequence=True, is_sun=True),
Preset(label="↑ 1m", display_gradient=("2000K","5000K"), sunrise=60*1, is_sequence=True, is_sun=True),
PresetSpace(),
Preset(label="↓ 1m", display_gradient=("5000K","2000K"), sunset=60*1, is_sequence=True, is_sun=True),
Preset(label="↓ 30m", display_gradient=("5000K","2000K"), sunset=60*30, is_sequence=True, is_sun=True),
Preset(label="↓ 1hr", display_gradient=("5000K","2000K"), sunset=60*60*1, is_sequence=True, is_sun=True),
Preset(label="↓ 2hr", display_gradient=("5000K","2000K"), sunset=60*60*2, is_sequence=True, is_sun=True),
),
"Colours":(
Preset(label="Red", display_colour="#FF0000", fade="#FF0000"),
Preset(label="Orange", display_colour="#FF8800", fade="#FF8800"),
Preset(label="Yellow", display_colour="#FFFF00", fade="#FFFF00"),
Preset(label="Lime", display_colour="#88FF00", fade="#88FF00"),
Preset(label="Green", display_colour="#00BB00", fade="#00FF00"),
Preset(label="Aqua", display_colour="#00FF88", fade="#00FF88"),
Preset(label="Cyan", display_colour="#00FFFF", fade="#00FFFF"),
Preset(label="Blue", display_colour="#0088FF", fade="#0088FF"),
Preset(label="Indigo", display_colour="#0000FF", fade="#0000FF"),
Preset(label="Purple", display_colour="#8800FF", fade="#7A00FF"), # There's a difference!
Preset(label="Magenta", display_colour="#FF00FF", fade="#FF00FF"),
Preset(label="Crimson", display_colour="#FF0088", fade="#FF0088"),
),
"Sequences":(
Preset(label="🔥 Campfire", display_gradient=("600K","400K","1000K","400K"), rotate="1100K,800K,1100K,1300K,1300K,900K,1500K,800K,900K,800K,1300K,600K,600K,600K,900K,600K,900K,1100K,1400K,1400K,900K,800K,600K,700K,700K,900K,1000K,1000K,800K,900K,1000K,700K,900K,1000K,600K,700K,1000K,800K,800K,1400K,900K,1100K,1000K,1500K,1000K,1000K,900K,700K", milliseconds="80", is_sequence=True),
Preset(label="🐟 Fish tank", display_gradient=("#00FF88","#0088FF","#007ACC","#00FFFF"), rotate="00FF88,0088FF,007ACC,00FFFF", milliseconds="2500", is_sequence=True),
Preset(label="🎉 Party", display_gradient=("cyan","yellow","magenta"), rotate="cyan,yellow,magenta", milliseconds="1250", is_sequence=True),
Preset(label="🌻 Flamboyant", display_gradient=("yellow","magenta"), jump="yellow,magenta", milliseconds="150", is_sequence=True),
Preset(label="🚨 NeeNaw", display_gradient=("cyan","blue"), jump="cyan,blue", milliseconds="100", is_sequence=True),
Preset(label="🚨 NeeNaw USA", display_gradient=("red","blue"), jump="red,blue", milliseconds="100", is_sequence=True),
Preset(label="🌈 Full circle", display_gradient=("#FF0000","#FF8800","#FFFF00","#88FF00","#00FF00","#00FF88","#00FFFF","#0088FF","#0000FF","#8800FF","#FF00FF","#FF0088"), milliseconds=500, rotate="#FF0000,FF8800,FFFF00,88FF00,00FF00,00FF88,00FFFF,0088FF,0000FF,8800FF,FF00FF,FF0088", is_sequence=True),
)
}
ALARM_PRESETS = {
"Morning":(
Preset(label="↑ 2hr", display_gradient=("0K","5000K"), morning=60*60*2, is_sequence=True, is_sun=True),
Preset(label="↑ 1hr", display_gradient=("0K","5000K"), morning=60*60*1, is_sequence=True, is_sun=True),
Preset(label="↑ 30m", display_gradient=("0K","5000K"), morning=60*30, is_sequence=True, is_sun=True),
Preset(label="↑ 1m", display_gradient=("0K","5000K"), morning=60*1, is_sequence=True, is_sun=True),
),
"Dawn":(
Preset(label="↓ 2hr", display_gradient=("5000K","0K"), dawn=60*60*2, is_sequence=True, is_sun=True),
Preset(label="↓ 1hr", display_gradient=("5000K","0K"), dawn=60*60*1, is_sequence=True, is_sun=True),
Preset(label="↓ 30m", display_gradient=("5000K","0K"), dawn=60*30, is_sequence=True, is_sun=True),
Preset(label="↓ 1m", display_gradient=("5000K","0K"), dawn=60*1, is_sequence=True, is_sun=True),
)
}
PRESETS_COPY = copy.deepcopy(PRESETS) # Modifiable dictionary. Used in alarms and music.
def __init__(self, *args, **kwargs):
"""
@TODO: perform LAN discovery, interrogate the resources, generate controls for all of them
"""
self.led_strip = LEDStrip(RESOLVED_USER_SETTINGS)
Resource.__init__(self, *args, **kwargs) #Super
# Add in the static folder.
static_folder = os.path.join(RASPILED_DIR,"static")
self.putChild("static", File(static_folder)) # Any requests to /static serve from the filesystem.
def getChild(self, path, request, *args, **kwargs):
"""
Entry point for dynamic pages
"""
self._path = path
return self
def getChildWithDefault(self, path, request):
"""
Retrieve a static or dynamically generated child resource from me.
First checks if a resource was added manually by putChild, and then
call getChild to check for dynamic resources. Only override if you want
to affect behaviour of all child lookups, rather than just dynamic
ones.
This will check to see if I have a pre-registered child resource of the
given name, and call getChild if I do not.
@see: L{IResource.getChildWithDefault}
"""
if path in self.children:
return self.children[path]
return self.getChild(path, request)
def client_LOGIN(self, request):
accepted_connection=False
self.ip=request.getClientIP()
self.session_data = {
'0':{
'user' : 'pi',
'psw' : '',
'ip' : ['127.0.0.1'],
'key' : '',
'last_c': '',
'u_key' : '',
}
}
if os.path.exists(wlist_path):
with open(wlist_path) as json_data:
self.session_data = json.load(json_data)
else:
self.whitelistjson2file()
for key, value in self.session_data.items():
if accepted_connection==True:
break
for ip in value['ip']:
if self.ip==ip:
accepted_connection=True
break
return accepted_connection
def render_GET(self, request):
"""
MAIN WEB PAGE ENTRY POINT
Responds to GET requests
If a valid action in the GET querystring is present, that action will get performed and
the web server will return a JSON response. The assumption is that a javascript function is calling
this web server to act as an API
If a human being arrives at the web server without providing a valid action in the GET querystring,
they'll just be given the main html page which shows all the buttons.
@param request: The http request, passed in from Twisted, which will be an instance of <SmartRequest>
@return: HTML or JSON depending on if there is no action or an action.
"""
accepted_client = self.client_LOGIN(request)
if accepted_client:
_colour_result = None
#Look through the actions if the request key exists, perform that action
for key_name, action_name in self.PARAM_TO_ACTION_MAPPING:
if request.has_param(key_name):
self.led_strip.stop_current_sequence() #Stop current sequence
action_func_name = "action__%s" % action_name
_colour_result = getattr(self, action_func_name)(request) #Execute that function
break
# Look through the actions if the request key exists, perform that action
clean_path = unicode(self._path or u"").rstrip("/")
for key_name, action_name in self.PARAM_TO_ACTION_MAPPING:
if request.has_param(key_name) or clean_path == key_name:
action_func_name = "action__%s" % action_name
if action_name in ("capabilities", "status"): # Something is asking for our capabilities or status
output = getattr(self, action_func_name)(request) # Execute that function
request.setHeader("Content-Type", "application/json; charset=utf-8")
return json.dumps(output)
else:
self.led_strip.stop_current_sequence() #Stop current sequence
_colour_result = getattr(self, action_func_name)(request) #Execute that function
break
# Now deduce our colour:
current_colour = "({})".format(self.led_strip)
current_hex = self.led_strip.hex
contrast_colour = self.led_strip.contrast_from_bg(current_hex, dark_default="202020")
# Return a JSON object if an action has been performed (i.e. _colour_result is set):
if _colour_result is not None:
json_data = {
"current" : current_hex,
"contrast" : contrast_colour,
"current_rgb": current_colour
}
try:
request.setHeader("Content-Type", "application/json; charset=utf-8")
return json.dumps(json_data)
except (TypeError, ValueError):
return b"Raspiled generated invalid JSON data!"
# Otherwise, we've not had an action, so return normal page
request.setHeader("Content-Type", "text/html; charset=utf-8")
htmlstr = ''
with open(RASPILED_DIR+'/static/index.html') as index_html_template:
htmlstr = index_html_template.read() # 2018-09-08 It's more efficient to pull the whole file in
return htmlstr.format(
current_colour=current_colour,
current_hex=current_hex,
contrast_colour=contrast_colour,
off_preset_html=self.OFF_PRESET.render(),
light_html=self.light_presets(request),
alarm_html=self.alarm_presets(request),
music_html=self.music_presets(request),
controls_html=self.udevelop_presets(request),
addition_js=self.js_interactions(request)
).encode('utf-8')
else:
# Authenticatiom
_connection_result=None
for key_name, action_name in self.PARAM_TO_AUTHENTICATE:
if request.has_param(key_name):
action_func_name = "action__%s" % action_name
_connection_result = getattr(self, action_func_name)(request)
break
if _connection_result is not None:
try:
return json.dumps(_connection_result)
except:
return b"Json fkucked up"
request.setHeader("Content-Type", "text/html; charset=utf-8")
htmlstr=''
with open(RASPILED_DIR+'/static/singin.html') as file:
for line in file:
htmlstr+=line
return htmlstr.encode('utf-8')
def action__newclient(self,request):
"""
Push a new client to the autentication or appends the new ip to the dictionary of sessions.
"""
self.user = request.get_param("user", force=unicode)
self.pswd = request.get_param("pswd", force=unicode)
if (self.user==None or self.pswd == None ):
pass
elif (self.user=='' or self.pswd == '' ):
return {'error':True,'message':'Empty user or password','accepted':False,'authentication':False}
else:
csession = request.getSession()
self.key = csession.uid
for key, value in self.session_data.items():
if value['user'] == self.user and value['psw'] == self.pswd:
value['ip'].append(self.ip)
self.whitelistjson2file()
return {'error':False,'message':'Success','accepted':True,'authentication':False}
self.ukey = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(10))
logging.info("Attempt from user: %s with IP: %s to access the server. Unique key: %s" % (self.user,self.ip,self.ukey))
return {'error':False,'message':'Authenticate','accepted':False,'authentication':True}
def action__authenticate(self,request):
"""
Client authenticate with random string generated in the server
"""
user_ukey = request.get_param("ukey", force=unicode)
if self.ukey == user_ukey:
self.session_data[str(len(self.session_data.keys()))]={
'user' : self.user,
'psw' : self.pswd,
'ip' : [self.ip],
'key' : self.key,
'last_c': str(datetime.datetime.now()),
'u_key' : self.ukey,
}
self.whitelistjson2file()
return {'error':False,'message':'Success','accepted':True,'authentication':False}
else:
self.ukey=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(10))
logging.info("New unique key: %s" % self.ukey)
return {'error':True,'message':'New authenticate code','accepted':False,'authentication':True}
def whitelistjson2file(self):
with open(wlist_path, 'w') as write_file:
json.dump(self.session_data, write_file)
#### Additional pages available via the menu ####
def light_presets(self, request):
"""
Renders the light presets as options
@param request: The http request object
"""
out_html_list = []
for group_name, presets in self.PRESETS.items():
preset_list = []
#Inner for
for preset in presets:
preset_html = preset.render()
preset_list.append(preset_html)
group_html = """
<div class="preset_group">
<h2>{group_name}</h2>
<div class="presets_row">
{preset_html}
</div>
</div>
""".format(
group_name = group_name,
preset_html = "\n".join(preset_list)
)
out_html_list.append(group_html)
out_html = "\n".join(out_html_list)
return out_html
def alarm_presets(self,request):
"""
Renders the alarm presets as options. Same sunrise or sunset routine except for 100k.
"""
out_html_list = []
for group_name, presets in self.ALARM_PRESETS.items():
preset_list = []
for preset in presets:
preset_html = preset.render_select()
preset_list.append(preset_html)
group_html = """
<p> {group_name} time </p>
<div class="{group_name}"></div>
<div class="preset_group">
<select class="presets_select {group_name}_select">
{preset_html}
</select>
</div>
""".format(
group_name = group_name,
preset_html = "\n".join(preset_list),
)
out_html_list.append(group_html)
out_html = "\n".join(out_html_list)
return out_html
def music_presets(self,request):
"""
Renders the Modipy music front page.
"""
#out_html="""
# <iframe src="http://192.168.182.190:{mopify}/mopify/" style="width:100vw;height:100vh">
# </iframe>
#""".format(mopify=params['mopidy_port'])
#return out_html
pass
def udevelop_presets(self,request):
"""
Renders the Under Development text.
"""
out_html="""
<div class="underdevelop">
<h1> Under Development, please refer to the Github repository.</h1>
</div>
"""
return out_html
def js_interactions(self,request):
request.setHeader("Content-Type", "text/html; charset=utf-8")
if params['latitude'] == '' or params['longitude'] == '':
lat,lon=pi_gps_location()
else:
lat=params['latitude']
lon=params['longitude']
jsstr=''
with open(RASPILED_DIR+'/static/js/raspiled_interaction.js') as file:
for line in file:
jsstr+=line
return jsstr.format(latcoord=str(lat),loncoord=str(lon)).encode('utf-8')
#### Actions: These are the actions our web server can initiate. Triggered by hitting the url with ?action_name=value ####
def action__set(self, request):
"""
Run when user wants to set a colour to a specified value
"""
set_colour = request.get_param("set", force=unicode)
D("Set to: %s" % set_colour)
return self.led_strip.set(set_colour)
action__set.capability = {
"param": "set",
"description": "Sets the RGB strip to a single colour.",
"value": "<unicode> A named colour (e.g. 'pink') or colour hex value (e.g. '#19BECA').",
"validity": "<unicode> A known named colour, or valid colour hex in the range #000000-#FFFFFF.",
"widget": "colourpicker",
"returns": "<unicode> The hex value of the colour the RGB strip has been set to."
}
def action__fade(self, request):
"""
Run when user wants to set a colour to a specified value
"""
fade_colour = request.get_param("fade", force=unicode)
logging.info("Fade to: %s" % fade_colour)
return self.led_strip.fade(fade_colour)
action__fade.capability = {
"param": "fade",
"description": "Fades the RGB strip from its current colour to a specified colour.",
"value": "<unicode> A named colour (e.g. 'pink') or colour hex value (e.g. '#19BECA').",
"validity": "<unicode> A known named colour, or valid colour hex in the range #000000-#FFFFFF",
"returns": "<unicode> The hex value of the colour the RGB strip has been set to."
}
def action__sunrise(self, request):
"""
Performs a sunrise over the specified period of time
"""
seconds = request.get_param(["seconds","s","sunrise"], default=10.0, force=float)
milliseconds = request.get_param(["milliseconds","ms"], default=0.0, force=float)
temp_start = request.get_param(['temp_start','K'], default=None, force=unicode)
temp_end = request.get_param('temp_end', default=None, force=unicode)
logging.info("Sunrise: %s seconds" % (seconds + (milliseconds/1000.0)))
return self.led_strip.sunrise(seconds=seconds, milliseconds=milliseconds, temp_start=temp_start, temp_end=temp_end)
action__sunrise.capability = {
"param": "sunrise",
"description": "Gently fades-in the RGB strip from deep red to daylight.",
"value": "The number of seconds you would like the sunrise to take.",
"validity": "<float> > 0",
"optional_concurrent_parameters": [
{
"param": "milliseconds",
"value": "The number of milliseconds the sunrise should take. Will be added to seconds (if specified) to give a total time.",
"validity": "<int> > 0",
"default": "1000",
},
{
"param": "temp_start",
"value": "The colour temperature you wish to start from (e.g. 500K).",
"validity": "<unicode> Matches a named colour temperature (50K - 15000K in 100 Kelvin steps)",
"default": "6500K"
},
{
"param": "temp_end",
"value": "The colour temperature you wish to finish at (e.g. 4500K).",
"validity": "<unicode> Matches a named colour temperature (50K - 15000K in 100 Kelvin steps)",
"default": "500K"
}
],
"returns": "<unicode> The hex value of the colour the RGB strip has been set to."
}
def action__sunset(self, request):
"""
Performs a sunset over the specified period of time
"""
seconds = request.get_param(["seconds","s","sunset"], default=10.0, force=float)
milliseconds = request.get_param(["milliseconds","ms"], default=0.0, force=float)
temp_start = request.get_param(['temp_start', 'K'], default=None, force=unicode)
temp_end = request.get_param('temp_end', default=None, force=unicode)
logging.info("Sunset: %s seconds" % (seconds + (milliseconds/1000.0)))
return self.led_strip.sunset(seconds=seconds, milliseconds=milliseconds, temp_start=temp_start, temp_end=temp_end)
action__sunset.capability = {
"param": "sunset",
"description": "Gently fades-out the RGB strip from daylight to deep-red.",
"value": "The number of seconds you would like the sunrise to take.",
"validity": "<float> > 0",
"optional_concurrent_parameters": [
{
"param": "milliseconds",
"value": "The number of milliseconds the sunset should take. Will be added to seconds (if specified) to give a total time.",
"validity": "<int> > 0",
"default": "1000",
},
{
"param": "temp_start",
"value": "The colour temperature you wish to start from (e.g. 500K).",
"validity": "<unicode> Matches a named colour temperature (50K - 15000K in 100 Kelvin steps)",
"default": "500K"
},
{
"param": "temp_end",
"value": "The colour temperature you wish to finish at (e.g. 4500K).",
"validity": "<unicode> Matches a named colour temperature (50K - 15000K in 100 Kelvin steps)",
"default": "6500K"
}
],
"returns": ""
}
def action__alarm(self, request):
"""
Performs a sunrise over the specified period of time
"""
m_seconds = request.get_param(["seconds","s","morning"], default=10.0, force=float)
d_seconds = request.get_param(["seconds","s","dawn"], default=10.0, force=float)
hour = request.get_param(["time","hr","hour"], default='12:00', force=unicode)
freq = request.get_param(["freq"], default='daily', force=float)
milliseconds = request.get_param(["milliseconds","ms"], default=0.0, force=float)
temp_start = request.get_param(['temp_start', 'K'], default=None, force=unicode)
temp_end = request.get_param('temp_end', default=None, force=unicode)
logging.info("Morning Alarm : %s seconds at %s" % (m_seconds + (milliseconds/1000.0), hour[0]))
logging.info("Dawn Alarm : %s seconds at %s" % (d_seconds + (milliseconds/1000.0), hour[1]))
return self.led_strip.alarm(seconds=[d_seconds,m_seconds], milliseconds=milliseconds, hour=hour, freq=freq, temp_start=temp_start, temp_end=temp_end)
def action__jump(self, request):
"""
Jump from one specified colour to the next
"""
jump_colours = request.get_param_values("jump")
seconds = request.get_param(["seconds","s"], default=0.0, force=float)
milliseconds = request.get_param(["milliseconds","ms"], default=0.0, force=float)
self.led_strip.stop_current_sequence() #Terminate any crap that's going on
total_seconds = (seconds + (milliseconds/1000.0))
logging.info("Jump: %s, %s seconds" % (jump_colours, total_seconds))
return self.led_strip.jump(jump_colours, seconds=seconds, milliseconds=milliseconds) #Has its own colour sanitisation routine
action__jump.capability = {
"param": "jump",
"description": "Hops from one colour to the next over an even period of time.",
"value": "A comma delimited list of colours you wish to jump between.",
"validity": "<unicode> valid colour names or hex values separated by commas (e.g. red,blue,green,cyan,#FF00FF)",
"optional_concurrent_parameters": [
{
"param": "milliseconds",
"value": "The number of milliseconds the each colour should be displayed for. Will be added to seconds (if specified) to give a total time.",
"validity": "<int> > 0",
"default": "200",
},
{
"param": "seconds",
"value": "The number of seconds each colour should be displayed for. Will be added to milliseconds (if specified) to give a total time.",
"validity": "<int> > 0",
"default": "0",
},
],
"returns": "<unicode> The first hex value of sequence."
}
def action__rotate(self, request):
"""
Rotates (fades) from one specified colour to the next
"""
rotate_colours = request.get_param_values("rotate")
seconds = request.get_param(["seconds","s"], default=0.0, force=float)
milliseconds = request.get_param(["milliseconds","ms"], default=0.0, force=float)
self.led_strip.stop_current_sequence() #Terminate any crap that's going on
total_seconds = (seconds + (milliseconds/1000.0))
logging.info("Rotate: %s, %s seconds" % (rotate_colours, total_seconds))
return self.led_strip.rotate(rotate_colours, seconds=seconds, milliseconds=milliseconds) #Has its own colour sanitisation routine
action__rotate.capability = {
"param": "rotate",
"description": "Fades from one colour to the next over an even period of time.",
"value": "A comma delimited list of colours you wish to cross-fade between.",
"validity": "<unicode> valid colour names or hex values separated by commas (e.g. red,blue,green,cyan,#FF00FF)",
"optional_concurrent_parameters": [
{
"param": "milliseconds",
"value": "The number of milliseconds the each colour fade should take. Will be added to seconds (if specified) to give a total time.",
"validity": "<int> > 0",
"default": "200",
},
{
"param": "seconds",
"value": "The number of seconds each colour fade should take. Will be added to milliseconds (if specified) to give a total time.",
"validity": "<int> > 0",
"default": "0",
},
],
"returns": "<unicode> The first hex value of sequence."
}
def action__stop(self, request):
"""
Stops the current sequence
"""
return self.led_strip.stop()
action__stop.capability = {
"param": "stop",
"description": "Halts the current sequence or fade.",
"value": "",
"returns": "<unicode> The hex value of colour the RGB strip got halted on."
}
def action__off(self, request):
"""
Turns the strip off
"""
logging.info("Off!")
return self.led_strip.off()
action__off.capability = {
"param": "off",
"description": "Stops any fades or sequences. Quickly Fades the RGB strip to black (no light)",
"value": "",
"returns": "<unicode> The hex value of colour the RGB strip ends up at (#000000)."
}
def action__capabilities(self, request, *args, **kwargs):
"""
Reports this listener's capabilities
"""
output_capabilities = []
for function_name in dir(self):
if function_name.startswith("action__"):
try:
capability_details = getattr(self,function_name).capability
output_capabilities.append(capability_details)
except AttributeError:
pass
return output_capabilities
def action__status(self, request, *args, **kwargs):
"""
Reports the status of the RGB LED strip
"""
return {
"current": self.led_strip.hex,
"contrast": self.led_strip.contrast_from_bg(self.led_strip.hex, dark_default="202020"),
"current_rgb": "({})".format(self.led_strip)
}
def teardown(self):
"""
Called automatically when exiting the parent reactor
"""
self.led_strip.teardown()
class NotSet():
pass
NOT_SET = NotSet()
def pi_gps_location(ip=''):
if ip=='':
locip = 'https://api.ipify.org?format=json'
r = requests.get(locip)
j = json.loads(r.text)
ipinfo = 'https://ipinfo.io/'+j['ip']
else:
ipinfo = 'https://ipinfo.io/'+ip
r = requests.get(ipinfo)
j = json.loads(r.text)
lat,lon=j['loc'].split(',')
return lat,lon
class SmartRequest(Request, object):
"""
The class for request objects returned by our web server.
This child version has methods for easily grabbing params safely.
Usage:
#If you just want the first value
sunset = request["sunset"]
sunset = request.get_param("sunset")
#You can even test the water with multiple values, it will stop at the first valid one
sunset = request.get_param(["sunset","ss","twilight"])
#If you want a whole list of values
jump = request.get_list("jump")
See docs: https://twistedmatrix.com/documents/8.0.0/api/twisted.web.server.Request.html
"""
def __init__(self, *args, **kwargs):
super(SmartRequest, self).__init__(*args, **kwargs)
def get_param_values(self, name, default=None):
"""
Failsafe way of getting querystring get and post params from the Request object
If not provided, will return default
@return: ["val1","val2"] LIST of arguments, or the default
"""
return self.args.get(name, default)
get_params = get_param_values #Alias
get_list = get_param_values #Alias
get_params_list = get_param_values #Alias
def get_param(self, names, default=None, force=None):
"""
Failsafe way of getting a single querystring value. Will only return one (the first) value if found
@param names: <str> The name of the param to fetch, or a list of candidate names to try
@keyword default: The default value to return if we cannot get a valid value
@keyword force: <type> A class / type to force the output into. Default is returned if we cannot force the value into this type
"""
if isinstance(names,(str, unicode)):
names = [names]
for name in names:
val = self.get_param_values(name=name, default=NOT_SET)
if val is not NOT_SET: #Once we find a valid value, continue
break
#If we have no valid value, then bail
if val is NOT_SET:
return default
try:
if len(val)==1:
single_val = val[0]
if force is not None:
return force(single_val)
return single_val
else:
mult_val = val
if force is not None:
mult_val = [force(ii) for ii in val]
return mult_val
except (IndexError, ValueError, TypeError):
pass
return default
get_value = get_param
param = get_param
def has_params(self, *param_names):
"""
Returns True or the value if any of the param names given by args exist
"""
for param_name in param_names:
try:
return self.args[param_name] or True
except KeyError:
pass
return False
has_param = has_params
has_key = has_params
def __getitem__(self, name):
"""
Lazy way of getting a param list, with the fallback default being None
"""
return self.get_param(name)
class RaspiledControlSite(Site, object):
"""
Site thread which initialises the RaspiledControlResource properly
"""
def __init__(self, *args, **kwargs):
self.clients=[]
resource = kwargs.pop("resource",RaspiledControlResource())
super(RaspiledControlSite, self).__init__(resource=resource, requestFactory=SmartRequest, *args, **kwargs)
def stopFactory(self):
"""
Called automatically when exiting the reactor. Here we tell the LEDstrip to tear down its resources
"""
self.resource.teardown()
def get_matching_pids(name, exclude_self=True):
"""
Checks the process ID of the specified processes matching name, having excluded itself
check_output(["pidof", str]) will return a space delimited list of all process ids
@param name: <str> The process name to search for
@keyword exclude_self: <Bool> Whether to remove own ID from returned list (e.g. if searching for a python script!)
@return: <list [<str>,]> List of PIDs
"""
#Get all matching PIDs
try:
pids_str = check_output(["pidof",name])
except CalledProcessError: #No matches
pids_str = ""
#Process string-list into python list
pids = pids_str.strip().split(" ")
#Remove self if required:
if exclude_self:
my_pid = str(os.getpid()) #Own PID - getpid() returns integer
try:
pids.remove(my_pid) #Remove my PID string:
except ValueError:
pass
return pids
def checkClientAgainstWhitelist(ip, user,token):
IPS = {
'IP1' : '127.0.0.1',
}
config_path = os.path.expanduser(RASPILED_DIR+'/.whitelist')
parser = configparser.ConfigParser(defaults=IPS)
if os.path.exists(config_path):
parser.read(config_path)
else:
with open(config_path, 'w') as f:
parser.write(f)
whitelist=parser.defaults()
for ii in whitelist.keys():
if ip == whitelist[ii]:
logging.info('Client registered')
connection = True
break
else:
connection = False
return connection
def start_if_not_running():
"""
Checks if the process is running, if not, starts it!
"""
pids = get_matching_pids(APP_NAME, exclude_self=True) #Will remove own PID
pids = filter(bool,pids)
if not pids: #No match! Implies we need to fire up the listener
logging.info("[STARTING] Raspiled Listener with PID %s" % str(os.getpid()))
factory = RaspiledControlSite(timeout=8) #8s timeout
endpoint = endpoints.TCP4ServerEndpoint(reactor, RESOLVED_USER_SETTINGS['pi_port'])
endpoint.listen(factory)
reactor.run()
else:
logging.info("Raspiled Listener already running with PID %s" % ", ".join(pids))
if __name__=="__main__":
start_if_not_running()
``` |
{
"source": "josuemartinezsv/joitek-video2text-converter",
"score": 3
} |
#### File: josuemartinezsv/joitek-video2text-converter/console_ui.py
```python
from pathlib import Path
from typing import Optional
import typer
from converter import video_to_text_converter
def version_callback(value: bool):
if value:
print("Joitek-Video2Text Converter 0.0.1")
raise typer.Exit()
def interactive_callback(value: bool) -> None:
if value:
typer.clear()
typer.echo("=======================================================================")
typer.echo("Joitek-Video2Text Converter")
typer.echo("Welcome to interactive mode!")
typer.echo("=======================================================================")
typer.echo()
typer.echo(
"The program will divide the video into chunks according to the exact number of minutes it has.\n"
"1 chunk = 1 minute\n\n"
"The remaining seconds will be ignored. It will be fixed in new versions ;)\n\n"
"e.g. a video with a duration of 1h: 00m: 18s, those 18 seconds will be ignored.\n"
"The rest will turn into minutes.\n\n"
"Start with the basis\n\n"
"For ignore something, press ENTER\n"
"For exit, press Ctrl + C or any combination. We make sure you don't have any problems"
)
typer.echo("=======================================================================\n\n")
try:
v: Path = typer.prompt("Path of the video to convert.\n"
"e.g. C:\\myfolder\\myvideo.mp4\n\n"
"Path of the video to convert", type=Path)
typer.echo()
typer.echo()
while not v.exists() or v.is_dir():
if v.is_dir():
typer.echo(f"[{v}] points to a dir, not a file\n\n")
else:
typer.echo(f"[{v}] not exist\n\n")
v = typer.prompt("Path of the video to convert", type=Path)
typer.echo()
typer.echo()
v = v.resolve()
o: Path = typer.prompt("Directory path where the file with the transcript will be saved.\n"
f"If ignored, the current directory will be used.\n\n"
"Output dir path",
type=Path,
default=Path.cwd(),
show_default=False)
while not o.exists() or o.is_file():
if o.is_file():
typer.echo(f"[{o}] points to a file, not a dir\n\n")
else:
typer.echo(f"[{o}] not exist\n\n")
o = typer.prompt("Output dir path", type=Path)
typer.echo()
typer.echo()
o = o.resolve()
typer.echo()
typer.echo()
name: str = typer.prompt("Transcription file name (without extension).\n"
"If ignored, the following template will be used:\n"
f"\ttranscription-of-{v.stem}-[date]__[finished-conversion-time]\n\n"
f"Transcription file name",
default=f"transcription-of-{v.stem}-",
show_default=False)
retries: int = typer.prompt("Attempts to be made in case bad chunk are found.\n"
"If ignored, the following default will be used: 10\n\n"
"Retries",
type=int, default=10,
show_default=False)
while retries < 0 or retries > 100:
retries = typer.prompt("Not in range [1-100].\n"
"Try again\n\n"
"Retries",
type=int,
show_default=False)
typer.echo()
typer.echo()
lang: str = typer.prompt("Language spoken in video in 'xx-YY' format. "
"'YY' can be the same as 'xx'. "
"'YY' is the regional language. "
"For example, if you want to convert a video "
"where they speak British English, use en-BR instead of en-US, "
"for greater precision. "
"See [https://cloud.google.com/speech-to-text/docs/languages] for "
"supported languages\n\n"
"Language")
skip_bf: bool = typer.confirm("Skip bad chunks. "
"This will skip the number of attempts to make and "
"proceed to the next one.\n\n"
"Confirm?")
typer.echo()
typer.echo()
abort_on_bf: bool = typer.confirm("Aborts the conversion to the first bad fragment "
"found. This will save what has been converted.\n\n"
"Confirm?")
typer.echo()
typer.echo()
video_to_text_converter(path_of_video=v,
transcription_file_dir_path=o,
transcription_file_name=name,
abort_on_bad_chunk=abort_on_bf,
skip_bad_chunks=skip_bf,
retries=retries,
language=lang)
raise typer.Exit()
# Use Exception due a lot of exception
# Is bad practice but do the required
except Exception as e:
typer.echo()
typer.echo("Exiting...")
typer.echo("Thanks!")
typer.echo(e)
raise typer.Exit()
def main(pov: Optional[Path] = typer.Argument(..., exists=True,
file_okay=True,
resolve_path=True,
help="Path of the video to convert."),
lang: Optional[str] = typer.Argument(...,
help="Language spoken in video in 'xx-YY' format. "
"'YY' can be the same as 'xx'. "
"'YY' is the regional language. "
"For example, if you want to convert a video "
"where they speak British English, use en-BR instead of en-US, "
"for greater precision. "
"See [https://cloud.google.com/speech-to-text/docs/languages] for "
"supported languages."),
tfdp: Optional[Path] = typer.Option(Path.cwd(),
"-o",
"--output",
resolve_path=True,
is_flag=True,
show_default=False,
help="Directory path where the file with the "
"transcript will be saved. "
"If it is ignored, the directory where this "
"program was invoked will be used by default."
),
tfn: Optional[str] = typer.Option(
f"transcription-of-[video-name]-[date]__[finished-conversion-time]",
"-tfn",
"--transcription-file-name",
show_default=False,
is_flag=True,
help="Transcription file name only (without file extension). "
"By default: autogenerated"
),
retries: Optional[int] = typer.Option(10, "-R",
"--retries",
min=1,
max=100,
show_default=False,
help="Attempts to be made in case bad chunk are found. By default: 10"
),
interactive: Optional[bool] = typer.Option(False,
"-i",
"--interactive",
show_default=False,
is_eager=True,
is_flag=True,
callback=interactive_callback,
help="Enter and start interactive mode. Useful for newest."),
sbc: Optional[bool] = typer.Option(False,
"--skip-bad-fragments",
is_flag=True,
show_default=False,
help="Skip bad fragments. "
"This will skip the number of attempts to make and "
"proceed to the next one."),
aobc: Optional[bool] = typer.Option(False,
"--abort-on-bad-fragment",
is_flag=True,
show_default=False,
help="Aborts the conversion to the first bad chunk "
"found. This will save what has been converted."
),
version: Optional[bool] = typer.Option(None,
"-v",
"--version",
is_flag=True,
is_eager=True,
callback=version_callback,
help="Show version and exit.")
) -> None:
if version or interactive:
typer.echo(None)
return video_to_text_converter(path_of_video=pov,
language=lang,
transcription_file_dir_path=tfdp,
transcription_file_name=tfn,
abort_on_bad_chunk=aobc,
skip_bad_chunks=sbc,
retries=retries)
``` |
{
"source": "josuemontano/API-platform",
"score": 2
} |
#### File: canopus/models/user.py
```python
from sqlalchemy import Boolean, Column, DateTime, Integer, String
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy_utils import ChoiceType, EmailType, PhoneNumberType
from .base import BaseModel
from .lib import OrderedEnum
from .meta import Base
class Role(OrderedEnum):
USER = 10
ADMIN = 20
class User(Base, BaseModel):
__tablename__ = 'users'
id = Column(Integer, primary_key=True, autoincrement=True)
first_name = Column(String(100), nullable=False)
last_name = Column(String(100), nullable=False)
email = Column(EmailType)
phone = Column(PhoneNumberType())
role = Column(ChoiceType(Role, impl=Integer()), nullable=False)
is_enabled = Column(Boolean, nullable=False, default=True)
last_signed_in_at = Column(DateTime)
@hybrid_property
def is_admin(self):
role = self.role
return role and role >= Role.ADMIN
@hybrid_property
def name(self):
return f'{self.first_name} {self.last_name}'
```
#### File: tests/models/test_user.py
```python
from tests import factories
class TestUser:
def test_name(self):
user = factories.UserFactory(first_name='John', last_name='Smith')
assert user.name == "<NAME>"
``` |
{
"source": "josuemontano/blender_wrapper",
"score": 3
} |
#### File: blender_wrapper/api/camera.py
```python
import bpy
from .base import BlenderObject
class Camera(BlenderObject):
"""Camera"""
def add_to_current_scene(self):
bpy.ops.object.camera_add(location=self.location,
rotation=self.rotation,
view_align=self.view_align,
layers=self.layers)
```
#### File: josuemontano/blender_wrapper/example.py
```python
from blender_wrapper.api import Scene
from blender_wrapper.api import Camera
from blender_wrapper.api import SunLamp
from blender_wrapper.api import Cone, Cube, Cylinder, Monkey, Plane
from blender_wrapper.api.constants import ORIGIN, SUBSURFz
def main():
scene = Scene(1500, 1000, filepath="./")
scene.setup()
camera = Camera((1, 0, 1), (90, 0, 0), view_align=True)
camera.add_to_current_scene()
lamp = SunLamp(10, (0, 0, 3), ORIGIN)
lamp.add_to_current_scene()
floor = Plane(ORIGIN, ORIGIN, radius=5.0)
floor.add_to_current_scene()
cube = Cube((2.5, 2.5, 0), (0, 0, 45))
cube.add_to_current_scene()
monkey = Monkey((0, 0, 1.25), (10, 0, 45), radius=1.25)
monkey.add_to_current_scene()
monkey.add_modifier(SUBSURF)
monkey.shade_smooth()
scene.render(samples=75, resolution_percentage=5)
scene.export_to_blend_file('./scene.blend')
# Execute running:
# blender --background -P ./test.py
if __name__ == "__main__":
main()
``` |
{
"source": "josuemtzmo/blendernc",
"score": 2
} |
#### File: blendernc/geonodes/panels.py
```python
import bpy
class GeoNodes_UI_PT_3dview(bpy.types.Panel):
bl_idname = "NCLOAD_PT_Panel"
bl_label = "GeoNodes"
bl_category = "GeoNodes"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
self.layout.operator('geonodes.ncload', text="Load NetCDF")
``` |
{
"source": "josuemtzmo/mitgcm_offline_flt",
"score": 2
} |
#### File: mitgcm_offline_flt/input/read_particles.py
```python
import os
import re
import numpy as np
import pandas as pd
import pylab as plt
import struct
import time
class ReadParticles():
'''
Class to read particle output from MITgcm.
'''
def __init__(self, filename,ffile=None):
if ffile==None:
ffile = filename.split('/')[-1][0:-1]
if '*' in filename:
folder = filename.split('*')[0]
folder = folder.split(folder.split('/')[-1])[0]
files = sorted([os.path.join(folder, f) for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f)) and ('.data' in f and ffile in f)])
self.filename=files
self.load_mftracks()
else:
self.filename=filename
self.load_tracks()
def load_mftracks(self):
printProgressBar(0, len(self.filename), prefix = 'Progress:', suffix = '', length = 10)
count=0
tic=time.time()
for file in self.filename:
if count==0:
tmp_tracks=self.load_tracks(file)
else:
pd0=self.load_tracks(file)
tmp_tracks=pd.concat([tmp_tracks,pd0])
toc=time.time()
printProgressBar(count + 1, len(self.filename), prefix = 'Progress:', suffix = 'Ellapsed time: {0}'.format(round(toc-tic,2)), length = 10)
count+=1
self.df_tracks = tmp_tracks
return self.df_tracks
def load_tracks(self,file=None):
'''
Read particle binary file (modified mds.rdmds function)
'''
if file == None and type(self.filename)!=list:
file=self.filename
elif type(self.filename)==list and file == None:
raise ValueError('Use the function load_mftracks to load multiple files.')
dimList = [ 1, 1, 1, 1, 1, 1, 16, 1, 16 ];
self.records=int(dimList[-1])
byterate = 8
tp = '>d{0}'
recshape = self.records
nrecords, = [ int(os.path.getsize(file)//(byterate)) ]
tileshape = (nrecords//recshape, recshape)
lev=()
if not isinstance(lev,tuple):
lev = (lev,)
levs = tuple( aslist(l) for l in lev )
levdims = tuple(len(l) for l in levs)
reclist = range(nrecords)
fmt = tp.format('d'*(nrecords-1))
struct_size = struct.calcsize(fmt)
with open(file, mode='rb') as file:
fileContent = file.read()
arrtile=np.array(struct.unpack(fmt, fileContent[:struct_size]))
self.arrtile=arrtile.reshape(tileshape)
self.bin2pd()
return self.df_tracks
def bin2pd(self,numcols=16):
if self.records < numcols:
numcols = self.records
cols_name=['x','y','z','i','j','k','p','u','v','t','s','vort','ke','t2ave'
,'c2','c3','c4','c5','c6','c7','c8','c9','c10','c11','c12','c13','c14']
if numcols > len(cols_name):
raise ValueError('''Most of the tracking data is contained in the
first 13 collumns, if you need more outputs,
modify the cols_name variable in this function.''')
index = pd.MultiIndex.from_arrays(self.arrtile[:,0:2].T, names=('particle_id', 'time'))
df_tracks = pd.DataFrame(np.array(self.arrtile[:,2:numcols],dtype=np.float64),
columns=cols_name[0:numcols-2],index=index)
self.df_tracks=df_tracks.sort_index()
def plot_tracks(self,latlon=True,**kargs):
if latlon==True:
import cartopy.crs as ccrs
transform=ccrs.PlateCarree()
else:
transform=None
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(1, 1, 1,projection=transform)
index = np.sort(self.df_tracks.index.get_level_values(0).unique())
printProgressBar(0, len(index), prefix = 'Progress:', suffix = '', length = 10)
count=0
for ii in index:
x=self.df_tracks.loc[ii]['x']
y=self.df_tracks.loc[ii]['y']
plt.plot(x,y,transform=transform)
printProgressBar(count+1, len(index), prefix = 'Progress:', suffix = 'Track: {0}'.format(ii), length = 10)
count+=1
if latlon==True:
ax.coastlines()
def plot_initp(self,latlon=True,**kargs):
if latlon==True:
import cartopy.crs as ccrs
transform=ccrs.PlateCarree()
else:
transform=None
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(1, 1, 1,projection=transform)
for ii in np.sort(self.df_tracks.index.get_level_values(0).unique()):
x=self.df_tracks.loc[ii]['x'].iloc[0]
y=self.df_tracks.loc[ii]['y'].iloc[0]
plt.plot(x,y,'og',transform=transform)
if latlon==True:
ax.coastlines()
def savedf(self,path):
self.df_tracks.to_csv(path)
def LAVD(self):
timediff=(self.df_tracks.loc[1].index[-1]-self.df_tracks.loc[1].index[0])/86400.0
LAVD = (1/timediff) * self.df_tracks.sum(level='particle_id')
return LAVD
def printProgressBar(iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix), end = '\r')
# Print New Line on Complete
if iteration == total:
print()
# if __name__ == "__main__":
# p_file = ReadParticles('./float_profiles.001.001.data')
# dic_file= p_file.readBinary()
# p_file.plot_tracks()
``` |
{
"source": "josuemtzmo/tcoasts",
"score": 3
} |
#### File: tcoasts/tcoasts/tcoasts.py
```python
import gsw
import xarray as xr
import subprocess
import numpy as np
import os
import pylab as plt
# Import utils and decorators
from tcoasts.utils.utils import *
from tcoasts.utils.decorators import _file_exists
class TransportAlongCoast(object):
'''
'''
def __init__(self,path,initpos,contour_file,distance=np.arange(-400,400,100),length=100):
self.path = path # Data path
self.initpos = initpos # Init location lat lon coordinates.
self.dist = distance # Units kilometers
self.length = length # Units kilometers
self.n = 4 # self.length/self.n corresponds to the segments
# on the perpendicular vector.
self.contour_file = contour_file # Contour filename.
self.tmpfile= 'tmp_interp_transects.nc' # Temporal file to store interpolated fields.
# Load required data
self.extract_contour()
self.initindex=find2d(self.coastline,initpos)
def extract_contour(self):
# Load contour file.
if './' not in self.contour_file:
self.contour_file=os.path.join(self.path,self.contour_file)
if os.path.isfile(self.contour_file):
self.coastline=np.loadtxt(self.contour_file)
else:
raise ValueError('''
Make sure the file path is correct.
The path should be relative to the location of
the running script, or relative to self.path.
''')
def coords2dist(self,lonlat,p=0):
'''
This function follows the GSW computation.
'''
distance=gsw.distance(lonlat[:,0],lonlat[:,1],p)
return distance
def distancefrominit(self):
'''
The distance definition points positive to the east.
'''
if self.initindex != 0:
# Compute cumulative distance to right of index [location,location]
postinit=np.cumsum(self.coords2dist(self.coastline[self.initindex:]))
# Compute cumulative distance to left of index [location,location]
neginit=-1*np.cumsum(np.flipud(self.coords2dist(self.coastline[:self.initindex])))
# Join cumulative distances.
cumdistance=np.hstack((np.flipud(neginit),postinit))
else:
# Compute cumulative distance starting from the index [0,0]
cumdistance=np.cumsum(self.coords2dist(self.coastline))
return cumdistance
def perploc(self):
#Find the user defined locations for the perpendicular vectors.
dist_coast=self.distancefrominit()
index_perp=[find(dist_coast,dis*1000) for dis in self.dist]
return index_perp
def perp2coast(self,method='smooth',x=10):
'''
Input:
method: [ mean ]
smooth - computes the mean over X number of slopes and
projects the perpendicular vector
byseg - computes the mean over each segment of the slope
local - computes the the perpendicular vector using the 2
adjacent locations
ext - computes the perpendicular vector using the slope at
x cells to the left and right of the desired
perpendicular location.
'''
index_perp=self.perploc()
# Method to find the location perpendicular vector.
if method =='local' and method =='ext':
# Compute slope from adjacent locations [loc-x,loc+x]
if method=='local':
x=1
slopes=np.array([slope(self.coastline[ii-x,0],self.coastline[ii+x,0],
self.coastline[ii-x,1],self.coastline[ii+x,1])
for ii in index_perp])
elif method == 'smooth':
# Compute average slope from all the indexes contained between locations [loc-x,loc+x]
slopes=np.array([np.mean([slope(self.coastline[ii-xx,0],self.coastline[ii+xx,0],
self.coastline[ii-xx,1],self.coastline[ii+xx,1])
for xx in range(1,x)])
for ii in index_perp])
else:
# Compute average slope from all segments from [loc-x,loc-x+(2x-1)]
slopes=np.array([np.mean([slope(self.coastline[ii-x,0],self.coastline[ii-x+xx,0],
self.coastline[ii-x,1],self.coastline[ii-x+xx,1])
for xx in range(1,(2*x-1))])
for ii in index_perp])
#Compute angles from slopes
angles=slope2angle(slopes)
#Shift angles to be perpendicular
perp_angle=angles+(np.pi/2)
#Normal vector
self.x_norm = np.squeeze(np.cos(angles))
self.y_norm = np.squeeze(np.sin(angles))
#Perpendicualar vector
self.x_perp = np.squeeze(np.cos(perp_angle))
self.y_perp = np.squeeze(np.sin(perp_angle))
# Return dictionary containing vector information
return {'Nvector':{'x':self.x_norm,'y':self.x_norm,'angle':angles,'slope':slopes},
'Pvector':{'x':self.x_perp,'y':self.y_perp,'angles':perp_angle,'slope':-1/slopes}}
def perpvecdist(self,index_perp,perp_angle):
#compute distances to scale perpendicular vectors.
### Note this will produce an error of 1e-4.
x=np.array([[self.coastline[index_perp][ii,0],
np.cos(perp_angle[ii])+self.coastline[index_perp][ii,0]]
for ii in range(len(index_perp))])
y=np.array([[self.coastline[index_perp][ii,1],
np.sin(perp_angle[ii])+self.coastline[index_perp][ii,1]]
for ii in range(len(index_perp))])
distances = gsw.distance(x,y)
return distances
# _file_exists will test if the tmporal file containing the interpolated
# data exits. If file exists it will load the contents, otherwise, it will
# interpolate the data.
@_file_exists
def inter2vector(self,ufiles='U.*.nc',vfiles='V.*.nc',tracerfile=None,dataset=None,save=True,shift=360,**kwargs):
'''
**kwargs inter2vector supports the all the kwargs of xr.open_mfdataset.
'''
# xr load parameters
xr_openmf_defaults={}
if '*' in ufiles and '*' in vfiles:
xr_openmf_defaults = {'concat_dim':'time','parallel':True,'combine':'nested'}
xr_openmf_defaults.update(kwargs)
print('Opening velocity files')
# Load data.
u = self.loaddata(file=ufiles,var='U',dataset=dataset,**xr_openmf_defaults)
v = self.loaddata(file=vfiles,var='V',dataset=dataset,**xr_openmf_defaults)
# Make sure the shape of the velocity fields are the same.
if u.shape != v.shape:
raise ValueError('The velocity fields should have the same shape.')
# Compute perpendicular vectors.
x_norm,y_norm,x_perp,y_perp,x_perp_all,y_perp_all=self.vertor_perp()
# Define locations to interpolate interpolation.
# !Important:
# x_perp,y_perp is defined in the center of the cells
x = xr.DataArray(x_perp, dims=('transect','n'))
y = xr.DataArray(y_perp, dims=('transect','n'))
# Define limits to slice data.
deltax = 2*max((abs(x_perp[:,0]-x_perp[:,1])))
slicevalx = [shift+x_perp.min()-deltax,shift+x_perp.max()+deltax]
deltay = 2*max((abs(y_perp[:,0]-y_perp[:,1])))
slicevaly = [y_perp.min()-deltay,y_perp.max()+deltay]
# Slice data to reduce memory issues.
u = u.sel({'lon':slice(slicevalx[0],slicevalx[1]),'lat':slice(slicevaly[0],slicevaly[1])})
v = v.sel({'lon':slice(slicevalx[0],slicevalx[1]),'lat':slice(slicevaly[0],slicevaly[1])})
# Interpolate data using xarray,
# Note that fields can not contain nans
# TO DO: Add support for data containing nans.
print('Interpolating velocity fields')
interp_u = u.chunk({'time':10,'depth':25,'lat':len(u.lat),'lon':len(u.lon)}).interp(lon=shift+x,lat=y).compute()
interp_u = interp_u.where(interp_u!=0,np.nan)
interp_v = v.chunk({'time':10,'depth':25,'lat':len(v.lat),'lon':len(v.lon)}).interp(lon=shift+x,lat=y).compute()
interp_v = interp_v.where(interp_v!=0,np.nan)
# Merge datasets
self.interp_data=xr.merge([interp_u.to_dataset(name='u'), interp_v.to_dataset(name='v')])
# Interpolate tracer fields to constrain transport.
if tracerfile != None:
print('Loadng and interpolating tracer')
tracer = self.loaddata(file=tracerfile,var='Tracer',dataset=dataset,**xr_openmf_defaults)
tracer = tracer.sel({'lon':slice(slicevalx[0],slicevalx[1]),'lat':slice(slicevaly[0],slicevaly[1])})
interp_tracer = tracer.interp(lon=shift+x,lat=y).compute()
interp_tracer = interp_tracer.where(interp_tracer!=0,np.nan)
self.interp_data = xr.merge([interp_u.to_dataset(name='u'), interp_v.to_dataset(name='v'),
interp_tracer.to_dataset(name='tracer')])
# Save data.
if save==True:
self.interp_data.to_netcdf('./tmp_interp_transects.nc')
return self.interp_data
def depth_profiles(self,bottom_vel):
'''
'''
# Maximum depth from interpolated field.
depth_index=self.interp_data.depth[np.isfinite(self.interp_data.u.where(abs(self.interp_data.u)>bottom_vel,np.nan).isel({'time':0})).argmin('depth')]
# xr.DataArray 2 multiply with field.
depth=(xr.zeros_like(self.interp_data.u.isel(time=0))+self.interp_data.depth)
# Mask depth to only contain values larger than index.
depth=depth.where(depth > depth_index,np.nan)
# Delta depth to compute area
delta_depth=depth.diff(dim='depth')
return delta_depth
def vel_magnitude(self):
# Magnitude of interpolated vectors.
magnitude = np.sqrt(self.interp_data.u**2+self.interp_data.v**2)
return magnitude
def dot_product(self):
# Dot product between interpolated vectors and normal vector
# from perpendicular transect to the coast.
return self.interp_data.u*self.x_norm[np.newaxis,np.newaxis,:,np.newaxis]+self.interp_data.v*self.y_norm[np.newaxis,np.newaxis,:,np.newaxis]
def compute_transport(self,bottom_vel=1e-5):
# Scalar projection of interpolated data
dotproduct = self.dot_product()
# Projected data over normal vectors to surface.
u_normal = dotproduct*self.x_norm[np.newaxis,np.newaxis,:,np.newaxis]
v_normal = dotproduct*self.y_norm[np.newaxis,np.newaxis,:,np.newaxis]
# Area of each grid cell.
dA = self.delta_area(bottom_vel)
# Multiplication of vector sum and the dA. Flux integral.
self.transport=(u_normal+v_normal)*dA
return self.transport.sum(dim={'depth','n'})
def delta_area(self,bottom_vel):
# Compute perpendicular vectors.
x_norm,y_norm,x_perp,y_perp,x_perp_all,y_perp_all=self.vertor_perp()
# Depth at each section of the transect.
delta_z=abs(self.depth_profiles(bottom_vel=bottom_vel))
# Distance between lon,lat points of transect.
delta_x=gsw.distance(x_perp_all,y_perp_all)
return delta_z*delta_x
def mask_transport(self,threshold,method='greater'):
'''
threshold [ float / list ]
Threshold to scale transport with tracers used for tracer.
method [ string ]
'greater' will compute the transport for all the values larger
than the threshold in the tracer field.
'smaller' will compute the transport for all the values smaller
than the threshold in the tracer field.
'both' will compute the transport for all the values within
the threshold interval in the tracer field.
'''
if type(threshold)==list:
threshold=np.array(threshold)
# TO DO: If u vertical grid != tracer vertical grid then interpolate tracer to velocity grid.
if method=='smaller' and type(threshold)==float:
scaled_transport=self.transport.where(self.interp_data.tracer.isel(depth=slice(0,-1))<threshold)
elif method=='greater' and type(threshold)==float:
scaled_transport=self.transport.where(self.interp_data.tracer.isel(depth=slice(0,-1))>threshold)
elif method=='both' and type(threshold)==np.ndarray:
scaled_transport=self.transport.where(self.interp_data.tracer.isel(depth=slice(0,-1))>threshold.min()).where(self.interp_data.tracer<threshold.max())
else:
raise ValueError('''Threshold must be an float or list/array in which the
min and max value will define the threshold interval.''')
return scaled_transport.sum(dim={'depth','n'})
def loaddata(self,file=None,var='U',dataset=None,**kwargs):
# Check if file or dataset is defined.
if file == None and dataset==None:
raise ValueError('''file should be the path to the netCDF files or
dataset should contain a dataset with a variable
containing the string defined as var.
''')
elif file != None and (type(dataset) == 'NoneType' or dataset==None):
results = subprocess.check_output(['find', self.path, '-name', file])
results=[s for s in results.decode('utf-8').split()]
results.sort()
data=xr.open_mfdataset(results,**kwargs)
elif dataset != None:
data=dataset
else:
raise ValueError('Only one of the arguments [file or dataset] can be defined.')
# Extract variables from dataset
varname= [key for key,items in data.data_vars.items()]
# Rename variable for easier manipulation.
if len(varname)==1:
variable=data.rename({varname[0]:var})
else:
varname=[var for varn in varname if var in varn]
variable=data.rename({varname[0]:var})
# Extract only the variable of interest.
data=variable[var]
if type(data) != xr.core.dataarray.DataArray:
raise ValueError('The provided data should be a xr.DataArray.')
else:
return data
def vector_scale(self,index_perp,perp_angle):
'''
Scale vector to desired distance self.length
'''
# Scale perpendicular vector to distance self.length
return np.squeeze((self.length*1000)/self.perpvecdist(index_perp,perp_angle))
def vertor_perp(self,shift=0):
# Nearest location of perpendicular vectors from coastline grid.
index_perp=self.perploc()
# Compute perpendicular vectors.
perp_dict=self.perp2coast()
# Scale perpendicular vector to desired distance self.length.
scale=self.vector_scale(index_perp,perp_dict['Pvector']['angles'])
# Gridded normal vector
x_norm=(np.squeeze(np.linspace(0,scale,self.length//self.n)[:,np.newaxis]*self.x_norm)
+self.coastline[index_perp][:,0]).T+shift
y_norm=(np.squeeze(np.linspace(0,scale,self.length//self.n)[:,np.newaxis]*self.y_norm)
+self.coastline[index_perp][:,1]).T
# Gridded perpendicular vector at [x,y]
x_perp_all=(np.squeeze(np.linspace(0,scale,self.length//self.n)[:,np.newaxis]*self.x_perp)
+self.coastline[index_perp][:,0]).T+shift
y_perp_all=(np.squeeze(np.linspace(0,scale,self.length//self.n )[:,np.newaxis]*self.y_perp)
+self.coastline[index_perp][:,1]).T
# Gridded perpendicular vector at [x+diff(x)/2,y+diff(y)/2]
x_perp = x_perp_all[:,:-1]+np.diff(x_perp_all)/2
y_perp = y_perp_all[:,:-1]+np.diff(y_perp_all)/2
return x_norm,y_norm,x_perp,y_perp,x_perp_all,y_perp_all
def plotperp_vect(self,shift=0,transect=None,**kwargs):
'''
transect [int] zooms in to the transect
'''
fig,ax = plt.subplots(1,1,figsize=(5,5),**kwargs)
# Plot coastline
plt.plot(self.coastline[:,0]+shift,self.coastline[:,1])
# Compute perpendicular vectors.
x_norm,y_norm,x_perp,y_perp,x_perp_all,y_perp_all=self.vertor_perp(shift)
# Plot perpendicular vectors.
plt.plot(x_norm.T,y_norm.T,'-r')
plt.plot(x_perp.T,y_perp.T,'--k')
# Zoom in into transect, usefule when the angle is significantly
# different to n*{0,np.pi/2}.
if transect != None:
xdelta=2*abs(x_perp[transect][0]-x_perp[transect][1])
plt.xlim(x_perp[transect].min()-xdelta,x_perp[transect].max()+xdelta)
ydelta=2*abs(y_perp[transect][0]-y_perp[transect][1])
plt.ylim(y_perp[transect].min()-ydelta,y_perp[transect].max()+ydelta)
plt.gca().set_aspect('equal', adjustable='box')
return fig,ax
```
#### File: tcoasts/utils/decorators.py
```python
import functools
import warnings
import os
import xarray as xr
def _file_exists(func):
@functools.wraps(func)
def wrap(self,*args, **kwargs):
if os.path.isfile(self.tmpfile):
warnings.filterwarnings('default',module='tcoasts')
warnings.warn('Loading previous saved data.', Warning)
interp=xr.open_dataset('./tmp_interp_transects.nc')
self.interp_data=interp.load()
else:
func(self, *args, **kwargs)
return wrap
``` |
{
"source": "josuemtzmo/tracpy",
"score": 3
} |
#### File: tracpy/tracpy/inout.py
```python
from __future__ import absolute_import
import netCDF4 as netCDF
import glob
import numpy as np
from scipy.spatial import Delaunay
import matplotlib.tri as mtri
import octant
import time
from . import op
import os
import tracpy
from matplotlib.mlab import find
def setupROMSfiles(loc, date, ff, tout, time_units, tstride=1):
"""
setupROMSfiles()
<NAME>, March 2013
Figures out necessary files to read in for track times and what
model output indices within those files to use.
Args:
loc: File location. loc can be a thredds server web address, a single
string of a file location, a list of strings of multiple file
locations to be searched through.
date: datetime format start date
ff: Time direction. ff=1 forward, ff=-1 backward
tout: Number of model outputs to use
time_units: To convert to datetime
tstride: Stride in time, in case want to use less model output than
is available. Default is 1, using all output.
Returns:
* nc - NetCDF object for relevant files
* tinds - Indices of outputs to use from fname files
"""
# For thredds server where all information is available in one place
# or for a single file
if 'http' in loc or type(loc) == str:
nc = netCDF.Dataset(loc)
# This is for the case when we have a bunch of files to sort through
else:
# the globbing should happen ahead of time so this case looks
# different than the single file case
# files in fname are in chronological order
nc = netCDF.MFDataset(loc)
# Convert date to number
# dates = netCDF.num2date(nc.variables['ocean_time'][:], time_units)
# The calendar definition extends dates to before the year 1582 for use
# with idealized simulations without meaningful dates.
if 'time' in nc.variables:
dates = netCDF.num2date(nc.variables['time'][:], time_units,
calendar='proleptic_gregorian')
elif 'ocean_time' in nc.variables:
dates = netCDF.num2date(nc.variables['ocean_time'][:], time_units,
calendar='proleptic_gregorian')
# time index with time value just below date (relative to file ifile)
istart = find(dates <= date)[-1]
# Select indices
if ff == 1:
# indices of model outputs desired
tinds = range(istart, istart+tout, tstride)
else: # backward in time
# have to shift istart since there are now new indices behind since
# going backward
tinds = range(istart, istart-tout, -tstride)
return nc, tinds
def readgrid(grid_filename, proj, vert_filename=None, usespherical=True):
"""
readgrid(loc)
<NAME>, March 2013
This function should be read in at the beginnind of a run.py call.
It reads in all necessary grid information that won't change in time
and stores it in a dictionary called grid.
All arrays are changed to Fortran ordering (from Python ordering)
and to tracmass variables ordering from ROMS ordering
i.e. from [t,k,j,i] to [i,j,k,t]
right away after reading in.
Args:
grid_filename: File name (with extension) where grid information is
stored
vert_filename (optional): File name (with extension) where vertical
grid information is stored, if not in grid_loc. Can also skip
this if don't need vertical grid info. also optional prjection
box parameters. Default is None.
proj: Projection object.
usespherical: Use spherical geometric coordinates (lat/lon) or not.
Returns:
* grid - Dictionary containing all necessary time-independent grid fields
grid dictionary contains: (array sizing is for tracmass ordering)
* imt,jmt,km: Grid index sizing constants in (x,y,z), are for horizontal
rho grid [scalar]
* dxv: Horizontal grid cell walls areas in x direction [imt,jmt-1]
* dyu: Horizontal grid cell walls areas in y direction [imt-1,jmt]
* dxdy: Horizontal area of cells defined at cell centers [imt,jmt]
* mask: Land/sea mask [imt,jmt]
* pm,pn: Difference in horizontal grid spacing in x and y [imt,jmt]
* kmt: Number of vertical levels in horizontal space [imt,jmt]
* dzt0: Thickness in meters of grid at each k-level with
time-independent free surface. Surface is at km [imt,jmt,km].
* zrt0: Depth in meters of grid at each k-level on vertical rho grid
with time-independent free surface. Surface is at km [imt,jmt,km]
* zwt0: Depth in meters of grid at each k-level on vertical w grid with
time-independent free surface. Surface is at km [imt,jmt,km]
* xr, yr: Rho grid zonal (x) and meriodional (y) coordinates [imt,jmt]
* xu, yu: U grid zonal (x) and meriodional (y) coordinates [imt,jmt]
* xv, yv: V grid zonal (x) and meriodional (y) coordinates [imt,jmt]
* xpsi, ypsi: Psi grid zonal (x) and meriodional (y) coordinates
[imt, jmt]
* X, Y: Grid index arrays
* tri, trir: Delaunay triangulations
* Cs_r, sc_r: Vertical grid streching paramters [km-1]
* hc: Critical depth [scalar]
* h: Depths [imt,jmt]
* theta_s: Vertical stretching parameter [scalar]. A parameter
(typically 0.0 <= theta_s < 5.0) that defines the amount of grid
focusing. A higher value for theta_s will focus the grid more.
* theta_b: Vertical stretching parameter [scalar]. A parameter (0.0 <
theta_b < 1.0) that says whether the coordinate will be focused at the
surface (theta_b -> 1.0) or split evenly between surface and bottom
(theta_b -> 0)
* basemap: Basemap object
Note: all are in fortran ordering and tracmass ordering except for X, Y,
tri, and tric
To test: [array].flags['F_CONTIGUOUS'] will return true if it is fortran
ordering
"""
# Read in grid parameters and find x and y in domain on different grids
# use full dataset to get grid information
gridfile = netCDF.Dataset(grid_filename)
if usespherical:
try:
lon_vert = gridfile.variables['lon_vert'][:]
lat_vert = gridfile.variables['lat_vert'][:]
except:
lon_rho = gridfile.variables['lon_rho'][:]
lat_rho = gridfile.variables['lat_rho'][:]
x_rho, y_rho = proj(lon_rho, lat_rho)
# get vertex locations
try:
angle = gridfile.variables['angle'][:]
except:
angle = np.zeros(x_rho.shape)
x_vert, y_vert = octant.grid.rho_to_vert(x_rho, y_rho,
gridfile.variables['pm'][:],
gridfile.variables['pn'][:],
angle)
lon_vert, lat_vert = proj(x_vert, y_vert, inverse=True)
try:
mask_rho = gridfile.variables['mask'][:]
except:
mask_rho = gridfile.variables['mask_rho'][:]
grid = octant.grid.CGrid_geo(lon_vert, lat_vert, proj)
grid.mask_rho = mask_rho
else: # read cartesian data
try:
x_vert = gridfile.variables['x_vert'][:]
y_vert = gridfile.variables['y_vert'][:]
except:
x_rho = gridfile.variables['x_rho'][:]
y_rho = gridfile.variables['y_rho'][:]
# get vertex locations
try:
angle = gridfile.variables['angle'][:]
except:
angle = np.zeros(x_rho.shape)
x_vert, y_vert = octant.grid.rho_to_vert(x_rho, y_rho,
gridfile.variables['pm'][:],
gridfile.variables['pn'][:],
angle)
grid = octant.grid.CGrid(x_vert, y_vert)
try:
mask_rho = gridfile.variables['mask'][:]
grid.mask_rho = mask_rho
# except KeyError as 'mask':
# mask_rho = gridfile.variables['mask_rho'][:]
# grid.mask_rho = mask_rho
except KeyError:
print('No mask.')
# Add into grid spherical coord variables so they are avaiable as
# expected for the code but set them equal to the projected coords.
# Make this better in the future.
grid.lon_rho = grid.x_rho
grid.lat_rho = grid.y_rho
grid.lon_psi = grid.x_psi
grid.lat_psi = grid.y_psi
grid.lon_u = grid.x_u
grid.lat_u = grid.y_u
grid.lon_v = grid.x_v
grid.lat_v = grid.y_v
# vertical grid info
if (vert_filename is not None) or ('s_w' in gridfile.variables):
if 's_w' in gridfile.variables: # test for presence of vertical info
nc = gridfile
else:
nc = netCDF.Dataset(vert_filename)
if 's_w' in nc.variables:
grid.sc_r = nc.variables['s_w'][:] # sigma coords, 31 layers
else:
grid.c_r = nc.variables['sc_w'][:] # sigma coords, 31 layers
# stretching curve in sigma coords, 31 layers
grid.Cs_r = nc.variables['Cs_w'][:]
grid.hc = nc.variables['hc'][:]
grid.theta_s = nc.variables['theta_s'][:]
grid.theta_b = nc.variables['theta_b'][:]
if 'Vtransform' in nc.variables:
grid.Vtransform = nc.variables['Vtransform'][:]
grid.Vstretching = nc.variables['Vstretching'][:]
else:
grid.Vtransform = 1
grid.Vstretching = 1
# Basing this on setupgrid.f95 for rutgersNWA example project from Bror
grid.h = gridfile.variables['h'][:]
# Grid sizes
grid.imt = grid.h.shape[1] # 191
grid.jmt = grid.h.shape[0] # 671
if hasattr(grid, 'sc_r'):
grid.km = grid.sc_r.shape[0]-1 # 30 NOT SURE ON THIS ONE YET
# Index grid, for interpolation between real and grid space
# This is for rho
# X goes from 0 to imt-1 and Y goes from 0 to jmt-1
# grid in index coordinates, without ghost cells
grid.X, grid.Y = np.meshgrid(np.arange(grid.imt), np.arange(grid.jmt))
# Triangulation for grid space to curvilinear space
pts = np.column_stack((grid.X.flatten(), grid.Y.flatten()))
tess = Delaunay(pts)
grid.tri = mtri.Triangulation(grid.X.flatten(), grid.Y.flatten(),
tess.simplices.copy())
# Triangulation for curvilinear space to grid space
# Have to use SciPy's Triangulation to be more robust.
# http://matveichev.blogspot.com/2014/02/matplotlibs-tricontour-interesting.html
if isinstance(grid.x_rho,np.ma.MaskedArray):
pts = np.column_stack((grid.x_rho.data.flatten(), grid.y_rho.data.flatten()))
else:
pts = np.column_stack((grid.x_rho.flatten(), grid.y_rho.flatten()))
tess = Delaunay(pts)
grid.trir = mtri.Triangulation(grid.x_rho.flatten(),
grid.y_rho.flatten(),
tess.simplices.copy())
# For the two triangulations that are not integer based, need to
# preprocess the mask to get rid of potential flat triangles at the
# boundaries
# http://matplotlib.org/1.3.1/api/tri_api.html#matplotlib.tri.TriAnalyzer
# Hopefully these will work for other cases too: for the xy spherical
# unit test cases, I needed these both for the triangulation to be valid.
mask = mtri.TriAnalyzer(grid.trir).get_flat_tri_mask(0.01, rescale=True)
grid.trir.set_mask(mask)
mask = mtri.TriAnalyzer(grid.trir).get_flat_tri_mask(0.01, rescale=False)
grid.trir.set_mask(mask)
if isinstance(grid.x_rho,np.ma.MaskedArray):
pts = np.column_stack((grid.lon_rho.data.flatten(), grid.lat_rho.data.flatten()))
else:
pts = np.column_stack((grid.lon_rho.flatten(), grid.lat_rho.flatten()))
tess = Delaunay(pts)
grid.trirllrho = mtri.Triangulation(grid.lon_rho.flatten(),
grid.lat_rho.flatten(),
tess.simplices.copy())
mask = mtri.TriAnalyzer(grid.trirllrho).get_flat_tri_mask(0.01, rescale=True)
grid.trirllrho.set_mask(mask)
mask = mtri.TriAnalyzer(grid.trirllrho).get_flat_tri_mask(0.01, rescale=False)
grid.trirllrho.set_mask(mask)
# tracmass ordering.
# Not sure how to convert this to pm, pn with appropriate shift
grid.dxv = 1/grid.pm # pm is 1/\Delta x at cell centers
grid.dyu = 1/grid.pn # pn is 1/\Delta y at cell centers
grid.dxdy = grid.dyu*grid.dxv
# Change dxv,dyu to be correct u and v grid size after having
# them be too big for dxdy calculation. This is not in the
# rutgersNWA example and I am not sure why. [i,j]
grid.dxv = 0.5*(grid.dxv[:-1, :] + grid.dxv[1:, :])
grid.dyu = 0.5*(grid.dyu[:, :-1] + grid.dyu[:, 1:])
# Adjust masking according to setupgrid.f95 for rutgersNWA example
# project from Bror
if hasattr(grid, 'sc_r'):
mask2 = grid.mask.copy()
grid.kmt = np.ones((grid.jmt, grid.imt))*grid.km
ind = (mask2 == 1)
ind[0:grid.jmt-1, :] = ind[1:grid.jmt, :]
mask2[ind] = 1
ind = (mask2 == 1)
ind[:, 0:grid.imt-1] = ind[:, 1:grid.imt]
mask2[ind] = 1
ind = (mask2 == 0)
grid.kmt[ind] = 0
# Use octant to calculate depths/thicknesses for the appropriate
# vertical grid parameters have to transform a few back to ROMS
# coordinates and python ordering for this
grid.zwt0 = octant.depths.get_zw(grid.Vtransform, grid.Vstretching,
grid.km+1, grid.theta_s,
grid.theta_b, grid.h, grid.hc,
zeta=0, Hscale=3)
grid.zrt0 = octant.depths.get_zrho(grid.Vtransform, grid.Vstretching,
grid.km, grid.theta_s,
grid.theta_b, grid.h, grid.hc,
zeta=0, Hscale=3)
# this should be the base grid layer thickness that doesn't change in
# time because it is for the reference vertical level
grid.dzt0 = grid.zwt0[1:, :, :] - grid.zwt0[:-1, :, :]
gridfile.close()
return grid
def readfields(tind, grid, nc, z0=None, zpar=None, zparuv=None):
"""
readfields()
<NAME>, March 2013
Reads in model output in order to calculate fluxes and z grid
properties to send into step.f95.
Should be called initially and then subsequently each time loop.
All arrays are changed to Fortran ordering (from Python ordering)
and to tracmass variables ordering from ROMS ordering
i.e. from [t,k,j,i] to [i,j,k,t]
right away after reading in.
Args:
tind: Single time index for model output to read in
grid: Dictionary containing all necessary time-independent grid
fields
nc: NetCDF object for relevant files
z0 (Optional): if doing 2d isoslice, z0 contains string saying which
kind
zpar (Optional): if doing 2d isoslice, zpar is the
depth/level/density at which we are to get the level
zparuv (Optional): Use this if the k index for the model output
fields (e.g, u, v) is different from the k index in the grid. This
might happen if, for example, only the surface current were saved,
but the model run originally did have many layers. This parameter
represents the k index for the u and v output, not for the grid.
Returns:
* uflux1 - Zonal (x) flux at tind
* vflux1 - Meriodional (y) flux at tind
* dzt - Height of k-cells in 3 dim in meters on rho vertical grid.
[imt,jmt,km]
* zrt - Time-dependent depths of cells on vertical rho grid (meters).
For the isoslice case, zrt ends up with 1 vertical level which
contains the depths for the vertical center of the cell for that
level.
* zwt - Time-dependent depths of cells on vertical w grid (meters).
zwt always contains the depths at the vertical cell edges for the
whole 3D grid and the correct depths can be accessed using the
drifter indices.
Array descriptions:
* u,v - Zonal (x) and meridional (y) velocities [imt,jmt,km] (m/s)
* ssh - Free surface [imt,jmt] (m)
* dz - Height of k-cells in 1 dim [km]
From coord.f95: z coordinates (z>0 going up) for layers in meters
bottom layer: k=0; surface layer: k=KM and zw=0
dz = layer thickness
* zt - Depths (negative) in meters of w vertical grid [imt,jmt,km+1]
* dzt - Height of k-cells in 3 dim in meters on rho vertical grid.
[imt,jmt,km]
* dzt0 - Height of k-cells in 2 dim. [imt,jmt]
* dzu - Height of each u grid cell [imt-1,jmt,km]
* dzv - Height of each v grid cell [imt,jmt-1,km]
* uflux1 - Zonal (x) fluxes [imt-1,jmt,km] (m^3/s)?
* vflux1 - Meriodional (y) fluxes [imt,jmt-1,km] (m^3/s)?
"""
# this parameter is in case there is less model output available
# vertically than was actually run on the grid
if zparuv is None:
zparuv = zpar
# tic_temp = time.time()
# Read in model output for index tind
if z0 == 's': # read in less model output to begin with, to save time
if nc.variables['u'].ndim == 4:
u = nc.variables['u'][tind, zparuv, :, :]
v = nc.variables['v'][tind, zparuv, :, :]
elif nc.variables['u'].ndim == 3:
u = nc.variables['u'][tind, :, :]
v = nc.variables['v'][tind, :, :]
if 'zeta' in nc.variables:
# [t,j,i], ssh in tracmass
ssh = nc.variables['zeta'][tind, :, :]
sshread = True
else:
sshread = False
else:
u = nc.variables['u'][tind, :, :, :]
v = nc.variables['v'][tind, :, :, :]
if 'zeta' in nc.variables:
# [t,j,i], ssh in tracmass
ssh = nc.variables['zeta'][tind, :, :]
sshread = True
else:
sshread = False
# Use octant to calculate depths for the appropriate vertical grid
# parameters have to transform a few back to ROMS coordinates and python
# ordering for this
if sshread:
zwt = octant.depths.get_zw(grid.Vtransform, grid.Vstretching,
grid.km+1, grid.theta_s,
grid.theta_b, grid.h, grid.hc, zeta=ssh,
Hscale=3)
else: # if ssh isn't available, approximate as 0
zwt = octant.depths.get_zw(grid.Vtransform, grid.Vstretching,
grid.km+1, grid.theta_s,
grid.theta_b, grid.h, grid.hc, zeta=0,
Hscale=3)
# Change dzt to tracmass/fortran ordering
dzt = zwt[1:, :, :] - zwt[:-1, :, :]
# also want depths on rho grid
if sshread:
zrt = octant.depths.get_zrho(grid.Vtransform, grid.Vstretching,
grid.km, grid.theta_s,
grid.theta_b, grid.h, grid.hc,
zeta=ssh, Hscale=3)
else:
zrt = octant.depths.get_zrho(grid.Vtransform, grid.Vstretching,
grid.km, grid.theta_s,
grid.theta_b, grid.h, grid.hc, zeta=0,
Hscale=3)
dzu = .5*(dzt[:, :, 0:grid.imt-1] + dzt[:, :, 1:grid.imt])
dzv = .5*(dzt[:, 0:grid.jmt-1, :] + dzt[:, 1:grid.jmt, :])
# I think I can avoid this loop for the isoslice case
if z0 is None: # 3d case
uflux1 = u*dzu*grid.dyu
vflux1 = v*dzv*grid.dxv
elif z0 == 's': # want a specific s level zpar
uflux1 = u*dzu[zpar, :, :]*grid.dyu
vflux1 = v*dzv[zpar, :, :]*grid.dxv
dzt = dzt[zpar, :, :]
zrt = zrt[zpar, :, :]
elif z0 == 'rho' or z0 == 'salt' or z0 == 'temp':
# the vertical setup we're selecting an isovalue of
vert = nc.variables[z0][tind, :, :, :]
# Calculate flux and then take slice
uflux1 = octant.tools.isoslice(u*dzu*grid.dyu, op.resize(vert, 2), zpar)
vflux1 = octant.tools.isoslice(v*dzv*grid.dxv, op.resize(vert, 1), zpar)
dzt = octant.tools.isoslice(dzt, vert, zpar)
zrt = octant.tools.isoslice(zrt, vert, zpar)
elif z0 == 'z':
# Calculate flux and then take slice
uflux1 = octant.tools.isoslice(u*dzu*grid.dyu, op.resize(zrt, 2), zpar)
vflux1 = octant.tools.isoslice(v*dzv*grid.dxv, op.resize(zrt, 1), zpar)
dzt = octant.tools.isoslice(dzt, zrt, zpar)
zrt = np.ones(uflux1.shape)*zpar # array of the input desired depth
# make sure that all fluxes have a placeholder for depth
if isinstance(z0, str):
uflux1 = uflux1.reshape(np.append(1, uflux1.shape))
vflux1 = vflux1.reshape(np.append(1, vflux1.shape))
dzt = dzt.reshape(np.append(1, dzt.shape))
zrt = zrt.reshape(np.append(1, zrt.shape))
return uflux1, vflux1, dzt, zrt, zwt
def savetracks(xin, yin, zpin, tpin, name, nstepsin, Nin, ffin, tseasin,
ahin, avin, do3din, doturbin, locin, doperiodicin,
time_unitsin, T0in=None, Uin=None, Vin=None, savell=True):
"""
Save tracks that have been calculated by tracmass into a netcdf file.
Args:
xin,yin,zpin: Drifter track positions [drifter x time]
tpin: Time vector for drifters [drifter x time]
name: Name of simulation, to use for saving file
savell: Whether saving in latlon (True) or grid coords (False).
Default True.
"""
# name for ll is basic, otherwise add 'gc' to indicate as grid indices
if not savell:
name += 'gc'
ntrac = xin.shape[0] # number of drifters
# number of time steps (with interpolation steps and starting point)
nt = xin.shape[1]
# SAVE VERSION OF TRACPY USED
# Save file into a local directory called tracks. Make directory if it
# doesn't exist.
if 'tracks' not in name:
if not os.path.exists('tracks'):
os.makedirs('tracks')
name = 'tracks/' + name
# Open file for writing.
# Using netCDF3-Classic because the ROMS output does and
# MFDataset does not work with NetCDF4
# Can't save variables over 2GB without special format:
# http://www.ncl.ucar.edu/Support/talk_archives/2011/0599.html
# rootgrp = netCDF.Dataset('tracks/' + name + '.nc','w',format='NETCDF3_CLASSIC')
# # Hoping that this format will both allow large variables and aggregation
# rootgrp = netCDF.Dataset('tracks/' + name + '.nc','w',format='NETCDF3_64BIT')
# Really useful netCDF4 resource:
# http://www.unidata.ucar.edu/software/netcdf/workshops/2012/netcdf_python/netcdf4python.pdf
# Looks like I might still be able to use MFDataset (with netCDF4_CLASSIC files)
# Apply compression at the createVariable stage with zlib
# Info about classic: http://www.unidata.ucar.edu/software/netcdf/docs/netcdf/NetCDF_002d4-Classic-Model-Format.html
# Looks like I might be able to use this, still use MFDataset, have large variables, and compress too
# 4-Classic can still only have 1 unlimited dimension
rootgrp = netCDF.Dataset(name + '.nc', 'w', format='NETCDF4_CLASSIC')
# Define dimensions
rootgrp.createDimension('ntrac', ntrac)
rootgrp.createDimension('nt', nt)
if Uin is not None:
xul = Uin.shape[0]
yul = Uin.shape[1]
rootgrp.createDimension('xul', xul)
rootgrp.createDimension('yul', yul)
xvl = Vin.shape[0]
yvl = Vin.shape[1]
rootgrp.createDimension('xvl', xvl)
rootgrp.createDimension('yvl', yvl)
# Do the rest of this by variable so they can be deleted as I go for memory.
if savell: # if saving in latlon
# Create variable
# 64-bit floating point, with lossless compression
lonp = rootgrp.createVariable('lonp', 'f8', ('ntrac', 'nt'),
zlib=True)
# Set some attributes
lonp.long_name = 'longitudinal position of drifter'
lonp.units = 'degrees'
lonp.time = 'tp'
# Write data to netCDF variables
lonp[:] = xin
# Delete to save space
del(xin)
# 64-bit floating point, with lossless compression
latp = rootgrp.createVariable('latp', 'f8', ('ntrac', 'nt'),
zlib=True)
latp.long_name = 'latitudinal position of drifter'
latp.units = 'degrees'
latp.time = 'tp'
latp[:] = yin
del(yin)
else: # then saving in grid coordinates
# Create variable
# 64-bit floating point, with lossless compression
xg = rootgrp.createVariable('xg', 'f8', ('ntrac', 'nt'), zlib=True)
# Set some attributes
xg.long_name = 'x grid position of drifter'
xg.units = 'grid units'
xg.time = 'tp'
# Write data to netCDF variables
xg[:] = xin
# Delete to save space
del(xin)
# 64-bit floating point, with lossless compression
yg = rootgrp.createVariable('yg', 'f8', ('ntrac', 'nt'), zlib=True)
yg.long_name = 'y grid position of drifter'
yg.units = 'grid units'
yg.time = 'tp'
yg[:] = yin
del(yin)
if do3din:
# 64-bit floating point, with lossless compression
zp = rootgrp.createVariable('zp', 'f8', ('ntrac', 'nt'),
zlib=True)
zp.long_name = 'vertical position of drifter (negative is downward from surface)'
zp.units = 'meter'
zp.time = 'tp'
zp[:] = zpin
del(zpin)
else:
del(zpin)
# 64-bit floating point, with lossless compression
tp = rootgrp.createVariable('tp', 'f8', ('ntrac', 'nt'),
zlib=True)
tp.long_name = 'time at drifter locations'
tp.units = time_unitsin
tp[:] = tpin
del(tpin)
if Uin is not None:
# 64-bit floating point, with lossless compression
T0 = rootgrp.createVariable('T0', 'f8', ('ntrac'), zlib=True)
U = rootgrp.createVariable('U', 'f8', ('xul', 'yul'), zlib=True)
V = rootgrp.createVariable('V', 'f8', ('xvl', 'yvl'), zlib=True)
T0.long_name = 'Initial volume transport associated with each drifter'
U.long_name = 'Aggregation of x volume transports of drifters'
V.long_name = 'Aggregation of y volume transports of drifters'
T0.units = 'meter3 second-1'
U.units = 'meter3 second-1'
V.units = 'meter3 second-1'
T0[:] = T0in
U[:] = Uin
V[:] = Vin
del(T0in, Uin, Vin)
# Create variables
# Main track information
# Include other run details
nsteps = rootgrp.createVariable('nsteps', 'i4')
N = rootgrp.createVariable('N', 'i4')
ff = rootgrp.createVariable('ff', 'i4')
tseas = rootgrp.createVariable('tseas', 'f8')
ah = rootgrp.createVariable('ah', 'f8')
av = rootgrp.createVariable('av', 'f8')
do3d = rootgrp.createVariable('do3d', 'i4')
doturb = rootgrp.createVariable('doturb', 'i4')
doperiodic = rootgrp.createVariable('doperiodic', 'i4')
# Set some attributes
nsteps.long_name = 'sets max time steps between time interpolations \
between model outputs'
N.long_name = 'sets number of samplings of drifter track'
ff.long_name = 'forward (1) or backward (-1) in time'
tseas.long_name = 'time between model outputs'
ah.long_name = 'horizontal diffusion'
av.long_name = 'vertical diffusion'
do3d.long_name = 'flag for running in 3d (1) or 2d (0)'
doturb.long_name = 'flag for using no subgrid parameterization (0), \
added turbulent velocities (1), displacement to \
particle position on a circle (2), displacement to \
particle position on an ellipse (3)'
doperiodic.long_name = 'flag for using periodic boundary conditions: \
none (0), in x-direction (1), in y-direction (2)'
tseas.units = 'second'
ah.units = 'meter2 second-1'
av.units = 'meter2 second-1'
# Write data to netCDF variables
nsteps[:] = nstepsin
N[:] = Nin
ff[:] = ffin
tseas[:] = tseasin
ah[:] = ahin
av[:] = avin
do3d[:] = do3din
doturb[:] = doturbin
doperiodic[:] = doperiodicin
rootgrp.close()
def loadtracks(name, loc=None):
"""
Load in track info from netcdf file.
Args:
name (str): Name of tracks file
loc (Optional): Tracks file is assumed to be in local tracks
directory. Use this to give location if it is not.
"""
if loc is None:
nc = netCDF.Dataset('tracks/' + name + '.nc')
else:
nc = netCDF.Dataset(loc + '/' + name + '.nc')
lonp = nc.variables['lonp'][:]
latp = nc.variables['latp'][:]
zp = nc.variables['zp'][:]
tp = nc.variables['tp'][:]
return lonp, latp, zp, tp
def loadtransport(name, fmod=None):
"""
Args:
name: Name of project
fmod: File modifier: a way to choose a subset of the file in the
project directory instead of all. Should be a string and can include
asterisks as wildcards.
Returns:
* U, V - Transport of drifter volume in x and y directions over all
used simulation files
* lon0 - Initial lon location for drifters
* lat0 - Initial lat location for drifters
* T0 - Overall
"""
# Which files to read in.
if fmod is None:
Files = glob.glob('tracks/' + name + '/*.nc')
elif type(fmod) == list and len(fmod) > 1:
Files = []
for i in range(len(fmod)):
Files = Files + glob.glob('tracks/' + fmod[i])
else:
Files = glob.glob('tracks/' + name + '/' + fmod + '.nc')
Files.sort()
# Load in U and V volume transports of drifters and add together for
# all files
for i, File in enumerate(Files):
d = netCDF.Dataset(File)
if i == 0: # initialize U and V transports from first file
U = d.variables['U'][:]
V = d.variables['V'][:]
T0 = np.sum(d.variables['T0'][:])
else: # add in transports from subsequent simulations
U = U + d.variables['U'][:]
V = V + d.variables['V'][:]
T0 = T0 + np.sum(d.variables['T0'][:])
# Add initial drifter location (all drifters start at the same
# location)
lon0 = d.variables['lonp'][:, 0]
lat0 = d.variables['latp'][:, 0]
d.close()
return U, V, lon0, lat0, T0
def save_ll2grid(name, grid, loc=None):
"""
Input drifter tracks from saved file in grid coordinates and save a new
file with drifter tracks in lat/lon instead.
Example:
>>> loc = 'http://barataria.tamu.edu:8080/thredds/dodsC/NcML/txla_nesting6.nc' # TXLA model/grid output location
>>> grid = tracpy.inout.readgrid(loc)
>>> tracpy.inout.save_ll2grid([trackfile], grid, loc=loc)
Note:
[trackfile] should be the name of the drifter tracks files,
including .nc extension, and any location prefix after 'tracks/'
Note:
input a loc value if the drifter files do not have it saved (those
run on hafen, for example)
"""
# load in tracks
d = netCDF.Dataset(name)
lonp = d.variables['lonp'][:]
latp = d.variables['latp'][:]
# Convert to grid coords
x, y, dt = tracpy.tools.interpolate2d(lonp, latp, grid, 'd_ll2ij')
del(lonp, latp, grid)
if 'loc' in d.variables:
loc = d.variables['loc'][:]
else:
print('will use input loc value for saving to file')
# save new file
# transport calculation included
if 'U' in d.variables:
if d.variables['do3d'][:]:
savetracks(x, y, d.variables['zp'][:], d.variables['tp'][:],
name.split('/')[-1][:-3], d.variables['nsteps'][:],
d.variables['N'][:], d.variables['ff'][:],
d.variables['tseas'][:], d.variables['ah'][:],
d.variables['av'][:], d.variables['do3d'][:],
d.variables['doturb'][:], loc, d.variables['T0'][:],
d.variables['U'][:], d.variables['V'][:], savell=False)
else: # have to input something for z but it won't be saved
savetracks(x, y, y, d.variables['tp'][:],
name.split('/')[-1][:-3], d.variables['nsteps'][:],
d.variables['N'][:], d.variables['ff'][:],
d.variables['tseas'][:], d.variables['ah'][:],
d.variables['av'][:], d.variables['do3d'][:],
d.variables['doturb'][:], loc, d.variables['T0'][:],
d.variables['U'][:], d.variables['V'][:], savell=False)
else:
if d.variables['do3d'][:]:
savetracks(x, y, d.variables['zp'][:], d.variables['tp'][:],
name.split('/')[-1][:-3], d.variables['nsteps'][:],
d.variables['N'][:], d.variables['ff'][:],
d.variables['tseas'][:], d.variables['ah'][:],
d.variables['av'][:], d.variables['do3d'][:],
d.variables['doturb'][:], loc, savell=False)
else: # have to input something for z but it won't be saved
savetracks(x, y, y, d.variables['tp'][:],
name.split('/')[-1][:-3], d.variables['nsteps'][:],
d.variables['N'][:], d.variables['ff'][:],
d.variables['tseas'][:], d.variables['ah'][:],
d.variables['av'][:], d.variables['do3d'][:],
d.variables['doturb'][:], loc, savell=False)
d.close()
``` |
{
"source": "josueortc/adelson_motion_model",
"score": 3
} |
#### File: josueortc/adelson_motion_model/adelson_motion_model.py
```python
import numpy as np
import torch
def filter_generation():
#Define the space axis of the filters
nx = 16.0
max_x = 2.0
dx = (max_x*2)/nx
#A row vector holding spatial sampling intervals
x_filt = np.linspace(-max_x, max_x, num=nx)
#Spatial filter parameters
sx = 0.5
sf = 1.1
#Spatial filter response
gauss = np.exp(-x_filt**2/(sx**2))
even_x = np.cos(2*np.pi*sf*x_filt)*gauss
odd_x = np.sin(2*np.pi*sf*x_filt)*gauss
#Define the time axis of the filters
nt = 3
max_t = 0.5
dt = max_t/nt
#A column vector holding the temporal sampling intervals
t_filt = np.linspace(0, max_t, nt)
#Temporal filter parameters
k = 100
slow_n = 9
fast_n = 6
beta = 0.9
#Temporal filter response
slow_t = (k*t_filt)*slow_n*np.exp(-k*t_filt)*(1/np.math.factorial(slow_n) - beta*((k*t_filt)**2)/np.math.factorial(slow_n+2))
fast_t = (k*t_filt)*fast_n*np.exp(-k*t_filt)*(1/np.math.factorial(fast_n) - beta*((k*t_filt)**2)/np.math.factorial(fast_n+2))
#Step 1b
even_xx = np.outer(even_x, even_x)
odd_xx = np.outer(odd_x, odd_x)
e_slow = np.random.random((slow_t.shape[0], even_x.shape[0], even_x.shape[0]))
e_fast = np.random.random((slow_t.shape[0], even_x.shape[0], even_x.shape[0]))
o_slow = np.random.random((slow_t.shape[0], even_x.shape[0], even_x.shape[0]))
o_fast = np.random.random((slow_t.shape[0], even_x.shape[0], even_x.shape[0]))
#Step 1c
for i in range(even_x.shape[0]):
e_slow[:,:,i] = np.outer(slow_t, even_xx[:,i])
e_fast[:,:,i] = np.outer(fast_t, even_xx[:,i])
o_slow[:,:,i] = np.outer(slow_t, odd_xx[:,i])
o_fast[:,:,i] = np.outer(fast_t, odd_xx[:,i])
#Step 2
left_1 = o_fast + e_slow
left_2 = o_slow + e_fast
right_1 = o_fast + e_slow
right_2 = o_slow + e_fast
left_1 = torch.from_numpy(left_1).float()
left_2 = torch.from_numpy(left_2).float()
right_1 = torch.from_numpy(right_1).float()
right_2 = torch.from_numpy(right_2).float()
left_1 = left_1.unsqueeze(0).unsqueeze(0)
left_2 = left_2.unsqueeze(0).unsqueeze(0)
right_1 = right_1.unsqueeze(0).unsqueeze(0)
right_2 = right_2.unsqueeze(0).unsqueeze(0)
return left_1, left_2, right_1, right_2
def motion_model(input):
assert len(input.size()) == 5
left_1, left_2, right_1, right_2 = filter()
#Convolution with input
resp_right_1 = torch.nn.functional.conv3d(input, right_1)
resp_right_2 = torch.nn.functional.conv3d(input, right_2)
resp_left_1 = torch.nn.functional.conv3d(input, left_1)
resp_left_2 = torch.nn.functional.conv3d(input, left_2)
resp_right_1 = resp_right_1 ** 2
resp_right_2 = resp_right_2 ** 2
resp_left_1 = resp_left_1 ** 2
resp_left_2 = resp_left_2 ** 2
energy_right= resp_right_1 + resp_right_2
energy_left= resp_left_1 + resp_left_2
total_energy = energy_right.sum() + energy_left.sum()
#Normalization
RR1 = resp_right_1/total_energy
RR2 = resp_right_2/total_energy
LR1 = resp_left_1/total_energy
LR2 = resp_left_2/total_energy
motion = torch.cat([RR1,RR2,LR1, LR2], 1)
return motion
``` |
{
"source": "Josuepma/BilletesFalsosPythonMexico",
"score": 3
} |
#### File: BilletesFalsosPythonMexico/tools/billete50.py
```python
from traceback import print_tb
import csv
import numpy as np
import cv2 as cv
from matplotlib import pyplot as plt
def validacion(edge,edgec2):
MIN_MATCH_COUNT = 8
resultados=[]
kp1, des1 = sift.detectAndCompute(edge,None)
kp2, des2 = sift.detectAndCompute(edgec2,None)
FLANN_INDEX_KDTREE = 1
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 50)
flann = cv.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
# store all the good matches as per Lowe's ratio test.
good = []
for m,n in matches:
if m.distance < 0.6*n.distance:
good.append(m)
if len(good)>MIN_MATCH_COUNT:
src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
M, mask = cv.findHomography(src_pts, dst_pts, cv.RANSAC,5.0)
matchesMask = mask.ravel().tolist()
h,w = edge.shape
pts = np.float32([ [0,0],[0,h-1],[w-1,h-1],[w-1,0] ]).reshape(-1,1,2)
dst = cv.perspectiveTransform(pts,M)
img2 = cv.polylines(edgec2,[np.int32(dst)],True,255,3, cv.LINE_AA)
else:
#print( "Not enough matches are found - {}/{}".format(len(good), MIN_MATCH_COUNT) )
matchesMask = None
draw_params = dict(matchColor = (0,255,0), # draw matches in green color
singlePointColor = None,
matchesMask = matchesMask, # draw only inliers
flags = 2)
if(matchesMask!=None):
#print(len(matchesMask))
None
else:
matchesMask=[0,0]
if(len(matchesMask)>5):
resultados+=["1"]
else:
resultados+=["0"]
#print(draw_params)
edgec2 = cv.drawMatches(edge,kp1,edgec2,kp2,good,None,**draw_params)
#plt.imshow(edgec2, 'gray'),plt.show()
return resultados
MIN_MATCH_COUNT = 8
resultados = []
resultadosc2 = []
resultadosc3 = []
resultadoFinal = []
billetes=[]
from pathlib import Path
p = Path('images/Billetes_de_50')
for child in p.iterdir():
#print(child)
billetes.append(child.__str__())
p = Path('images/billeteF_50')
for child in p.iterdir():
#print(child)
billetes.append(child.__str__())
#print(billetes)
for i in billetes:
print(i)
img1 = cv.imread(i,0) # queryImage
img4 = cv.imread('images/Caracteristica_50/C1.jpg',0) # trainImage
img3 = cv.imread('images/Caracteristica_50/C2.jpg',0) # trainImage
img5 = cv.imread('images/Caracteristica_50/CAR3.jpg',0) # trainImage
edge=cv.Canny(img1,100,200)
edgec2=cv.Canny(img3,100,200)
edgec4=cv.Canny(img4,100,200)
edgec5=cv.Canny(img5,100,200)
sift = cv.SIFT_create()
resultados+=validacion(edge,edgec2)
resultadosc2+=validacion(edge,edgec4)
resultadosc3+=validacion(edge,edgec5)
if(int(resultados[-1]) == 1 and int(resultadosc2[-1]) == 1):
resultadoFinal+=["1"]
else:
resultadoFinal+=["0"]
# find the keypoints and descriptors with SIFT
print(resultados)
print(resultadosc2)
columnas = ['c1', 'c2', 'c3', 'b']
with open("Billetes50C1.csv", 'w', newline="") as file:
writer = csv.DictWriter(file, fieldnames=columnas)
writer.writeheader()
for i in range(len(resultados)):
file.write(resultados[i] + ',' + resultadosc2[i] +
',' + resultadosc3[i] + ',' + resultadoFinal[i])
file.write("\n")
``` |
{
"source": "Josue-Rodriguez-98/WhatDidYouMean",
"score": 3
} |
#### File: Josue-Rodriguez-98/WhatDidYouMean/DidYouMean.py
```python
import subprocess as sp
import re
import os
#Bloque de código que ejecuta el comando
def execute(args):
try:
#print("trying...")
if(args[0] == "ping" and len(args) == 2):
args = [args[0],"-c","4",args[1]]
sp.call(args)
except:
#print("failing...")
print(f"Command '{args}' not defined")
#----------------------------------------
#Bloque de código que analiza la entrada erronea
def analyze(args, correct, rcvdList):
#Busca si el comando ya está en list
if(args[0] in rcvdList):
args[0] = correct
execute(args)
else:
#Si no está, le pregunta al usuario wdym y lo guarda (o no)
answer = input(f"Did you mean '{correct}'? [Y/n]: ")
if(answer):
answer = answer.lower()
if(not answer or answer[0] == 'y'):
print("Ok! I'll remember that!")
rcvdList.append(args[0])
args[0] = correct
execute(args)
elif(answer[0] == 'n'):
print("Ok then!")
else:
print("I'll take that as a NO...")
return rcvdList
#------------------------------------------
def getPath():
currentPath = os.getcwd()[1:].split("/")
newPath = []
retVal = ""
for path in currentPath:
#print(path)
if(path != "home"):
newPath.append(path)
for path in newPath:
if(path == currentPath[1]):
retVal = path + ":~"
else:
retVal += "/" + path
return retVal
#-------------------------------------------
def writeFile(lsList, dfList, pingList):
content = ""
i = 0
for i in range(len(lsList)):
#print(f" lsList in {i} is {lsList[i]}")
if(lsList[i] != ""):
content += lsList[i]
if(i + 1 < len(lsList)):
content += "|"
content += "#"
i = 0
for i in range(len(dfList)):
#print(f" dfList in {i} is {dfList[i]}")
if(dfList[i] != ""):
content += dfList[i]
if(i + 1 < len(dfList)):
content += "|"
content += "#"
i = 0
for i in range(len(pingList)):
#print(f" pingList in {i} is {pingList[i]}")
if(pingList[i] != ""):
content += pingList[i]
if(i + 1 < len(pingList)):
content += "|"
#print(content)
file = open("filiberto.txt","w+")
file.write(content)
file.close()
#-------------------------------------------
def main():
lsList = []
dfList = []
pingList = []
alive = True
content = ""
try:
#Si el archivo existe, lo lee
file = open("filiberto.txt","r")
content = file.readline()
file.close()
#print(content)
arrayContent = content.split("#")
lsList = arrayContent[0].split("|")
dfList = arrayContent[1].split("|")
pingList = arrayContent[2].split("|")
except IOError:
#Si el archivo no existe, lo crea
file = open("filiberto.txt","w+")
file.close()
path = getPath()
while alive:
command = input(f"{path}:> ")
if(re.match(r'^[eE][xX][iI][tT]$',command)):
print("See you later!")
#Escribir las sugerencias guardadas en un archivo txt
writeFile(lsList, dfList, pingList)
alive = False
elif(command):
args = command.split()
if(len(args) > 0):
if(args[0] == "ls" or args[0] == "df" or args[0] == "ping"):
execute(args)
else:
if(re.match(r'^[jklñ{][asdf]$',args[0])):
#evaluar ls
analyze(args,"ls",lsList)
elif(re.match(r'^[asdfg][sdfgh]$',args[0])):
#evaluar df
analyze(args,"df",dfList)
elif(re.match(r'^[iop][yuiop][vbnm,][dfghj]$', args[0])):
#evaluar ping
analyze(args,"ping",pingList)
else:
print("We really don't know what you mean... :s")
else:
print("We really don't know what you mean... :s")
#-----------------------------------------
if '__main__' == __name__:
main()
``` |
{
"source": "josuerojasq/netacad_python",
"score": 2
} |
#### File: extra/good/alpha.py
```python
def FunA():
return "Alpha"
if __name__ == "__main__":
print("I prefer to be a module")
```
#### File: netacad_python/103_manejoErrores/errores6.py
```python
def badFun(n):
try:
return 1 / n
except ArithmeticError:
print("¡Problema aritmético!")
return None
x = int(input("Ingresa un numero: "))
print("1 /",x,"=",badFun(x))
print("FIN.")
```
#### File: netacad_python/105_POO/bases.py
```python
class SuperUno:
pass
class SuperDos:
pass
class Sub(SuperUno, SuperDos):
pass
def printBases(cls):
print('( ', end='')
for x in cls.__bases__:
print(x.__name__, end=' ')
print(')')
printBases(SuperUno)
printBases(SuperDos)
printBases(Sub)
```
#### File: netacad_python/105_POO/clasePila.py
```python
class Pila:
#Define la funcion del constructor...
#El nombre del constructor es siempre "__init__"
def __init__(self):
#Agregando una propiedad a la clase Pila
#Cuando el nombre de la propiedad comienza con dos guiones bajos (__), se vuelve privado
self.__listaPila = [] #Encapsulacion
#Funciones: Tal componente es llamado "publico", por ello no puede comenzar su nombre con dos
# (o más) guiones bajos. Hay un requisito más - el nombre no debe tener más de un guión bajo.
def push(self, val):
self.__listaPila.append(val)
#Todos los métodos deben tener este parámetro "self". Desde el constructor
# Permite que el método acceda a entidades (propiedades y actividades / métodos) del objeto.
def pop(self):
val = self.__listaPila[-1]
del self.__listaPila[-1]
return val
#Define una nueva subclase que apunte a la clase que se usará como superclase.
class SumarPila(Pila):
def __init__(self):
Pila.__init__(self)
self.__sum = 0
#Cambiar la funcionalidad de los métodos, no sus nombres.
def push(self, val):
self.__sum += val
Pila.push(self, val)
def pop(self):
val = Pila.pop(self)
self.__sum -= val
return val
def getSuma(self):
return self.__sum
#Instanciando el objeto Pila
objetoPila = Pila()
objetoPila.push(3)
objetoPila.push(2)
objetoPila.push(1)
print(objetoPila.pop())
print(objetoPila.pop())
print(objetoPila.pop())
objPila = SumarPila()
for i in range(5):
objPila.push(i)
print(objPila.getSuma())
for i in range(5):
print(objPila.pop())
```
#### File: netacad_python/105_POO/existenciaAtributo.py
```python
class ClaseEjemplo:
def __init__(self, val):
if val % 2 != 0:
self.a = 1
else:
self.b = 1
objetoEjemplo = ClaseEjemplo(1)
print(objetoEjemplo.a)
#Esto genera un error de tipo "AttributeError"
#print(objetoEjemplo.b)
#Puedes evitar el problema con (no se recomienda):
try:
print(objetoEjemplo.b)
except AttributeError:
pass
```
#### File: netacad_python/105_POO/variablesClase2.py
```python
class ClaseEjemplo:
__contador = 0
def __init__(self, val = 1):
self.__primera = val
ClaseEjemplo.__contador += 1
objetoEjemplo1 = ClaseEjemplo()
objetoEjemplo2 = ClaseEjemplo(2)
objetoEjemplo3 = ClaseEjemplo(4)
print(objetoEjemplo1.__dict__, objetoEjemplo1._ClaseEjemplo__contador)
print(objetoEjemplo2.__dict__, objetoEjemplo2._ClaseEjemplo__contador)
print(objetoEjemplo3.__dict__, objetoEjemplo3._ClaseEjemplo__contador)
```
#### File: netacad_python/105_POO/variablesInstancia.py
```python
class ClaseEjemplo:
def __init__(self, val = 1):
self.primera = val
def setSegunda(self, val):
self.segunda = val
objetoEjemplo1 = ClaseEjemplo()
objetoEjemplo2 = ClaseEjemplo(2)
objetoEjemplo2.setSegunda(3)
objetoEjemplo3 = ClaseEjemplo(4)
objetoEjemplo3.tercera = 5
#Los objetos de Python, cuando se crean, están dotados de un pequeño conjunto de propiedades y métodos
# predefinidos. Cada objeto los tiene, los quieras o no. Uno de ellos es una variable llamada "__dict__"
# (es un diccionario). La variable contiene los nombres y valores de todas las propiedades (variables)
# que el objeto contiene actualmente
print(objetoEjemplo1.__dict__)
print(objetoEjemplo2.__dict__)
print(objetoEjemplo3.__dict__)
```
#### File: josuerojasq/netacad_python/78.funcionesPtsClave.py
```python
def multiply(a, b):
return a * b
print(multiply(3, 4)) # salida: 12
#El resultado de una función se puede asignar fácilmente a una variable
def deseos():
return "¡Felíz Cumpleaños!"
d = deseos()
print(d) # salida: ¡Felíz Cumpleaños!
# Ejemplo 1
def deseos2():
print("Mis deseos")
return "¡Felíz Cumpleaños!"
deseos2() # salida: Mis deseos
# Ejemplo 2
print(deseos2()) # salidas: Mis Deseos
# ¡Feliz Cumpleaños!
#Puedes usar una lista como argumento de una función
def HolaaTodos(myList):
for nombre in myList:
print("Hola,", nombre)
HolaaTodos(["Adam", "John", "Lucy"])
#Una lista también puede ser un resultado de función
def createList(n):
myList = []
for i in range(n):
myList.append(i)
return myList
print(createList(5))
#Ejercicio 1
def isInt(data):
if type(data) == int:
return True
elif type(data) == float:
return False
print(isInt(5))
print(isInt(5.0))
print(isInt("5"))
#Ejercicio 2
def evenNumLst(ran):
lst = []
for num in range(ran):
if num % 2 == 0:
lst.append(num)
return lst
print(evenNumLst(11))
#Ejercicio 3
def listUpdater(lst):
updList = []
for elem in lst:
elem **= 2
updList.append(elem)
return updList
l = [1, 2, 3, 4, 5]
print(listUpdater(l))
```
#### File: josuerojasq/netacad_python/79.funcionesScopes.py
```python
def miFuncion():
print("¿Conozco a la variable?", var)
var = 1
miFuncion()
print(var)
#Una variable que existe fuera de una función tiene un alcance dentro del cuerpo de la función, excluyendo a aquellas que tienen el mismo nombre.
def miFuncion2():
var2 = 2
print("¿Conozco a la variable?", var2)
var2 = 1
miFuncion2()
print(var2)
#La palabra reservada llamada "global" puede extender el alcance de una variable
#incluyendo el cuerpo de las funciones para poder no solo leer los valores de las variables
#sino también modificarlos.
def miFuncion3():
global var3
var3 = 2
print("¿Conozco a aquella variable?", var3)
var3 = 1
miFuncion3()
print(var3)
#Al cambiar el valor del parámetro este no se propaga fuera de la función
#Esto también significa que una función recibe el valor del argumento,
#no el argumento en sí.
def miFuncion4(n):
print("Yo obtuve", n)
n += 1
print("Yo ahora tengo", n)
varn = 1
miFuncion4(varn)
print(varn)
def miFuncionl(miLista1):
print(miLista1)
miLista1 = [0, 1]
miLista2 = [2, 3]
miFuncionl(miLista2)
print(miLista2)
def miFuncionl2(miLista1):
print(miLista1)
del miLista1[0]
miLista2 = [2, 3]
miFuncionl2(miLista2)
print(miLista2)
```
#### File: josuerojasq/netacad_python/81.verificaTriangulo.py
```python
def esUnTriangulo (a, b, c):
return a + b > c and b + c > a and c + a > b
#print(esUnTriangulo (1, 1, 1))
#print(esUnTriangulo (1, 1, 3))
a = float(input("Ingresa la longitud del primer lado: "))
b = float(input("Ingresa la longitud del segundo lado: "))
c = float(input("Ingresa la longitud del tercer lado: "))
if esUnTriangulo(a, b, c):
print("Felicidades, puede ser un triángulo.")
else:
print("Lo siento, no puede ser un triángulo.")
#Verificar si un triángulo es un triángulo rectángulo.
def esUnTrianguloRectangulo(a, b, c):
if not esUnTriangulo (a, b, c):
return False
if c > a and c > b:
return c ** 2 == a ** 2 + b ** 2
if a > b and a > c:
return a ** 2 == b ** 2 + c ** 2
else:
return b ** 2 == a ** 2 + c ** 2
print(esUnTrianguloRectangulo(5, 3, 4))
print(esUnTrianguloRectangulo(1, 3, 4))
print(esUnTrianguloRectangulo(3, 5, 4))
#Evaluar el campo de un triángulo con la Formula de Heron
def heron(a, b, c):
p = (a + b + c) / 2
return (p * (p - a) * (p - b) * (p - c)) ** 0.5
def campoTriangulo(a, b, c):
if not esUnTriangulo(a, b, c):
return None
return heron(a, b, c)
print(campoTriangulo(1., 1., 2. ** .5))
```
#### File: josuerojasq/netacad_python/92.importModulos.py
```python
import math
#como pueden dos namespaces (el tuyo y el del módulo) coexistir.
def sin(x):
if 2 * x == pi:
return 0.99999999
else:
return None
pi = 3.14
print(sin(pi/2))
#Acceder a los nombres del modulo math (namespace)
print(math.sin(math.pi/2))
``` |
{
"source": "josue-rosa/Interface-grafica-PySimpleGUI",
"score": 3
} |
#### File: Interface-grafica-PySimpleGUI/Notepy/MyNotepy.py
```python
import PySimpleGUI as psg
# -*- coding: utf-8 -*-
"""
Autor: <NAME>
Versão: 0.1
"""
psg.theme('Dark')
# psg.theme('BrownBlue')
largura = 90
altura = 25
bar_menu_layout = (
["Editar", ["Converter para MAIÚSCULA", "Converter para Title"]],
["Sobre", ["Autor ", "Créditos"]],
)
layout_window = [
[psg.MenuBar(bar_menu_layout)],
[psg.Multiline(font="Roboto 14", size=(largura, altura), right_click_menu=['Contexto', ['Converter para MAIÚSCULA', 'Converter Title']], key="_body_multiline_")],
[psg.Text('Localização do arquivo: ', size=(largura, 1), key='caminho_arquivo')],
]
window = psg.Window('Notepy - Programa escrito em Python 3', layout_window, resizable=True, margins=(0, 0))
window.read(timeout=1)
window.maximize()
window["_body_multiline_"].expand(expand_x=True, expand_y=True)
def tornar_texto_caixa_alta():
window["_body_multiline_"].update(value=str(values["_body_multiline_"]).upper())
def tornar_texto_title():
window["_body_multiline_"].update(value=str(values["_body_multiline_"]).title())
def menu_contexto_title():
window["_body_multiline_"].update(value=str(values["_body_multiline_"]).title())
def menu_contexto_caixa_alta():
window["_body_multiline_"].update(value=str(values["_body_multiline_"]).upper())
def autor():
texto_autor = """
Nome: <NAME>
Orgulhosamente escrito em Python e PySimpleGUI
"""
psg.popup(texto_autor, title='Autor', grab_anywhere=True)
def creditos():
texto_creditos = """
<NAME>
youtube.com/channel/UCz1ipXWkAYjcS4jie_IKs6g.
<NAME> - <NAME>
"""
psg.popup(texto_creditos, no_titlebar=False, title='Créditos', grab_anywhere=True)
while True:
event, values = window.read()
if event == psg.WIN_CLOSED:
break
if event == "Converter para MAIÚSCULA":
tornar_texto_caixa_alta()
if event == 'Converter para Title':
tornar_texto_title()
if event == "Converter para MAIÚSCULA":
menu_contexto_caixa_alta()
if event == "Converter Title":
menu_contexto_title()
if event == "Autor ":
autor()
if event == "Créditos":
creditos()
``` |
{
"source": "Josue-sch/proyecto_introduccion_a_la_programacion",
"score": 3
} |
#### File: Josue-sch/proyecto_introduccion_a_la_programacion/Proyecto.py
```python
from flask import Flask, make_response, redirect, request, jsonify, render_template
from flask_cors import CORS
from hangman import init_game_data
import hangman.config.constants as constants
import hangman.utils as utils
import hangman.validations as validations
from config.db import DB
from config.api import WordApi
app = Flask(__name__) # referencing current file
CORS(app)
db_instance = DB(app)
crud_methods = ['GET', 'POST']
Word = db_instance.models['word']
WORD = WordApi.get_word()
#Word.add_word(word_here, redirect('/')) add word to db
GAME_DATA = init_game_data(WORD)
@app.route('/', methods=crud_methods)
def index():
global WORD
if request.method == 'POST':
req = request.form
# TODO handle game logic functionality different
# handle different scenarios
# handle errors
# handle tries
input = req['letter']
print(input)
return redirect('/')
else:
GAME_DATA['words'] = Word.get_played_words()
return render_template(
'index.html',
instructions=constants.INSTRUCTIONS,
game=GAME_DATA)
@app.route('/restart', methods=['GET'])
def restart():
# TODO make restart functionality
return redirect('/')
if __name__ == '__main__':
db_instance.init_db()
app.run(debug=True)
``` |
{
"source": "JosueTorresM/StockMarketAlerts",
"score": 3
} |
#### File: JosueTorresM/StockMarketAlerts/StockMarketAlerts.py
```python
import os
import requests
import json
import time
from iexfinance.stocks import Stock
from forex_python.converter import CurrencyRates
from datetime import datetime
TELEGRAM_BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN')
TELEGRAM_CHAT_ID = os.getenv('TELEGRAM_CHAT_ID')
RTCER_KEY = os.getenv('RTCER_KEY')
stock_list = ["AAPL","AXP"]
average_cost = {"AAPL":6000.00,
"AXP":2200.00}
def RealTimeCurrencyExchangeRate(from_currency, to_currency, api_key):
base_url = "https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE"
main_url = base_url + "&from_currency=" + from_currency + "&to_currency=" + to_currency + "&apikey=" + api_key
req_ob = requests.get(main_url)
result = req_ob.json()
return float(result["Realtime Currency Exchange Rate"]['5. Exchange Rate'])
def telegram_bot_sendtext(bot_message):
send_text = 'https://api.telegram.org/bot' + TELEGRAM_BOT_TOKEN + '/sendMessage?chat_id=' + TELEGRAM_CHAT_ID + '&parse_mode=Markdown&text=' + bot_message
requests.get(send_text)
def check_stocks(exchange_rate):
for stock in stock_list:
symbol = Stock(stock)
mxn_average_cost = average_cost[stock]
symbol_data = symbol.get_quote()
usd_price = symbol_data["latestPrice"]
mxn_price = usd_price * exchange_rate
print(stock + ": " + str(usd_price) + " USD/ " + str("{:.2f}".format(mxn_price)) + " MXN")
if mxn_price<mxn_average_cost:
telegram_bot_sendtext(stock + ":" +
"\nCurrent price: " + str(usd_price) + " USD/ " + str("{:.2f}".format(mxn_price)) + " MXN" +
"\nAverage cost: " + str(mxn_average_cost) + " MXN"
)
if __name__ == "__main__":
now = datetime.now()
market_open_time = now.replace(hour=8, minute=30, second=0, microsecond=0)
market_close_time = now.replace(hour=15, minute=0, second=0, microsecond=0)
if now.weekday() < 5 and now > market_open_time and now < market_close_time:
exchange_rate = RealTimeCurrencyExchangeRate("USD", "MXN", RTCER_KEY)
check_stocks(exchange_rate)
``` |
{
"source": "josuetsm/gshell",
"score": 2
} |
#### File: gshell/gshell/__init__.py
```python
from __future__ import print_function
import os
import os.path as osp
import pkg_resources
import platform
import re
import subprocess
import sys
import click
import yaml
from . import util
__author__ = '<NAME> <<EMAIL>>'
__version__ = pkg_resources.get_distribution('gshell').version
this_dir = osp.dirname(osp.abspath(osp.realpath(__file__)))
CONFIG_FILE = osp.expanduser('~/.gshell')
if platform.uname()[0] == 'Linux':
DRIVE_EXE = osp.join(this_dir, 'bin/_gshell_drive-linux-x64')
elif platform.uname()[0] == 'Darwin':
DRIVE_EXE = osp.join(this_dir, 'bin/_gshell_drive-osx-x64')
else:
sys.stderr.write('Not supported os\n')
sys.exit(1)
@click.group(context_settings={'help_option_names': ['-h', '--help']})
@click.version_option(__version__, '-V', '--version')
def cli():
pass
def init(name='system'):
home_dir = osp.expanduser('~')
if name == 'system':
config_dir = osp.join(home_dir, '.gdrive')
else:
config_dir = osp.join(home_dir, '.gshell_configs', name)
if not osp.exists(osp.join(config_dir, 'token_v2.json')):
print('Initializing {name} ({config})'
.format(name=name, config=config_dir))
cmd = '{exe} --config {config} about'.format(
exe=DRIVE_EXE, config=config_dir)
subprocess.call(cmd, shell=True)
if not osp.exists(CONFIG_FILE):
init_config()
return config_dir
def _get_current_config_dir():
default_config_dir = osp.expanduser('~/.gdrive')
if osp.exists(CONFIG_FILE):
config = yaml.safe_load(open(CONFIG_FILE))
name = config.get('name', 'system')
else:
name = 'system'
if name == 'system':
config_dir = default_config_dir
else:
config_dir = osp.join(osp.expanduser('~/.gshell_configs'), name)
if not osp.exists(osp.join(config_dir, 'token_v2.json')):
config_dir = init(name)
return config_dir
def _get_home_id():
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} list'.format(
exe=DRIVE_EXE, config=config_dir)
stdout = util.check_output(cmd)
lines = stdout.splitlines()
header = lines[0]
start = re.search('Id', header).start()
end = re.search('Name', header).start()
parent_id = lines[1][start:end].strip()
while parent_id is not None:
child_id = parent_id
parent_id = get_parent_id(child_id)
return child_id
def init_config(name='system'):
home_id = _get_home_id()
config = {'home_id': home_id, 'id': home_id, 'name': name}
yaml.safe_dump(config, open(CONFIG_FILE, 'w'), default_flow_style=False)
return config
def getcwd():
if osp.exists(CONFIG_FILE):
return yaml.safe_load(open(CONFIG_FILE))
return init_config()
def get_path_by_id(id):
cwd = getcwd()
if cwd['id'] == cwd['home_id']:
return '/'
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} info {id}'.format(
exe=DRIVE_EXE, config=config_dir, id=id)
stdout = util.check_output(cmd)
for line in stdout.splitlines():
if line.startswith('Path: '):
path = line[len('Path: '):]
elif line.startswith('Parents: '):
path = osp.join('/', path)
break
return path
@cli.command(name='init', help='initialize gshell')
@click.argument('name', default='system', required=False)
def cmd_init(name):
config_dir = init(name)
print('{name}: {config}'.format(name=name, config=config_dir))
@cli.command(name='switch', help='switch user')
@click.argument('name', required=False)
def cmd_switch(name):
config_list_dir = osp.expanduser('~/.gshell_configs')
if not osp.exists(config_list_dir):
os.mkdir(config_list_dir)
if name is None:
config = yaml.safe_load(open(CONFIG_FILE, 'r'))
current_name = config.get('name', 'system')
config_list = {name: osp.join(config_list_dir, name)
for name in os.listdir(config_list_dir)}
config_list['system'] = osp.expanduser('~/.gdrive')
for name, config_dir in config_list.items():
flag = '*' if current_name == name else ' '
print('{flag} {name} ({config})'
.format(flag=flag, name=name, config=config_dir))
else:
config_dir = osp.join(config_list_dir, name)
if name == 'system' or osp.exists(config_dir):
config = yaml.safe_load(open(CONFIG_FILE, 'r'))
config['name'] = name
yaml.safe_dump(config, open(CONFIG_FILE, 'w'),
default_flow_style=True)
init_config(name)
print('Switched to {name} ({config}).'
.format(name=name, config=config_dir))
else:
print('Config for {name} ({config}) does not exist.'
.format(name=name, config=config_dir))
@cli.command(name='about', help='show account status')
def cmd_about():
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} about'.format(
exe=DRIVE_EXE, config=config_dir)
subprocess.call(cmd, shell=True)
@cli.command(name='upload', help='upload file')
@click.argument('filenames', required=True,
type=click.Path(exists=True), nargs=-1)
@click.option('-r', '--recursive', is_flag=True,
help='Upload files recursively.')
@click.option('-p', '--parent', help='Parent dir id of uploaded file.')
def cmd_upload(filenames, recursive, parent):
config_dir = _get_current_config_dir()
cwd = getcwd()
if parent is None:
parent = cwd['id']
for fname in filenames:
cmd = '{exe} --config {config} upload {file} --parent {pid}'.format(
exe=DRIVE_EXE, config=config_dir, file=fname, pid=parent)
if recursive:
cmd += ' --recursive'
subprocess.call(cmd, shell=True)
@cli.command(name='download', help='download file')
@click.argument('filename', required=True)
@click.option('-r', '--recursive', is_flag=True,
help='download directory recursively')
@click.option('-i', '--with-id', default=False, is_flag=True,
help='change directory with folder id')
def cmd_download(filename, recursive, with_id):
if with_id:
id = filename
else:
id = get_id_by_path(filename)
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} download {id}'.format(
exe=DRIVE_EXE, config=config_dir, id=id)
if recursive:
cmd += ' --recursive'
subprocess.call(cmd, shell=True)
@cli.command(name='rm', help='remove file')
@click.argument('filename', required=True)
@click.option('-r', '--recursive', is_flag=True,
help='remove files recursively')
def cmd_rm(filename, recursive):
config_dir = _get_current_config_dir()
id = get_id_by_path(filename)
cmd = '{exe} --config {config} delete'.format(
exe=DRIVE_EXE, config=config_dir)
if recursive:
cmd += ' --recursive'
cmd += ' {id}'.format(id=id)
subprocess.call(cmd, shell=True)
@cli.command(name='ll', help='list files in detail')
@click.argument('path', required=False)
def cmd_ll(path):
if path is None:
cwd = getcwd()
id = cwd['id']
else:
id = get_id_by_path(path)
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} list'
cmd += ''' --query "trashed = false and '{pid}' in parents"'''
cmd += ' --max 100 --name-width 0'
cmd = cmd.format(exe=DRIVE_EXE, config=config_dir, pid=id)
subprocess.call(cmd, shell=True)
@cli.command(name='ls', help='list files')
@click.argument('path', required=False)
@click.option('-i', '--with-id', default=False, is_flag=True,
help='change directory with folder id')
def cmd_ls(path, with_id):
if path is None:
if with_id:
print('Id must be specified.', file=sys.stderr)
else:
cwd = getcwd()
id = cwd['id']
else:
if with_id:
id = path
else:
id = get_id_by_path(path)
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} list'
cmd += ''' --query "trashed = false and '{pid}' in parents"'''
cmd += ' --max 100 --name-width 0'
cmd = cmd.format(exe=DRIVE_EXE, config=config_dir, pid=id)
stdout = util.check_output(cmd)
lines = stdout.splitlines()
header = lines[0]
start = re.search('Name', header).start()
end = re.search('Type', header).start()
if path is None or with_id:
path = ''
print('\n'.join(osp.join(path, l[start:end].strip())
for l in stdout.splitlines()[1:]))
@cli.command(name='mkdir', help='make directory')
@click.argument('dirname', required=True)
def cmd_mkdir(dirname):
cwd = getcwd()
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} mkdir {name} --parent {pid}'.format(
exe=DRIVE_EXE, config=config_dir, name=dirname, pid=cwd['id'])
subprocess.call(cmd, shell=True)
@cli.command(name='pwd', help='print current working directory')
@click.option('--show-id', default=False, is_flag=True,
help='show current directory id')
def cmd_pwd(show_id):
cwd = getcwd()
if show_id:
print(cwd['id'])
return
print(get_path_by_id(cwd['id']))
def get_id_by_path(path):
cwd = getcwd()
if path.startswith('/'):
cwd['id'] = cwd['home_id']
for d in path.split('/'):
if not d:
continue
if d == '..':
id = get_parent_id(cwd['id']) or cwd['id']
elif d == '.':
id = cwd['id']
else:
id = get_id_by_name(d, cwd=cwd)
if id is None:
sys.stderr.write('directory {name} does not exist\n'
.format(name=d))
sys.exit(1)
cwd['id'] = id
return cwd['id']
def get_id_by_name(name, cwd=None):
cwd = cwd or getcwd()
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} list'
cmd += ''' --query "trashed = false and '{pid}' in parents"'''
cmd += ' --max 100 --name-width 0'
cmd = cmd.format(exe=DRIVE_EXE, config=config_dir, pid=cwd['id'])
stdout = util.check_output(cmd)
lines = stdout.splitlines()
header = lines[0]
start = re.search('Name', header).start()
end = re.search('Type', header).start()
for l in stdout.splitlines()[1:]:
id, title = l[:start].strip(), l[start:end].strip()
if name == title:
return id
def get_parent_id(id):
config = _get_current_config_dir()
cmd = '{exe} --config {config} info {id}'.format(
exe=DRIVE_EXE, config=config, id=id)
stdout = util.check_output(cmd)
for l in stdout.splitlines():
if l.startswith('Parents: '):
return l.split()[-1]
@cli.command(name='cd', help='change directory')
@click.argument('dirname', required=False)
@click.option('-i', '--with-id', default=False, is_flag=True,
help='change directory with folder id')
def cmd_cd(dirname, with_id):
cwd = getcwd()
if with_id:
id = dirname
cwd['id'] = id
elif dirname is None:
cwd['id'] = cwd['home_id']
else:
cwd['id'] = get_id_by_path(dirname)
yaml.safe_dump(cwd, open(CONFIG_FILE, 'w'), default_flow_style=False)
@cli.command(name='open', help='open current site on browser')
@click.argument('filename', required=False)
@click.option('-i', '--with-id', default=False, is_flag=True,
help='Specify id instead of name.')
def cmd_open(filename, with_id):
cwd = getcwd()
if platform.uname()[0] == 'Linux':
open_exe = 'gnome-open'
elif platform.uname()[0] == 'Darwin':
open_exe = 'open'
else:
sys.stderr.write('Not supported os\n')
sys.exit(1)
if filename is None:
file_id = cwd['id']
else:
file_id = filename if with_id else get_id_by_path(filename)
cmd = "{exe} 'https://drive.google.com/drive/u/1/folders/{id}'"\
.format(exe=open_exe, id=file_id)
subprocess.call(cmd, shell=True)
@cli.command(name='share', help='share file')
@click.argument('filename', required=True)
@click.option('--role', default='reader',
type=click.Choice(['owner', 'writer', 'commenter', 'reader']),
show_default=True)
@click.option('--type', default='anyone',
type=click.Choice(['user', 'group', 'domain', 'anyone']),
show_default=True)
@click.option('--email', help='email address for user or group type')
@click.option('--domain', help='domain for domain type')
@click.option('--discoverable', is_flag=True, help='flag for searchablity')
# FIXME: --revoke does not work at current
# @click.option('--revoke', is_flag=True, help='flag to revoke access')
def cmd_share(filename, role, type, email, domain, discoverable):
if type in ['user', 'group'] and email is None:
print('--email is required for user or group type.')
sys.exit(1)
elif type == 'domain' and domain is None:
print('--domain is required for domain type.')
sys.exit(1)
config_dir = _get_current_config_dir()
id = get_id_by_path(filename)
cmd = '{exe} --config {config} share'.format(
exe=DRIVE_EXE, config=config_dir)
if role:
cmd += ' --role {role}'.format(role=role)
if type:
cmd += ' --type {type}'.format(type=type)
if email:
cmd += ' --email {email}'.format(email=email)
if domain:
cmd += ' --domain {domain}'.format(domain=domain)
if discoverable:
cmd += ' --discoverable'
# if revoke:
# cmd += ' --revoke'
cmd += ' {id}'.format(id=id)
subprocess.call(cmd, shell=True)
@cli.command(name='info', help='show file information')
@click.argument('filename', required=True)
@click.option('-i', '--with-id', default=False, is_flag=True,
help='change directory with folder id')
def cmd_info(filename, with_id):
if with_id:
id = filename
else:
id = get_id_by_path(filename)
config_dir = _get_current_config_dir()
cmd = '{exe} --config {config} info {id}'.format(
exe=DRIVE_EXE, config=config_dir, id=id)
stdout = util.check_output(cmd)
for line in stdout.splitlines():
if line.startswith('ViewUrl:'):
print('ViewUrl: https://drive.google.com/open?id={id}'
.format(id=id))
elif line.startswith('DownloadUrl:'):
url = 'https://drive.google.com/uc?id={id}'.format(id=id)
print('DownloadUrl: {url}'.format(url=url))
print('DownloadCommand: gdown {url}'.format(url=url))
else:
print(line)
def main():
cli()
if __name__ == '__main__':
main()
``` |
{
"source": "JOSUEXLION/prog-IV",
"score": 4
} |
#### File: prog-IV/tareas/tarea6.py
```python
import os
j=0
aje=0
atle=0
bal=0
fut=0
kar=0
nat=0
voll=0
fla=0
pin=0
otro=0
def mostrarMenu():
print('\n1. Ajedrez')
print('2. Atletismo')
print('3. Baloncesto')
print('4. Futbol')
print('5. Karate')
print('6. Natacion')
print('7. Volleyball')
print('8. Flag')
print('9. Ping Pong')
print('10. Otros')
print()
for i in range (0, 10):
os.system('cls')
j= (j+1)
mostrarMenu()
print("Persona #"+str(j))
opc = int(input("\nElija una opcion: "))
if opc == 1:
aje= (aje+1)
elif opc == 2:
atle= (atle+1)
elif opc == 3:
bal= (bal+1)
elif opc ==4:
fut= (fut+1)
elif opc ==5:
kar= (kar+1)
elif opc ==6:
nat=(nat+1)
if opc == 7:
voll= (voll+1)
elif opc == 8:
fla= (fla+1)
elif opc == 9:
pin= (pin+1)
elif opc ==10:
otro= (otro+1)
elif (opc<1) or (opc>10):
print('introduzca numero entre las opciones')
input()
os.system('cls')
print("PORCENTAJES:")
print("1.Ajedrez: ", str((aje/10)*100)+"%")
print("2.Atletismo: ", str((atle/10)*100)+"%")
print("3.Baloncesto: ",str((bal/10)*100)+"%")
print("4.Futbol: ", str((fut/10)*100)+"%")
print("5.Karate: ", str((kar/10)*100)+"%")
print("6.Natacion: ", str((nat/10)*100)+"%")
print("7.Volleyball: ",str((voll/10)*100)+"%")
print("8.Flag: ", str((fla/10)*100)+"%")
print("9.Pin Pong: ", str((pin/10)*100)+"%")
print("10.Otros: ", str((otro/10)*100)+"%")
``` |
{
"source": "Josue-Zea/tytus",
"score": 4
} |
#### File: fase2/team04/campo.py
```python
import tkinter as tk
from tkinter import Menu, Tk, Text, DISABLED, RAISED,Frame, FLAT, Button, Scrollbar, Canvas, END
from tkinter import messagebox as MessageBox
from tkinter import ttk
import tkinter.simpledialog
from tkinter import *
from tkinter import font as tkFont
#Metodo para enumerar las lineas
class TextLineNumbers(Canvas):
def __init__(self, *args, **kwargs):
Canvas.__init__(self, *args, **kwargs)
self.textwidget = None
def attach(self, text_widget):
self.textwidget = text_widget
def redraw(self, *args):
'''redraw line numbers'''
self.delete("all")
i = self.textwidget.index("@0,0")
while True :
dline= self.textwidget.dlineinfo(i)
if dline is None: break
y = dline[1]
linenum = str(i).split(".")[0]
self.create_text(2,y,anchor="nw", text=linenum)
i = self.textwidget.index("%s+1line" % i)
#Metodo para el campo de texto
class CustomText(Text):
def __init__(self, *args, **kwargs):
Text.__init__(self, *args, **kwargs)
# create a proxy for the underlying widget
self._orig = self._w + "_orig"
self.tk.call("rename", self._w, self._orig)
self.tk.createcommand(self._w, self._proxy)
def _proxy(self, *args):
# let the actual widget perform the requested action
cmd = (self._orig,) + args
result = self.tk.call(cmd)
# generate an event if something was added or deleted,
# or the cursor position changed
if (args[0] in ("insert", "replace", "delete") or
args[0:3] == ("mark", "set", "insert") or
args[0:2] == ("xview", "moveto") or
args[0:2] == ("xview", "scroll") or
args[0:2] == ("yview", "moveto") or
args[0:2] == ("yview", "scroll")
):
self.event_generate("<<Change>>", when="tail")
# return what the actual widget returned
return result
#Frame que une todo
class Campo(Frame):
def __init__(self, *args, **kwargs):
Frame.__init__(self, *args, **kwargs)
self.text = CustomText(self)
self.linenumbers = TextLineNumbers(self, width=30)
self.linenumbers.attach(self.text)
self.linenumbers.pack(side="left", fill="y")
self.text.pack(side="right", fill="both", expand=True)
self.text.bind("<<Change>>", self._on_change)
self.text.bind("<Configure>", self._on_change)
# clone the text widget font and use it as a basis for some tag
bold_font = tkFont.Font(self.text, self.text.cget("font"))
bold_font.configure(weight="bold")
self.text.tag_configure("bold", font=bold_font)
self.text.tag_configure("reserve", foreground="blue", underline=False)
def _on_change(self, event):
self.linenumbers.redraw()
#Clase para crear usuario
class MyDialog(tkinter.simpledialog.Dialog):
def body(self, master):
Label(master, text="Username:").grid(row=0)
Label(master, text="Password:").grid(row=1)
self.result = []
self.accept = False
self.e1 = Entry(master)
self.e2 = Entry(master, show="*")
self.e1.grid(row=0, column=1)
self.e2.grid(row=1, column=1)
return self.e1 # initial focus
def apply(self):
first = self.e1.get()
second = self.e2.get()
self.accept = True
self.result = [first, second]
```
#### File: team01/Grupo1/reporteIndexGen.py
```python
from imports import *
def reporteTablaIndices(datos):
dic=[]
cad=""
#Recorremos la lista de funciones creadas
for i in index_create.indices:
nombreindice=""
namecom=""
tablaname=""
unique = ""
colname = ""
tipoAscDes =""
specs = ""
tipoindice = ""
#Recorremos las tablas de simbolos con la clave de cada funcion
for key, value in datos.tablaSimbolos.items():
#sacando nombreindice
nombreindice=datos.tablaSimbolos[i]['nombreindice']
print(nombreindice)
namecom=datos.tablaSimbolos[i]['namecom']
print(namecom)
tablaname=datos.tablaSimbolos[i]['tablaname']
unique=datos.tablaSimbolos[i]['unique']
colname=datos.tablaSimbolos[i]['colname']
tipoAscDes=datos.tablaSimbolos[i]['tipoAscDes']
specs=datos.tablaSimbolos[i]['specs']
tipoindice=datos.tablaSimbolos[i]['tipoindice']
#para sacar los parametros
# for p in range(0,len(datos.tablaSimbolos[i]['parametros'])):
# #recorro la lista de parametros hago esplit por coma
# param=str(datos.tablaSimbolos[i]['parametros'][p]).split(",")
# #dependiendo el numero de parametros recorremos la clave
# param1+=","+str(param[0][9:])
#para sacar el tipo
# t=str(datos.tablaSimbolos[i]['tipo'])[1 : -1].split(",")
# #Clave tipo split por coma y hagarro el primero y le quito 7 caracteres 'type':
# tip=str(t[0][7:])
cad=str(nombreindice)+":"+str(namecom)+":"+str(tablaname)+":"+str(unique) +":"+str(colname) +":"+str(tipoAscDes ) +":"+str(specs) +":"+str(tipoindice)
param1=""
dic.append(cad)
cad=""
print(dic)
f = open("./Reportes/Reporte_TablaSimbolosIndices.html", "w")
f.write('<!DOCTYPE HTML5>\n')
f.write('<html>\n')
f.write('<head>\n')
f.write('<title>Indices</title>\n')
f.write('<style type="text/css">\n')
f.write('.styled-table {\n')
f.write('border-collapse: collapse;\n')
f.write('margin:0 auto;\n')
f.write('font-size: 0.9em;\n')
f.write('font-family: sans-serif;\n')
f.write('min-width: 400px;\n')
f.write('box-shadow: 0 0 20px rgba(0, 0, 0, 0.15);}\n')
f.write('.styled-table thead tr {\n')
f.write('background-color: #009879;\n')
f.write('color: #ffffff;\n')
f.write('text-align: left;}\n')
f.write('.styled-table th,\n')
f.write('.styled-table td {\n')
f.write('padding: 12px 15px;}\n')
f.write('.styled-table tbody tr {\n')
f.write('border-bottom: 1px solid #dddddd;}\n')
f.write('.styled-table tbody tr:nth-of-type(even) {\n')
f.write('background-color: #f3f3f3;}\n')
f.write('.styled-table tbody tr:last-of-type {\n')
f.write('border-bottom:4px solid #009879;}\n')
f.write('.styled-table tbody tr.active-row {\n')
f.write('font-weight: bold;\n')
f.write('color: black;}\n')
f.write('H2 { text-align: center}\n')
f.write('</style>\n')
f.write('</head>\n')
f.write('<body style="background-color:grey;">\n')
f.write('<h2>Indices en la Tabla de simbolos</h2>\n')
f.write('<div style="text-align:center;">\n')
f.write('<table class="styled-table">\n')
f.write('<thead>\n')
f.write('<tr>\n')
f.write('<th>INSTRUCCION</th>\n')
f.write('<th>NOMBRE INDICE</th>\n')
f.write('<th>TABLA</th>\n')
f.write('<th>UNIQUE</th>\n')
f.write('<th>COLUMNA</th>\n')
f.write('<th>TIPO ASC/DESC</th>\n')
f.write('<th>ORDER</th>\n')
f.write('<th>TIPO INDICE</th>\n')
f.write('</tr>\n')
f.write('</thead>\n')
f.write('<tbody>\n')
# nombreindice=datos.tablaSimbolos[i]['nombreindice']
# namecom=datos.tablaSimbolos[i]['namecom']
# tablaname=datos.tablaSimbolos[i]['tablaname']
# unique=datos.tablaSimbolos[i]['unique']
# colname=datos.tablaSimbolos[i]['colname']
# tipoAscDes=datos.tablaSimbolos[i]['tipoAscDes']
# specs=datos.tablaSimbolos[i]['specs']
# tipoindice=datos.tablaSimbolos[i]['tipoindice']
#Recorro la lista de funciones
p1=0
for i in index_create.indices:
if p1%2==0:
f.write('<tr>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['namecom'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['nombreindice'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['tablaname'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['unique'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['colname'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['tipoAscDes'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['specs'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['tipoindice'])+'</td>\n')
f.write('<td>Void</td>\n')
f.write('</tr>\n')
else:
f.write('<tr class="active-row">\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['namecom'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['nombreindice'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['tablaname'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['unique'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['colname'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['tipoAscDes'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['specs'])+'</td>\n')
f.write('<td>'+str( datos.tablaSimbolos[i]['tipoindice'])+'</td>\n')
f.write('<td>Void</td>\n')
f.write('</tr>\n')
f.write('</tbody>\n')
f.write('</table>\n')
f.write('</div>\n')
#Termina procedimiento
f.write('</body>\n')
f.write('</html> \n')
f.close()
# def reporteTablaIndices(datos):
# g = open("./Reportes/Reporte_TablaSimbolosIndices.html", "w")
# g.write('')
# g.close()
# if('IndicesTS' in datos.tablaSimbolos) :
# g = open("./Reportes/Reporte_TablaSimbolosIndices.html", "a")
# g.write("<!DOCTYPE html>\n")
# g.write("<html>\n")
# g.write(" <head>\n")
# g.write(' <meta charset="UTF-8">\n')
# g.write(' <meta name="viewport" content="width=device-width, initial-scale=1.0">')
# g.write(" <title>Reporte de tabla simbolos</title>\n")
# g.write(' <link rel="stylesheet" href="style.css">\n')
# g.write(" </head>\n")
# g.write(" <body>\n")
# g.write(" <div>\n")
# g.write("<p></p>\n")
# g.write("<p></p>\n")
# g.write("""<table style="border-collapse: collapse; width: 80%; margin-left: auto; margin-right: auto;" border="1"; text-align="center";>\n""")
# g.write("<tbody>\n")
# g.write("<div><center> <p><b>Reporte Tabla de Simbolos Indices</b></p></center></div>\n")
# g.write("<p></p>\n")
# g.write("<tr>\n")
# g.write("""<td style="width: 12.5%;"><b><strong>INSTRUCCION</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>NOMBRE INDICE</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>TABLA</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>UNIQUE</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>COLUMNA</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>TIPO ASC/DESC</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>ORDER</strong></b></td>\n""")
# g.write("""<td style="width: 12.5%;"><b><strong>TIPO INDICE</strong></b></td>\n""")
# g.write("""</tr>\n""")
# g.write("""</tbody>\n""")
# g.write("""</table>\n""")
# #for col in columnas:
# for column in range(0,len(datos.tablaSimbolos['IndicesTS'])):
# namecom = datos.tablaSimbolos['IndicesTS'][column]['namecom']#'Nombre'#cc
# nombreindice = datos.tablaSimbolos['IndicesTS'][column]['nombreindice']#'Nombre'#cc
# tablaname = datos.tablaSimbolos['IndicesTS'][column]['tablaname']#'Nombre'#cc
# unique = datos.tablaSimbolos['IndicesTS'][column]['unique']#'Nombre'#cc
# colname = datos.tablaSimbolos['IndicesTS'][column]['colname']#'Nombre'#cc
# tipoAscDes = datos.tablaSimbolos['IndicesTS'][column]['tipoAscDes']#'Nombre'#cc
# specs = datos.tablaSimbolos['IndicesTS'][column]['specs']#'Nombre'#cc
# tipoindice = datos.tablaSimbolos['IndicesTS'][column]['tipoindice']#'Nombre'#cc
# #namecom, nombreindice, tablaname,unique, colname, tipoAscDes, specs, tipoindice
# if(namecom is None):
# namecom = ''
# if(nombreindice is None):
# nombreindice = ''
# if(tablaname is None):
# tablaname = ''
# if(unique is None):
# unique = ''
# if(colname is None):
# colname = ''
# if(tipoAscDes is None):
# tipoAscDes = ''
# if(specs is None):
# specs = ''
# if(tipoindice is None):
# tipoindice = ''
# g.write("""<table style="border-collapse: collapse; width: 80%; margin-left: auto; margin-right: auto;" border="1"; text-align="center";>\n""")
# g.write("<tbody>\n")
# g.write("<tr>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(namecom)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(nombreindice)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(tablaname)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(unique)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(colname)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(tipoAscDes)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(specs)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("""<td style="width: 12.5%;">\n""")
# g.write("<div>\n")
# g.write("<div>"+str(tipoindice)+"</div>\n")
# g.write("</div>\n")
# g.write("</td>\n")
# g.write("</tr>\n")
# g.write("</tbody>\n")
# g.write("</table>\n")
# g.write(" </div>\n")
# g.write(" </div>\n")
# g.write(" </body>\n")
# g.write("</html>\n")
# g.close()
```
#### File: fase2/team03/query_tool.py
```python
from tkinter import *
import shutil
from tkinter import ttk
from tkinter.filedialog import askopenfilename
from tkinter import messagebox as mb
from tkinter import Tk, Menu, messagebox, filedialog, ttk, Label, scrolledtext, INSERT, END, Button, Scrollbar, RIGHT, \
Y, Frame, Canvas, HORIZONTAL, VERTICAL, simpledialog, font
import tkinter as tk
from tabulate import tabulate
import os
from grammarReview import *
class query_tool:
def __init__(self, window):
self.vp = window
self.rutaArchivo = ""
self.nombreArchivo = ""
self.extension = ""
self.texto = ""
self.txtConsola = ""
self.inputText = ""
###################################################### INTERFAZ GRÁFICA ######################################################
# Ventana Principal
self.vp.title("Tytus - Query Tool")
self.barraMenu = Menu(self.vp)
self.vp.configure(menu=self.barraMenu)
# Labels
tk.Label(self.vp, fg="white", bg="#154360", text="TYTUS - Query Tool", font=("Arial Bold", 15)).grid(row=0,
column=1,
sticky=E + W)
tk.Label(self.vp, fg="white", bg="#154360", text="Linea : 1 Columna : 1", font=("Arial Bold", 10)).grid(row=2,
column=1,
sticky=E + W)
# Canvas
self.canvas1 = tk.Canvas(self.vp, width=1300, height=300)
self.canvas1.grid(row=4, column=1, sticky=E + W)
self.canvas2 = tk.Canvas(self.vp, width=1200, height=275)
self.canvas2.grid(row=5, column=1, sticky='news')
# Frames
self.frame1 = tk.LabelFrame(self.canvas1, bg="#154360", text="Entrada:", font=("Arial Bold", 10),
foreground="white")
self.frame1.place(relx=0, rely=0, relwidth=1, relheight=1)
self.frame2 = tk.LabelFrame(self.canvas2, bg="#154360", text="Resultados:", font=("Arial Bold", 10),
foreground="white")
self.frame2.place(relx=0, rely=0, relwidth=1, relheight=1)
# TextArea
self.entrada = tk.Text(self.frame1, font=("Courier New", 10), foreground="#154360")
self.entrada.place(relx=0.02, rely=0.05, relwidth=0.96, relheight=0.9)
self.scrollbarEntradaX = tk.Scrollbar(self.entrada, orient=tk.HORIZONTAL)
self.scrollbarEntradaX.pack(side="bottom", fill="x")
self.scrollbarEntradaY = tk.Scrollbar(self.entrada)
self.scrollbarEntradaY.pack(side="right", fill="y")
self.entrada.config(wrap="none", xscrollcommand=self.scrollbarEntradaX.set,
yscrollcommand=self.scrollbarEntradaY.set)
self.scrollbarEntradaY.config(command=self.entrada.yview)
self.scrollbarEntradaX.config(command=self.entrada.xview)
# Consola
self.consola = tk.Text(self.frame2, font=("Courier New", 9), background="black", foreground="yellow")
self.consola.place(relx=0.02, rely=0.05, relwidth=0.96, relheight=0.9)
self.scrollbarConsolaX = tk.Scrollbar(self.consola, orient=tk.HORIZONTAL)
self.scrollbarConsolaX.pack(side="bottom", fill="x")
self.scrollbarConsolaY = tk.Scrollbar(self.consola)
self.scrollbarConsolaY.pack(side="right", fill="y")
self.consola.config(wrap="none", xscrollcommand=self.scrollbarConsolaX.set,
yscrollcommand=self.scrollbarConsolaY.set)
self.scrollbarConsolaY.config(command=self.consola.yview)
self.scrollbarConsolaX.config(command=self.consola.xview)
# Menu Archivo
self.archivoMenu = Menu(self.barraMenu, tearoff=0)
self.archivoMenu.add_command(label="New", command=self.NewFile)
self.archivoMenu.add_separator()
self.archivoMenu.add_command(label="Open file", command=self.OpenFile)
self.archivoMenu.add_separator()
self.archivoMenu.add_command(label="Save", command=self.Save)
self.archivoMenu.add_command(label="Save As...", command=self.SaveAs)
self.archivoMenu.add_separator()
self.archivoMenu.add_command(label="Exit", command=self.vp.quit)
# Menu Edit
self.editMenu = Menu(self.barraMenu, tearoff=0)
self.editMenu.add_command(label="Cut", \
accelerator="Ctrl+X", \
command=lambda: \
self.entrada.event_generate('<<Cut>>'))
self.editMenu.add_command(label="Copy", \
accelerator="Ctrl+C", \
command=lambda: \
self.entrada.event_generate('<<Copy>>'))
self.editMenu.add_command(label="Paste", \
accelerator="Ctrl+V", \
command=lambda: \
self.entrada.event_generate('<<Paste>>'))
# Menu Run
self.runMenu = Menu(self.barraMenu, tearoff=0)
self.runMenu.add_command(label="Run", command=self.Run, accelerator="Ctrl+R")
self.runMenu.add_command(label="Generate", command=self.generate, accelerator="Ctrl+G")
self.runMenu.add_command(label="Clear Execution", command=self.clear_files, accelerator="Ctrl+Z")
# Menu Archivo
self.reportsMenu = Menu(self.barraMenu, tearoff=0)
self.reportsMenu.add_command(label="Symbol Table", command=self.getST)
self.reportsMenu.add_command(label="Optimizations", command=self.getOptimization)
self.reportsMenu.add_separator()
self.reportsMenu.add_command(label="Errors", command=self.openErrors)
self.reportsMenu.add_separator()
self.reportsMenu.add_command(label="AST", command=self.openAST)
self.reportsMenu.add_separator()
self.reportsMenu.add_command(label="DDS", command=self.openBNF)
self.reportsMenu.add_separator()
self.reportsMenu.add_command(label="Asc Grammar", command=self.openBNFasc)
self.reportsMenu.add_command(label="Desc Grammar", command=self.openBNFdesc)
self.reportsMenu.add_command(label="Grammar Analysis", command=self.openBNFanalysis)
# Menu Help
self.helpMenu = Menu(self.barraMenu, tearoff=0)
self.helpMenu.add_command(label="About", command=self.seeAbout)
self.helpMenu.add_separator()
self.helpMenu.add_command(label="Technical Manual", command=self.openTechnical)
self.helpMenu.add_command(label="User Manual", command=self.openUser)
# Barra de Menú
self.barraMenu.add_cascade(label="File", menu=self.archivoMenu)
self.barraMenu.add_cascade(label="Edit", menu=self.editMenu)
self.barraMenu.add_cascade(label="Run", menu=self.runMenu)
self.barraMenu.add_cascade(label="Reports", menu=self.reportsMenu)
self.barraMenu.add_cascade(label="Help", menu=self.helpMenu)
self.vp.columnconfigure(0, weight=0)
self.vp.columnconfigure(1, weight=1)
window.bind('<Control-r>', self.run_listener)
window.bind('<Control-g>', self.generate_listener)
window.bind('<Control-z>', self.clear_listener)
def callback(event):
'''
Permite actualizar la posicion actual del puntero en el text de entrada
'''
puntero = self.entrada.index(tk.INSERT)
p = puntero.split(".")
col = p[1]
t = "Linea: " + p[0] + " Columna: " + str(int(col) + 1)
tk.Label(self.vp, fg="white", bg="#154360", text=t, font=("Arial Bold", 10)).grid(row=2, column=1,
sticky=E + W)
self.entrada.bind("<Button-1>", callback)
self.entrada.bind("<Return>", callback)
self.entrada.bind("<Any-KeyPress>", callback)
self.entrada.bind("<Motion>", callback)
self.entrada.bind("<FocusIn>", callback)
self.entrada.focus()
###################################################### METODOS ######################################################
def NewFile(self):
'''
Creación de un nuevo archivo en blanco
'''
self.rutaArchivo = ""
self.texto = ""
self.extension = ""
self.entrada.delete(1.0, END)
self.consola.delete(1.0, END)
def OpenFile(self):
'''
Abrir un archivo local de texto plano y colocar su contenido en el area de entrada
'''
self.txtConsola = ""
self.consola.delete(1.0, END)
self.rutaArchivo = filedialog.askopenfilename(title="Open File")
getNameAndExtensionFile(self)
fileAbierto = open(self.rutaArchivo, encoding="utf-8")
self.texto = fileAbierto.read()
self.entrada.delete(1.0, END)
self.entrada.insert(INSERT, self.texto)
fileAbierto.close();
def SaveAs(self):
'''
Permite guardar un nuevo archivo con el contenido del área de entrada en la ruta que el usuario elija
'''
ruta = filedialog.asksaveasfilename(title="Save As...")
fguardar = open(ruta, "w+", encoding="utf-8")
fguardar.write(self.entrada.get(1.0, END))
fguardar.close()
self.rutaArchivo = ruta
getNameAndExtensionFile(self)
def Save(self):
'''
Guarda el texto actual en el archivo abierto
'''
if self.rutaArchivo == "":
self.guardarComo()
else:
archivoA = open(self.rutaArchivo, "w", encoding="utf-8")
archivoA.write(self.entrada.get(1.0, END))
archivoA.close()
def run_listener(self, event):
self.Run()
def generate_listener(self, event):
self.generate()
def clear_listener(self, event):
self.clear_files()
def clear_files(self):
if os.path.exists('data'):
shutil.rmtree('data')
if os.path.exists('opt_report.txt'):
os.remove('opt_report.txt')
def Run(self):
'''
Llamada a la clase main del proyecto, envía el texto que está en el área de entrada
'''
self.consola.delete(1.0, END)
self.inputText = self.entrada.get("1.0", "end")
grammarReview(self.inputText)
gr = grammarReview.get_result(self)
self.consola.insert(INSERT, gr)
def generate(self):
self.consola.delete(1.0, END)
self.inputText = self.entrada.get("1.0", "end")
generation = GrammarGenerate(self.inputText)
result = generation.get_result()
self.consola.insert(INSERT, result)
def openErrors(self):
ST0 = '\n\n\n============== ERROR REPORT ==============\n'
ST1 = grammarReview.report_errors(self)
ST2 = '\n'
self.consola.insert(INSERT, ST0)
self.consola.insert(INSERT, ST1)
self.consola.insert(INSERT, ST2)
def getST(self):
ST0 = '\n\n\n+------------- SYMBOL TABLE REPORT --------------+\n'
ST1 = grammarReview.getTablaTabulada(self)
ST2 = '\n'
self.consola.insert(INSERT, ST0)
self.consola.insert(INSERT, ST1)
self.consola.insert(INSERT, ST2)
def getOptimization(self):
ST0 = '\n\n\n+------------- OPTIMIZATION REPORT --------------+\n'
# ST1 = grammarReview.getTablaTabulada(self)
file = open('opt_report.txt', 'r')
ST1 = file.read()
ST2 = '\n'
self.consola.insert(INSERT, ST0)
self.consola.insert(INSERT, ST1)
self.consola.insert(INSERT, ST2)
def seeAbout(self):
mb.showinfo("About",
"TYTUS\n Universidad de San Carlos de Guatemala \nOLC 2\nCuso de vacaciones \nDiciembre \nAño 2020\nCoautores: \n\t201020126 - <NAME> \n\t201020252 - <NAME> \n\t201020260 - <NAME> \n\t201020697 - <NAME> ")
def openAST(self):
a = os.popen('grap.png')
def openBNF(self):
a = os.popen('reportGrammar.md')
def openBNFasc(self):
a = os.popen('docs\grammars\gramatica-ascendente.md')
def openBNFdesc(self):
a = os.popen('docs\grammars\gramatica-descendente.md')
def openBNFanalysis(self):
a = os.popen('docs\grammars\grammar-analysis.md')
def openTechnical(self):
a = os.popen('docs\\manuals\\technical-manual.md')
def openUser(self):
a = os.popen('docs\\manuals\\user-manual.md')
def getNameAndExtensionFile(self):
rutaSpliteada = self.rutaArchivo.split("/")
ultimaPos = len(rutaSpliteada) - 1
self.nombreArchivo = rutaSpliteada[ultimaPos]
ext = self.nombreArchivo.split(".")
self.extension = ext[1]
def CleanConsole(self):
self.consola.delete(1.0, END)
self.consola.insert(INSERT, self.txtConsola)
if __name__ == '__main__':
window = Tk()
window.resizable(1, 0)
app = query_tool(window)
window.mainloop()
```
#### File: Instrucciones/FunctionTrigonometric/Acosd.py
```python
import math
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tipo import Tipo_Dato, Tipo
from Instrucciones.Excepcion import Excepcion
class Acosd(Instruccion):
def __init__(self, valor, strGram, linea, columna):
Instruccion.__init__(self,Tipo(Tipo_Dato.DOUBLE_PRECISION),linea,columna,strGram)
self.valor = valor
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
""" resultado = self.valor.ejecutar(tabla,arbol)
if isinstance(resultado, Excepcion):
return resultado
if self.valor.tipo.tipo != Tipo_Dato.SMALLINT and self.valor.tipo.tipo != Tipo_Dato.INTEGER and self.valor.tipo.tipo != Tipo_Dato.BIGINT and self.valor.tipo.tipo != Tipo_Dato.DECIMAL and self.valor.tipo.tipo != Tipo_Dato.NUMERIC and self.valor.tipo.tipo != Tipo_Dato.REAL and self.valor.tipo.tipo != Tipo_Dato.DOUBLE_PRECISION:
error = Excepcion('42883',"Semántico","No existe la función acosd("+self.valor.tipo.toString()+")",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
try:
if resultado == 1: return 0
if resultado == 0: return 90
if resultado == -1: return 180
return math.degrees(math.acos(resultado))
except ValueError as c:
error = Excepcion('22003',"Semántico","La entrada está fuera de rango",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error """
return math.degrees(math.acos(self.valor))
def getCodigo(self, tabla, arbol):
result = self.valor.getCodigo(tabla, arbol)
value_list = []
value_list.append(result['dir'])
value_list.append(f"\"{self.strGram}\"")
value_list.append(self.linea)
value_list.append(self.columna)
native_result = arbol.getExpressionCode(value_list, 'acosd')
codigo = result['codigo']
codigo += native_result['codigo']
return {'codigo': codigo, 'dir': native_result['dir']}
def toString(self):
return f"ACOS({self.valor})"
```
#### File: Instrucciones/plsql/clase_if.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Excepcion import Excepcion
class clase_if(Instruccion): #block y _else_block son listas
def __init__(self, expresion, block, _else_block,strGram, linea, columna):
self.expresion = expresion
self.block = block
self._else_block = _else_block
self.linea = linea
self.columna = columna
def ejecutar(self, tabla, arbol):
pass
def getCodigo(self, tabla, arbol):
label_true = arbol.getLabel()
label_false = arbol.getLabel()
label_salida = arbol.getLabel()
result = self.expresion.getCodigo(tabla, arbol)
codigo = result['codigo']
codigo += f"\tif({result['dir']}): goto .{label_true}\n"
codigo += f"\tgoto .{label_false}\n"
codigo += f"\tlabel .{label_true}\n"
codigo += self.block.getCodigo(tabla, arbol)
codigo += f"\tgoto .{label_salida}\n"
codigo += f"\tlabel .{label_false}\n"
codigo += self._else_block.getCodigo(tabla, arbol) if self._else_block else ""
codigo += f"\tlabel .{label_salida}\n"
return codigo
```
#### File: Instrucciones/plsql/statement.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Excepcion import Excepcion
class Statement(Instruccion):
def __init__(self, dec, expresion, strGram,linea, columna):
self.dec = dec
self.expresion = expresion
self.linea = linea
self.columna = columna
def ejecutar(self, tabla, arbol):
pass
def getCodigo(self, tabla, arbol):
return ""
```
#### File: Instrucciones/Sql_index/Dropindex.py
```python
from storageManager.jsonMode import *
from Instrucciones.Tablas.Tablas import Tablas
from Instrucciones.TablaSimbolos.Tipo import Tipo
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tabla import Tabla
class DropIndex(Instruccion):
def __init__(self,num,nombre,tipo,col,opcion,rest,linea,columna,strGram):
Instruccion.__init__(self,tipo,linea,columna,strGram)
self.num = num
self.nombre = nombre
self.tipo = tipo
self.col = col
self.opcion = opcion
self.rest = rest
self.linea = linea
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
db = arbol.getBaseDatos()
bandera = 0
index = 0
lista_index = arbol.getIndex()
for x in range(0,len(lista_index)):
for item in lista_index[x]:
if item['Base'] == db:
if item['Nombre'] == self.col:
bandera = 1
index = x
break
if(bandera == 1):
break
if bandera == 1 :
lista_index.pop(index)
print(f"DROP INDEX : {self.col} SE ELIMINO CORRECTAMENTE")
else:
print(f"ERROR DROP: El index no existe o es una base de datos equivocada")
def getCodigo(self, tabla, arbol):
db = arbol.getBaseDatos()
bandera = 0
index = 0
lista_index = arbol.getIndex()
for x in range(0,len(lista_index)):
for item in lista_index[x]:
if item['Base'] == db:
if item['Nombre'] == self.col:
bandera = 1
index = x
break
if(bandera == 1):
break
if bandera == 1 :
lista_index.pop(index)
print(f"DROP INDEX : {self.col} SE ELIMINO CORRECTAMENTE")
else:
print(f"ERROR DROP: El index no existe o es una base de datos equivocada")
return ""
```
#### File: Instrucciones/TablaSimbolos/Arbol.py
```python
from storageManager.jsonMode import *
class Arbol():
'Esta clase almacenará todas las instrucciones, errores y mensajes.'
def __init__(self, instrucciones):
self.instrucciones = instrucciones
self.excepciones = []
self.consola = []
self.bdUsar = None
self.listaBd = []
self.where = False
self.update = False
self.relaciones = False
self.nombreTabla = None
self.tablaActual = []
self.columnasActual = []
self.lEnum = []
self.lRepDin = []
self.comprobacionCreate = False
self.columnaCheck = None
self.order = None
self.temporal = -1
self.label = -1
self.index = []
self.ts = {}
self.scope = None
self.topt = []
def setEnum(self, nuevo):
self.lEnum.append(nuevo)
#devuelve un objeto enum
def getEnum(self, nombre):
for x in range(0, len(self.lEnum)):
if nombre == self.lEnum[x].id:
return self.lEnum[x]
return None
def setListaBd(self, nueva):
self.listaBd.append(nueva)
#esto es para la base de datos actual
def setBaseDatos(self,datos):
self.bdUsar = datos
#print("la tabla a usar es "+self.bdUsar)
#retornar la base de datos
def getBaseDatos(self):
return self.bdUsar
def devolverBaseDeDatos(self):
nombre = self.getBaseDatos()
for x in range(0,len(self.listaBd)):
if(self.listaBd[x].nombreTabla == nombre):
#print(self.listaBd[x])
return self.listaBd[x]
def existeBd(self,nombre):
for x in range(0,len(self.listaBd)):
if(self.listaBd[x].nombreTabla == nombre):
return 1
return 0
def eliminarBD(self,nombre):
for x in range(0,len(self.listaBd)):
if(self.listaBd[x].nombreTabla == nombre):
self.listaBd.pop(x)
return 1
return 0
def renombrarBd(self, nombre1, nombre2):
for x in range(0,len(self.listaBd)):
if(self.listaBd[x].nombreTabla == nombre2):
print(self.listaBd[x])
return self.listaBd[x]
def eliminarTabla(self, nombreT):
res = self.devolverBaseDeDatos()
res.eliminarTabla(nombreT)
def agregarTablaABd(self, nueva):
#devolver tabla
res = self.devolverBaseDeDatos()
res.agregarTabla(nueva)
def llenarTablas(self,nombre):
#agregar las tablas
tablas = showTables(nombre)
self.devolverBaseDeDatos()
self.agregarTablaABd(tablas)
def devolviendoTablaDeBase(self, nombreTabla):
nombreBd = self.getBaseDatos()
if(self.existeBd(nombreBd) == 1):
base = self.devolverBaseDeDatos()
tabla = base.devolverTabla(nombreTabla)
if( tabla == 0):
print("No se encontro la tabla")
return 0
else:
return tabla
def devolverColumnasTabla(self,nombreTabla):
print(nombreTabla)
tabla = self.devolviendoTablaDeBase(nombreTabla)
if(tabla == 0):
print("No se encontro la tabla")
return 0
else:
return tabla.devolverTodasLasColumnas()
def devolverOrdenDeColumna(self, nombreTabla, nombreColumna):
nombreBd = self.getBaseDatos()
if(self.existeBd(nombreBd) == 1):
base = self.devolverBaseDeDatos()
tabla = base.devolverTabla(nombreTabla)
if( tabla == 0):
print("No se encontro la tabla")
else:
res = tabla.devolverColumna(nombreColumna)
if(res==-1):
print("No se encontro el ide")
return -1
return res
else:
print("No existe bd en uso")
return -1
def devolverTipoColumna(self, nombreTabla, nombreColumna):
nombreBd = self.getBaseDatos()
if(self.existeBd(nombreBd) == 1):
base = self.devolverBaseDeDatos()
tabla = base.devolverTabla(nombreTabla)
if( tabla == 0):
print("No se encontro la tabla")
else:
res = tabla.devolverTipo(nombreColumna)
if(res==-1):
print("No se encontro el ide")
return -1
return res
else:
print("No existe bd en uso")
return -1
def getMensajeTabla(self, columnas, tuplas):
lf = []
for i in range(0,len(columnas)):
temporal = []
temporal.append(len(columnas[i]))
for l in tuplas:
temporal.append(len(str(l[i])))
lf.append(max(temporal))
# Encabezado
cad = ''
for s in range(0,len(lf)):
cad += '+---'+'-'*lf[s]
cad += '+\n'
for s in range(0,len(lf)):
cad += '| ' +str(columnas[s]) + ' ' *((lf[s]+4)-(2+len(str(columnas[s]))))
cad += '|\n'
cad += '|'
for s in range(0,len(lf)):
cad += '---'+'-'*lf[s]+ '+'
size = len(cad)
cad = cad[:size - 1] + "|\n"
# Valores
for i in tuplas:
for j in range(0,len(lf)):
cad += '| ' + str(i[j]) + ' ' *((lf[j]+4)-(2+len(str(i[j]))))
cad += "|\n"
# Línea final
for s in range(0,len(columnas)):
cad += '+---'+'-'*lf[s]
cad += '+\n'
self.consola.append(cad)
print(cad)
def setColumnasActual(self, valor):
self.columnasActual = valor
def getColumnasActual(self):
return self.columnasActual
def setWhere(self, valor):
self.where = valor
def getWhere(self):
return self.where
def setTablaActual(self, valor):
self.tablaActual = valor
def getTablaActual(self):
return self.tablaActual
def setRelaciones(self, valor):
self.relaciones = valor
def getRelaciones(self):
return self.relaciones
def setUpdate(self):
self.update = not self.update
def getUpdate(self):
return self.update
def getNombreTabla(self):
return self.nombreTabla
def setNombreTabla(self, valor):
self.nombreTabla = valor
def devolverTamanio(self, nombreTabla):
tabla = self.devolviendoTablaDeBase(nombreTabla)
can = tabla.devolverTodasLasColumnas()
return len(can)
def setOrder(self, order):
self.order = order
def getOrder(self):
return self.order
def getTemporal(self):
self.temporal += 1
return f"t{self.temporal}"
def getLabel(self):
self.label += 1
return f"L{self.label}"
def setIndex(self, nueva):
self.index.append(nueva)
def getIndex(self):
return self.index
def getExists(self,nombre):
if len(self.index) > 0:
for lista in self.index:
for items in lista:
if items['Nombre'] == nombre:
return 1
else:
return 0
return 0
def removeIndex(self,nombre):
if len(self.index) > 0:
for i in range(0,len(self.index)):
for r in self.index[i]:
if(self.getBaseDatos == r['Base'] and nombre == r['Nombre']):
self.index.pop(i)
return 1
return 0
def addSymbol(self, name, dic):
self.ts[name] = dic
def deleteSymbol(self, name, scope):
for key in self.ts:
symbol = self.ts[key]
if symbol['name'] == name or symbol['scope'] == scope:
del self.ts[symbol]
def getSymbol(self, name, scope):
for key in self.ts:
symbol = self.ts[key]
if symbol['name'] == name and symbol['scope'] == scope:
return self.ts[name]
return {}
def get_ts(self):
return self.ts
def setScope(self, scope):
self.scope = scope
def getScope(self):
return self.scope
def get_topt(self):
return self.topt
def addOpt(self, dic):
self.topt.append(dic)
def getExpressionCode(self, value_list, call_name):
codigo = f"\t#{call_name} 3D\n"
size = len(value_list)
temp_param_list = []
i = 0
while i < size:
param = self.getTemporal()
temp_param_list.append(param)
codigo += f"\t{param} = {value_list[i]}\n"
i += 1
temp_tam_func = self.getTemporal()
codigo += f"\t{temp_tam_func} = pointer + {size}\n"
i = 1
while i <= size:
index = self.getTemporal()
codigo += f"\t{index} = {temp_tam_func} + {i}\n"
codigo += f"\tstack[{index}] = {temp_param_list[i-1]}\n"
i += 1
temp_return = self.getTemporal()
temp_result = self.getTemporal()
codigo += f"\tpointer = pointer + {size}\n"
codigo += f"\tinter_{call_name}()\n"
codigo += f"\t{temp_return} = pointer + 0\n"
codigo += f"\t{temp_result} = stack[{temp_return}]\n"
codigo += f"\tpointer = pointer - {size}\n"
return {'codigo': codigo, 'dir': temp_result}
```
#### File: Tytus/reportes/reporteoptimizacion.py
```python
import os.path
from os import path
import webbrowser
def crear_reporte(topt):
filename = "Optimizacion.html"
file = open(filename,"w",encoding='utf-8')
file.write(reporte_optimizacion(topt))
file.close()
webbrowser.open_new_tab(filename)
def reporte_optimizacion(topt):
cadena = ''
cadena += "<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/><title>Reporte</title><style> \n"
cadena += "table{ \n"
cadena += "width:100%;"
cadena += "} \n"
cadena += "table, th, td {\n"
cadena += "border: 1px solid black;\n"
cadena += "border-collapse: collapse;\n"
cadena += "}\n"
cadena += "th, td {\n"
cadena += "padding: 5px;\n"
cadena += "text-align: left;\n"
cadena += "}\n"
cadena += "table#t01 tr:nth-child(even) {\n"
cadena += "background-color: #eee;\n"
cadena += "}\n"
cadena += "table#t01 tr:nth-child(odd) {\n"
cadena += "background-color:#fff;\n"
cadena += "}\n"
cadena += "table#t01 th {\n"
cadena += "background-color: black;\n"
cadena += "color: white;\n"
cadena += "}\n"
cadena += "</style></head><body><h1><center>Tabla de Símbolos</center></h1>\n"
cadena += "<table id=\"t01\">\n"
cadena += "<tr>\n"
cadena += "<th><center>#</center></th>\n"
cadena += "<th><center>Tipo</center></th>\n"
cadena += "<th><center>Normal</center></th>\n"
cadena += "<th><center>Optimizado</center></th>\n"
cadena += "<th><center>Línea</center></th>\n"
cadena += "</tr>\n"
contador = 1
for symbol in topt:
cadena += "<tr>\n"
cadena += "<td><center>" + str(contador) + "</center></td>\n"
cadena += "<td><center>" + symbol['type'] + "</center></td>\n"
cadena += "<td><center>" + symbol['before'] + "</center></td>\n"
cadena += "<td><center>" + symbol['opt'] + "</center></td>\n"
cadena += "<td><center>" + symbol['line'] + "</center></td>\n"
cadena += "</tr>\n"
contador += 1
cadena += "</table>\n"
cadena += "</body>\n"
cadena += "</html>"
return cadena
```
#### File: analizadorFase2/Abstractas/Primitivo.py
```python
from analizadorFase2.Abstractas.Expresion import Expresion
from analizadorFase2.Abstractas.Expresion import Tipos
class Primitivo(Expresion):
def __init__(self, type : Tipos, valor):
Expresion.__init__(self)
self.tipo = type
self.valor = valor
```
#### File: Tytus_SQLPARSER_G8/Instrucciones/Declaracion.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Excepcion import Excepcion
from lexico import columas
from tkinter.constants import FALSE
from Instrucciones.Sql_create.ShowDatabases import ShowDatabases
from Instrucciones.TablaSimbolos.Instruccion import *
from Instrucciones.Tablas.BaseDeDatos import BaseDeDatos
from Instrucciones.TablaSimbolos.Simbolo import Simbolo
from Instrucciones.Expresiones.Primitivo import Primitivo
from Instrucciones.TablaSimbolos.Tipo import Tipo, Tipo_Dato
from storageManager.jsonMode import *
class Declaracion(Instruccion):
def __init__(self, nombre, tipo, expresion):
Instruccion.__init__(self,tipo,0,0,"strGram")
self.nombre=nombre
self.tipo=tipo
self.expresion = expresion
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
prim = None
strGram = ""
if self.tipo.tipo == Tipo_Dato.SMALLINT:
prim = Primitivo(0, Tipo("",Tipo_Dato.SMALLINT), strGram,0,0)
elif self.tipo.tipo == Tipo_Dato.INTEGER:
prim = Primitivo(0, Tipo("",Tipo_Dato.INTEGER), strGram,0,0)
elif self.tipo.tipo == Tipo_Dato.BIGINT:
prim = Primitivo(0, Tipo("",Tipo_Dato.BIGINT), strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.DECIMAL:
prim = Primitivo(0, Tipo("",Tipo_Dato.DECIMAL),strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.NUMERIC:
prim = Primitivo(0, Tipo("",Tipo_Dato.NUMERIC), strGram,0,0)
elif self.tipo.tipo == Tipo_Dato.REAL:
prim = Primitivo(0, Tipo("",Tipo_Dato.REAL), strGram,0,0)
elif self.tipo.tipo == Tipo_Dato.DOUBLE_PRECISION:
prim = Primitivo(0, Tipo("",Tipo_Dato.DOUBLE_PRECISION),strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.MONEY:
prim = Primitivo(0, Tipo("",Tipo_Dato.MONEY),strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.DATE:
prim = Primitivo('1900-01-01', Tipo("",Tipo_Dato.DATE),strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.TIMESTAMP:
prim = Primitivo('1900-01-01', Tipo("",Tipo_Dato.TIMESTAMP),strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.TIME:
prim = Primitivo('1900-01-01', Tipo("",Tipo_Dato.DATE),strGram, 0,0)
elif self.tipo.tipo == Tipo_Dato.BOOLEAN:
prim = Primitivo(True, Tipo("",Tipo_Dato.BOOLEAN),strGram, 0,0)
variable = Simbolo(self.nombre,self.tipo,prim.valor,0,0)
resultadoInsertar = tabla.setVariable(variable)
if resultadoInsertar != None:
error = Excepcion("100","Semantico","La columna "+self.nombre+" yo existe",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
if self.expresion != None:
resultado = self.expresion.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
return True
```
#### File: Instrucciones/Expresiones/Primitivo.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Simbolo3D import Simbolo3d
from Instrucciones.TablaSimbolos.Tipo import Tipo_Dato, Tipo
class Primitivo(Instruccion):
def __init__(self, valor, tipo, strGram, linea, columna, strSent):
Instruccion.__init__(self,tipo,linea,columna, strGram, strSent)
self.valor = valor
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
return self.valor
def traducir(self,tabla,arbol,cadenaTraducida):
super().ejecutar(tabla,arbol)
temporal = arbol.generaTemporal()
codigo = "\t" + temporal + " = " + str(self.valor) + "\n"
nuevo = Simbolo3d(self.tipo,temporal,codigo,None,None)
return nuevo
'''
p = Primitivo(1,Tipo(Tipo_Dato.INTEGER),1,2)
print(p.tipo.toString())
res = p.ejecutar(None,None)
print(res)
'''
```
#### File: Instrucciones/plpgsql/condicional_case.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tipo import Tipo_Dato
from Instrucciones.Excepcion import Excepcion
from Instrucciones.Expresiones import Relacional
class Case(Instruccion):
'''
La clase Case, recibe una lista de condiciones y cada una de ellas tiene una lista de instrucciones
'''
def __init__(self,l_condiciones,strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram,strSent)
self.l_condiciones = l_condiciones
#desde aqui voy a mandar la etiqueta de escape
def ejecutar(self, tabla, arbol):
pass
def traducir(self, tabla, arbol,cadenaTraducida):
codigo = ""
etiquetaSalida = arbol.generaEtiqueta()
#Si existe algun error en la condicion se devuelve el error
for condicion in self.l_condiciones:
condicion_case = condicion.traducir(tabla, arbol,etiquetaSalida)
if isinstance(condicion_case, Excepcion):
return condicion_case
codigo += condicion_case
codigo += "\tlabel ." + etiquetaSalida + "\n"
return codigo
class condicion_case(Instruccion):
'''
La clase recibe una expresion logica y una lista de instrucciones
'''
def __init__(self, expLogica,instrucciones,strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram,strSent)
self.expLogica = expLogica
self.instrucciones = instrucciones
def ejecutar(self, tabla, arbol):
pass
def traducir(self, tabla, arbol,cadenaTraducida):
#Si existe algun error en la expresion logica se devuelve el error
expresion_logica = self.expLogica.traducir(tabla, arbol,cadenaTraducida)
if isinstance(expresion_logica, Excepcion):
return expresion_logica
if expresion_logica.tipo.tipo == Tipo_Dato.BOOLEAN or expresion_logica.tipo.tipo == Tipo_Dato.ID:
#Inicia traduccion
codigo = expresion_logica.codigo
etiquetaSalida = cadenaTraducida
codigo += "\tlabel " + expresion_logica.etiquetaV.replace(":","") + "\n"
for i in self.instrucciones:
instruccion_if = i.traducir(tabla, arbol,cadenaTraducida)
if isinstance(instruccion_if, Excepcion):
return instruccion_if
codigo += instruccion_if
codigo += "\tgoto ." + etiquetaSalida + "\n"
codigo += "\tlabel " + expresion_logica.etiquetaF.replace(":","") + "\n"
return codigo
# ...
# if temporal_logico:
# goto L1
# goto L2
# label L1
# instrucciones_if
# goto Lsalida
# label L2
# ...
else:
error = Excepcion('42804',"Semántico","La expresion logica debe ser de tipo boolean",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
class CaseElse(Instruccion):
'''
La clase CaseElse, recibe una lista de condiciones y cada una de ellas tiene una lista de instrucciones
ademas de las instrucciones si todas son falsas
'''
def __init__(self,l_condiciones,instrCaseFalso,strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram,strSent)
self.l_condiciones = l_condiciones
self.instrCaseFalso = instrCaseFalso
#desde aqui voy a mandar la etiqueta de escape
def ejecutar(self, tabla, arbol):
pass
def traducir(self, tabla, arbol,cadenaTraducida):
codigo = ""
etiquetaSalida = arbol.generaEtiqueta()
#Si existe algun error en la condicion se devuelve el error
for condicion in self.l_condiciones:
condicion_case = condicion.traducir(tabla, arbol,etiquetaSalida)
if isinstance(condicion_case, Excepcion):
return condicion_case
codigo += condicion_case
for i in self.instrCaseFalso:
instruccion_if = i.traducir(tabla, arbol,cadenaTraducida)
if isinstance(instruccion_if, Excepcion):
return instruccion_if
codigo += instruccion_if
codigo += "\tlabel " + etiquetaSalida + "\n"
return codigo
class CaseID(Instruccion):
'''
La clase CaseID, recibe una variable a evaluar, una lista de condiciones
y cada una de ellas tiene una lista de instrucciones
'''
def __init__(self,identificador,l_condiciones,strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram,strSent)
self.identificador = identificador
self.l_condiciones = l_condiciones
#desde aqui voy a mandar la etiqueta de escape
def ejecutar(self, tabla, arbol):
pass
def traducir(self, tabla, arbol,cadenaTraducida):
temporal = arbol.generaTemporal()
codigo = "\t" + temporal + " = " + self.identificador + "\n"
etiquetaSalida = arbol.generaEtiqueta()
#Si existe algun error en la condicion se devuelve el error
for condicion in self.l_condiciones:
condicion_case = condicion.traducir(tabla, arbol,etiquetaSalida,temporal)
if isinstance(condicion_case, Excepcion):
return condicion_case
codigo += condicion_case
codigo += "\tlabel ." + etiquetaSalida + "\n"
return codigo
class condicion_caseID(Instruccion):
'''
La clase recibe una lista de expresiones y una lista de instrucciones
'''
def __init__(self, expLogica,instrucciones,strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram,strSent)
self.expLogica = expLogica
self.instrucciones = instrucciones
def ejecutar(self, tabla, arbol):
pass
def traducir(self, tabla, arbol,cadenaTraducida,temporal):
#Si existe algun error en la expresion logica se devuelve el error
temporal = temporal
codigo = ""
for expre in self.expLogica:
expresion_logica = expre.traducir(tabla, arbol,cadenaTraducida)
if isinstance(expresion_logica, Excepcion):
return expresion_logica
#expresion logica contiene un simbolo3D codigo = temporal = valor
codigo += expresion_logica.codigo
#Inicia traduccion
etiquetaV = arbol.generaEtiqueta()
etiquetaF = arbol.generaEtiqueta()
etiquetaSalida = cadenaTraducida
codigo += "\tif (" + temporal + "==" + expresion_logica.temporal + "):\n"
codigo += "\t\tgoto ." + etiquetaV + "\n"
codigo += "\tgoto ." + etiquetaF + "\n"
codigo += "\tlabel ." + etiquetaV + "\n"
for i in self.instrucciones:
instruccion_if = i.traducir(tabla, arbol,cadenaTraducida)
if isinstance(instruccion_if, Excepcion):
return instruccion_if
codigo += instruccion_if
codigo += "\tgoto ." + etiquetaSalida + "\n"
codigo += "\tlabel ." + etiquetaF + "\n"
return codigo
# ...
# if temporal_logico:
# goto L1
# goto L2
# label L1
# instrucciones_if
# goto Lsalida
# label L2
# ...
class CaseIDElse(Instruccion):
'''
La clase CaseIDElse, recibe una variablea evaluar, una lista de condiciones y cada una de ellas
tiene una lista de instrucciones, ademas de las instrucciones si todas son falsas
'''
def __init__(self,identificador,l_condiciones,instrCaseFalso,strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram,strSent)
self.identificador = identificador
self.l_condiciones = l_condiciones
self.instrCaseFalso = instrCaseFalso
#desde aqui voy a mandar la etiqueta de escape
def ejecutar(self, tabla, arbol):
pass
def traducir(self, tabla, arbol,cadenaTraducida):
temporal = arbol.generaTemporal()
codigo = "\t" + temporal + " = " + self.identificador + "\n"
etiquetaSalida = arbol.generaEtiqueta()
#Si existe algun error en la condicion se devuelve el error
for condicion in self.l_condiciones:
condicion_case = condicion.traducir(tabla, arbol,etiquetaSalida,temporal)
if isinstance(condicion_case, Excepcion):
return condicion_case
codigo += condicion_case
for i in self.instrCaseFalso:
instruccion_if = i.traducir(tabla, arbol,cadenaTraducida)
if isinstance(instruccion_if, Excepcion):
return instruccion_if
codigo += instruccion_if
codigo += "\tlabel ." + etiquetaSalida + "\n"
return codigo
```
#### File: Instrucciones/plpgsql/DeclaracionRetorno.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
class DeclaracionRetorno(Instruccion):
def __init__(self, exprecion, strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna, strGram, strSent)
self.exprecion = exprecion
def ejecutar(self, tabla, arbol):
pass
def traducir(self,tabla,arbol,cadenaTraducida):
codigo = ""
if self.exprecion is None:
codigo += "\treturn\n"
else:
simbolo = self.exprecion.traducir(tabla,arbol,cadenaTraducida)
codigo += simbolo.codigo
codigo += "\treturn " + simbolo.temporal + "\n"
return codigo
```
#### File: Instrucciones/plpgsql/DropProcedimiento.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
class DropProcedimiento(Instruccion):
def __init__(self, id, strGram, linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna, strGram, strSent)
self.id = id
def ejecutar(self, tabla, arbol):
tabla.dropProcedimiento(self, arbol)
def traducir(self,tabla,arbol,cadenaTraducida):
tabla.dropProcedimiento(self, arbol)
codigo = ""
#Se declara la eliminacion con el nombre
codigo += "\tdel " + self.id + "\n"
return codigo
```
#### File: Instrucciones/Sql_alter/AlterDBOwner.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
# Para todas las definiciones que incluyan owner solamente aceptarlo en la sintaxis no hacer nada con ellos
class AlterDBOwner(Instruccion):
def __init__(self, id, owner, strGram,linea, columna, strSent):
Instruccion.__init__(self,None,linea,columna,strGram, strSent)
self.id = id
self.owner = owner
def ejecutar(self, tabla, arbol):
#super().ejecutar(tabla,arbol)
arbol.consola.append("Consulta devuelta correctamente.")
def traducir(self,tabla,arbol,cadenaTraducida):
temporal = arbol.generaTemporal()
codigo = "\t" + temporal + " = " + "\"" + self.strSent + "\"\n"
codigo += "\tFuncionesPara3D.ejecutarsentecia(" + temporal + ")\n\n"
return codigo
```
#### File: Instrucciones/Sql_create/Set.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
class Set(Instruccion):
def __init__(self, id, tipo, id2, strGram,linea, columna, strSent):
Instruccion.__init__(self,tipo,linea,columna, strGram, strSent)
self.valor = id
self.id2 = id2
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
print(self.valor + " linea: " + str(self.linea) + " columna: " + str(self.columna))
def traducir(self,tabla,arbol,cadenaTraducida):
temporal = arbol.generaTemporal()
codigo = "\t" + temporal + " = " + "\"" + self.strSent + "\"\n"
codigo += "\tFuncionesPara3D.ejecutarsentecia(" + temporal + ")\n\n"
return codigo
'''
instruccion = Use("hola mundo",None, 1,2)
instruccion.ejecutar(None,None)
'''
```
#### File: team07/Tytus_SQLPARSER_G8/sintacticoGraph.py
```python
from Instrucciones.Excepcion import Excepcion
from Instrucciones.Identificador import Identificador
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from tkinter.constants import HORIZONTAL
from ply import *
from lexico import *
#tokens= lexico.tokens
from Instrucciones.TablaSimbolos.Tipo import Tipo, Tipo_Dato
from Instrucciones.FunctionAgregate import Avg, Count, Greatest, Least, Max, Min, Sum, Top
from Instrucciones.FunctionMathematical import Abs, Cbrt, Ceil, Ceiling, Degrees, Div, Exp, Factorial, Floor, Gcd, Lcm, Ln, Log, Log10, MinScale, Mod, PI, Power, Radians, Random, Round, Scale, SetSeed, Sign, Sqrt, TrimScale, Trunc, WidthBucket
from Instrucciones.FunctionTrigonometric import Acos, Acosd, Acosh, Asin, Asind, Asinh, Atan, Atan2, Atan2d, Atand, Atanh, Cos, Cosd, Cosh, Cot, Cotd, Sin, Sind, Sinh, Tan, Tand, Tanh
from Instrucciones.FunctionBinaryString import Convert, Decode, Encode, GetByte, Length, Md5, SetByte, Sha256, Substr, Substring, Trim
from Instrucciones.Expresiones import Aritmetica, Logica, Primitivo, Relacional, Between
from Instrucciones.DateTimeTypes import Case , CurrentDate, CurrentTime, DatePart, Extract, Now, Por, TimeStamp
from Instrucciones.Sql_alter import AlterDatabase, AlterTable, AlterDBOwner, AlterTableAddColumn, AlterTableAddConstraintFK, Columna, AlterTableDropColumn, AlterTableAddConstraint, AlterTableAddFK, AlterTableAlterColumn, AlterTableDropConstraint, AlterTableAlterColumnType, AlterTableAddCheck, AlterIndex
from Instrucciones.Sql_create import CreateDatabase, CreateFunction, CreateOrReplace, CreateTable, CreateType, Use, ShowDatabases,Set, CreateIndex
from Instrucciones.Sql_declare import Declare
from Instrucciones.Sql_delete import DeleteTable
from Instrucciones.Sql_drop import DropDatabase, DropTable, DropIndex
from Instrucciones.Sql_insert import insertTable
from Instrucciones.Sql_Joins import Join, JoinFull, JoinInner, JoinLeft, JoinRight
from Instrucciones.Sql_select import GroupBy, Having, Limit, OrderBy, Select, Where, SelectLista
from Instrucciones.Sql_truncate import Truncate
from Instrucciones.Sql_update import UpdateTable
from Instrucciones.Sql_create import Columna as CColumna
from Instrucciones import Relaciones, LlamadoFuncion
import nodoGeneral
from Instrucciones.plpgsql import condicional_if, Funcion, DeclaracionVariable, DeclaracionAlias, condicional_case, Procedimiento, DeclaracionRetorno, AsignacionVariable
# IMPORTAMOS EL STORAGE
from storageManager import jsonMode as storage
from Instrucciones.Sql_create.Tipo_Constraint import *
lista_lexicos=lista_errores_lexico
# INICIA EN ANALISIS SINTACTICO
global numNodo
numNodo = 0
def incNodo(valor):
global numNodo
numNodo = numNodo + 1
return numNodo
def crear_nodo_general(nombre, valor, fila, column):
nNodo = incNodo(numNodo)
hijos = []
nodoEnviar = nodoGeneral.NodoGeneral(fila, column, nombre, nNodo, valor, hijos)
return nodoEnviar
# Asociación de operadores y precedencia
precedence = (
('left', 'CHECK'),
('left', 'OR'),
('left', 'AND'),
('left', 'IS', 'FROM','DISTINCT'),
('left', 'LIKE', 'BETWEEN', 'IN'),
('left', 'NOT'),
('left', 'IGUAL', 'MAYORQ', 'MENORQ', 'MAYOR_IGUALQ', 'MENOR_IGUALQ', 'DISTINTO'),
('left', 'MAS', 'MENOS'),
('left', 'EXPONENCIACION'),
('left', 'POR', 'DIVIDIDO'),
('left', 'MODULO'),
('left', 'AS', 'ASC', 'DESC'),
('left', 'COUNT'),
('left', 'UNION', 'INTERSECT', 'EXCEPT'),
('left', 'PARIZQ', 'PARDER'),
('right', 'UMENOS')
)
# Definición de la gramática
def p_init(t):
'init : instrucciones'
t[0] = t[1]
def p_instrucciones_lista1(t):
'instrucciones : instrucciones instruccion '
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_instrucciones_lista2(t):
'instrucciones : instruccion '
nodo = crear_nodo_general("init", "", t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
# CREATE DATABASE
def p_instruccion_create_database1(t):
'''instruccion : CREATE DATABASE if_not_exists ID PUNTO_COMA
'''
# ID tipo opcion ID2 ENTERO
nodoId = crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[3]
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
t[0] = nodo
def p_instruccion_create_database2(t):
'''instruccion : CREATE DATABASE if_not_exists ID OWNER IGUAL cowner PUNTO_COMA
'''
# ID tipo opcion ID2 ENTERO
nodoId = crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[3]
nodoO = t[7]
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoO)
t[0] = nodo
def p_instruccion_create_database3(t):
'''instruccion : CREATE DATABASE if_not_exists ID OWNER IGUAL cowner MODE IGUAL ENTERO PUNTO_COMA
'''
# ID tipo opcion ID2 ENTERO
nodoId = crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[3]
nodoO = t[7]
nodoM = crear_nodo_general("Mode",t[10],t.lexer.lineno, t.lexer.lexpos)
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoO)
nodo.hijos.append(nodoM)
t[0] = nodo
def p_instruccion_create_database4(t):
'''instruccion : CREATE DATABASE if_not_exists ID MODE IGUAL ENTERO PUNTO_COMA
'''
# ID tipo opcion ID2 ENTERO
nodoId = crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[3]
nodoM = crear_nodo_general("Mode",t[7],t.lexer.lineno, t.lexer.lexpos)
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoM)
t[0] = nodo
# CREATE OR REPLACE DATABASE
def p_instruccion_create_or_database1(t):
'''instruccion : CREATE OR REPLACE DATABASE if_not_exists ID PUNTO_COMA
'''
nodoR = crear_nodo_general("OR REPLACE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[5]
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
t[0] = nodo
def p_instruccion_create_or_database2(t):
'''instruccion : CREATE OR REPLACE DATABASE if_not_exists ID OWNER IGUAL cowner PUNTO_COMA
'''
nodoR = crear_nodo_general("OR REPLACE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[5]
nodoO = t[9]
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoO)
t[0] = nodo
def p_instruccion_create_or_database3(t):
'''instruccion : CREATE OR REPLACE DATABASE if_not_exists ID OWNER IGUAL cowner MODE IGUAL ENTERO PUNTO_COMA
'''
nodoR = crear_nodo_general("OR REPLACE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[5]
nodoO = t[9]
nodoM = crear_nodo_general("Mode",t[12],t.lexer.lineno, t.lexer.lexpos)
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoO)
nodo.hijos.append(nodoM)
t[0] = nodo
def p_instruccion_create_or_database4(t):
'''instruccion : CREATE OR REPLACE DATABASE if_not_exists ID MODE IGUAL ENTERO PUNTO_COMA
'''
nodoR = crear_nodo_general("OR REPLACE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[5]
nodoM = crear_nodo_general("Mode",t[9],t.lexer.lineno, t.lexer.lexpos)
nodo = crear_nodo_general("CREATE DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoM)
t[0] = nodo
def p_owner(t):
'''cowner : ID
| CARACTER
| CADENA
'''
t[0] = crear_nodo_general("Owner",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_if_not_exists(t):
'''if_not_exists : IF NOT EXISTS
'''
t[0] = crear_nodo_general("IF NOT EXISTS","",t.lexer.lineno, t.lexer.lexpos)
def p_if_not_exists1(t):
'''if_not_exists :
'''
t[0] = None
def p_instruccion_create1(t):
'''instruccion : CREATE TABLE ID PARIZQ campos PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("CREATE TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoCampos = t[5]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoCampos)
t[0] = nodo
def p_instruccion_create2(t):
'''instruccion : CREATE TABLE ID PARIZQ campos PARDER INHERITS PARIZQ ID PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("CREATE TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoCampos = t[5]
nodoI = crear_nodo_general("INHERITS",t[9],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoCampos)
nodo.hijos.append(nodoI)
t[0] = nodo
def p_instruccion_use(t):
'''instruccion : USE ID PUNTO_COMA
'''
t[0] = crear_nodo_general("USE",t[2],t.lexer.lineno, t.lexer.lexpos)
def p_instruccion_show_database1(t):
'''instruccion : SHOW DATABASES PUNTO_COMA
'''
t[0] = crear_nodo_general("SHOW DATABASES","",t.lexer.lineno, t.lexer.lexpos)
def p_instruccion_show_database2(t):
'''instruccion : SHOW DATABASES LIKE cadena_o_caracter PUNTO_COMA
'''
nodo = crear_nodo_general("SHOW DATABASES","",t.lexer.lineno, t.lexer.lexpos)
nodoL = crear_nodo_general("LIKE","",t.lexer.lineno, t.lexer.lexpos)
nodoC = t[4]
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoC)
t[0] = nodo
def p_instruccion_create_enumerated_type(t):
'''instruccion : CREATE TYPE ID AS ENUM PARIZQ l_expresiones PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("CREATE TYPE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[7]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoE)
t[0] = nodo
def p_instruccion_truncate(t):
'''instruccion : TRUNCATE TABLE ID PUNTO_COMA
'''
nodo = crear_nodo_general("TRUNCATE TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
t[0] = nodo
# DROP DATABASE
def p_instruccion_drop_database1(t):
'''instruccion : DROP DATABASE ID PUNTO_COMA
'''
nodo = crear_nodo_general("DROP DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
t[0] = nodo
def p_instruccion_drop_database2(t):
'''instruccion : DROP DATABASE IF EXISTS ID PUNTO_COMA
'''
nodo = crear_nodo_general("DROP DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodoI = crear_nodo_general("IF EXISTS","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[5],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoI)
nodo.hijos.append(nodoId)
t[0] = nodo
# DROP TABLE
def p_instruccion_drop(t):
'''instruccion : DROP TABLE ID PUNTO_COMA
'''
nodo = crear_nodo_general("DROP TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
t[0] = nodo
def p_instruccion_drop2(t):
'''instruccion : DROP ID
'''
nodo = crear_nodo_general("DROP ID",t[2],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_instruccion_where(t):
'''
instructionWhere : WHERE expre
'''
nodo = crear_nodo_general("WHERE","",t.lexer.lineno, t.lexer.lexpos)
nodoE = t[2]
nodo.hijos.append(nodoE)
t[0] = nodo
# update tabla set campo = valor , campo 2= valor where condicion
def p_instruccion_update(t):
'''instruccion : UPDATE ID SET lcol instructionWhere PUNTO_COMA
'''
nodo = crear_nodo_general("UPDATE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos)
nodoL = t[4]
nodoInstr = t[5]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoInstr)
t[0] = nodo
# update tabla set campo = valor , campo 2= valor;
def p_instruccion_update2(t):
'''instruccion : UPDATE ID SET lcol PUNTO_COMA
'''
nodo = crear_nodo_general("UPDATE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos)
nodoL = t[4]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoL)
t[0] = nodo
# DELETE FROM Customers WHERE CustomerName='<NAME>';
def p_columunas_delete(t):
'''
instruccion : DELETE FROM ID instructionWhere PUNTO_COMA
'''
nodo = crear_nodo_general("DELETE FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoInstr = t[4]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoInstr)
t[0] = nodo
#FUNCIONES
#def p_funciones(t):
#'''
# instruccion : CREATE FUNCTION ID BEGIN instrucciones END PUNTO_COMA
#'''
# strGram = "<instruccion> ::= CREATE FUNCTION ID BEGIN <instrucciones> END PUNTO_COMA"
# t[0] = CreateFunction.CreateFunction(t[3],None, None, None, t[5], strGram, t.lexer.lineno, t.lexer.lexpos)
#def p_funciones2(t):
#'''
# instruccion : CREATE FUNCTION ID PARIZQ lcol PARDER BEGIN instrucciones END PUNTO_COMA
#'''
# strGram = "<instruccion> ::= CREATE FUNCTION ID PARIZQ <lcol> PARDER BEGIN <instrucciones> END PUNTO_COMA"
# t[0] = CreateFunction.CreateFunction(t[3],None, t[5], None, t[8], strGram, t.lexer.lineno, t.lexer.lexpos)
#def p_funciones3(t):
#'''
# instruccion : CREATE FUNCTION ID PARIZQ lcol PARDER AS expresion BEGIN instrucciones END PUNTO_COMA
#'''
# strGram = "<instruccion> ::= CREATE FUNCTION ID PARIZQ <lcol> PARDER AS <expresion> BEGIN <instrucciones> END PUNTO_COMA"
# t[0] = CreateFunction.CreateFunction(t[3],None, t[5], t[8], t[10], strGram, t.lexer.lineno, t.lexer.lexpos)
def p_declaracion(t):
'''
instruccion : DECLARE expresion AS expresion PUNTO_COMA
'''
nodo = crear_nodo_general("DECLARE","",t.lexer.lineno, t.lexer.lexpos)
nodoE = t[2]
nodoA = crear_nodo_general("AS","",t.lexer.lineno, t.lexer.lexpos)
nodoE2 = t[4]
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoA)
nodo.hijos.append(nodoE2)
t[0] = nodo
def p_declaracion1(t):
'''
instruccion : DECLARE expresion tipo PUNTO_COMA
'''
nodo = crear_nodo_general("DECLARE","",t.lexer.lineno, t.lexer.lexpos)
nodoE = t[2]
nodoT = t[3]
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoT)
t[0] = nodo
def p_set(t):
'''
instruccion : SET expresion IGUAL expre PUNTO_COMA
'''
nodo = crear_nodo_general("SET","",t.lexer.lineno, t.lexer.lexpos)
nodoE = t[2]
nodoI = crear_nodo_general("=","",t.lexer.lineno, t.lexer.lexpos)
nodoE2 = t[4]
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoI)
nodo.hijos.append(nodoE2)
t[0] = nodo
# ALTER DATABASE name RENAME TO new_name
def p_instruccion_alter_database1(t):
'''instruccion : ALTER DATABASE ID RENAME TO ID PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer)
nodoR = crear_nodo_general("RENAME TO","",t.lexer.lineno, t.lexer.lexpos)
nodoId2 = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoId2)
t[0] = nodo
# ALTER DATABASE name OWNER TO { new_owner | CURRENT_USER | SESSION_USER }
def p_instruccion_alter_database2(t):
'''instruccion : ALTER DATABASE ID OWNER TO list_owner PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER DATABASE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoOwner = crear_nodo_general("OWNER TO","",t.lexer.lineno, t.lexer.lexpos)
nodoL = t[6]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoOwner)
nodo.hijos.append(nodoL)
t[0] = nodo
# { new_owner | CURRENT_USER | SESSION_USER }
def p_list_owner(t):
'''list_owner : ID
| CURRENT_USER
| SESSION_USER
'''
t[0] = crear_nodo_general("OWNER",t[1],t.lexer.lineno, t.lexer.lexpos)
# ALTER TABLE 'NOMBRE_TABLA' ADD COLUMN NOMBRE_COLUMNA TIPO;
def p_instruccion_alter1(t):
'''instruccion : ALTER TABLE ID l_add_column PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoL = t[4]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoL)
t[0] = nodo
def p_l_add_column1(t):
'''l_add_column : l_add_column COMA add_column
'''
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_l_add_column2(t):
'''l_add_column : add_column
'''
nodo = crear_nodo_general("l_add_column","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_add_column(t):
'''add_column : ADD COLUMN ID tipo'''
nodo = crear_nodo_general("ADD COLUMN","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoT = t[4]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoT)
t[0] = nodo
# ALTER TABLE 'NOMBRE_TABLA' DROP COLUMN NOMBRE_COLUMNA;
def p_instruccion_alter2(t):
'''instruccion : ALTER TABLE ID l_drop_column PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoL = t[4]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoL)
t[0] = nodo
def p_l_drop_column1(t):
'''l_drop_column : l_drop_column COMA drop_column'''
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_l_drop_column2(t):
'''l_drop_column : drop_column'''
nodo = crear_nodo_general("l_drop_column","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_drop_column(t):
'drop_column : DROP COLUMN ID'
nodo = crear_nodo_general("DROP COLUMN","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
t[0] = nodo
# ALTER TABLE 'NOMBRE_TABLA' ADD CHECK EXP;
def p_instruccion_alter3(t):
'''instruccion : ALTER TABLE ID ADD CHECK expre PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoC = crear_nodo_general("ADD CHECK","",t.lexer.lineno, t.lexer.lexpos)
nodoE = t[6]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoC)
nodo.hijos.append(nodoE)
t[0] = nodo
# ALTER TABLE 'NOMBRE_TABLA' ADD CONSTRAINT 'NOMBRE' UNIQUE (LISTA_ID);
def p_instruccion_alter4(t):
'''instruccion : ALTER TABLE ID ADD CONSTRAINT ID UNIQUE PARIZQ lista_id PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoConst = crear_nodo_general("ADD CONSTRAINT","",t.lexer.lineno, t.lexer.lexpos)
nodoId2 = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoU = crear_nodo_general("UNIQUE","",t.lexer.lineno, t.lexer.lexpos)
nodoL = t[9]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoConst)
nodo.hijos.append(nodoId2)
nodo.hijos.append(nodoU)
nodo.hijos.append(nodoL)
t[0] = nodo
def p_instruccion_altercfk(t):
'''instruccion : ALTER TABLE ID ADD CONSTRAINT ID FOREIGN KEY PARIZQ lista_id PARDER REFERENCES ID PARIZQ lista_id PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoConst = crear_nodo_general("ADD CONSTRAINT","",t.lexer.lineno, t.lexer.lexpos)
nodoId2 = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoU = crear_nodo_general("FOREIGN KEY","",t.lexer.lineno, t.lexer.lexpos)
nodoLI = t[10]
nodoR = crear_nodo_general("REFERENCES","",t.lexer.lineno, t.lexer.lexpos)
nodoId3 = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoLI2 = t[15]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoConst)
nodo.hijos.append(nodoId2)
nodo.hijos.append(nodoU)
nodo.hijos.append(nodoLI)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoId3)
nodo.hijos.append(nodoLI2)
t[0] = nodo
# ALTER TABLE child_table ADD FOREIGN KEY (fk_columns) REFERENCES parent_table (parent_key_columns);
def p_instruccion_alter5(t):
'''instruccion : ALTER TABLE ID ADD FOREIGN KEY PARIZQ lista_id PARDER REFERENCES ID PARIZQ lista_id PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoF = crear_nodo_general("ADD FOREIGN KEY","",t.lexer.lineno, t.lexer.lexpos)
nodoLI = t[8]
nodoR = crear_nodo_general("REFERENCES","",t.lexer.lineno, t.lexer.lexpos)
nodoId2 = crear_nodo_general("ID",t[11],t.lexer.lineno, t.lexer.lexpos)
nodoLI = t[13]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLI)
nodo.hijos.append(nodoR)
nodo.hijos.append(nodoId2)
nodo.hijos.append(nodoLI)
t[0] = nodo
# ALTER TABLE 'NOMBRE_TABLA' ALTER COLUMN 'NOMBRE' SET NOT NULL;
def p_instruccion_alter6(t):
'''instruccion : ALTER TABLE ID ALTER COLUMN ID SET NOT NULL PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoA = crear_nodo_general("ALTER COLUMN","",t.lexer.lineno, t.lexer.lexpos)
nodoId2 = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodoS = crear_nodo_general("SET NOT NULL","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoA)
nodo.hijos.append(nodoId2)
nodo.hijos.append(nodoS)
t[0] = nodo
# ALTER TABLE 'NOMBRE_TABLA' DROP CONSTRAINT 'NOMBRE';
def p_instruccion_alter7(t):
'''instruccion : ALTER TABLE ID DROP CONSTRAINT ID PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoC = crear_nodo_general("DROP CONSTRAINT","",t.lexer.lineno, t.lexer.lexpos)
nodoId2 = crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoC)
nodo.hijos.append(nodoId2)
t[0] = nodo
# ALTER TABLE 'NOMBRE_TABLA' ADD CONSTRAINT 'NOMBRE' CHECK expre;
def p_instruccion_alter8(t):
'''instruccion : ALTER TABLE ID l_alter PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoL = t[4]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoL)
t[0] = nodo
def p_l_alter1(t):
'l_alter : l_alter COMA alter_column'
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_l_alter2(t):
'l_alter : alter_column'
nodo = crear_nodo_general("l_alter","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_alter_column(t):
'alter_column : ALTER COLUMN ID TYPE tipo'
nodo = crear_nodo_general("ALTER TABLE","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoT = crear_nodo_general("TYPE",t[5],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoT)
t[0] = nodo
# insert into tabla (campo1,campo2,campo3,campo4) values (valor1, valor2, valor3, valor4)
# unicamente validar que tengan los mismos campos y la mismas cantidad de valores
def p_instruccion_insert(t):
'''instruccion : INSERT INTO ID PARIZQ lista_id PARDER VALUES PARIZQ l_expresiones PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("INSERT INTO","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoL = t[5]
nodoV = crear_nodo_general("VALUES","",t.lexer.lineno, t.lexer.lexpos)
nodoLE = t[9]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoV)
nodo.hijos.append(nodoLE)
t[0] = nodo
#insert into tabla values (valor1,valor2,valor3)
# debe validar que la cantidad de valores coincida con la cantidad de columnas de la tabla y el tipo de dato
def p_instruccion_insert2(t):
'''
instruccion : INSERT INTO ID VALUES PARIZQ l_expresiones PARDER PUNTO_COMA
'''
nodo = crear_nodo_general("INSERT INTO","",t.lexer.lineno, t.lexer.lexpos)
nodoId = crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos)
nodoV = crear_nodo_general("VALUES","",t.lexer.lineno, t.lexer.lexpos)
nodoLE = t[6]
nodo.hijos.append(nodoId)
nodo.hijos.append(nodoV)
nodo.hijos.append(nodoLE)
t[0] = nodo
# SELECT col, col FROM id;
# SELECT * from id;
def p_instruccion_query(t):
'''
instruccion : lquery PUNTO_COMA
'''
t[0] = t[1]
def p_lista_querys(t):
'''lquery : lquery relaciones query
'''
nodo = crear_nodo_general("lquery","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
nodo.hijos.append(t[2])
nodo.hijos.append(t[3])
t[0] = nodo
def p_lista_querys2(t):
'''
lquery : query
'''
t[0] = t[1]
def p_tipo_relaciones(t):
'''relaciones : UNION
| INTERSECT
| EXCEPT
'''
if(t[1]=="UNION"):
t[0] = crear_nodo_general("relaciones",t[1],t.lexer.lineno, t.lexer.lexpos)
elif(t[1]=="INTERSECT"):
t[0] = crear_nodo_general("relaciones",t[1],t.lexer.lineno, t.lexer.lexpos)
elif(t[1]=="EXCEPT"):
t[0] = crear_nodo_general("relaciones",t[1],t.lexer.lineno, t.lexer.lexpos)
else:
t[0] = None
def p_tipo_relaciones2(t):
'''relaciones : UNION ALL
| INTERSECT ALL
| EXCEPT ALL
'''
if(t[1]=="UNION"):
t[0] = crear_nodo_general("relaciones","UNION ALL",t.lexer.lineno, t.lexer.lexpos)
elif(t[1]=="INTERSECT"):
t[0] = crear_nodo_general("relaciones","INTERSECT ALL" ,t.lexer.lineno, t.lexer.lexpos)
elif(t[1]=="EXCEPT"):
t[0] = crear_nodo_general("relaciones","EXCEPT ALL",t.lexer.lineno, t.lexer.lexpos)
else:
t[0] = None
def p_instruccion_select(t):
'''
query : SELECT dist lcol FROM lcol
'''
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
t[0] = nodo
def p_instruccion_select1(t):
'''
query : SELECT dist lcol FROM lcol instructionWhere lrows
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodoW = t[6]
nodoLR = t[7]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
nodo.hijos.append(nodoW)
nodo.hijos.append(nodoLR)
t[0] = nodo
def p_instruccion_select2(t):
'''
query : SELECT dist lcol FROM lcol instructionWhere
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodoW = t[6]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
nodo.hijos.append(nodoW)
t[0] = nodo
def p_instruccion_select3(t):
'''
query : SELECT dist lcol FROM lcol linners
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodoLi = t[6]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
nodo.hijos.append(nodoLi)
t[0] = nodo
def p_instruccion_select4(t):
'''
query : SELECT dist lcol FROM lcol linners instructionWhere lrows
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodoLi = t[6]
nodoW = t[7]
nodoLR = t[8]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
nodo.hijos.append(nodoLi)
nodo.hijos.append(nodoW)
nodo.hijos.append(nodoLR)
t[0] = nodo
def p_instruccion_select5(t):
'''
query : SELECT dist lcol FROM lcol linners instructionWhere
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodoLi = t[6]
nodoW = t[7]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
nodo.hijos.append(nodoLi)
nodo.hijos.append(nodoW)
t[0] = nodo
def p_instruccion_select6(t):
'''
query : SELECT dist lcol
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
t[0] = nodo
def p_instruccion_select7(t):
'''
query : SELECT dist lcol FROM lcol lrows
'''
# dist tipo lcol lcol linners where lrows
nodo = crear_nodo_general("SELECT","",t.lexer.lineno, t.lexer.lexpos)
nodoD = t[2]
nodoL = t[3]
nodoF = crear_nodo_general("FROM","",t.lexer.lineno, t.lexer.lexpos)
nodoLC = t[5]
nodoLr = t[6]
nodo.hijos.append(nodoD)
nodo.hijos.append(nodoL)
nodo.hijos.append(nodoF)
nodo.hijos.append(nodoLC)
nodo.hijos.append(nodoLr)
t[0] = nodo
def p_lista_case(t):
'''lcase : lcase case
'''
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_lista_case_case(t):
'''lcase : case
'''
nodo = crear_nodo_general("lcase","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_instruccion_case(t):
'''
case : WHEN expre THEN expre
| ELSE expre
'''
t[0] = crear_nodo_general("CASE","",t.lexer.lineno, t.lexer.lexpos)
def p_instruccion_lrows(t):
'''lrows : lrows rows
'''
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_instruccion_lrows2(t):
'''lrows : rows
'''
nodo = crear_nodo_general("lrows","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_dist(t):
'''dist : DISTINCT
'''
t[0] = crear_nodo_general("DISTINCT",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_dist1(t):
'''dist :
'''
t[0] = None
def p_instruccion_rows(t):
'''
rows : ORDER BY l_expresiones
| ORDER BY l_expresiones DESC
| ORDER BY l_expresiones ASC
| ORDER BY l_expresiones NULLS FIRST
| ORDER BY l_expresiones NULLS LAST
| GROUP BY l_expresiones
| HAVING lcol
| LIMIT ENTERO
'''
if t[1] == "ORDER":
nodo = crear_nodo_general("ORDER BY","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[3])
if len(t) == 5:
nodo.hijos.append(crear_nodo_general("ORDER",t[4],t.lexer.lineno, t.lexer.lexpos))
if len(t) == 6:
nodo.hijos.append(crear_nodo_general("NULLS",t[5],t.lexer.lineno, t.lexer.lexpos))
if t[1] == "GROUP":
nodo = crear_nodo_general("GROUP BY","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[3])
if t[1] == "HAVING":
nodo = crear_nodo_general("HAVING","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
if t[1] == "LIMIT":
nodo = crear_nodo_general("LIMIT","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ENTERO",t[2],t.lexer.lineno, t.lexer.lexpos))
def p_instruccion_row2(t):
'''rows : LIMIT ENTERO OFFSET ENTERO'''
#LIMIT(LIMITE,FILAS_A_EXCLUIR,fila,columna)
nodo = crear_nodo_general("LIMIT","",t.lexer.lineno, t.lexer.lexpos)
nodoE = crear_nodo_general("ENTERO",t[2],t.lexer.lineno, t.lexer.lexpos)
nodoO = crear_nodo_general("OFFSET","",t.lexer.lineno, t.lexer.lexpos)
nodoEn = crear_nodo_general("ENTERO",t[4],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoO)
nodo.hijos.append(nodoEn)
t[0] = nodo
def p_linner_join(t):
'''linners : linners inners
'''
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_linner_join2(t):
'''linners : inners
'''
nodo = crear_nodo_general("linners","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_inner_join(t):
'''
inners : INNER JOIN expre nombre ON expre
| LEFT JOIN expre nombre ON expre
| FULL OUTER JOIN expre nombre ON expre
| JOIN expre nombre ON expre
| RIGHT JOIN expre nombre ON expre
'''
def p_operadores_logicos(t):
''' expre : expre OR expre
| expre AND expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[2] == "OR":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("OR","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "AND":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("AND","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_operadores_unarios(t):
''' expre : NOT expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("NOT","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_operadores_relacionales(t):
''' expre : expre IGUAL expre
| expre MAYORQ expre
| expre MENORQ expre
| expre MAYOR_IGUALQ expre
| expre MENOR_IGUALQ expre
| expre DISTINTO expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[2] == "IGUAL":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("IGUAL","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "MAYORQ":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MAYORQ","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "MENORQ":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MENORQ","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "MAYOR_IGUALQ":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MAYOR_IGUALQ","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "MENOR_IGUALQ":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MENOR_IGUALQ","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "DISTINTO":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("DISTINTO","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_operadores_aritmeticos(t):
'''expre : expre MAS expre
| expre MENOS expre
| expre POR expre
| expre DIVIDIDO expre
| expre EXPONENCIACION expre
| expre MODULO expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[2] == "MAS":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MAS","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "MENOS":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MENOS","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "POR":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("POR","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "DIVIDIDO":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("DIVIDIDO","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "EXPONENCIACION":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("EXPONENCIACION","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
if t[2] == "MODULO":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("MODULO","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_operador_unario(t):
'expre : MENOS expre %prec UMENOS'
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
nodoM = crear_nodo_general("MENOS",t[1],t.lexer.lineno, t.lexer.lexpos)
nodoE = t[2]
nodoU = crear_nodo_general("UMENOS",t[4],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(nodoM)
nodo.hijos.append(nodoE)
nodo.hijos.append(nodoU)
t[0] = nodo
def p_operadores_like(t):
'''expre : expre LIKE expre
| expre NOT LIKE expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[2] == "LIKE":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("LIKE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
else:
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("NOT LIKE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
#t[0] = PatternMatching(t[1], t[3], 'LIKE', t.lexer.lineno, t.lexer.lexpos) if t[2] == 'LIKE' else PatternMatching(t[1], t[3], 'NOT_LIKE', t.lexer.lineno, t.lexer.lexpos)
def p_operadores_between(t):
'''expre : expre BETWEEN expresion AND expresion
| expre NOT BETWEEN expresion AND expresion
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[2] == "NOT":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("NOT BETWEEN","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
nodo.hijos.append(crear_nodo_general("AND","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[6])
t[0] = nodo
else:
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("BETWEEN","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("AND","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[5])
t[0] = nodo
#t[0] = Between(t[1], t[3], t[5], 'BETWEEN', t.lexer.lineno, t.lexer.lexpos) if t[2] == 'LIKE' else Between(t[1], t[4], t[5], 'NOT_BETWEEN', t.lexer.lineno, t.lexer.lexpos)
def p_operadores_in(t):
'''expre : expre IN expre
| expre NOT IN expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[2] == "NOT":
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("NOT IN","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
else:
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("IN","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_operadores_is(t):
'''expre : expre IS NULL
| expre IS NOT NULL
| expre IS DISTINCT FROM expre
| expre IS NOT DISTINCT FROM expre
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
if t[3] == 'NULL':
nodo.hijos.append(crear_nodo_general("IS NULL","",t.lexer.lineno, t.lexer.lexpos))
if t[3] == 'DISTINCT':
nodo.hijos.append(crear_nodo_general("IS DISTINCT FROM","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[5])
elif t[3] == 'NOT' and t[4] == 'NULL':
nodo.hijos.append(crear_nodo_general("IS NOT NULL","",t.lexer.lineno, t.lexer.lexpos))
elif t[3] == 'NOT' and t[4] == 'DISTINCT':
nodo.hijos.append(crear_nodo_general("IS NOT DISTINCT FROM","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[6])
t[0] = nodo
def p_operadores_agregacion(t):
'''expre : AVG PARIZQ expre PARDER
| COUNT PARIZQ expre PARDER
| GREATEST PARIZQ lcol PARDER
| LEAST PARIZQ lcol PARDER
| MAX PARIZQ expre PARDER
| MIN PARIZQ expre PARDER
| SUM PARIZQ expre PARDER
| TOP PARIZQ expre PARDER
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[1] == 'AVG':
nodo.hijos.append(crear_nodo_general("AVG",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'COUNT':
nodo.hijos.append(crear_nodo_general("COUNT",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'GREATEST':
nodo.hijos.append(crear_nodo_general("GREATEST",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'LEAST':
nodo.hijos.append(crear_nodo_general("LEAST",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'MAX':
nodo.hijos.append(crear_nodo_general("MAX",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'MIN':
nodo.hijos.append(crear_nodo_general("MIN",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'SUM':
nodo.hijos.append(crear_nodo_general("SUM",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
elif t[1] == 'TOP':
nodo.hijos.append(crear_nodo_general("TOP",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
pass
def p_operadores_matematica(t):
'''expre : ABS PARIZQ expre PARDER
| CBRT PARIZQ expre PARDER
| CEIL PARIZQ expre PARDER
| CEILING PARIZQ expre PARDER
| DEGREES PARIZQ expre PARDER
| DIV PARIZQ expre COMA expre PARDER
| EXP PARIZQ expre PARDER
| FACTORIAL PARIZQ expre PARDER
| FLOOR PARIZQ expre PARDER
| GCD PARIZQ expre COMA expre PARDER
| LCM PARIZQ expre PARDER
| LN PARIZQ expre PARDER
| LOG PARIZQ expre PARDER
| LOG10 PARIZQ expre PARDER
| MIN_SCALE PARIZQ expre PARDER
| MOD PARIZQ expre COMA expre PARDER
| PI PARIZQ PARDER
| POWER PARIZQ expre COMA expre PARDER
| RADIANS PARIZQ expre PARDER
| RANDOM PARIZQ PARDER
| ROUND PARIZQ expre PARDER
| SCALE PARIZQ expre PARDER
| SETSEED PARIZQ expre PARDER
| SIGN PARIZQ expre PARDER
| SQRT PARIZQ expre PARDER
| TRIM_SCALE PARIZQ expre PARDER
| TRUNC PARIZQ expre PARDER
| WIDTH_BUCKET PARIZQ expresion COMA expresion COMA expresion COMA expresion PARDER
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[1] == 'ABS':
nodo.hijos.append(crear_nodo_general("ABS",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'CBRT':
nodo.hijos.append(crear_nodo_general("CBRT",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'CEIL':
nodo.hijos.append(crear_nodo_general("CEIL",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'CEILING':
nodo.hijos.append(crear_nodo_general("CEILING",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'DEGREES':
nodo.hijos.append(crear_nodo_general("DEGREES",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'DIV':
nodo.hijos.append(crear_nodo_general("DIV",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
elif t[1] == 'EXP':
nodo.hijos.append(crear_nodo_general("EXP",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'FACTORIAL':
nodo.hijos.append(crear_nodo_general("FACTORIAL",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'FLOOR':
nodo.hijos.append(crear_nodo_general("FLOOR",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'GCD':
nodo.hijos.append(crear_nodo_general("GCD",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
elif t[1] == 'LCM':
nodo.hijos.append(crear_nodo_general("LCM",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'LN':
nodo.hijos.append(crear_nodo_general("LN",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'LOG':
nodo.hijos.append(crear_nodo_general("LOG",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'LOG10':
nodo.hijos.append(crear_nodo_general("LOG10",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'MIN_SCALE':
nodo.hijos.append(crear_nodo_general("MIN_SCALE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'MOD':
nodo.hijos.append(crear_nodo_general("MOD",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
elif t[1] == 'PI':
nodo.hijos.append(crear_nodo_general("PI",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'POWER':
nodo.hijos.append(crear_nodo_general("POWER",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
elif t[1] == 'RADIANS':
nodo.hijos.append(crear_nodo_general("RADIANS",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'RANDOM':
nodo.hijos.append(crear_nodo_general("RANDOM",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'ROUND':
nodo.hijos.append(crear_nodo_general("ROUND",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'SCALE':
nodo.hijos.append(crear_nodo_general("SCALE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'SETSEED':
nodo.hijos.append(crear_nodo_general("SETSEED",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'SIGN':
nodo.hijos.append(crear_nodo_general("SIGN",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'SQRT':
nodo.hijos.append(crear_nodo_general("SQRT",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'TRIM_SCALE':
nodo.hijos.append(crear_nodo_general("TRIM_SCALE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'TRUNC':
nodo.hijos.append(crear_nodo_general("TRUNC",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'WIDTH_BUCKET':
nodo.hijos.append(crear_nodo_general("WIDTH_BUCKET",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
nodo.hijos.append(t[7])
nodo.hijos.append(t[9])
t[0] = nodo
def p_operadores_binarias(t):
''' expre : CONVERT PARIZQ expre AS tipo PARDER
| DECODE PARIZQ expre COMA expre PARDER
| ENCODE PARIZQ expre COMA expre PARDER
| GET_BYTE PARIZQ expre COMA ENTERO PARDER
| LENGTH PARIZQ expre PARDER
| MD5 PARIZQ expre PARDER
| SET_BYTE PARIZQ expre COMA ENTERO COMA ENTERO PARDER
| SHA256 PARIZQ expre PARDER
| SUBSTR PARIZQ expre COMA ENTERO COMA ENTERO PARDER
| SUBSTRING PARIZQ expre COMA ENTERO COMA ENTERO PARDER
| TRIM PARIZQ expre PARDER
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[1] == 'CONVERT':
nodo.hijos.append(crear_nodo_general("CONVERT",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
elif t[1] == 'DECODE':
nodo.hijos.append(crear_nodo_general("DECODE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'ENCODE':
nodo.hijos.append(crear_nodo_general("ENCODE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'GET_BYTE':
nodo.hijos.append(crear_nodo_general("GET_BYTE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ENTERO",t[5],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'LENGTH':
nodo.hijos.append(crear_nodo_general("LENGTH",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'MD5':
nodo.hijos.append(crear_nodo_general("MD5",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'SET_BYTE':
nodo.hijos.append(crear_nodo_general("GET_BYTE",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ENTERO",t[5],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ENTERO",t[7],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'SHA256':
nodo.hijos.append(crear_nodo_general("SHA256",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
elif t[1] == 'SUBSTR':
nodo.hijos.append(crear_nodo_general("SUBSTR",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ENTERO",t[5],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ENTERO",t[7],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'SUBSTRING':
nodo.hijos.append(crear_nodo_general("SUBSTRING",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ENTERO",t[5],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ENTERO",t[7],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'TRIM':
nodo.hijos.append(crear_nodo_general("TRIM",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_operadores_trigonometricas(t):
''' expre : ACOS PARIZQ expre PARDER
| ACOSD PARIZQ expre PARDER
| ACOSH PARIZQ expre PARDER
| ASIN PARIZQ expre PARDER
| ASIND PARIZQ expre PARDER
| ASINH PARIZQ expre PARDER
| ATAN PARIZQ expre PARDER
| ATAN2 PARIZQ expre COMA expre PARDER
| ATAN2D PARIZQ expre COMA expre PARDER
| ATAND PARIZQ expre PARDER
| ATANH PARIZQ expre PARDER
| COS PARIZQ expre PARDER
| COSD PARIZQ expre PARDER
| COSH PARIZQ expre PARDER
| COT PARIZQ expre PARDER
| COTD PARIZQ expre PARDER
| SIN PARIZQ expre PARDER
| SIND PARIZQ expre PARDER
| SINH PARIZQ expre PARDER
| TAN PARIZQ expre PARDER
| TAND PARIZQ expre PARDER
| TANH PARIZQ expre PARDER
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if len(t) == 5:
nodo.hijos.append(crear_nodo_general(t[1],t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
else:
nodo.hijos.append(crear_nodo_general(t[1],t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
def p_cadena_o_caracter(t):
'''
cadena_o_caracter : CADENA
| CARACTER
'''
def p_operadores_otros(t):
''' expre : EXTRACT PARIZQ tiempo FROM TIMESTAMP cadena_o_caracter PARDER
| NOW PARIZQ PARDER
| DATE_PART PARIZQ cadena_o_caracter COMA INTERVAL cadena_o_caracter PARDER
| CURRENT_DATE
| CURRENT_TIME
| TIMESTAMP cadena_o_caracter
| CASE lcase END
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
if t[1] == 'EXTRACT':
nodo.hijos.append(crear_nodo_general("EXTRACT",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("FROM TIMESTAMP","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(T[6])
t[0] = nodo
elif t[1] == 'NOW':
nodo.hijos.append(crear_nodo_general("NOW",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'DATE_PART':
nodo.hijos.append(crear_nodo_general("DATE_PART",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(t[6])
t[0] = nodo
elif t[1] == 'CURRENT_DATE':
nodo.hijos.append(crear_nodo_general("CURRENT_DATE",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'CURRENT_TIME':
nodo.hijos.append(crear_nodo_general("CURRENT_TIME",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
elif t[1] == 'TIMESTAMP':
nodo.hijos.append(crear_nodo_general("TIMESTAMP",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
elif t[1] == 'CASE':
nodo.hijos.append(crear_nodo_general("CASE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_operadores_parentesis(t):
''' expre : PARIZQ expre PARDER
| PARIZQ query PARDER
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
t[0] = nodo
def p_operadores_logicos5(t):
''' expre : expresion
'''
nodo = crear_nodo_general("expre","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_tiempo1(t):
''' tiempo : YEAR
'''
t[0] = crear_nodo_general("tiempo",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_tiempo2(t):
''' tiempo : MONTH
'''
t[0] = crear_nodo_general("tiempo",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_tiempo3(t):
''' tiempo : DAY
'''
t[0] = crear_nodo_general("tiempo",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_tiempo4(t):
''' tiempo : HOUR
'''
t[0] = crear_nodo_general("tiempo",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_tiempo5(t):
''' tiempo : MINUTE
'''
t[0] = crear_nodo_general("tiempo",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_tiempo6(t):
''' tiempo : SECOND
'''
t[0] = crear_nodo_general("tiempo",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_campos_tablas(t):
'''campos : campos COMA ID tipo lista_op
'''
#ESTOY HACIENDO ESTA
nodo = t[1]
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
nodo.hijos.append(t[5])
t[0] = nodo
def p_campos_tablas1(t):
'''campos : campos COMA ID tipo
'''
nodo = t[1]
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
#def p_campos_tablas2(t):
# '''campos : campos COMA CHECK expre
# '''
#AQUI ESTOY TRABAJANDO
# t[1].append(Tipo_Constraint(None,Tipo_Dato_Constraint.CHECK,t[4]))
# t[0] = t[1]
#def p_campos_tablas3(t):
# '''campos : campos COMA CONSTRAINT ID CHECK expre
# '''
# t[1].append(Tipo_Constraint(t[4],Tipo_Dato_Constraint.CHECK,t[4]))
# t[0] = t[1]
def p_campos_tablas4(t):
'''campos : campos COMA UNIQUE PARIZQ lista_id PARDER
'''
nodo = t[1]
nodo.hijos.append(crear_nodo_general("UNIQUE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[5])
t[0] = nodo
def p_campos_tablas5(t):
'''campos : campos COMA FOREIGN KEY PARIZQ lista_id PARDER REFERENCES ID PARIZQ lista_id PARDER
'''
nodo = t[1]
nodo.hijos.append(crear_nodo_general("FOREIGN KEY","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[6])
nodo.hijos.append(crear_nodo_general("REFERENCES","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[9],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[11])
t[0] = nodo
def p_campos_tablas6(t):
'''campos : campos COMA PRIMARY KEY PARIZQ lista_id PARDER
'''
nodo = t[1]
nodo.hijos.append(crear_nodo_general("PRIMARY KEY","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[6])
t[0] = nodo
def p_campos_tablas7(t):
'''campos : ID tipo lista_op
'''
nodo = crear_nodo_general("campos","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
nodo.hijos.append(t[3])
t[0] = nodo
def p_campos_tablas8(t):
'''campos : ID tipo
'''
nodo = crear_nodo_general("campos","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_lista_id1(t):
'''lista_id : lista_id COMA ID
'''
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_lista_id2(t):
'''lista_id : ID
'''
nodo = crear_nodo_general("lista_id","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_lista_op1(t):
'''lista_op : lista_op opcion
'''
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_lista_op2(t):
'''lista_op : opcion
'''
nodo = crear_nodo_general("lista_op","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_opcion(t):
'''opcion : PRIMARY KEY
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("PRIMARY KEY","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_opcion1(t):
'''opcion : REFERENCES ID PARIZQ lista_id PARDER
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("REFERENCES","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
def p_opcion2(t):
'''opcion : DEFAULT expresion
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("DEFAULT","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_opcion3(t):
'''opcion : NOT NULL
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("NOT NULL","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_opcion4(t):
'''opcion : NULL
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("NULL","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_opcion5(t):
'''opcion : UNIQUE
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("UNIQUE","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_opcion6(t):
'''opcion : CONSTRAINT ID UNIQUE
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("CONSTRAINT","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("UNIQUE","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_opcion7(t):
'''opcion : CONSTRAINT ID CHECK expre
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("CONSTRAINT","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("CHECK","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
def p_opcion8(t):
'''opcion : CHECK expre
'''
nodo = crear_nodo_general("opcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("CHECK","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_lista_expresiones(t):
'''
l_expresiones : l_expresiones COMA expre
'''
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_lista_expresiones1(t):
'''
l_expresiones : expre
'''
nodo = crear_nodo_general("l_expresiones","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_expresion(t):
'''
expresion : CADENA
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("CADENA",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion1(t):
'''expresion : CARACTER
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("CARACTER",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion2(t):
'''expresion : ENTERO
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ENTERO",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion3(t):
'''expresion : FDECIMAL
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("FDECIMAL",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion4(t):
'''expresion : DOUBLE
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("DOUBLE",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion5(t):
'''expresion : ID
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion61(t):
'''expresion : ID PUNTO ID
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID.ID",t[1] + "." + t[3],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion62(t):
'''expresion : ID PUNTO POR
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID.*",t[1] + ".*",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion7(t):
'''expresion : ARROBA ID
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("@ID","@" + t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion8(t):
'''expresion : ID PARIZQ lcol PARDER
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_expresion9(t):
'''expresion : TRUE
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("TRUE",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_expresion10(t):
'''expresion : FALSE
'''
nodo = crear_nodo_general("expresion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("FALSE",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_lista_columas(t):
'''lcol : lcol COMA expre
'''
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_lista_columas1(t):
'''lcol : lcol COMA expre nombre
'''
nodo = t[1]
nodo.hijos.append(t[3])
nodo.hijos.append(t[4])
t[0] = nodo
def p_lista_columas2(t):
'''lcol : lcol COMA expre AS nombre
'''
nodo = t[1]
nodo.hijos.append(t[3])
nodo.hijos.append(t[5])
t[0] = nodo
def p_lista_columas01(t):
'''lcol : POR
'''
nodo = crear_nodo_general("lcol",t[1],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("POR",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_lista_columas3(t):
'''lcol : expre
'''
nodo = crear_nodo_general("lcol",t[1],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_lista_columas4(t):
'''lcol : expre nombre
'''
nodo = crear_nodo_general("lcol",t[1],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
nodo.hijos.append(t[2])
t[0] = nodo
def p_lista_columas5(t):
'''lcol : expre AS nombre
'''
nodo = crear_nodo_general("lcol",t[1],t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
nodo.hijos.append(t[2])
t[0] = nodo
def p_nombre(t):
'''nombre : ID
| CADENA
| CARACTER
'''
t[0] = crear_nodo_general("nombre",t[1],t.lexer.lineno, t.lexer.lexpos)
#----------------------TIPO DE DATOS---------------------------------
def p_tipo_datos(t):
'''tipo : INT
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos1(t):
'''tipo : DATE
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
# NO RECUERDO PARA QUE IMPLEMENTAMOS ESTA PARTE ENTONCES LA COMENTE
#def p_tipo_datos2(t):
# '''tipo : ID PARIZQ ID PARDER
# '''
# t[0]=t[1]
def p_tipo_datos_varchar(t):
'''tipo : VARCHAR PARIZQ ENTERO PARDER
'''
nodo = crear_nodo_general("tipo",str(t[1]) + str(t[2]) + str(t[3]) + str(t[4]),t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_varchar1(t):
'''tipo : CHAR PARIZQ ENTERO PARDER
'''
nodo = crear_nodo_general("tipo",str(t[1]) + str(t[2]) + str(t[3]) + str(t[4]),t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_varchar2(t):
'''tipo : CHARACTER VARYING PARIZQ ENTERO PARDER
'''
nodo = crear_nodo_general("tipo","CHARACTER VARYING" + str(t[3]) + str(t[4]) + str(t[5]),t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_varchar3(t):
'''tipo : CHARACTER PARIZQ ENTERO PARDER
'''
nodo = crear_nodo_general("tipo",str(t[1]) + str(t[2]) + str(t[3]) + str(t[4]),t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_varchar4(t):
'''tipo : TEXT
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
#ESTE NO SE CONTEMPLO EN LA GRAMATICA DE MAEDA
def p_tipo_datos_decimal(t):
'''tipo : DECIMAL PARIZQ ENTERO COMA ENTERO PARDER
'''
nodo = crear_nodo_general("tipo",str(t[1]) + str(t[2]) + str(t[3]) + str(t[4]) + str(t[5]) + str(t[6]),t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
#def p_tipo_datos_decimal1(t):
# '''tipo : DOUBLE
# '''
# t[0] = Tipo("",Tipo_Dato.DOUBLE_PRECISION)
def p_tipo_datos_decimal2(t):
'''tipo : DECIMAL
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
#ESTE NO SE CONTEMPLO EN LA GRAMATICA
#def p_tipo_datos_decimal3(t):
# '''tipo : FLOAT PARIZQ ENTERO COMA ENTERO PARDER
# '''
# t[0]=
#HAY QUE VALIDAR ESTE, CREO QUE ESTA DEMAS ACA
#def p_tipo_datos_int(t):
# '''tipo : ENTERO
# '''
# t[0]=Tipo("",Tipo_Dato.INTEGER)
def p_tipo_datos_int1(t):
'''tipo : SMALLINT
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int2(t):
'''tipo : INTEGER
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int3(t):
'''tipo : BIGINT
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int4(t):
'''tipo : NUMERIC
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int5(t):
'''tipo : REAL
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int6(t):
'''tipo : DOUBLE PRECISION
'''
nodo = crear_nodo_general("tipo","DOUBLE PRECISION",t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int7(t):
'''tipo : MONEY
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_int8(t):
'''tipo : BOOLEAN
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_date(t):
'''tipo : TIMESTAMP
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_date1(t):
'''tipo : TIME
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos_date2(t):
'''tipo : INTERVAL
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
def p_tipo_datos2(t):
'''tipo : ID
'''
nodo = crear_nodo_general("tipo",t[1],t.lexer.lineno, t.lexer.lexpos)
t[0] = nodo
########################################### GRAMATICA FASE 2 ########################################
def p_exect_func_pro(t):
'''
instruccion : EXECUTE ID PARDER l_expresiones PARIZQ PUNTO_COMA
'''
nodo = crear_nodo_general("EXECUTE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
def p_procedimiento(t):
'''
instruccion : CREATE PROCEDURE ID PARIZQ parametros_funcion PARDER LANGUAGE PLPGSQL AS DOLLAR DOLLAR declaraciones_funcion BEGIN contenido_funcion END PUNTO_COMA DOLLAR DOLLAR
'''
nodo = crear_nodo_general("CREATE PROCEDURE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[5])
nodo.hijos.append(t[12])
nodo.hijos.append(t[14])
t[0] = nodo
#DECLARACION DE UNA FUNCION
def p_funciones(t):
'''
instruccion : CREATE FUNCTION ID PARIZQ parametros_funcion PARDER returns_n retorno_funcion declaraciones_funcion BEGIN contenido_funcion END PUNTO_COMA DOLLAR DOLLAR LANGUAGE PLPGSQL PUNTO_COMA
'''
nodo = crear_nodo_general("CREATE FUNCTION","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[5])
nodo.hijos.append(t[7])
nodo.hijos.append(t[8])
nodo.hijos.append(t[9])
nodo.hijos.append(t[11])
t[0] = nodo
def p_funciones_drop(t):
'''
instruccion : DROP FUNCTION if_op ID PUNTO_COMA
'''
nodo = crear_nodo_general("DROP FUNCTION","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_procedimientos_drop(t):
'''
instruccion : DROP PROCEDURE if_op ID PUNTO_COMA
'''
nodo = crear_nodo_general("DROP PROCEDURE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_parametros_funcion(t):
'''
parametros_funcion : lista_parametros_funcion
'''
t[0] = t[1]
def p_parametros_funcion_e(t):
'''
parametros_funcion :
'''
t[0] = None
def p_lista_parametros_funcion(t):
'''
lista_parametros_funcion : lista_parametros_funcion COMA parametro_fucion
'''
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
def p_lista_parametros_funcion2(t):
'''
lista_parametros_funcion : parametro_fucion
'''
nodo = crear_nodo_general("lista_parametros_funcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_parametro_fucion(t):
'''
parametro_fucion : ID tipo
| tipo
'''
nodo = crear_nodo_general("parametro_fucion","",t.lexer.lineno, t.lexer.lexpos)
if len(t) == 3:
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
else:
nodo.hijos.append(t[2])
t[0] = nodo
def p_returns(t):
'''
returns_n : RETURNS
'''
t[0] = crear_nodo_general("RETURNS","",t.lexer.lineno, t.lexer.lexpos)
def p_returns_e(t):
'''
returns_n :
'''
t[0] = None
def p_retorno_funcion(t):
'''
retorno_funcion : tipo AS DOLLAR DOLLAR
| TABLE PARIZQ lista_campos_tabla PARDER AS DOLLAR DOLLAR
| AS DOLLAR DOLLAR
'''
nodo = crear_nodo_general("retorno_funcion","",t.lexer.lineno, t.lexer.lexpos)
if len(t) == 4:
nodo.hijos.append(crear_nodo_general("AS $$","",t.lexer.lineno, t.lexer.lexpos))
elif len(t) == 5:
nodo.hijos.append(t[1])
nodo.hijos.append(crear_nodo_general("AS $$","",t.lexer.lineno, t.lexer.lexpos))
else:
nodo.hijos.append(crear_nodo_general("TABLA","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("AS $$","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_lista_campos_tabla(t):
'''
lista_campos_tabla : lista_campos_tabla COMA ID tipo
'''
nodo = t[1]
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
def p_lista_campos_tabla2(t):
'''
lista_campos_tabla : ID tipo
'''
nodo = crear_nodo_general("lista_campos_tabla","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_declaraciones_funcion(t):
'''
declaraciones_funcion : DECLARE list_dec_var_funcion
'''
nodo = crear_nodo_general("DECLARE","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
t[0] = nodo
def p_declaraciones_funcion_e(t):
'''
declaraciones_funcion :
'''
t[0] = None
def p_list_dec_var_funcion(t):
'''
list_dec_var_funcion : list_dec_var_funcion dec_var_funcion PUNTO_COMA
'''
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_list_dec_var_funcion2(t):
'''
list_dec_var_funcion : dec_var_funcion PUNTO_COMA
'''
nodo = crear_nodo_general("list_dec_var_funcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_dec_var_funcion(t):
'''
dec_var_funcion : ID constant_n tipo nnull aisgnacion_valor
'''
nodo = crear_nodo_general("dec_var_funcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
nodo.hijos.append(t[3])
nodo.hijos.append(t[4])
nodo.hijos.append(t[5])
t[0] = nodo
def p_dec_var_funcion2(t):
'''
dec_var_funcion : ID ALIAS FOR DOLLAR ENTERO
| ID ALIAS FOR ID
'''
nodo = crear_nodo_general("dec_var_funcion","",t.lexer.lineno, t.lexer.lexpos)
if len(t) == 5:
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ALIAS FOR","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos))
else:
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ALIAS FOR $","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ENTERO",t[5],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_dec_var_funcion3(t):
'''
dec_var_funcion : ID tabla_typerow MODULO type_row
'''
nodo = crear_nodo_general("dec_var_funcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
nodo.hijos.append(crear_nodo_general("MODULO","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
t[0] = nodo
def p_tabla_typerow(t):
'''
tabla_typerow : ID PUNTO ID
| ID
'''
nodo = crear_nodo_general("tabla_typerow","",t.lexer.lineno, t.lexer.lexpos)
if len(t) == 4:
nodo.hijos.append(crear_nodo_general("ID.ID",str(t[1]) + str(t[2]) + str(t[3]),t.lexer.lineno, t.lexer.lexpos))
else:
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_type_row(t):
'''
type_row : TYPE
| ROWTYPE
'''
t[0] = crear_nodo_general("type_row",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_constant(t):
'''
constant_n : CONSTANT
'''
t[0] = crear_nodo_general("CONSTANT",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_constant_e(t):
'''
constant_n :
'''
t[0] = None
def p_nnull(t):
'''
nnull : NOT NULL
'''
t[0] = crear_nodo_general("NOT NULL",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_nnull_e(t):
'''
nnull :
'''
t[0] = None
def p_aisgnacion_valor(t):
'''
aisgnacion_valor : DEFAULT expre
| DOSP_IGUAL expre
| IGUAL expre
'''
nodo = crear_nodo_general("aisgnacion_valor","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general(t[1],"",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = t[2]
def p_aisgnacion_valor_e(t):
'''
aisgnacion_valor :
'''
t[0] = None
def p_contenido_funcion(t):
'''
contenido_funcion : contenido_funcion cont_funcion'''
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
def p_contenido_funcion2(t):
'''
contenido_funcion : cont_funcion '''
nodo = crear_nodo_general("contenido_funcion","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_cont_funcion(t):
'''
cont_funcion : sentencia_if
| instruccion
| sentencia_retorno
| asignacion_var
'''
t[0] = t[1]
def p_sentencia_retorno(t):
'''
sentencia_retorno : RETURN PUNTO_COMA
| RETURN expre PUNTO_COMA
'''
nodo = crear_nodo_general("sentencia_retorno","",t.lexer.lineno, t.lexer.lexpos)
if len(t) == 3:
nodo.hijos.append(crear_nodo_general("RETURN","",t.lexer.lineno, t.lexer.lexpos))
else:
nodo.hijos.append(crear_nodo_general("RETURN","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
t[0] = nodo
def p_asignacion_var(t):
'''
asignacion_var : ID IGUAL expre PUNTO_COMA
| ID DOSP_IGUAL expre PUNTO_COMA
'''
nodo = crear_nodo_general("asignacion_var","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("IGUAL",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
t[0] = nodo
def p_sentencia_if(t):
'''
sentencia_if : IF expre THEN instrucciones_if condicionesif ELSE instrucciones_if END IF PUNTO_COMA
| IF expre THEN instrucciones_if condicionesif END IF PUNTO_COMA
| IF expre THEN instrucciones_if ELSE instrucciones_if END IF PUNTO_COMA
| IF expre THEN instrucciones_if END IF PUNTO_COMA
| CASE ID condiciones_cuando ELSE instrucciones_if END CASE PUNTO_COMA
| CASE ID condiciones_cuando END CASE PUNTO_COMA
| CASE condiciones_cuandoB ELSE instrucciones_if END CASE PUNTO_COMA
| CASE condiciones_cuandoB END CASE PUNTO_COMA
| BEGIN instrucciones_if EXCEPTION WHEN l_identificadores THEN instrucciones_if END PUNTO_COMA
| BEGIN instrucciones_if EXCEPTION WHEN sql_states THEN instrucciones_if END PUNTO_COMA
'''
nodo = crear_nodo_general("sentencia_if","",t.lexer.lineno, t.lexer.lexpos)
if t[1] == "IF" and len(t) == 11:
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
nodo.hijos.append(t[5])
nodo.hijos.append(crear_nodo_general("ELSE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[7])
elif t[1] == "IF" and len(t) == 8:
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
elif t[1] == "IF" and len(t) == 10:
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
nodo.hijos.append(crear_nodo_general("ELSE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[6])
elif t[1] == "IF" and len(t) == 9:
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
nodo.hijos.append(t[5])
elif t[1] == "CASE" and len(t) == 6:
nodo.hijos.append(t[2])
elif t[1] == "CASE" and len(t) == 8:
nodo.hijos.append(t[2])
nodo.hijos.append(crear_nodo_general("ELSE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
elif t[1] == "CASE" and len(t) == 7:
nodo.hijos.append(crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
elif t[1] == "CASE" and len(t) == 9:
nodo.hijos.append(crear_nodo_general("ID",t[2],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ELSE","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[5])
else:
nodo.hijos.append(crear_nodo_general("EXCEPTION","",t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_instrucciones_if(t):
'''
instrucciones_if : instrucciones_if instruccion_if
| instruccion_if
'''
if len(t) == 3:
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
else:
nodo = crear_nodo_general("instrucciones_if","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_instruccion_if(t):
'''
instruccion_if : cont_funcion
| expre PUNTO_COMA
| RAISE NOTICE CADENA PUNTO_COMA
| RAISE NOTICE CADENA COMA ID PUNTO_COMA
| RAISE NOTICE CARACTER PUNTO_COMA
| RAISE NOTICE CARACTER COMA ID PUNTO_COMA
'''
t[0] = t[1]
def p_condiciones_if(t):
'''
condicionesif : condicionesif condicionif
| condicionif
'''
if len(t) == 3:
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
else:
nodo = crear_nodo_general("condicionesif","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_condicion_if(t):
'''
condicionif : ELSIF expre THEN instrucciones_if
| ELSEIF expre THEN instrucciones_if
'''
nodo = crear_nodo_general(t[1],"",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
t[0] = nodo
def p_condiciones_cuando(t):
'''
condiciones_cuando : condiciones_cuando condicion_cuando
| condicion_cuando
'''
if len(t) == 3:
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
else:
nodo = crear_nodo_general("condiciones_cuando","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_condicion_cuando(t):
'''
condicion_cuando : WHEN l_expresiones THEN instrucciones_if
'''
nodo = crear_nodo_general("condicion_cuando","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
t[0] = nodo
def p_condiciones_cuando_B(t):
'''
condiciones_cuandoB : condiciones_cuandoB condicion_cuandoB
| condicion_cuandoB
'''
if len(t) == 3:
nodo = t[1]
nodo.hijos.append(t[2])
t[0] = nodo
else:
nodo = crear_nodo_general("condiciones_cuandoB","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_condicion_cuando_B(t):
'''
condicion_cuandoB : WHEN expre THEN instrucciones_if
'''
nodo = crear_nodo_general("condicion_cuandoB","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
t[0] = nodo
def p_sql_states(t):
'''
sql_states : sql_states OR sql_state
| sql_state
'''
if len(t) == 4:
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
else:
nodo = crear_nodo_general("sql_states","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_sql_state(t):
'''
sql_state : SQLSTATE CADENA
'''
t[0] = crear_nodo_general("SQLSTATE",t[2],t.lexer.lineno, t.lexer.lexpos)
def p_identificadores(t):
'''
l_identificadores : l_identificadores OR ID
| ID
'''
if len(t) == 4:
nodo = t[1]
nodo.hijos.append(t[3])
t[0] = nodo
else:
nodo = crear_nodo_general("l_identificadores","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[1])
t[0] = nodo
def p_instruccion_index(t):
'''
instruccion : CREATE unique_op INDEX nombre_op ON ID hash_op PARIZQ l_indexes PARDER where_op PUNTO_COMA
'''
nodo = crear_nodo_general("CREATE INDEX","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[2])
nodo.hijos.append(t[4])
nodo.hijos.append(crear_nodo_general("ID",t[6],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[7])
nodo.hijos.append(t[9])
nodo.hijos.append(t[11])
t[0] = nodo
def p_instruccion_del_index(t):
'''
instruccion : DROP INDEX if_op ID PUNTO_COMA
'''
nodo = crear_nodo_general("DROP INDEX","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_instruccion_alter_index(t):
'''
instruccion : ALTER INDEX if_op ID ALTER column_op ID ID PUNTO_COMA
'''
nodo = crear_nodo_general("ALTER INDEX","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(t[3])
nodo.hijos.append(crear_nodo_general("ID",t[4],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ALTER","",t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[6])
nodo.hijos.append(crear_nodo_general("ID",t[7],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[8],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_index_column(t):
'''
column_op : COLUMN
'''
t[0] = crear_nodo_general("COLUMN","",t.lexer.lineno, t.lexer.lexpos)
def p_index_column_e(t):
'''
column_op :
'''
t[0] = None
def p_index_if_exists(t):
'''
if_op : IF EXISTS
'''
t[0] = crear_nodo_general("IF EXISTS","",t.lexer.lineno, t.lexer.lexpos)
def p_index_if_e(t):
'''
if_op :
'''
t[0] = None
def p_index_nombre(t):
'''
nombre_op : ID
'''
t[0] = crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos)
def p_index_nombre_e(t):
'''
nombre_op :
'''
t[0] = None
def p_index_unique(t):
'''
unique_op : UNIQUE
'''
t[0] = crear_nodo_general("UNIQUE","",t.lexer.lineno, t.lexer.lexpos)
def p_index_unique_e(t):
'''
unique_op :
'''
t[0] = None
def p_index_hash(t):
'''
hash_op : USING HASH
'''
t[0] = crear_nodo_general("USING HASH","",t.lexer.lineno, t.lexer.lexpos)
def p_index_hash_e(t):
'''
hash_op :
'''
t[0] = None
def p_index_indexes(t):
'''
l_indexes : l_indexes COMA ID order_op null_op first_last_op
'''
nodo = t[1]
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[4])
nodo.hijos.append(t[5])
nodo.hijos.append(t[6])
t[0] = nodo
def p_index_index(t):
'''
l_indexes : ID order_op null_op first_last_op
'''
nodo = crear_nodo_general("l_indexes","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(t[2])
nodo.hijos.append(t[3])
nodo.hijos.append(t[4])
t[0] = nodo
def p_index_func(t):
'''
l_indexes : ID PARIZQ ID PARDER
'''
nodo = crear_nodo_general("l_indexes","",t.lexer.lineno, t.lexer.lexpos)
nodo.hijos.append(crear_nodo_general("ID",t[1],t.lexer.lineno, t.lexer.lexpos))
nodo.hijos.append(crear_nodo_general("ID",t[3],t.lexer.lineno, t.lexer.lexpos))
t[0] = nodo
def p_index_order(t):
'''
order_op : ASC
| DESC
'''
t[0] = crear_nodo_general(t[1],"",t.lexer.lineno, t.lexer.lexpos)
def p_index_order_e(t):
'''
order_op :
'''
t[0] = None
def p_index_null(t):
'''
null_op : NULLS
'''
t[0] = crear_nodo_general("NULLS","",t.lexer.lineno, t.lexer.lexpos)
def p_index_null_e(t):
'''
null_op :
'''
t[0] = None
def p_index_first_last(t):
'''
first_last_op : FIRST
| LAST
'''
t[0] = crear_nodo_general(t[1],"",t.lexer.lineno, t.lexer.lexpos)
def p_index_first_last_e(t):
'''
first_last_op :
'''
t[0] = None
def p_index_where(t):
'''
where_op : instructionWhere
'''
t[0] = t[1]
def p_index_where_e(t):
'''
where_op :
'''
t[0] = None
#FIN DE LA GRAMATICA
# MODO PANICO ***************************************
def p_error(p):
if not p:
print("Fin del Archivo!")
return
dato = Excepcion(1,"Error Sintáctico", f"Se esperaba una instrucción y viene {p.value}", p.lexer.lineno, find_column(lexer.lexdata,p))
lista_lexicos.append(dato)
while True:
tok = parser.token() # Get the next token
if not tok or tok.type == 'PUNTO_COMA':
if not tok:
print("FIN DEL ARCHIVO")
return
else:
print("Se recupero con ;")
break
dato = Excepcion(1,"Error Sintáctico", f"Se esperaba una instrucción y viene {tok.value}", p.lexer.lineno, find_column(lexer.lexdata,tok))
lista_lexicos.append(dato)
parser.restart()
def find_column(input,token):
last_cr = str(input).rfind('\n',0,token.lexpos)
if last_cr < 0:
ast_cr = 0
column = (token.lexpos - last_cr) + 1
return column
parser = yacc.yacc()
def ejecutar_analisis(texto):
#LIMPIAR VARIABLES
columna=0
lista_lexicos.clear()
#se limpia analisis lexico
lexer.input("")
lexer.lineno = 0
#se obtiene la acción de analisis sintactico
print("inicio")
return parser.parse(texto)
```
#### File: Instrucciones/FunctionAgregate/Min.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Excepcion import Excepcion
import numpy as np
class Min(Instruccion):
def __init__(self, valor, tipo, strGram, linea, columna):
Instruccion.__init__(self,tipo,linea,columna, strGram)
self.valor = valor
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
resultado = self.valor.ejecutar(tabla, arbol)
if isinstance(resultado , Excepcion):
return resultado
listaejemplo = []
for x in range(0, len(resultado)):
#print(f"posicion {x}")
#print(f"valor {resultado[x][0]}")
if str.isnumeric(str(resultado[x][0])):
listaejemplo.append(int(resultado[x][0]))
elif str.isdecimal(str(resultado[x][0])):
listaejemplo.append(float(resultado[x][0]))
else:
error = Excepcion("22023", "Semantico", "Parametro de evaluacion invalido", self.linea, self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
listaNums = np.array(listaejemplo)
minimo = np.amin(listaNums)
return np.array([[minimo]])
def analizar(self, tabla, arbol):
return super().analizar(tabla, arbol)
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
cadena = "MIN("
cadena += self.valor.concatenar(tabla,arbol)
cadena += ")"
return cadena
```
#### File: Instrucciones/FunctionMathematical/Scale.py
```python
import math
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Expresiones.Aritmetica import Aritmetica
from Instrucciones.Expresiones.Primitivo import Primitivo
class Scale(Instruccion):
def __init__(self, valor, strGram,tipo, linea, columna):
Instruccion.__init__(self,tipo,linea,columna, strGram)
self.valor = valor
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
arbol.consola.append('Función en proceso...')
def analizar(self, tabla, arbol):
return super().analizar(tabla, arbol)
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
if isinstance(self.valor, Primitivo):
return f"SCALE({self.valor.traducir(tabla,arbol).temporalAnterior})"
elif isinstance(self.valor, Aritmetica):
return f"SCALE({self.valor.concatenar(tabla,arbol)})"
return f"SCALE({self.valor.traducir(tabla,arbol)})"
```
#### File: Instrucciones/PL/Asignacion.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tipo import Tipo_Dato
from Instrucciones.TablaSimbolos.Nodo3D import Nodo3D
from Instrucciones.Sql_select.SelectLista import SelectLista2
from Instrucciones.Excepcion import Excepcion
class Asignacion(Instruccion):
def __init__(self, id, expresion, strGram, linea, columna):
Instruccion.__init__(self,None,linea,columna,strGram)
self.id = id
self.expresion = expresion
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
pass
def analizar(self, tabla, arbol):
super().analizar(tabla,arbol)
variable = tabla.getSimboloVariable(self.id)
if variable == None:
error = Excepcion("42723", "Semantico", f"La variable {self.id} no ha sido declarada", self.linea, self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
resultado = self.expresion.analizar(tabla,arbol)
if not isinstance(resultado, Excepcion):
self.tipo = resultado
comprobar = self.comprobarTipo(variable.tipo, resultado, None)
if not comprobar:
error = Excepcion('42804',"Semántico",f"La variable {self.id} es de tipo {variable.tipo.toString()} pero la expresión es de tipo {resultado.toString()}.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
def traducir(self, tabla, arbol):
super().traducir(tabla,arbol)
retorno = Nodo3D()
variable = tabla.getSimboloVariable(self.id)
arbol.addComen(f"Inicia asignación: {self.id}")
temporal1 = tabla.getTemporal()
temporal2 = tabla.getTemporal()
arbol.addc3d(f"{temporal1} = P + {variable.posicion}")
arbol.addComen("Se obtiene el valor")
if isinstance(self.expresion, SelectLista2):
arbol.addc3d("arbol.expre_query = True")
exp = self.expresion.c3d(tabla, arbol)
else:
exp = self.expresion.traducir(tabla, arbol)
if variable.tipo.tipo == Tipo_Dato.BOOLEAN and exp.temporalAnterior != "1" and exp.temporalAnterior != "0":
retorno.imprimirEtiquetDestino(arbol, exp.etiquetaTrue)
arbol.addc3d(f"{temporal2} = 1")
etiqueta1 = tabla.getEtiqueta()
arbol.addc3d(f"goto .{etiqueta1}")
retorno.imprimirEtiquetDestino(arbol, exp.etiquetaFalse)
arbol.addc3d(f"{temporal2} = 0")
arbol.addc3d(f"label .{etiqueta1}")
else:
arbol.addc3d(f"{temporal2} = {exp.temporalAnterior}")
arbol.addc3d(f"Pila[{temporal1}] = {temporal2}")
if isinstance(self.expresion, SelectLista2):
arbol.addc3d("arbol.expre_query = False")
arbol.addComen("Fin Asignación")
def comprobarTipo(self, tipoColumna, tipoValor, val):
if tipoValor.tipo == Tipo_Dato.QUERY:
return True
if (tipoColumna.tipo == Tipo_Dato.MONEY) and (tipoValor.tipo == Tipo_Dato.CHAR):
if ',' in val:
val = val.replace(',','')
try:
val = float(val)
except:
return False
return True
if (tipoColumna.tipo == Tipo_Dato.CHAR or tipoColumna.tipo == Tipo_Dato.VARCHAR or tipoColumna.tipo == Tipo_Dato.VARYING or tipoColumna.tipo == Tipo_Dato.CHARACTER or tipoColumna.tipo == Tipo_Dato.TEXT) and (tipoValor.tipo == Tipo_Dato.CHAR or tipoValor.tipo == Tipo_Dato.VARCHAR or tipoValor.tipo == Tipo_Dato.VARYING or tipoValor.tipo == Tipo_Dato.CHARACTER or tipoValor.tipo == Tipo_Dato.TEXT):
if tipoColumna.dimension != None:
pass
return True
elif (tipoColumna.tipo == Tipo_Dato.SMALLINT or tipoColumna.tipo == Tipo_Dato.INTEGER or tipoColumna.tipo == Tipo_Dato.BIGINT or tipoColumna.tipo == Tipo_Dato.DECIMAL or tipoColumna.tipo == Tipo_Dato.NUMERIC or tipoColumna.tipo == Tipo_Dato.REAL or tipoColumna.tipo == Tipo_Dato.DOUBLE_PRECISION or tipoColumna.tipo == Tipo_Dato.MONEY) and (tipoValor.tipo == Tipo_Dato.SMALLINT or tipoValor.tipo == Tipo_Dato.INTEGER or tipoValor.tipo == Tipo_Dato.BIGINT or tipoValor.tipo == Tipo_Dato.DECIMAL or tipoValor.tipo == Tipo_Dato.NUMERIC or tipoValor.tipo == Tipo_Dato.REAL or tipoValor.tipo == Tipo_Dato.DOUBLE_PRECISION or tipoValor.tipo == Tipo_Dato.MONEY):
if tipoColumna.tipo == Tipo_Dato.SMALLINT:
pass
elif tipoColumna.tipo == Tipo_Dato.INTEGER:
pass
elif tipoColumna.tipo == Tipo_Dato.BIGINT:
pass
return True
elif (tipoColumna.tipo == Tipo_Dato.DATE or tipoColumna.tipo == Tipo_Dato.TIMESTAMP or tipoColumna.tipo == Tipo_Dato.TIME or tipoColumna.tipo == Tipo_Dato.INTERVAL or tipoColumna.tipo == Tipo_Dato.CHAR ) and (tipoValor.tipo == Tipo_Dato.DATE or tipoValor.tipo == Tipo_Dato.TIMESTAMP or tipoValor.tipo == Tipo_Dato.TIME or tipoValor.tipo == Tipo_Dato.INTERVAL or tipoValor.tipo == Tipo_Dato.CHAR):
return True
elif (tipoColumna.tipo == Tipo_Dato.BOOLEAN) and (tipoValor.tipo == Tipo_Dato.BOOLEAN):
return True
return False
```
#### File: Instrucciones/Sql_select/SelectLista.py
```python
from Instrucciones.PL.Llamada import Llamada
from Instrucciones.Expresiones import Aritmetica
from Instrucciones.Identificador import Identificador
import Instrucciones
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tipo import Tipo_Dato, Tipo
from Instrucciones.Excepcion import Excepcion
from Instrucciones.Sql_select.Select import Select
from Instrucciones.Tablas.Tablas import Tablas
from Instrucciones.Expresiones.Primitivo import *
from Instrucciones.TablaSimbolos.Nodo3D import Nodo3D
from Instrucciones.Sql_select.Alias import Alias
import numpy as np
class SelectLista(Instruccion):
def __init__(self, lista, strGram, linea, columna):
Instruccion.__init__(self,Tipo(Tipo_Dato.QUERY),linea,columna,strGram)
self.lista = lista
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
columnas = []
valores = []
selectEncontrado = 0
tipoResultado = None
for ins in self.lista:
if isinstance(ins, Alias):
resultado = ins.expresion.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
valores.append(str(resultado))
columnas.append(ins.id)
tipoResultado = ins.tipo
elif isinstance(ins, Select):
resultado = ins.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
tipoResultado = ins.tipo
valores = resultado
columnas = ins.devolverColumnas(tabla,arbol)
if isinstance(columnas, Excepcion):
return columnas
selectEncontrado = 1
else:
resultado = ins.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
valores.append(str(resultado))
columnas.append('col')
tipoResultado = ins.tipo
#print("COLUMNAS-------------------------->",columnas)
#print("VALORES-------------------------->",valores)
if arbol.expre_query:
if tipoResultado.tipo == Tipo_Dato.SMALLINT or tipoResultado.tipo == Tipo_Dato.INTEGER or tipoResultado.tipo == Tipo_Dato.BIGINT:
return int(valores[0])
elif tipoResultado.tipo == Tipo_Dato.NUMERIC or tipoResultado.tipo == Tipo_Dato.DECIMAL or tipoResultado.tipo == Tipo_Dato.REAL or tipoResultado.tipo == Tipo_Dato.DOUBLE_PRECISION:
return float(valores[0])
else:
if isinstance(valores[0], (np.ndarray, np.generic)):
#print(valores[0][0])
return valores[0][0]
else:
return valores[0]
if(selectEncontrado == 0):
valores = [valores]
if(arbol.getRelaciones() == False):
arbol.getMensajeTabla(columnas,valores)
else:
n = Tablas("tabla",None)
n.data = valores
n.lista_de_campos = columnas
return n
else:
if(arbol.getRelaciones() == False):
arbol.getMensajeTabla(columnas,valores)
else:
n = Tablas("tabla",None)
n.lista_de_campos = columnas
n.data = valores
return n
def analizar(self, tabla, arbol):
return self.tipo
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
cadena = ""
cadena += "f\"SELECT "
for query in self.lista:
if isinstance(query, Llamada):
cadena += query.concatenar(tabla,arbol)
elif isinstance(query, Primitivo):
#print("SELECT",query.traducir(tabla, arbol).temporalAnterior)
cadena += query.traducir(tabla,arbol).temporalAnterior
#elif isinstance(query, Select):
#cadena +=f"{query.traducir(tabla,arbol)}"
elif isinstance(query, Aritmetica.Aritmetica):
cadena += f"{query.concatenar(tabla,arbol)}"
else:
cadena += f"{query.traducir(tabla,arbol)}"
if self.lista.index(query) == len(self.lista)-1:
cadena += " "
else:
cadena += ", "
cadena += ";\""
if(arbol.getRelacionales()==False):
arbol.addComen("Asignar cadena")
temporal1 = tabla.getTemporal()
arbol.addc3d(f"{temporal1} = { cadena }")
arbol.addComen("Entrar al ambito")
temporal2 = tabla.getTemporal()
arbol.addc3d(f"{temporal2} = P+2")
temporal3 = tabla.getTemporal()
arbol.addComen("parametro 1")
arbol.addc3d(f"{temporal3} = { temporal2}+1")
arbol.addComen("Asignacion de parametros")
arbol.addc3d(f"Pila[{temporal3}] = {temporal1}")
arbol.addComen("Llamada de funcion")
arbol.addc3d(f"P = P+2")
arbol.addc3d(f"funcionintermedia()")
arbol.addComen("obtener resultado")
temporalX = tabla.getTemporal()
arbol.addc3d(f"{temporalX} = P+2")
temporalR = tabla.getTemporal()
arbol.addc3d(f"{temporalR} = Pila[{ temporalX }]")
arbol.addComen("Salida de funcion")
arbol.addc3d(f"P = P-2")
else:
return cadena
def traducir2(self, tabla, arbol):
cadena = ""
cadena += "(SELECT "
for query in self.lista:
if isinstance(query, Primitivo):
#print("SELECT",query.traducir(tabla, arbol).temporalAnterior)
cadena += query.traducir(tabla,arbol).temporalAnterior
elif isinstance(query, Select):
cadena +=f"{query.traducir(tabla,arbol)}"
elif isinstance(query, Aritmetica.Aritmetica):
cadena += f"{query.concatenar(tabla,arbol)}"
else:
cadena += f"{query.traducir(tabla,arbol)}"
if self.lista.index(query) == len(self.lista)-1:
cadena += " "
else:
cadena += ", "
cadena = cadena.rstrip() + ")"
return cadena
def c3d(self, tabla, arbol):
retorno = Nodo3D()
#print("SELECT LISTA")
cadena = ""
cadena += "f\"SELECT "
for query in self.lista:
if isinstance(query, Llamada):
cadena += query.concatenar(tabla,arbol)
elif isinstance(query, Primitivo):
#print("SELECT",query.traducir(tabla, arbol).temporalAnterior)
cadena += query.traducir(tabla,arbol).temporalAnterior
elif isinstance(query, Select):
cadena +=f"{query.traducir(tabla,arbol)}"
elif isinstance(query, Aritmetica.Aritmetica):
cadena += f"{query.concatenar(tabla,arbol)}"
else:
cadena += f"{query.traducir(tabla,arbol)}"
if self.lista.index(query) == len(self.lista)-1:
cadena += " "
else:
cadena += ", "
cadena += ";\""
if(arbol.getRelacionales()==False):
arbol.addComen("Asignar cadena")
temporal1 = tabla.getTemporal()
arbol.addc3d(f"{temporal1} = { cadena }")
arbol.addComen("Entrar al ambito")
temporal2 = tabla.getTemporal()
arbol.addc3d(f"{temporal2} = P+2")
temporal3 = tabla.getTemporal()
arbol.addComen("parametro 1")
arbol.addc3d(f"{temporal3} = { temporal2}+1")
arbol.addComen("Asignacion de parametros")
arbol.addc3d(f"Pila[{temporal3}] = {temporal1}")
arbol.addComen("Llamada de funcion")
arbol.addc3d(f"P = P+2")
arbol.addc3d(f"funcionintermedia()")
arbol.addComen("obtener resultado")
temporalX = tabla.getTemporal()
arbol.addc3d(f"{temporalX} = P+2")
temporalR = tabla.getTemporal()
arbol.addc3d(f"{temporalR} = Pila[{ temporalX }]")
arbol.addComen("Salida de funcion")
arbol.addc3d(f"P = P-2")
retorno.temporalAnterior = temporalR
return retorno
else:
return cadena
class SelectLista2(Instruccion):
def __init__(self, lista, strGram, linea, columna):
Instruccion.__init__(self,Tipo(Tipo_Dato.QUERY),linea,columna,strGram)
self.lista = lista
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
columnas = []
valores = []
selectEncontrado = 0
for ins in self.lista:
if isinstance(ins, Alias):
resultado = ins.expresion.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
valores.append(str(resultado))
columnas.append(ins.id)
elif isinstance(ins, Select):
resultado = ins.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
valores = resultado
columnas = ins.devolverColumnas(tabla,arbol)
if isinstance(columnas, Excepcion):
return columnas
selectEncontrado = 1
else:
resultado = ins.ejecutar(tabla, arbol)
if isinstance(resultado, Excepcion):
return resultado
valores.append(str(resultado))
columnas.append('col')
#print("COLUMNAS-------------------------->",columnas)
#print("VALORES-------------------------->",valores)
return valores[0]
if(selectEncontrado == 0):
valores = [valores]
if(arbol.getRelaciones() == False):
arbol.getMensajeTabla(columnas,valores)
else:
n = Tablas("tabla",None)
n.data = valores
n.lista_de_campos = columnas
return n
else:
if(arbol.getRelaciones() == False):
arbol.getMensajeTabla(columnas,valores)
else:
n = Tablas("tabla",None)
n.lista_de_campos = columnas
n.data = valores
return n
def analizar(self, tabla, arbol):
return self.tipo
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
cadena = ""
cadena += "f\"SELECT "
for query in self.lista:
if isinstance(query, Llamada):
cadena += query.concatenar(tabla,arbol)
elif isinstance(query, Primitivo):
#print("SELECT",query.traducir(tabla, arbol).temporalAnterior)
cadena += query.traducir(tabla,arbol).temporalAnterior
elif isinstance(query, Select):
cadena +=f"{query.traducir(tabla,arbol)}"
elif isinstance(query, Aritmetica.Aritmetica):
cadena += f"{query.concatenar(tabla,arbol)}"
else:
cadena += f"{query.traducir(tabla,arbol)}"
if self.lista.index(query) == len(self.lista)-1:
cadena += " "
else:
cadena += ", "
cadena += ";\""
if(arbol.getRelacionales()==False):
arbol.addComen("Asignar cadena")
temporal1 = tabla.getTemporal()
arbol.addc3d(f"{temporal1} = { cadena }")
arbol.addComen("Entrar al ambito")
temporal2 = tabla.getTemporal()
arbol.addc3d(f"{temporal2} = P+2")
temporal3 = tabla.getTemporal()
arbol.addComen("parametro 1")
arbol.addc3d(f"{temporal3} = { temporal2}+1")
arbol.addComen("Asignacion de parametros")
arbol.addc3d(f"Pila[{temporal3}] = {temporal1}")
arbol.addComen("Llamada de funcion")
arbol.addc3d(f"P = P+2")
arbol.addc3d(f"funcionintermedia()")
arbol.addComen("obtener resultado")
temporalX = tabla.getTemporal()
arbol.addc3d(f"{temporalX} = P+2")
temporalR = tabla.getTemporal()
arbol.addc3d(f"{temporalR} = Pila[{ temporalX }]")
arbol.addComen("Salida de funcion")
arbol.addc3d(f"P = P-2")
else:
return cadena
def traducir2(self, tabla, arbol):
cadena = ""
cadena += "(SELECT "
for query in self.lista:
if isinstance(query, Primitivo):
#print("SELECT",query.traducir(tabla, arbol).temporalAnterior)
cadena += query.traducir(tabla,arbol).temporalAnterior
elif isinstance(query, Select):
cadena +=f"{query.traducir(tabla,arbol)}"
elif isinstance(query, Aritmetica.Aritmetica):
cadena += f"{query.concatenar(tabla,arbol)}"
else:
cadena += f"{query.traducir(tabla,arbol)}"
if self.lista.index(query) == len(self.lista)-1:
cadena += " "
else:
cadena += ", "
cadena = cadena.rstrip() + ")"
return cadena
def c3d(self, tabla, arbol):
retorno = Nodo3D()
#print("SELECT LISTA")
cadena = ""
cadena += "f\"SELECT "
for query in self.lista:
if isinstance(query, Llamada):
cadena += query.concatenar(tabla,arbol)
elif isinstance(query, Primitivo):
#print("SELECT",query.traducir(tabla, arbol).temporalAnterior)
cadena += query.traducir(tabla,arbol).temporalAnterior
#elif isinstance(query, Select):
# cadena +=f"{query.traducir(tabla,arbol)}"
elif isinstance(query, Aritmetica.Aritmetica):
cadena += f"{query.concatenar(tabla,arbol)}"
else:
cadena += f"{query.traducir(tabla,arbol)}"
if self.lista.index(query) == len(self.lista)-1:
cadena += " "
else:
cadena += ", "
cadena += ";\""
if(arbol.getRelacionales()==False):
if arbol.tamanio_actual == None:
arbol.addComen("Asignar cadena")
temporal1 = tabla.getTemporal()
arbol.addc3d(f"{temporal1} = { cadena }")
arbol.addComen("Entrar al ambito")
temporal2 = tabla.getTemporal()
arbol.addc3d(f"{temporal2} = P+2")
temporal3 = tabla.getTemporal()
arbol.addComen("parametro 1")
arbol.addc3d(f"{temporal3} = { temporal2}+1")
arbol.addComen("Asignacion de parametros")
arbol.addc3d(f"Pila[{temporal3}] = {temporal1}")
arbol.addComen("Llamada de funcion")
arbol.addc3d(f"P = P+2")
arbol.addc3d(f"funcionintermedia()")
arbol.addComen("obtener resultado")
temporalX = tabla.getTemporal()
arbol.addc3d(f"{temporalX} = P+2")
temporalR = tabla.getTemporal()
arbol.addc3d(f"{temporalR} = Pila[{ temporalX }]")
arbol.addComen("Salida de funcion")
arbol.addc3d(f"P = P-2")
retorno.temporalAnterior = temporalR
else:
arbol.addComen("Asignar cadena")
tempcadena = tabla.getTemporal()
arbol.addc3d(f"{tempcadena} = { cadena }")
arbol.addComen("Simulando el paso de parámetros")
temp1 = tabla.getTemporal()
arbol.addc3d(f"{temp1} = P + 2")
temporal1 = tabla.getTemporal()
arbol.addc3d(f"{temporal1} = {temp1} + {arbol.tamanio_actual}")
arbol.addComen("Asignación de parámetros")
temporal2 = tabla.getTemporal()
arbol.addComen("parametro 1")
arbol.addc3d(f"{temporal2} = { temporal1} + 1")
arbol.addComen("Asignacion de parametros")
arbol.addc3d(f"Pila[{temporal2}] = {tempcadena}")
temporal3 = tabla.getTemporal()
temporal4 = tabla.getTemporal()
arbol.addComen("Cambio de ámbito")
arbol.addc3d(f"P = P + 2")
arbol.addc3d(f"P = P + {arbol.tamanio_actual}")
arbol.addComen("Llamada a la función")
arbol.addc3d(f"funcionintermedia()")
arbol.addComen("Posición del return en el ámbito de la función")
arbol.addc3d(f"{temporal3} = {temporal1} + 2")
arbol.addc3d(f"{temporal4} = Pila[{temporal3}]")
arbol.addc3d(f"P = P - 2")
arbol.addc3d(f"P = P - {arbol.tamanio_actual}")
retorno.temporalAnterior = temporal4
return retorno
return retorno
else:
return cadena
'''
class Alias(Instruccion):
def __init__(self, id, expresion, pas):
Instruccion.__init__(self,None,None,None,None)
self.id = id
self.expresion = expresion
self.tipo = None
self.pas = pas
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
pass
def analizar(self, tabla, arbol):
super().analizar(tabla,arbol)
pass
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
cadena=""
if isinstance(self.expresion,str):
if self.id != None:
cadena += f"{self.id}.{self.expresion} "
else:
cadena += self.expresion + " "
elif isinstance(self.expresion, Identificador):
if self.pas=="AS":
cadena += f"{self.expresion.concatenar(tabla,arbol)} {self.pas} {self.id}"
else:
cadena += f"{self.expresion.concatenar(tabla,arbol)} {self.id}"
else:
if self.pas=="AS":
cadena += f"{self.expresion.traducir(tabla,arbol)} {self.pas} {self.id}"
else:
cadena += f"{self.expresion.traducir(tabla,arbol)} {self.id}"
return cadena
'''
```
#### File: Instrucciones/C3D/DeclacionC3D.py
```python
from optimizacion.Instrucciones.TablaSimbolos.InstruccionC3D import InstruccionC3D
class DeclaracionC3D(InstruccionC3D):
def __init__(self,id, expre, linea, columna):
InstruccionC3D.__init__(self,linea,columna)
self.id = id
self.expresion = expre
print("ENTRO A destino")
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
print("linea" + str(self.linea) + " columna: " + str(self.columna))
```
#### File: Instrucciones/C3D/SentenciaIf.py
```python
from optimizacion.Instrucciones.TablaSimbolos.InstruccionC3D import InstruccionC3D
class SentenciaIf(InstruccionC3D):
def __init__(self,opIzq, relacional, opDer, instrucciones, linea, columna):
InstruccionC3D.__init__(self,linea,columna)
self.opIzq = opIzq
self.relacional = relacional
self.opDer = opDer
self.instrucciones = instrucciones
print("ENTRO A if")
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
print(" linea: " + str(self.linea) + " columna: " + str(self.columna))
if self.opDer == None :
return "\rif __name__ == \"__main__\":"
if(self.relacional == "="):
self.relacional = "=="
return "if (" + str(self.opIzq) + " " + self.relacional + " " + str(self.opDer) + "):"
```
#### File: team08/Tytus_SQLPARSER_G8/recorrido_arbol.py
```python
import os
class recorrido_arbol():
def __init__(self, nodo_arbol):
self.nodo_arbol = nodo_arbol
self.cadena = ""
self.cadenaAux = ""
self.id_n = 0
def recorrer_arbolito(self,nodo):
if nodo.id==0:
nodo.id = self.id_n
self.id_n = self.id_n+1
if nodo != None:
val = str(nodo.id) + " [label=\"" + str(nodo.valor) + "\" fillcolor=\"#d62728\" shape=\"circle\"];\n"
self.cadena += val
for x in nodo.hijos:
self.cadenaAux += str(nodo.id) + "->" + str(self.id_n) + ";\n"
self.recorrer_arbolito(x)
def imprimir(self):
self.recorrer_arbolito(self.nodo_arbol)
printCuerpo = "digraph arbolAST{\n" + self.cadena + self.cadenaAux + "}\n"
try:
f = open('Digraph.dot','w+')
f.write(printCuerpo)
f.close()
except:
print("No se pudo dibujar el árbol")
#aquí se deberá sustituir por la ruta en la que se encuentre instalado el Graphviz
#cmd = '"D:\\Program Files (x86)\\Graphviz2.38\\bin\\dot.exe"' + " -Tpng Digraph.dot -o Digraph.png"
try:
#os.system(cmd)
#print('terminó!')
pass
except:
print("Excepción al ejecutar el archivo .dot")
```
#### File: Instrucciones/Expresiones/Logica.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tipo import Tipo_Dato, Tipo
from Instrucciones.Excepcion import Excepcion
from Instrucciones.C3D.temporal import temporal
class Logica(Instruccion):
def __init__(self, opIzq, opDer, operador, strGram, linea, columna):
Instruccion.__init__(self,Tipo(Tipo_Dato.BOOLEAN),linea,columna,strGram)
self.opIzq = opIzq
self.opDer = opDer
self.operador = operador
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
# Operación con dos operadores
if(self.opDer != None):
# Si existe algún error en el operador izquierdo, retorno el error.
resultadoIzq = self.opIzq.ejecutar(tabla, arbol)
if isinstance(resultadoIzq, Excepcion):
return resultadoIzq
# Si existe algún error en el operador derecho, retorno el error.
resultadoDer = self.opDer.ejecutar(tabla, arbol)
if isinstance(resultadoDer, Excepcion):
return resultadoDer
# Comprobamos el tipo de operador
if self.operador == 'OR':
if self.opIzq.tipo.tipo == Tipo_Dato.BOOLEAN and self.opDer.tipo.tipo == Tipo_Dato.BOOLEAN:
return resultadoIzq or resultadoDer
else:
error = Excepcion('42804',"Semántico","El argumento de OR debe ser de tipo boolean",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
elif self.operador == 'AND':
if self.opIzq.tipo.tipo == Tipo_Dato.BOOLEAN and self.opDer.tipo.tipo == Tipo_Dato.BOOLEAN:
return resultadoIzq and resultadoDer
else:
error = Excepcion('42804',"Semántico","El argumento de AND debe ser de tipo boolean",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
else:
error = Excepcion('42804',"Semántico","Operador desconocido.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
# Operación unaria
else:
# Si existe algún error en el operador izquierdo, retorno el error.
resultadoIzq = self.opIzq.ejecutar(tabla, arbol)
if isinstance(resultadoIzq, Excepcion):
return resultadoIzq
if self.operador == 'NOT':
if self.opIzq.tipo.tipo == Tipo_Dato.BOOLEAN:
return not resultadoIzq
else:
error = Excepcion('42804',"Semántico","Tipo de datos incorrectos en la operación lógica not",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
else:
error = Excepcion('42804',"Semántico","Operador desconocido.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
#******************** traduccion fase 2 *****************
def traducir(self, tabla, controlador):
codigo =''
if(self.opDer != None):
# Si existe algún error en el operador izquierdo, retorno el error.
resultadoIzq = self.opIzq.traducir(tabla, controlador)
if isinstance(resultadoIzq, Excepcion):
return resultadoIzq
# Si existe algún error en el operador derecho, retorno el error.
resultadoDer = self.opDer.traducir(tabla, controlador)
if isinstance(resultadoDer, Excepcion):
return resultadoDer
#verificar tipos
if (self.opIzq.tipo.tipo != Tipo_Dato.BOOLEAN or self.opDer.tipo.tipo != Tipo_Dato.BOOLEAN):
error = Excepcion('42804',"Semántico","El argumento de OR debe ser de tipo boolean",self.linea,self.columna)
return error
#etiquetas para el c3d
cond1_lv = controlador.get_etiqueta()
cond1_lf = controlador.get_etiqueta()
cond2_lv = controlador.get_etiqueta()
cond2_lf = controlador.get_etiqueta()
temp_izq = resultadoIzq.get_temp()
temp_der = resultadoDer.get_temp()
temp_izq_c3d = temp_izq
temp_der_c3d = temp_der
#valores true o false en c3d seran 1 y 0
if temp_izq == True:
temp_izq_c3d = 1
elif temp_izq == False:
temp_izq_c3d = 0
if temp_der == True:
temp_der_c3d = 1
elif temp_der == False:
temp_der_c3d = 0
controlador.cont_temp = controlador.cont_temp + 1
temp_resultado = temporal(controlador.cont_temp,None)
# Comprobamos el tipo de operador
if self.operador == 'OR':
self.tipo = Tipo(Tipo_Dato.BOOLEAN)
temp_resultado.Tipo = Tipo(Tipo_Dato.BOOLEAN)
codigo += ' #operacion logia OR \n'
codigo += ' if('+str(temp_izq_c3d) + '== 1): \n'
codigo += ' goto .'+ cond1_lv +'\n'
codigo += ' goto .'+ cond1_lf +' \n'
codigo += ' label .'+cond1_lf + '\n'
codigo += ' if('+str(temp_der_c3d) + '== 1): \n'
codigo += ' goto .'+cond2_lv +'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 0 \n'
codigo += ' goto .'+cond2_lf+'\n'
codigo += ' label .'+cond1_lv+'\n'
codigo += ' label .'+cond2_lv+'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 1 \n'
codigo += ' label .'+cond2_lf+'\n'
codigo+= '\n'
controlador.append_3d(codigo)
return temp_resultado
elif self.operador == 'AND':
self.tipo = Tipo(Tipo_Dato.BOOLEAN)
temp_resultado.Tipo = Tipo(Tipo_Dato.BOOLEAN)
codigo += ' #operacion logia AND \n'
codigo += ' if('+str(temp_izq) + '== 1): \n'
codigo += ' goto .'+ cond1_lv +'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 0 \n'
codigo += ' goto .'+ cond1_lf +' \n'
codigo += ' label .'+cond1_lv+'\n'
codigo += ' if('+str(temp_der) + '== 1): \n'
codigo += ' goto .'+cond2_lv +'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 0 \n'
codigo += ' goto .'+cond2_lf+'\n'
codigo += ' label .'+cond2_lv+'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 1 \n'
codigo += ' label .'+cond1_lf + '\n'
codigo += ' label .'+cond2_lf+'\n'
codigo+= '\n'
controlador.append_3d(codigo)
return temp_resultado
else:
error = Excepcion('42804',"Semántico","Operador desconocido.",self.linea,self.columna)
return error
# Operación unaria
else:
# Si existe algún error en el operador izquierdo, retorno el error.
resultadoIzq = self.opIzq.traducir(tabla, controlador)
if isinstance(resultadoIzq, Excepcion):
return resultadoIzq
cond1_lv = controlador.get_etiqueta()
cond1_lf = controlador.get_etiqueta()
temp_izq = resultadoIzq.get_temp()
if temp_izq == True:
temp_izq_c3d = 1
elif temp_izq == False:
temp_izq_c3d = 0
controlador.cont_temp = controlador.cont_temp + 1
temp_resultado = temporal(controlador.cont_temp,None)
if self.operador == 'NOT':
if self.opIzq.tipo.tipo == Tipo_Dato.BOOLEAN:
temp_resultado.tipo = Tipo_Dato.BOOLEAN
codigo += ' #operacion logica NOT \n'
codigo += ' if('+str(temp_izq_c3d)+ ' == 1): \n'
codigo += ' goto .'+cond1_lv+'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 1 \n'
codigo += ' goto .'+cond1_lf+'\n'
codigo += ' label .'+cond1_lv+'\n'
codigo += ' '+str(temp_resultado.get_temp())+' = 0 \n'
codigo += ' label .'+cond1_lf+'\n'
codigo+= '\n'
controlador.append_3d(codigo)
return temp_resultado
else:
error = Excepcion('42804',"Semántico","Tipo de datos incorrectos en la operación lógica not",self.linea,self.columna)
return error
else:
error = Excepcion('42804',"Semántico","Operador desconocido.",self.linea,self.columna)
return error
```
#### File: Instrucciones/PLpgSQL/CaseWhen.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
class CaseWhen(Instruccion):
def __init__(self, when, sent, othe, strGram, linea, columna):
Instruccion.__init__(self, None, linea, columna, strGram)
self.when = when
self.sent = sent
self.othe = othe
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla, arbol)
def traducir(self, tabla, controlador):
codigo = ''
```
#### File: Instrucciones/PLpgSQL/Variable.py
```python
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
class Variable(Instruccion):
def __init__(self, nombre, const, tipo, nulo, valor, alias, strGram, linea, columna):
Instruccion.__init__(self, None, linea, columna, strGram)
self.nombre = nombre
self.const = const
self.tipo = tipo
self.nulo = nulo
self.valor = valor
self.alias = alias
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla, arbol)
```
#### File: fase2/team09/instrucciones.py
```python
import salto_incodicional as si
import salto_condicional as sc
import reglas as r
class Instrucciones():
def __init__(self, instrucciones):
self.instrucciones = instrucciones
def optimizacion(self, reglas, pendiente):
ins = self.instrucciones
i = 0
for i in range(len(ins)):
#print('el valor de inst[i] es ->\n' + str(ins[i]))
if ins[i][0] == 'sc':
try:
sig_incon = ins[i+1]
except:
sig_incon = None
try:
sig_salto = ins[i+2]
except:
sig_salto = None
if sig_incon != None and sig_salto != None:
if sig_incon[0] == 'sin' and sig_salto[0] == 'salto':
#Se cumple la regla 3
print('regla3')
if sig_incon[0] == 'sin':
#No se sabe que regla es, así que se activan las 2
r.Reglas.regla4 = True
r.Reglas.regla5 = True
cond = sc.Salto_con(ins[i][1], ins[i][2], ins[i][3]).optimizacion(reglas, pendiente)
incond = si.Salto_in(sig_incon[1]).optimizacion()
#Si las 2 reglas siguen siendo true
if r.Reglas.regla4 and r.Reglas.regla5:
codigo = cond + '\n' + incond
pendiente.append(codigo)
#Si solo la regla4 es true
elif r.Reglas.regla4:
nuevo = 'goto ' + ins[i][2]
codigo_regla4 = '#Se cumple la regla 4\n' + nuevo
pendiente.append(codigo_regla4)
anterior = cond + '<br>' + incond + '<br>'
linea = ''
if cond[0] == 'sc':
linea = cond[3]
elif cond[0] == 'sin' or cond[0] == 'salto':
linea = cond[2]
regla = '4,'+anterior+','+nuevo+','+linea
reglas.append(regla)
#Si sola la regla5 es true
elif r.Reglas.regla5:
codigo_regla5 = '#Se cumple la regla 5 \n'+ incond + '\n'
pendiente.append(codigo_regla5)
anterior = cond + '<br>' + incond
linea = ''
if cond[0] == 'sc':
linea = cond[3]
elif cond[0] == 'sin' or cond[0] == 'salto':
linea = cond[2]
regla = '5,'+anterior+','+incond+','+linea
reglas.append(regla)
```
#### File: Instrucciones/Sql_alter/AlterIndex.py
```python
from Instrucciones.Excepcion import Excepcion
from Instrucciones.Sql_create.Campo import Campo
from Instrucciones.Sql_create.CreateIndex import Cons
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Undefined.Empty import Empty
class AlterIndex(Instruccion):
def __init__(self, existe, nombre, vcolum, ncolum, strGram, linea, columna):
Instruccion.__init__(self, None, linea, columna, strGram)
self.nombre = nombre
self.existe = existe
self.vcolum = vcolum
self.ncolum = ncolum
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla, arbol)
encontrado = False
ntabla = None
for db in arbol.listaBd:
for t in db.tablas:
for i in range(len(t.lista_de_campos)):
c = t.lista_de_campos[i]
if c.tipo.toString() == "index":
if c.nombre == self.nombre:
encontrado = True
ntabla = t.nombreDeTabla
break
if not encontrado and self.existe:
error = Excepcion('INX03', "Semántico", "No existe un índice llamado «" + self.nombre + "»", self.linea, self.columna)
arbol.excepciones.append(error)
arbol.consola.append("\n" + error.toString())
err = True
return
elif not encontrado:
arbol.consola.append("\nNo se ha encontrado el índice «" + self.nombre + "».")
return
else:
indice = t.lista_de_campos[i]
try:
int(self.ncolum)
self.obtenerCampo(self.ncolum, ntabla, db.tablas)
if isinstance(self.ncolum, Excepcion):
arbol.excepciones.append(self.ncolum)
arbol.consola.append("\n" + self.ncolum.toString())
return
except:
ncolum = self.ncolum
self.ncolum = None
for c in t.lista_de_campos:
if c.nombre == ncolum:
self.ncolum = c
break
if self.ncolum is None:
error = Excepcion("INX01", "Semántico", "No existe el campo «" + ncolum + "» en la tabla «" + ntabla + "»", self.linea, self.columna)
arbol.excepciones.append(error)
arbol.consola.append("\n" + error.toString())
return
vacio = Empty(None, None, None, None, None)
self.ncolum = Campo(self.ncolum.nombre, False, vacio, vacio, "", 0, 0)
for j in range(len(indice.campos)):
if indice.campos[j].nombre == self.vcolum:
self.ncolum.restricciones = indice.campos[j].restricciones
indice.campos[j] = self.ncolum
break
restricciones = ""
for l in indice.campos:
restricciones = restricciones + " " + l.nombre + l.restricciones
nCons = None
if len(indice.constraint) == 1:
nCons = Cons(restricciones, "campo(s)")
else:
nCons = Cons(restricciones, "<br>campos(s)")
indice.constraint[len(indice.constraint) - 1] = nCons
arbol.consola.append("\nSe ha modificado el índice «" + self.nombre + "» correctamente.")
return
def obtenerCampo(self, indice, ntabla, lista):
actual = 0
for tabla in lista:
if tabla.nombreDeTabla == ntabla:
for atributo in tabla.lista_de_campos:
if atributo.tipo.toString() != "index":
actual = actual + 1
if actual == indice:
self.ncolum = atributo
return
self.ncolum = Excepcion("INX04", "Semántico", "El número de columna «" + str(indice) + "» no se encuentra en el rango de campos de la tabla «" + ntabla + "».", self.linea, self.columna)
return
```
#### File: fase2/team09/salto_incodicional.py
```python
import reglas as r
class Salto_in():
def __init__(self, etiqueta):
self.etiqueta = etiqueta
def optimizacion(self):
if r.Reglas.regla3:
print('regla2')
elif r.Reglas.regla4:
return 'goto ' + str(self.etiqueta)
elif r.Reglas.regla5:
return 'goto ' + str(self.etiqueta)
elif r.Reglas.regla2:
r.Reglas.pendiente = r.Reglas.pendiente + 'goto ' + str(self.etiqueta) + '\n'
return 'goto ' + str(self.etiqueta)
return 'goto ' + str(self.etiqueta)
```
#### File: InstruccionesPL/CreatePL/CreateOrReplacePL.py
```python
from InstruccionesPL.TablaSimbolosPL.InstruccionPL import InstruccionPL
from InstruccionesPL.TablaSimbolosPL.ArbolPL import Cuadruplo
class CreateOrReplacePL(InstruccionPL):
def __init__(self, id , parametros, retornos,declare, begin, tipo, linea, columna, strGram):
InstruccionPL.__init__(self, tipo, linea, columna, strGram)
self.id = id
self.parametros = parametros
self.retornos = retornos
self.begin = begin
self.declare = declare
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
#ejecucion de una funcion
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
ret = ''
arbol.declararDiccionario(self.id)
arbol.setDefEtiqueta(self.id)
if self.parametros != None:
for par in self.parametros:
par.traducir(tabla, arbol)
ret += 'def {0}():\n'.format(self.id)
arbol.add3D(['def {0}():\n'.format(self.id)])
arbol.agregarGeneral(0,'Metodo', self.id, '')
if self.retornos !=None:
for rets in self.retornos:
rets.traducir(tabla, arbol)
#Definir el modelo de return variable a regresar aqui se obtiene el tipo de variable pero no se define su ID
if self.declare != None:
for declas in self.declare:
declas.traducir(tabla, arbol)
if self.begin != None:
for begs in self.begin:
if type(begs) == list:
for beg in begs:
beg.traducir(tabla, arbol)
else:
begs.traducir(tabla, arbol)
#print(ret)
#
# def -> no se puede ingresar a tripletas solo se podria ingresar ID de la funcion
# por lo tanto la tripleta quedaria indice Tripleta
# (0) ()
# lista actual -> 5 elementos
# enviamos la listatermporal -> llenado que se van en momento -> se generaron 11 elementos
#lista actual-> ()('op', op1, indice 11)-> agregar a la actual
# recorremos la termporal
#agregamos todos los elementos de la temporal
# segunda opcion
# mandar las lista
'''
(op,op1,op2, resultado )
resultado = iD o una etiqueta tn
'''
```
#### File: InstruccionesPL/Ends/Ends.py
```python
from InstruccionesPL.TablaSimbolosPL.InstruccionPL import InstruccionPL
class Ends(InstruccionPL):
def __init__(self, tipo, linea, columna, strGram):
InstruccionPL.__init__(self, tipo, linea, columna, strGram)
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
#ejecucion de una funcion
def traducir(self, tabla, arbol):
super().traducir(tabla, arbol)
print(';')
```
#### File: team10/InstruccionesPL/InstruccionPL.py
```python
from abc import ABC, abstractmethod
class InstruccionPL():
@abstractmethod
def ejecutar(self, tabla, arbol):
#print('Ejecutando...?')
if self.strGram:
arbol.lRepDin.append(self.strGram)
pass
def __init__(self, tipo, linea, columna, strGram):
self.tipo = tipo
self.linea = linea
self.columna = columna
self.nodoPadre = None
self.nodosLista = []
self.strGram = strGram
```
#### File: InstruccionesPL/ObtenerSql/ObtenerSql.py
```python
from InstruccionesPL.TablaSimbolosPL.InstruccionPL import InstruccionPL
class ObtenerSql(InstruccionPL):
def __init__(self, cadena , tipo, linea, columna, strGram):
InstruccionPL.__init__(self, tipo, linea, columna, strGram)
self.cadena = cadena
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
#ejecucion de una funcion
def traducir(self, tabla, arbol):
super().traducir(tabla,arbol)
print('ejecutar cadena desde sql: Crear Metodo ')
```
#### File: InstruccionesPL/Returns/ReturnsNext.py
```python
from InstruccionesPL.TablaSimbolosPL.InstruccionPL import InstruccionPL
class ReturnsNext(InstruccionPL):
def __init__(self, OperacionLogica, tipo, linea, columna, strGram):
InstruccionPL.__init__(self, tipo, linea, columna, strGram)
self.OperacionLogica = OperacionLogica
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
#ejecucion de una funcion
def traducir(self, tabla, arbol):
print('trduccion')
```
#### File: SELECT/NODE_SELECT/Response_Exp.py
```python
class Response():
def __init__(self):
self.encabezados = []
self.data = []
self.tipos = []
self.tipoUnico = None
self.valorUnico = None
```
#### File: src/SENTENCIA_IF/Sentencia_Else.py
```python
import sys, os.path
nodo_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\AST\\')
sys.path.append(nodo_dir)
c3d_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\C3D\\')
sys.path.append(c3d_dir)
ent_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\ENTORNO\\')
sys.path.append(ent_dir)
from Nodo import Nodo
from Tipo_Expresion import *
from Entorno import Entorno
from Label import *
class Sentencia_Else(Nodo):
def __init__(self, nombreNodo, fila = -1, columna = -1, valor = None):
Nodo.__init__(self,nombreNodo, fila, columna, valor)
def execute(self, enviroment):
bloque = self.hijos[0]
# Entorno Local
entornoLocal = Entorno(enviroment)
entornoLocal.nombreEntorno = 'else ' + enviroment.nombreEntorno
entornoLocal.Global = enviroment.Global
enviroment.entornosLocales.append(entornoLocal)
return bloque.execute(entornoLocal)
def compile(self,enviroment):
bloque = self.hijos[0]
# Entorno Local
entornoLocal = Entorno(enviroment)
entornoLocal.nombreEntorno = 'else ' + enviroment.nombreEntorno
entornoLocal.Global = enviroment.Global
enviroment.entornosLocales.append(entornoLocal)
return bloque.compile(enviroment)
def getText(self):
pass
```
#### File: team14/Instrucciones/Bloque.py
```python
from Instrucciones.Instruccion import Instruccion
class Bloque(Instruccion):
def __init__(self,instrucciones):
self.instrucciones=instrucciones
def ejecutar(self, ent):
'ejecucion del bloque'
for inst in self.instrucciones:
val= inst.ejecutar(ent)
if val!=None:
return val
def traducir(self,ent):
'tradyccuion del bloque'
cad=''
strsql=''
for inst in self.instrucciones:
obj= inst.traducir(ent)
cad+=obj.codigo3d
strsql+=obj.stringsql
self.codigo3d = cad
self.stringsql=self.stringsql.replace('LISTACONTENIDO',strsql)
return self
```
#### File: team14/Instrucciones/Raise.py
```python
from Instrucciones.Instruccion import Instruccion
from Expresion.Relacional import *
class Raise(Instruccion):
def __init__(self,level,exp):
self.level=level
self.exp=exp
def ejecutar(self, ent):
'ejecutar raise'
if self.level == 'notice':
variables.consola.insert(INSERT, 'NOTIFICACION: '+str(self.exp.getval(ent).valor))
variables.consola.insert(INSERT, "\n")
def traducir(self,entorno):
'traduzco raise'
if self.level=='notice':
exp=self.exp.traducir(entorno)
cad = exp.codigo3d
cad += 'print (' + exp.temp + ') \n'
self.codigo3d = cad
self.stringsql=' raise notice '+self.exp.traducir(entorno).stringsql+';'
return self
```
#### File: InterpreteF2/Reporteria/ReporteTS_Indice.py
```python
from typing import List
class ReportIndice():
def __init__(self, alias, nombre, tipo,columnas:List[str], consideracion, fila, columna):
self.alias = alias
self.nombre = nombre
self.tipo = tipo
self.columnas:List[str] = columnas
self.consideracion = consideracion
self.fila = fila
self.columna = columna
```
#### File: fase2/team18/CD3.py
```python
from storageManager import jsonMode as EDD
from expresiones import *
from instrucciones import *
import json
import copy
#variables
listaSalida=[]
listaMemoria=[]
listaoptimizaciones=[]
memoriTrue=0
contT=-1;
contOP=-1
#Funciones para generear codigo de 3 direcciones
def numT():
global contT
global contOP
contT+=1
contOP+=1
return contOP
def reinicar_contOP():
global contOP
if contOP != -1 or contT != -1:
regla = "1 - Se reutilizo temporal"
noOp = "t"+str(contT)
Op = "t"+str(contOP)
agregarOptimizacion(regla,noOp,Op)
contOP=-1
def agregarOptimizacion(regla,codnoOp,condOp):
global listaoptimizaciones
listaoptimizaciones.append([regla,codnoOp,condOp])
def agregarInstr(datoMemoria,instruccion):
#agregar a la lista de parametros
global listaMemoria
if datoMemoria != '':
listaMemoria.append(datoMemoria)
#agregar a la lista de salida
global listaSalida
if(instruccion!=''):
listaSalida.append(instruccion)
def PCreateDatabase(nombreBase,result):
reinicar_contOP()
txt="\t#Create DataBase\n"
txt+="\tt"+str(numT())+"='"+nombreBase+"'\n"
varT="t"+str(numT())
txt+="\t"+varT+"=CD3.EReplace()\n"
txt+="\tif("+varT+"):\n"
txt+="\t\tgoto .dropDB"+str(contT)+"\n"
txt+="\t\tlabel.dropDB"+str(contT)+"\n"
txt+="\t\tCD3.EDropDatabase()"
replac=False
if(result==1):
#'eliminar, luego crear'
replac=True
agregarInstr(replac,txt)#agregar replace
agregarInstr(nombreBase,'')#agregar Drop
else:
agregarInstr(replac,txt)#agregar replace
#crear tabla
txt3="\tCD3.ECreateDatabase()\n"
agregarInstr(nombreBase,txt3)#agregar create
def PDropDatabase(nombreBase):
reinicar_contOP()
txt="\t#Drop DataBase\n"
txt+="\tt"+str(numT())+"='"+nombreBase+"'\n"
txt+="\tCD3.EDropDatabase()\n"
agregarInstr(nombreBase,txt)
def PSelectFunciones(alias,resultado):
reinicar_contOP()
txt="\t#Select funcion\n"
varT="t"+str(numT())
txt+="\t"+varT+"='"+alias+"'\n"
varR="t"+str(numT())
txt+="\t"+varR+"='"+str(resultado)+"'\n"
txt+='\tprint("Cabecera: " + '+ varT + ' + " Resultado: "+ str('+ varR +'))\n'
agregarInstr("",txt)
def PSelectTablas(nombreTabla,cabeceras,filas,cantidadRegistros):
reinicar_contOP()
registros=[cantidadRegistros]
txt="\t#Select table\n"
varT="t"+str(numT())
txt+="\t"+varT+"="+str(nombreTabla)+"\n"
varC="t"+str(numT())
txt+="\t"+varC+"="+str(cabeceras)+"\n"
varR="t"+str(numT())
txt+="\t"+varR+"=CD3.ECantidadRegistros()\n"
varfilas="t"+str(numT())
txt+="\t"+varfilas+"="+str(filas)+"\n"
varCont="t"+str(numT())
txt+="\t"+varCont+"=0\n"
txt+="\tprint(\'tablas seleccionadas:\',str("+varT+"))\n"
txt+="\tprint(\'cabeceras:\',str("+varC+"))\n"
txt+="\tlabel.mostrarFila"+str(contT)+"\n"
txt+="\tif("+varCont+"<"+varR+"):"+"\n"
txt+="\t\tprint(\'\t\',"+varfilas+"["+varCont+"])\n"
txt+="\t\t"+varCont+"="+varCont+"+1\n"
txt+="\t\tgoto.mostrarFila"+str(contT)+"\n"
agregarInstr(registros,txt)
def PUseDatabase(nombreBase):
reinicar_contOP()
txt="\t#Use Database\n"
txt+="\tt"+str(numT())+"='"+nombreBase+"'\n"
txt+="\tCD3.EUseDatabase()\n"
agregarInstr(nombreBase,txt)
def PCreateType(nombreBase,nombreTabla,cantidadcol,valores):
'''
var=nombreTipo #cargar en memoria el nombre
t3=CD3.EcrearTipo()
if(t3) goto Error1:
insertD1:
var=[1,1,1,1,1] #cargar en memoria la lista
CD3.EInsert()
'''
reinicar_contOP()
txt="\t#Create Type\n"
txt+="\tt"+str(numT())+"='"+nombreTabla+"'\n"
var="t"+str(numT())
txt+="\t"+var+"= CD3.ECreateTable()"+"\n"
txt+="\tif("+var+"):"+"\n"
txt+="\t\tgoto .Insertar"+str(contT)+"\n"
txt+="\t\tlabel.Insertar"+str(contT)+"\n"
var="t"+str(numT())
txt+="\t\t"+var+"="+str(valores)+""
crearT=[nombreBase,nombreTabla,cantidadcol]
agregarInstr(crearT,txt)
txt="\t\t"+"CD3.EInsert()"+"\n"
inserT=[nombreBase,nombreTabla,valores]
agregarInstr(inserT,txt)
def PCreateTable(nombreBase,nombreTabla,cantidadcol,llaves,nombresC):
reinicar_contOP()
txt="\t#Create Table\n"
txt+="\tt"+str(numT())+"='"+nombreTabla+"'\n"
txt+="\tt"+str(numT())+"="+str(nombresC)+"\n"
var="t"+str(numT())
txt+="\t"+var+"=CD3.ECreateTable()"+"\n"
txt+="\tif("+var+"):"+"\n"
txt+="\t\tgoto .insPK"+str(contT)+"\n"
txt+="\t\tlabel.insPK"+str(contT)+"\n"
var="t"+str(numT())
txt+="\t\t"+var+"="+str(llaves)+""
crearT=[nombreBase,nombreTabla,cantidadcol]
agregarInstr(crearT,txt)
txt="\t\t"+"CD3.EAddPK()"+"\n"
pkT=[nombreBase,nombreTabla,llaves]
agregarInstr(pkT,txt)
def PInsert(nombreBase,nombreTabla,valores):
reinicar_contOP()
Data_insert=[nombreBase,nombreTabla,valores]
txt="\t#Insert\n"
txt+="\tt"+str(numT())+"='"+nombreTabla+"'\n"
varT="t"+str(numT())
txt+="\t"+varT+"=CD3.EExistT()\n"
txt+="\tif("+varT+"):\n"
txt+="\t\tgoto .insert"+str(contT)+"\n"
txt+="\t\tlabel.insert"+str(contT)+"\n"
agregarInstr(True,'')#agregar que si existe
varT="t"+str(numT())
txt+="\t\t"+varT+"="+str(valores)+"\n"
txt+="\t\tCD3.EInsert()\n"
agregarInstr(Data_insert,txt)
def PUpdate(nombreBase,nombreTabla,indice,valor,nvalores):
reinicar_contOP()
update_data=[nombreBase,nombreTabla,indice,valor,nvalores]
busqueda_tb=[nombreBase,nombreTabla]
txt="\t#Update Registro\n"
txt+="\tt"+str(numT())+"='"+nombreTabla+"'\n"
var="t"+str(numT())
txt+="\t"+var+"=CD3.EObtenerTabla()\n"
txt+="\tif("+var+"):\n"
txt+="\t\tgoto .tbencontrada"+str(contT)+"\n"
txt+="\t\tlabel.tbencontrada"+str(contT)
agregarInstr(busqueda_tb,txt)
txt="\t\tt"+str(numT())+"='"+str(valor)+"'\n"
txt+="\t\tCD3.EUpdate()\n"
agregarInstr(update_data,txt)
def PDelete(nombreBase,nombreTabla,cols):
reinicar_contOP()
delete_data=[nombreBase,nombreTabla,cols]
busqueda_tb=[nombreBase,nombreTabla]
txt="\t#Delete Registro\n"
txt+="\tt"+str(numT())+"='"+nombreTabla+"'\n"
var="t"+str(numT())
txt+="\t"+var+"=CD3.EObtenerTabla()\n"
txt+="\tif("+var+"):\n"
txt+="\t\tgoto .tbencontrada"+str(contT)+"\n"
txt+="\t\tlabel.tbencontrada"+str(contT)
agregarInstr(busqueda_tb,txt)
txt="\t\tt"+str(numT())+"="+str(cols)+"\n"
txt+="\t\tCD3.EDelete()\n"
agregarInstr(delete_data,txt)
def PShowDatabases(dataBases):
reinicar_contOP()
txt="\t#Show Databases\n"
txt+="\tt"+str(numT())+"="+str(dataBases)+"\n"
txt+="\tCD3.EShowDatabases()\n"
agregarInstr(dataBases,txt)
def PDropTable(nombreBase,nombreTabla):
reinicar_contOP()
drop_tb=[nombreBase,nombreTabla]
txt="\t#Drop Table\n"
txt+="\tt"+str(numT())+"='"+nombreTabla+"'\n"
txt+="\tCD3.EDropTable()\n"
agregarInstr(drop_tb,txt)
def PDropFuncion(nombres):
reinicar_contOP()
drop_funcion=[nombres]
txt="\t#Drop Funcion\n"
txt+="\tt"+str(numT())+"="+str(nombres)+"\n"
txt+="\tCD3.EDropFuncion()\n"
agregarInstr(drop_funcion,txt)
def PCreateFuncion(nombreF,tipoF,contenidoF,parametrosF,reemplazada):
reinicar_contOP()
txt="\t#Crear Funcion\n"
txt+="\tt"+str(numT())+"='"+nombreF+"'\n"
txt+="\tt"+str(numT())+"="+str(parametrosF)+"\n"
txt+="\tt"+str(numT())+"="+str(reemplazada)+" #Reemplazar funcion\n"
varT="t"+str(numT())
txt+="\t"+varT+"=CD3.ECreateFuncion()\n"
#------------------optimizacion---------------
regla="3 - se nego condicion para poder eliminar etiqueta"
msg="if("+varT+"):<br>"
msg+="\tgoto .bodyFun"+str(contT)+"<br>"
msg+="else:<br>"
msg+="\tgoto .endFun"+str(contT)+"<br>"
msg+="\tlabel.bodyFun"+str(contT)+"<br>"
msg+="\tlabel.endFun"+str(contT)+"<br>"
msg2="if(!"+varT+"):<br>"
msg2+="\tgoto .endFun"+str(contT)+"<br>"
msg2+="\tlabel.endFun"+str(contT)+"<br>"
#---------------------------------------------
txt2="\tif("+varT+"==False):\n"
fin=contT
txt2+="\t\tgoto .endFun"+str(fin)+"\n"
varT="t"+str(numT())
txt2+="\t"+varT+"=CD3.ExcuteFun()\n"
txt2+="\tif("+varT+"==False):\n"
txt2+="\t\tgoto .endFun"+str(fin)+"\n"
#declaraciones
txt2+="\tlabel.decFun"+str(contT)+" #Declaraciones funcion\n"
for i in contenidoF.declaraciones:
txt2+="\tt"+str(numT())+"='"+i.nombre+"'\n"
print("CD3------>",i)
#contenido
txt2+="\tlabel.bodyFun"+str(contT)+" #Contenido funcion\n"
txt2+=PInstrFun(contenidoF.contenido)+"\n"
txt2+="\tlabel.endFun"+str(fin)+"\n"
agregarOptimizacion(regla,msg,msg2)
txt+=txt2
dataC=[nombreF,tipoF,str(contenidoF),parametrosF,reemplazada]
agregarInstr(dataC,txt)
agregarInstr(False,'')
def PInstrFun(inst):
var=''
for i in inst:
if isinstance(i,Sentencia_IF):
var+="\t#sentencia IF\n"
var+=txtIF(i)
elif isinstance(i,Sentencia_Case):
var+="\t#sentencia case\n"
var+=txtCase(i)
elif isinstance(i,Operacion_Expresion):
var+=txtExpresion(i)
elif isinstance(i,Select_Asigacion):
var+="\t#sentencia select asignacion\n"
var+=txtSelectAsig(i)
return var
def txtIF(inst):
var=''
var+="\t#condicion IF\n"
mivar=mivar=txtC3dExp(inst.condicion)
varT=""
if(mivar[1]==''):
varT="t"+str(numT())
var+="\t"+varT+"="+mivar[0]+"\n"
else:
var+=mivar[0]+"\n"
varT="t"+str(mivar[1])
var+="\tif("+varT+"):\n"
var+="\t\tgoto .if"+str(contT)+"\n"
var+="\telse:\n"
fin=contT
var+="\t\tgoto .endif"+str(fin)+"\n"
var+="\tlabel.if"+str(contT)+"\n"
#contenido if
var+=PInstrFun(inst.sentencias) #"\tt"+str(numT())+"="+str(inst.sentencias)+"\n"
var+="\tlabel.endif"+str(fin)+"\n"
#contenido else
if(inst.elsif_else[0]!=False):
for elifX in inst.elsif_else:
print ("------------------",elifX)
if(elifX.condicion==None):
var+=PInstrFun(elifX.sentencias)
#elif
else:
elifAux=Sentencia_IF(elifX.condicion,elifX.sentencias,[False])
var=txtIF(elifAux)
return var
def txtCase(inst):
var=""
print(inst.busqueda,inst.sentencia_when)
if(inst.busqueda!=None):
valorB=inst.busqueda.id
finCase=contT
for wen in inst.sentencia_when:
finWen=contT
var+="\t#cabecera wen\n"
if(wen.condicion!=None):
mivar=mivar=txtC3dExp(wen.condicion)
varT=""
if(mivar[1]==''):
varT="t"+str(numT())
var+="\t"+varT+"="+mivar[0]+"!='"+valorB+"'\n"
else:
var+=mivar[0]+"\n"
varT="t"+str(mivar[1])
var+="\tif("+varT+"):\n"
var+="\t\tgoto.finWen"+str(finWen)+"\n"
var+=PInstrFun(wen.sentencias)
var+="\tgoto.finCase"+str(finCase)+"\n"
var+="\tlabel.finWen"+str(finWen)+"\n"
else:
var+=PInstrFun(wen.sentencias)
var+="\tlabel.finCase"+str(finCase)+"\n"
else:
finCase=contT
for wen in inst.sentencia_when:
finWen=contT
var+="\t#cabecera wen\n"
if(wen.condicion!=None):
mivar=mivar=txtC3dExp(wen.condicion)
varT=""
if(mivar[1]==''):
varT="t"+str(numT())
var+="\t"+varT+"="+mivar[0]+"\n"
else:
var+=mivar[0]+"\n"
varT="t"+str(mivar[1])
var+="\tif("+varT+"):\n"
var+="\t\tgoto.finWen"+str(finWen)+"\n"
var+=PInstrFun(wen.sentencias)
var+="\tgoto.finCase"+str(finCase)+"\n"
var+="\tlabel.finWen"+str(finWen)+"\n"
else:
var+=PInstrFun(wen.sentencias)
var+="\tlabel.finCase"+str(finCase)+"\n"
return var
def txtExpresion(inst):
var=''
if(inst.tipo=="return"):
var+="\t#Return\n"
mivar=txtC3dExp(inst.expresion)
if(mivar[1]==''):
varT="t"+str(numT())
var+="\t"+varT+"="+mivar[0]+"\n"
else:
var+=mivar[0]+"\n"
varT="t"+str(mivar[1])
var+="\t#return "+varT+"\n"
elif(inst.tipo=="asignacion"):
var+="\t#Asignacion\n"
mivar=txtC3dExp(inst.expresion)
if(mivar[1]==''):
#variable
varT="t"+str(numT())
var+="\t"+str(inst.variable.id)+"="+mivar[0]+"\n"
else:
varT="t"+str(numT())
var+=mivar[0]+"\n"
var+="\t"+str(inst.variable.id)+"=t"+str(mivar[1])+"\n"
elif(inst.tipo=="raise"):
var+="\t#Raise\n"
mivar=txtC3dExp(inst.expresion)
if(mivar[1]==''):
varT="t"+str(numT())
var+="\t"+varT+"="+mivar[0]+"\n"
else:
var+=mivar[0]+"\n"
varT="t"+str(mivar[1])
var+="\tprint("+varT+");\n"
return var
#generar el codigo de 3 direcciones para una expresion---> a+b+c*1+3.....
def txtC3dExp(inst):
result=''
if isinstance(inst,Operando_ID):
txt="'"+str(inst.id)+"'"
result=[txt,'']
elif isinstance(inst,Operando_Numerico):
txt=str(inst.valor)
result=[txt,'']
elif isinstance(inst,Operando_Cadena):
txt="'"+str(inst.valor)+"'"
result=[txt,'']
elif isinstance(inst,Operacion_Aritmetica):
var=numT()
simbolo=''
a=txtC3dExp(inst.op1)
b=txtC3dExp(inst.op2)
if(inst.operador==OPERACION_ARITMETICA.MAS):
simbolo=" + "
elif(inst.operador==OPERACION_ARITMETICA.MENOS):
simbolo=" - "
elif(inst.operador==OPERACION_ARITMETICA.POR):
simbolo=" * "
elif(inst.operador==OPERACION_ARITMETICA.DIVIDIDO):
simbolo=" / "
elif(inst.operador==OPERACION_ARITMETICA.POTENCIA):
simbolo=" ^ "
elif(inst.operador==OPERACION_ARITMETICA.MODULO):
simbolo=" % "
if(a[1]==''):
txt="\tt"+str(var)+"="+a[0]+simbolo
else:
txt2=a[0]+"\n"
txt="\tt"+str(var)+"="+"t"+str(a[1])+simbolo
txt=txt2+txt
if(b[1]==''):
txt+=b[0]
else:
txt2=b[0]+"\n"
txt+="t"+str(b[1])
txt=txt2+txt
result=[txt,var]
elif isinstance(inst,Operacion_Relacional) or isinstance(inst,Operacion_Logica_Binaria):
var=numT()
simbolo=''
a=txtC3dExp(inst.op1)
b=txtC3dExp(inst.op2)
if(inst.operador==OPERACION_RELACIONAL.IGUAL):
simbolo=" == "
elif(inst.operador==OPERACION_RELACIONAL.DIFERENTE):
simbolo=" != "
elif(inst.operador==OPERACION_RELACIONAL.MAYORIGUALQUE):
simbolo=" >= "
elif(inst.operador==OPERACION_RELACIONAL.MENORIGUALQUE):
simbolo=" <= "
elif(inst.operador==OPERACION_RELACIONAL.MAYOR_QUE):
simbolo=" > "
elif(inst.operador==OPERACION_RELACIONAL.MENOR_QUE):
simbolo=" < "
elif(inst.operador==OPERACION_LOGICA.AND):
simbolo=" and "
elif(inst.operador==OPERACION_LOGICA.OR):
simbolo=" or "
if(a[1]==''):
txt="\tt"+str(var)+"="+a[0]+simbolo
else:
txt2=a[0]+"\n"
txt="\tt"+str(var)+"="+"t"+str(a[1])+simbolo
txt=txt2+txt
if(b[1]==''):
txt+=b[0]
else:
txt2=b[0]+"\n"
txt+="t"+str(b[1])+"\n"
txt=txt2+txt
result=[txt,var]
else:
txt="'"+str(inst)+"'"
result=[txt,'']
result[0]=filtroC3DExp(result[0])
return result
def filtroC3DExp(cadena):
listEx=[]
listEx=cadena.split(sep='\n')
newList=[]
for exp in listEx:
listAux=[]
#filtro asignacion
listAux=exp.split(sep='=')
#reglas suma
if " + " in exp and len(listAux)==2:
auxVal=[]
auxVal=listAux[1].split(sep=' + ')
#regla 12
if(auxVal[0]=='0'):
regla="12 - reduccion eliminando el valor 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[1]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
elif(auxVal[1]=='0'):
regla="12 - reduccion eliminando el valor 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
else:
newList.append(exp)
#reglas multi
elif " * " in exp and len(listAux)==2:
auxVal=[]
auxVal=listAux[1].split(sep=' * ')
#regla 17
if(auxVal[0]=='0'):
regla="17 - se asigna 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
elif(auxVal[1]=='0'):
regla="17 - se asigna 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[1]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
#regla 14
elif(auxVal[0]=='1'):
regla="14 - reduccion eliminando el valor 1"
txtold=exp
txtnew=listAux[0]+"="+auxVal[1]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
elif(auxVal[1]=='1'):
regla="14 - reduccion eliminando el valor 1"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
#regla 16
elif(auxVal[0]=='2'):
regla="16 - se agrega la suma del mismo valor"
txtold=exp
txtnew=listAux[0]+"="+auxVal[1]+" + "+auxVal[1]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
elif(auxVal[1]=='2'):
regla="16 - se agrega la suma del mismo valor"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]+" + "+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
else:
newList.append(exp)
#reglas resta
elif " - " in exp and len(listAux)==2:
auxVal=[]
auxVal=listAux[1].split(sep=' - ')
#regla 13
if(auxVal[0]=='0'):
regla="13 - reduccion eliminando el valor 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[1]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
elif(auxVal[1]=='0'):
regla="13 - reduccion eliminando el valor 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
else:
newList.append(exp)
#reglas div
elif " / " in exp and len(listAux)==2:
auxVal=[]
auxVal=listAux[1].split(sep=' / ')
#regla 15
if(auxVal[1]=='1'):
regla="15 - reduccion eliminando el valor 1"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
#regla 18
elif(auxVal[0]=='0'):
regla="18 - se asigna 0"
txtold=exp
txtnew=listAux[0]+"="+auxVal[0]
agregarOptimizacion(regla,txtold,txtnew)
newList.append(txtnew)
else:
newList.append(exp)
else:
newList.append(exp)
res=""
finfor=0
for sal in newList:
finfor+=1
if len(newList)>finfor:
res+=sal+"\n"
else:
res+=sal
if(res==""):
res=cadena
print("Salida antigua\n",cadena)
print("Salida nueva\n",res)
return res
def txtSelectAsig(inst):
return ""
#EMPIEZA MIO *****************
#Procedimientos
def PDropProcedimientos(nombres):
reinicar_contOP()
drop_procedimientos=[nombres]
txt="\t#Drop Procedure\n"
txt+="\tt"+str(numT())+"="+str(nombres)+"\n"
txt+="\tCD3.EDropProcedure()\n"
agregarInstr(drop_procedimientos,txt)
def PCreateProcedure(nombre,cuerpo,parametros,reemplazada):
reinicar_contOP()
txt="\t#Crear Stored Procedure\n"
txt+="\tt"+str(numT())+"='"+nombre+"'\n"
txt+="\tt"+str(numT())+"='"+str(parametros)+"'\n"
txt+="\tt"+str(numT())+"="+str(reemplazada)+" #Reemplazar procedure\n"
varT="t"+str(numT())
txt+="\t"+varT+"=CD3.ECreateProcedure()\n"
#------------------optimizacion---------------
regla="3 - se nego condicion para poder eliminar etiqueta"
msg="if("+varT+"):\n"
msg+="\tgoto .bodyProc"+str(contT)+"\n"
msg+="else:\n"
msg+="\tgoto .endProc"+str(contT)
msg2=""
#---------------------------------------------
txt2="\tif("+varT+"==0):\n"
fin=contT
txt2+="\t\tgoto .endProc"+str(fin)+"\n"
varT="t"+str(numT())
txt2+="\t"+varT+"=CD3.ExecuteProc()\n"
txt2+="\tif("+varT+"==0):\n"
txt2+="\t\tgoto .endProc"+str(fin)+"\n"
#declaraciones
txt2+="\tlabel.decProc"+str(contT)+" #Declaraciones Procedure\n"
for i in cuerpo.declaraciones:
txt2+="\tt"+str(numT())+"='"+i.nombre+"'\n"
print("CD3------>",i)
#Body procedure
txt2+="\tlabel.bodyProc"+str(contT)+" #Cuerpo Procedure\n"
txt2+=PInstrProcedure(cuerpo.contenido)+"\n"
txt2+="\tlabel.endProc"+str(fin)+"\n"
agregarOptimizacion(regla,msg,txt2)
txt+=txt2
data=[nombre,str(cuerpo),str(parametros),reemplazada]
agregarInstr(data,txt)
agregarInstr(False,'')
def PInstrProcedure(inst):
print("Imprimir procedure", inst)
var=''
'''for i in inst:
if isinstance(i,Insertar):
var+="\t#Instruccion INSERT\n"
txtInsert(i)
elif isinstance(i,Actualizar):
var+="\t#Instruccion update\n"
txtUpdate(i)
elif isinstance(i,Operacion_Expresion):
var+="\t#sentencia EXPRESION, Return,Raise y asignacion\n"
elif isinstance(i,Eliminar):
var+="\t#Instruccion delete\n"
txtEliminar(i)'''
return var
def txtInsert(instr):
print("Insert")
return ""
def txtUpdate(instr):
print("Update")
return ""
def txtEliminar(instr):
print("Delete")
return ""
#Fin procedimientos
TTv=""
#^^^
def PAlterRenameDatabase(nombreBaseOld,nombreBaseNew):
global TTv
reinicar_contOP()
valores=[nombreBaseOld,nombreBaseNew]
TTv=""
addLine("#ALTER Rename Database")
addLine("t"+str(numT())+"=\'"+nombreBaseOld+"\'")
addLine("t"+str(numT())+"=\'"+nombreBaseNew+"\'")
addLine("CD3.EAltRenameDatabase()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbRenameConst(NombreTabla,ID1,ID2):
global TTv
reinicar_contOP()
valores=[NombreTabla,ID1,ID2]
TTv=""
addLine("#ALTER TABLE RENAME Constraint")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Old Name")
addLine("t"+str(numT())+"=\'"+ID1+"\'")
addLine("#New Name")
addLine("t"+str(numT())+"=\'"+ID2+"\'")
addLine("CD3.EAltTbRenameConst()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
#^^^
def PAlterTbRenameTable(baseActiva, NombreTabla,ID1):
global TTv
reinicar_contOP()
valores=[baseActiva, NombreTabla,ID1]
TTv=""
addLine("#ALTER TABLE RENAME Table")
addLine("#Old Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#New Table Name")
addLine("t"+str(numT())+"=\'"+ID1+"\'")
addLine("CD3.EAltTbRenameTable()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbRenameColum(baseActiva,NombreTabla,ID1,ID2):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID1,ID2]
TTv=""
addLine("#ALTER TABLE RENAME Column")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Old Name")
addLine("t"+str(numT())+"=\'"+ID1+"\'")
addLine("#New Name")
addLine("t"+str(numT())+"=\'"+ID2+"\'")
addLine("CD3.EAltTbRenameColum()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterSNN(baseActiva,NombreTabla,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN set not null")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Columna:"+ID+" SET NOT NULL")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("CD3.EAltTbAlterSNN()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterSDT(baseActiva,NombreTabla,ID,OPEE1):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID,OPEE1]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN set data type")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Columna:"+ID+" SET DATA TYPE")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("#New Type")
addLine("t"+str(numT())+"=\'"+OPEE1+"\'")
addLine("CD3.EAltTbAlterSDT()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterSDef(baseActiva,NombreTabla,ID,valCOL):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID,valCOL]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN set default")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Columna:"+ID+" SET DEFAULT")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("#New Default")
addLine("t"+str(numT())+"=\'"+str(valCOL)+"\'")
addLine("CD3.EAltTbAlterSDef()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterDNN(baseActiva,NombreTabla,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN drop not null")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Columna:"+ID+" DROP NOT NULL")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("CD3.EAltTbAlterDNN()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterDDef(baseActiva,NombreTabla,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN drop default")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Columna:"+ID+" DROP DEFAULT")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("CD3.EAltTbAlterDDef()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
#^^^
def PAlterTbAlterDropCol(baseActiva,NombreTabla,ID,No_col):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID,No_col]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN drop column")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Columna:"+ID+" DROP COLUMN")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("CD3.EAltTbAlterDropCol()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterDropConst(baseActiva,NombreTabla,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN drop constraint")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Constraint:"+ID+" DROP constraint")
addLine("CD3.EAltTbAlterDropConst()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterAddConstUni(baseActiva,NombreTabla,ColN,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ColN,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN add constraint unique")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Column Name")
addLine("t"+str(numT())+"=\'"+ColN+"\'")
addLine("#Constraint:"+ID+" ADD constraint unique")
addLine("CD3.EAltTbAlterAddConstUni()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
#^^^
def PAlterTbAlterAddConstPrim(baseActiva,NombreTabla,ColN,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ColN,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN add constraint primary")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Column Name")
addLine("t"+str(numT())+"=\'"+ColN+"\'")
addLine("#Constraint:"+ID+" ADD constraint primary")
addLine("CD3.EAltTbAlterAddConstPrim()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def PAlterTbAlterAddConstFor(baseActiva,NombreTabla,ColN,ID):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ColN,ID]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN add constraint foreign")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Column Name")
addLine("t"+str(numT())+"=\'"+ColN+"\'")
addLine("#Constraint:"+ID+" ADD constraint foreign")
addLine("CD3.EAltTbAlterAddConstFor()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
#^^^
def PAlterTbAlterAddCol(baseActiva,NombreTabla,ID,TIPO):
global TTv
reinicar_contOP()
valores=[baseActiva,NombreTabla,ID,TIPO]
TTv=""
addLine("#ALTER TABLE ALTER COLUMN add column")
addLine("#Table Name")
addLine("t"+str(numT())+"=\'"+NombreTabla+"\'")
addLine("#Column Name")
addLine("t"+str(numT())+"=\'"+ID+"\'")
addLine("#Column Type:"+TIPO)
addLine("CD3.EAltTbAlterAddCol()")
txt=copy.deepcopy(TTv)
agregarInstr(valores,txt)
def addLine(cadena):
global TTv
TTv+=("\t"+cadena+"\n")
#FIN MIO *****************
#escribir archivo
def CrearArchivo():
#crear salida
nombre="SalidaCD3.py"
f=open(nombre,"w")
f.write("#importar modulos")
f.write("\n")
f.write("import CD3 as CD3 #modulo codigo 3 direcciones")
f.write("\n")
f.write("from goto import with_goto #modulo goto")
f.write("\n")
f.write("\n")
f.write("@with_goto # Decorador necesario \ndef main():\n")
f.write("#Codigo Resultante")
f.write("\n")
for x in listaSalida:
f.write(x)
f.write("\n")
f.write("main()")
f.close()
'''
print("\n-------------------Optimizacion----------------")
for i in listaoptimizaciones:
print(i[0],i[1])
print("-------------------------------------------------\n")
'''
#Genera reporte Optimizacion
Reporte_Optimizaciones()
#crear memoria
with open('memoria.json','w') as file:
json.dump(listaMemoria,file,indent=4)
#reiniciar lista temp
listaMemoria.clear()
listaSalida.clear()
memoriTrue=0
contT=-1
#Funciones para ejecutar codigo de 3 direcciones
def cargarMemoria():
#lectura memoria temp
global memoriTrue
global listaMemoria
if(memoriTrue==0):
print("----------Verificando Heap-------")
memoriTrue=1
with open('memoria.json') as file:
data = json.load(file)
listaMemoria=data
def ECreateDatabase():
cargarMemoria()
if(len(listaMemoria)>0):
print("base de datos creada:",listaMemoria[0])
EDD.createDatabase(listaMemoria[0])
listaMemoria.pop(0)
def EDropDatabase():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("base de datos eliminada:",listaMemoria[0])
EDD.dropDatabase(listaMemoria[0])
listaMemoria.pop(0)
def EUseDatabase():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("selecionada base de datos:",listaMemoria[0])
listaMemoria.pop(0)
def ECreateType():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
crear_type=listaMemoria[0]
EDD.createTable(crear_type[0],crear_type[1],crear_type[2])
EDD.insert(crear_type[0],crear_type[1],crear_type[3])
print("creado type ",crear_type[1]," con valores ",crear_type[3])
listaMemoria.pop(0)
def ECreateTable():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
crear_tabla=listaMemoria[0]
EDD.createTable(crear_tabla[0],crear_tabla[1],crear_tabla[2])
print("creando Tabla ",crear_tabla[1])
listaMemoria.pop(0)
return True
return False
def EAddPK():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
crear_tabla=listaMemoria[0]
if(len(crear_tabla[2])>0):
EDD.alterAddPK(crear_tabla[0],crear_tabla[1],crear_tabla[2])
print("\tllave primaria:",crear_tabla[2])
listaMemoria.pop(0)
def EReplace():
cargarMemoria()
#llamar la funcion de EDD
result=False
if(len(listaMemoria)>0):
result=listaMemoria[0]
listaMemoria.pop(0)
return result
def EExistT():
cargarMemoria()
#llamar la funcion de EDD
result=False
if(len(listaMemoria)>0):
result=listaMemoria[0]
listaMemoria.pop(0)
return result
def ESelectFuncion():
print("Select funcion")
def EInsert():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
Data_insert=listaMemoria[0]
EDD.insert(Data_insert[0],Data_insert[1],Data_insert[2])
print("insert en tabla ",Data_insert[1]," \n\tvalores ",Data_insert[2])
listaMemoria.pop(0)
def EObtenerTabla():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
get_tb=listaMemoria[0]
result=EDD.showTables(get_tb[0])
if get_tb[1] in result:
listaMemoria.pop(0)
return True
return False
def EUpdate():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
update_registro=listaMemoria[0]
indice=update_registro[2]
valor=update_registro[3]
col={}
col[indice]=valor
EDD.update(update_registro[0],update_registro[1],col,update_registro[4])
print("update en tabla: ",update_registro[1])
print("\tregistro actualizado: valor ",valor)
listaMemoria.pop(0)
def EDelete():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
delete_registro=listaMemoria[0]
EDD.delete(delete_registro[0],delete_registro[1],delete_registro[2])
print("delete en tabla: ",delete_registro[1])
print("\tregistro eliminado: llave primaria:",delete_registro[2])
listaMemoria.pop(0)
def EShowDatabases():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
databases=listaMemoria[0]
print("databases: ",str(databases))
listaMemoria.pop(0)
def EDropTable():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
drop_tb=listaMemoria[0]
EDD.dropTable(drop_tb[0],drop_tb[1])
print("tabla eliminada: ",drop_tb[1])
listaMemoria.pop(0)
def EDropFuncion():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
drop_fn=listaMemoria[0]
for fn in drop_fn:
print("funcion eliminada: ",fn)
listaMemoria.pop(0)
def ECreateFuncion():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
creaF=listaMemoria[0]
if(creaF[4]):
print("funcion ",creaF[0]," reemplazada de tipo:",creaF[1])
print("\tparametros:",creaF[3])
else:
print("funcion ",creaF[0]," creada de tipo:",creaF[1])
print("\tparametros:",creaF[3])
listaMemoria.pop(0)
def ExcuteFun():
cargarMemoria()
#llamar la funcion de EDD
result=False
if(len(listaMemoria)>0):
result=listaMemoria[0]
listaMemoria.pop(0)
return result
def ECantidadRegistros():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
registros=listaMemoria[0]
listaMemoria.pop(0)
return registros[0]
return 0
#Ejecucion procedimientos
def EDropProcedure():
cargarMemoria()
if(len(listaMemoria)>0):
drop_Proc=listaMemoria[0]
for i in drop_Proc:
print("Procedimiento eliminada: ",i.lower())
listaMemoria.pop(0)
def ECreateProcedure():
cargarMemoria()
if(len(listaMemoria)>0):
crea=listaMemoria[0]
print(crea)
print("Procedure ",crea[0])
print("\tparametros:",crea[3])
listaMemoria.pop(0)
def ExecuteProc():
cargarMemoria()
#llamar la funcion de EDD
result=False
if(len(listaMemoria)>0):
result=listaMemoria[0]
listaMemoria.pop(0)
return result
#FIN ejecucion
#2INICIO MIO *****************
def EAltRenameDatabase():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
EDD.alterDatabase(listaMemoria[0][0],listaMemoria[0][1])
print("Base de datos Renombrada Exitosamente")
listaMemoria.pop(0)
def EAltTbAlterAddCol():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
res=EDD.alterAddColumn(listaMemoria[0][0],listaMemoria[0][1],"")
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tADD COLUMN:"+listaMemoria[0][2])
print("\tResultado de la creacion de la columna:"+str(res))
listaMemoria.pop(0)
def EAltTbAlterAddConstPrim():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
res=EDD.alterAddPK(listaMemoria[0][0],listaMemoria[0][1],[listaMemoria[0][2]])
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tADD Primary Key:"+listaMemoria[0][3])
print("\tResultado de la creacion de primary key:"+str(res))
listaMemoria.pop(0)
def EAltTbAlterDropCol():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
res=EDD.alterDropColumn(listaMemoria[0][0],listaMemoria[0][1],listaMemoria[0][3])
print("Eliminando en Tabla:"+listaMemoria[0][1])
print("\tDROP COLUMN:"+listaMemoria[0][2])
print("\tResultado de la eliminacion de la columna:"+str(res))
listaMemoria.pop(0)
def EAltTbRenameTable():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
res=EDD.alterTable(listaMemoria[0][0],listaMemoria[0][1],listaMemoria[0][2])
print("Modificando en Tabla:"+listaMemoria[0][1])
print("\tRENAME TABLE:"+listaMemoria[0][2])
print("\tResultado de renombrar tabla:"+str(res))
listaMemoria.pop(0)
#solo imprimenllll
def EAltTbRenameConst():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Modificando en Tabla:"+listaMemoria[0][0])
print("\tRENAME Constraint:"+listaMemoria[0][1])
print("\tTO:"+listaMemoria[0][2])
listaMemoria.pop(0)
def EAltTbRenameColum():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Modificando en Tabla:"+listaMemoria[0][1])
print("\tRENAME Column:"+listaMemoria[0][2])
print("\tTO:"+listaMemoria[0][3])
listaMemoria.pop(0)
def EAltTbAlterSNN():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tSET NOT NULL:"+listaMemoria[0][2])
listaMemoria.pop(0)
def EAltTbAlterSDT():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tSET DATA TYPE:"+listaMemoria[0][3])
listaMemoria.pop(0)
def EAltTbAlterSDef():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tSET DEFAULT:"+listaMemoria[0][2])
listaMemoria.pop(0)
def EAltTbAlterDNN():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Eliminando en Tabla:"+listaMemoria[0][1])
print("\tNOT NULL:"+listaMemoria[0][2])
listaMemoria.pop(0)
def EAltTbAlterDDef():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Eliminando en Tabla:"+listaMemoria[0][1])
print("\tDEFAULT:"+listaMemoria[0][2])
listaMemoria.pop(0)
def EAltTbAlterDropConst():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Eliminando en Tabla:"+listaMemoria[0][1])
print("\tConstraint:"+listaMemoria[0][2])
listaMemoria.pop(0)
def EAltTbAlterAddConstUni():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tConstraint Unique:"+listaMemoria[0][3])
listaMemoria.pop(0)
def EAltTbAlterAddConstFor():
cargarMemoria()
#llamar la funcion de EDD
if(len(listaMemoria)>0):
print("Agregando en Tabla:"+listaMemoria[0][1])
print("\tConstraint Foreign:"+listaMemoria[0][3])
listaMemoria.pop(0)
#2FIN MIO *****************
#Reporte Optimizaciones
Line=""
def Reporte_Optimizaciones():
global listaoptimizaciones
global Line
#listaoptimizaciones.append([regla,msg])
Line=""
ag("<html>")
ag("<head>")
ag("<title>")
ag("Reporte Optimizaciones")
ag("</title>")
ag("<link rel=\"stylesheet\" href=\"styles.css\">")
ag("</head>")
ag("<body>")
ag("<div id=\"divs\">")
ag("<table id=\"tab\" >")
ag("<tr>")
ag("<td id=\"td1\">")
ag("<h3>")
ag("Listado de Optimizaciones de Codigo")
ag("</h3>")
ag("</td>")
ag("</tr>")
for val in listaoptimizaciones:
cuerpoR(val)
ag("</table>")
ag("</div>")
ag("</body>")
ag("</html>")
gen_Arch()
def cuerpoR(a):
ag2("<tr>")
ag2("<td id=\"td2\">")
ag2("<table id=\"tabF\" >")
ag3("<tr>")
ag3("<td id=\"td3\">")
#Titulo1
ag3("<h4>Regla:</h4>")
ag3("</td>")
#ag3("</tr>")
#ag3("<tr>")
ag3("<td id=\"td4\">")
#ag3("<textarea id=\"tex\"readonly>")
#cuerpo1
ag0(a[0])
#ag3("</textarea>")
ag3("</td>")
ag3("</tr>")
ag3("<tr>")
ag3("<td id=\"td3\">")
#Titulo2
ag3("<h4>Codigo Sin Optimizar:</h4>")
ag3("</td>")
#ag3("</tr>")
#ag3("<tr>")
ag3("<td id=\"td4\">")
#ag3("<textarea id=\"tex\"readonly>")
#cuerpo1
ag0(a[1])
#ag3("</textarea>")
ag3("</td>")
ag3("</tr>")
ag3("<tr>")
ag3("<td id=\"td3\">")
#Titulo3
ag3("<h4>Codigo Optimizado:</h4>")
ag3("</td>")
#ag3("</tr>")
#ag3("<tr>")
ag3("<td id=\"td4\">")
#ag3("<textarea id=\"tex\"readonly>")
#cuerpo1
ag0(a[2])
#ag3("</textarea>")
ag3("</td>")
ag3("</tr>")
ag2("</table>")
ag2("</td>")
ag2("</tr>")
def ag0(nueva):
global Line
arregla=str(nueva)+"\n"
Line=Line+arregla
return arregla
def ag(nueva):
global Line
arregla="\t"+str(nueva)+"\n"
Line=Line+arregla
return arregla
def ag2(nueva):
global Line
arregla="\t\t"+str(nueva)+"\n"
Line=Line+arregla
return arregla
def ag3(nueva):
global Line
arregla="\t\t\t"+str(nueva)+"\n"
Line=Line+arregla
return arregla
def gen_Arch():
global Line
nombre="Reporte_Optimizacion.html"
f=open(nombre,"w")
f.write("\n")
f.write(Line)
f.write("\n")
f.close()
#Fin reporte Optimizaciones
```
#### File: fase2/team18/reporte_g.py
```python
from expresiones import *
from instrucciones import *
import tablasimbolos as TS
#import markdown
class Reporte_Gramaticas:
textoAsc = ''
def __init__(self):
print("Reportes gramaticales ASC y DSC")
def grammarASC(self,instrucciones):
lista_instrucciones = instrucciones
global textoAsc
textoAsc = '# Reporte Gramatical Ascendente \n'
textoAsc += '```sh \n'
textoAsc += '<init> ::= <l_sentencias> { init.val = l_sentencias.val } \n'
textoAsc += '''<l_sentencias> ::= <l_sentencias sentencias> { l_sentencias.val = l_sentencias.append(sentencias.val) }
| <sentencias> \n'''
textoAsc += '<sentencias> ::= <sentencia> ";" { sentencias.val = sentencia.val } \n'
textoAsc += '''<sentencia> ::= <sentencia_ddl> { sentencia.val = sentencia_ddl.val}
| <sentencia_dml> { sentencia = sentencia_dml.val} \n'''
textoAsc += '''<sentencia_ddl> ::= <crear> { sentencia_ddl.val = crear.val }
| <liberar> { sentencia_ddl.val = liberar.val } \n'''
textoAsc += '''<sentencia_dml> ::= <insertar> { sentencia_dml.val = insertar.val }
| <actualizar> { sentencia_dml.val = actualizar.val }
| <eliminar> { sentencia_dml.val = eliminar.val }
| <seleccionH> { sentencia_dml.val = seleccionH.val }
| <mostrar> { sentencia_dml.val = mostrar.val}
| <altert> { sentencia_dml.val = altert.val}
| <usar> { sentencia_dml.val = usar.val } \n'''
tam = 0
while tam < len(lista_instrucciones):
instruccion = lista_instrucciones[tam]
if isinstance(instruccion, CrearBD):
textoAsc += '<crear> ::= "CREATE" <reemplazar> "DATABASE" <verificacion> <ID> <propietario> <modo> { crear.val = CrearBD(t[2], t[4], Operando_ID( %s ), t[6], t[7]) } \n ' %(str(instruccion.nombre.id))
elif isinstance(instruccion, CrearTabla):
textoAsc += '<crear> ::= "CREATE" "TABLE" <ID> "(" <columnas> ")" <herencia> { crear.val = CrearTabla(Operando_ID( %s ),t[7],t[5]) } \n' %(str(instruccion.nombre.id))
elif isinstance(instruccion,CrearType):
textoAsc += '<crear> ::= "CREATE" "TYPE" <ID> "AS" "ENUM" ""(""<lista_exp> "")"" { crear.val =CrearType(Operando_ID( %s ),t[7]) } \n' %(str(instruccion.nombre.id))
elif isinstance(instruccion,EliminarDB):
textoAsc += '<liberar> ::= "DROP" "DATABASE" <existencia> <ID> { liberar.val =EliminarDB(t[3],Operando_ID( %s )) } \n' %(str(instruccion.nombre.id))
elif isinstance(instruccion,EliminarTabla):
textoAsc += '<liberar> ::= "DROP" "TABLE" <existencia> <ID> { liberar.val = EliminarTabla(t[3],Operando_ID( %s )) } \n' %(str(instruccion.nombre.id))
elif isinstance(instruccion,Insertar):
textoAsc += '<insertar> ::= "INSERT" "INTO" <ID> <para_op> "VALUES" "(" <lista_exp> ")" \n'
textoAsc += '<para_op> ::= PAR_A <lnombres> PAR_C { para_op.val = lnombres.val } \n'
elif isinstance(instruccion,Actualizar):
textoAsc += '<actualizar> ::= "UPDATE" <ID> "SET" <lista_update> "WHERE" <exp> { actualizar = Actualizar(Operando_ID(t[2]),t[6],t[4]) } \n'
textoAsc += '<lista_update> ::= <lista_update> "," campoupdate \n'
textoAsc += ' | campoupdate { listaupdate.append(campoupdate) listaupdate.val = campoupdate.val} \n'
elif isinstance(instruccion,DBElegida):
textoAsc += '<usar> ::= "USE" <ID> { usar.val = DBElegida(Operando_ID( %s ))} \n' %(str(instruccion.nombre.id))
elif isinstance(instruccion,MostrarDB):
textoAsc += '<mostrar> ::= "SHOW" "DATABASES" { mostrar.val = MostrarDB() } \n'
elif isinstance(instruccion,MostrarTB):
textoAsc += '<mostrar> ::= "SHOW" "TABLE" { mostrar.val = MostrarTB() } \n'
elif isinstance(instruccion,Indice):
textoAsc += '<crear> ::= "CREATE" <unicidad_index> "INDEX" ID "ON" ID <tipo_index> "(" <lista_exp> <value_direction> <value_rang> ")" <cond_where> { crear.val = Indice(Operando_ID(t[4]),Operando_ID(t[6]),Operando_Booleano(t[7]),Operando_Booleano(t[2]),t[9],t[10])} \n'
textoAsc += '<unicidad_index> ::= "UNIQUE" { unicidad_index.val = true; } \n'
textoAsc += '<unicidad_index> ::= "empty" { unicidad_index.val = false; } \n'
textoAsc += '<cond_where> ::= "WHERE" <exp>{ cond_where.val = t[2] } '
textoAsc += '<value_direction> : ASC | DESC \n'
elif isinstance(instruccion,Funcion):
textoAsc += '<crear> ::= "CREATE" <reemplazar> "FUNCTION" <ID> "(" <lparametros> ")" "RETURNS" <tipo> <lenguaje_funcion> "AS" <dollar_var> <cuerpo_funcion> <dollar_var> <lenguaje_funcion>{ crear.val = Funcion(t[2],t[4],t[6],t[9],t[12],t[13]) } \n'
textoAsc += '<reemplazar> ::= "OR" "REPLACE" \n'
textoAsc += ' | empty { reemplazar.val = t[0] } \n'
textoAsc += '<lparametros> ::= <lparametros> "," <parametro> \n'
textoAsc += ' | <parametro> { lparametros.append(t[3]); lparametros.val = t[1]; } \n'
textoAsc += '<lenguaje_funcion> ::= "LANGUAGE" "PLPSQL" \n'
textoAsc += ' | "LANGUAGE" "SQL" { lenguaje_funcion.val = t[2]} \n'
elif isinstance(instruccion,Drop_Function):
textoAsc += '<liberar> ::= "DROP" "FUNCTION" <lnombres> { liberar.val = Drop_Function(t[3]) } \n'
textoAsc += '<lnombres> ::= <lnombres> "," "ID" \n'
textoAsc += ' | "ID" { lnombres.append(t[3]); lnombres.val = t[1]; } \n'
elif isinstance(instruccion,Drop_Procedure):
textoAsc += '<liberar> ::= "DROP" "PROCEDURE" <lnombres> { liberar.val = Drop_Procedure(t[3]) } \n'
textoAsc += '<lnombres> ::= <lnombres> "," "ID" \n'
textoAsc += ' | "ID" { lnombres.append(t[3]); lnombres.val = t[1]; } \n'
elif isinstance(instruccion,Procedimiento):
textoAsc += '<crear> ::= "CREATE" <reemplazar> "PROCEDURE" <ID> "(" <lparametros> ")" <lenguaje_funcion> "AS" <dollar_var> <cuerpo_funcion> <dollar_var> { crear.val = Procedimiento(t[2],t[4],t[6],t[9],t[12],t[13]) } \n'
textoAsc += '<reemplazar> ::= "OR" "REPLACE" \n'
textoAsc += ' | empty { reemplazar.val = t[0] } \n'
textoAsc += '<lparametros> ::= <lparametros> "," <parametro> \n'
textoAsc += ' | <parametro> { lparametros.append(t[3]); lparametros.val = t[1]; } \n'
textoAsc += '<lenguaje_funcion> ::= "LANGUAGE" "PLPSQL" \n'
textoAsc += ' | "LANGUAGE" "SQL" { lenguaje_funcion.val = t[2]} \n'
else:
print("No se incluyo en el reporte")
tam = tam + 1
textoAsc += '``` \n '
#html = markdown.markdown(textoAsc)
try:
with open('gramatica_ASC.md','w') as rep:
rep.write(textoAsc)
except Exception as e:
print("No fue posible generar el reporte gramatical ASC: "+ str(e))
```
#### File: Analisis_Ascendente/reportes/Reportes.py
```python
from enum import Enum
class Error:
def __init__(self, TIPO, LEXEMA, FIL ,COL):
self.TIPO = TIPO
self.LEXEMA = LEXEMA
self.COL = COL
self.FIL =FIL
class TipoOptimizacion(Enum):
REGLA1 = 1
REGLA2 = 2
REGLA3 = 3
REGLA4 = 4
REGLA5 = 5
REGLA6 = 6
REGLA7 = 7
REGLA8 = 8
REGLA9 = 9
REGLA10 = 10
REGLA11 = 11
REGLA12 = 12
REGLA13 = 13
REGLA14 = 14
REGLA15 = 15
REGLA16 = 16
REGLA17 = 17
REGLA18 = 18
class ListaOptimizacion:
def __init__(self, c3d_original, c3d_optimizado, numero_regla):
self.c3d_original = c3d_original
self.c3d_optimizado = c3d_optimizado
self.numero_regla = numero_regla
self.nombre_regla = ''
if numero_regla == TipoOptimizacion.REGLA1:
self.nombre_regla = "Eliminación de instrucciones redundantes de carga y almacenamiento"
elif numero_regla == TipoOptimizacion.REGLA2 or numero_regla == TipoOptimizacion.REGLA3 or numero_regla == TipoOptimizacion.REGLA4 or numero_regla == TipoOptimizacion.REGLA5:
self.nombre_regla = "Eliminación de código inalcanzable"
elif numero_regla == TipoOptimizacion.REGLA6 or numero_regla == TipoOptimizacion.REGLA7:
self.nombre_regla = "Optimizaciones de flujo de control"
else:
self.nombre_regla = "Simplificación algebraica y por fuerza"
class RealizarReportes:
def generar_reporte_lexicos(self,lista):
# #print("estoy generando mi reporte")
nombre = "ErroresLexicos.html"
texto = ""
texto += "<!DOCTYPE html>"
texto += "<head>"
texto += "<title>Lexico</title>"
texto += "<style>"
texto +='''body {
background-color: #d0efb141;
font-family: calibri, Helvetica, Arial;
}
h1 {
text-align: center;
font-size: 100px;
}
table {
width: 100%;
border-collapse: collapse;
font-size: 25px;
font-weight: bold;
}
table td, table th {
border: 0px dashed #77A6B6;
padding: 10px;
}
table tr:nth-child(even){ background-color: #9DC3C2; }
table tr:nth-child(odd){ background-color: #B3D89C; }
table tr:hover {
background-color: #77A6B6;
color: #feffff;
}
table th {
color: white;
background-color: #4d7298;
text-align: left;
padding-top: 12px;
padding-bottom: 12px;
}
.content {
width: 90%;
margin: 0 auto;
}'''
texto += "</style>"
texto += "</head>"
texto += "<body>"
texto += "<h2>Reporte analísis lexico</h2>"
texto += '<div class="content"><table>'
texto += "<tr>"
texto += "<th>#</th>"
texto += "<th>Tipo de Error</th>"
texto += "<th>Lexema o caracter</th>"
texto += "<th>Fila</th>"
texto += "<th>Columna</th>"
texto += "</tr>"
texto += "<tr>"
i = 1
for token in lista:
texto += "<td>" + str(i) + "</td>"
texto += "<td>" + token.TIPO + "</td>"
texto += "<td>" + token.LEXEMA + "</td>"
texto += "<td>" + token.FIL+ "</td>"
texto += "<td>" + token.COL + "</td>"
texto += "</tr>"
i += 1
texto += "</table></div>"
f = open(nombre, 'w')
f.write(texto)
f.close()
def generar_reporte_sintactico(self,lista):
# #print("estoy generando mi reporte")
nombre = "ErroresSintacticos.html"
texto = ""
texto += "<!DOCTYPE html>"
texto += "<head>"
texto += "<title>Sintactico</title>"
texto += "<style>"
texto += '''body {
background-color: #d0efb141;
font-family: calibri, Helvetica, Arial;
}
h1 {
text-align: center;
font-size: 100px;
}
table {
width: 100%;
border-collapse: collapse;
font-size: 25px;
font-weight: bold;
}
table td, table th {
border: 0px dashed #77A6B6;
padding: 10px;
}
table tr:nth-child(even){ background-color: #9DC3C2; }
table tr:nth-child(odd){ background-color: #B3D89C; }
table tr:hover {
background-color: #77A6B6;
color: #feffff;
}
table th {
color: white;
background-color: #4d7298;
text-align: left;
padding-top: 12px;
padding-bottom: 12px;
}
.content {
width: 90%;
margin: 0 auto;
}'''
texto += "</style>"
texto += "</head>"
texto += "<body>"
texto += "<h2>Reporte analisis sintactico</h2>"
texto += '<div class="content"><table>'
texto += "<tr>"
texto += "<th>#</th>"
texto += "<th>Tipo de Error</th>"
texto += "<th>Lexema o caracter</th>"
texto += "<th>Fila</th>"
texto += "<th>Columna</th>"
texto += "</tr>"
texto += "<tr>"
i = 1
for token in lista:
texto += "<td>" + str(i) + "</td>"
texto += "<td>" + token.TIPO + "</td>"
texto += "<td>" + token.LEXEMA + "</td>"
texto += "<td>" + token.FIL+ "</td>"
texto += "<td>" + token.COL + "</td>"
texto += "</tr>"
i=i+1
texto += "</table></div>"
f = open(nombre, 'w')
f.write(texto)
f.close()
def generar_reporte_tablaSimbolos(self,lista):
# #print("estoy generando mi reporte")
nombre = "Simbolos.html"
texto = ""
texto += "<!DOCTYPE html>"
texto += "<head>"
texto += "<title>Simbolos</title>"
texto += "<style>"
texto += '''body {
background-color: #d0efb141;
font-family: calibri, Helvetica, Arial;
}
h1 {
text-align: center;
font-size: 100px;
}
table {
width: 100%;
border-collapse: collapse;
font-size: 25px;
font-weight: bold;
}
table td, table th {
border: 0px dashed #77A6B6;
padding: 10px;
}
table tr:nth-child(even){ background-color: #9DC3C2; }
table tr:nth-child(odd){ background-color: #B3D89C; }
table tr:hover {
background-color: #77A6B6;
color: #feffff;
}
table th {
color: white;
background-color: #4d7298;
text-align: left;
padding-top: 12px;
padding-bottom: 12px;
}
.content {
width: 90%;
margin: 0 auto;
}'''
texto += "</style>"
texto += "</head>"
texto += "<body>"
texto += "<h2>Reporte entornos/tabla de simbolos</h2>"
texto += "<h5>Entorno global</h5>"
texto += '<div class="content"><table>'
texto += "<tr>"
texto += "<th>#</th>"
texto += "<th>Categoria</th>"
texto += "<th>id</th>"
texto += "<th>tipo</th>"
texto += "<th>valor</th>"
texto += "<th>entorno</th>"
texto += "</tr>"
texto += "<tr>"
i = 1
for data in lista:
texto += "<td>" + str(i) + "</td>"
texto += "<td>" + str(lista.get(data).categoria) + "</td>"
texto += "<td>" + str(lista.get(data).id) + "</td>"
texto += "<td>" + str(lista.get(data).tipo) + "</td>"
texto += "<td>" + str(lista.get(data).valor) + "</td>"
if str(lista.get(data).Entorno) != str(None):
texto += "<td>" + "Entorno BD" + "</td>"
else:
texto += "<td>" + "None" + "</td>"
texto += "</tr>"
i=i+1
texto += "</table>"
#------------------------------------------------------------------------------------------------
#sub entornos
for data in lista:
if str(lista.get(data).Entorno) != str(None):
entornoBD = lista.get(data).Entorno
#print(entornoBD.simbolos)
lista2=entornoBD.simbolos
texto += "<h5>Entorno "+lista.get(data).id+"</h5>"
texto += "<table>"
texto += "<tr>"
texto += "<th>#</th>"
texto += "<th>Categoria</th>"
texto += "<th>id</th>"
texto += "<th>tipo</th>"
texto += "<th>valor</th>"
texto += "<th>entorno</th>"
texto += "</tr>"
texto += "<tr>"
i = 1
for data in lista2:
texto += "<td>" + str(i) + "</td>"
texto += "<td>" + str(lista2.get(data).categoria) + "</td>"
texto += "<td>" + str(lista2.get(data).id) + "</td>"
texto += "<td>" + str(lista2.get(data).tipo) + "</td>"
texto += "<td>" + str(lista2.get(data).valor) + "</td>"
if str(lista2.get(data).Entorno) != str(None):
texto += "<td>" + "Entorno Tabla" + "</td>"
entornoTB = lista2.get(data).Entorno
lista3 = entornoTB.simbolos
j = 1
for campos in lista3:
texto += "<tr>"
texto += "<th>"+str(j)+"</th>"
texto += "<th>"+str(lista3.get(campos).categoria)+"</th>"
texto += "<th>"+str(lista3.get(campos).id)+"</th>"
texto += "<th>"+str(lista3.get(campos).tipo)+"</th>"
texto += "<th>"+str(lista3.get(campos).valor)+"</th>"
texto += "<td>"+str(lista2.get(data).id)+"</td>"
texto += "</tr>"
j=j+1
else:
texto += "<td>" + "None" + "</td>"
texto += "</tr>"
i = i + 1
texto += "</table>"
else:
pass
texto += "</div>"
f = open(nombre, 'w')
f.write(texto)
f.close()
def generar_reporte_semanticos(self, lista):
# #print("estoy generando mi reporte")
nombre = "ErroresSemanticos.html"
texto = ""
texto += "<!DOCTYPE html>"
texto += "<head>"
texto += "<title>Semantico</title>"
texto += "<style>"
texto += '''body {
background-color: #d0efb141;
font-family: calibri, Helvetica, Arial;
}
h1 {
text-align: center;
font-size: 100px;
}
table {
width: 100%;
border-collapse: collapse;
font-size: 25px;
font-weight: bold;
}
table td, table th {
border: 0px dashed #77A6B6;
padding: 10px;
}
table tr:nth-child(even){ background-color: #9DC3C2; }
table tr:nth-child(odd){ background-color: #B3D89C; }
table tr:hover {
background-color: #77A6B6;
color: #feffff;
}
table th {
color: white;
background-color: #4d7298;
text-align: left;
padding-top: 12px;
padding-bottom: 12px;
}
.content {
width: 90%;
margin: 0 auto;
}'''
texto += "</style>"
texto += "</head>"
texto += "<body>"
texto += "<h2>Reporte analísis semantico</h2>"
texto += '<div class="content"><table>'
texto += "<tr>"
texto += "<th>#</th>"
texto += "<th>Errores semantico- codigo- descrpcion - fila - columna</th>"
texto += "</tr>"
texto += "<tr>"
i = 1
for token in lista:
texto += "<td>" + str(i) + "</td>"
texto += "<td>" + token + "</td>"
texto += "</tr>"
i = i + 1
texto += "</table></div>"
f = open(nombre, 'w')
f.write(texto)
f.close()
def generar_reporte_optimizacion(self, lista):
nombre = "ReporteOptimizacion.html"
texto = '''
<!DOCTYPE html>
<head>
<title>Optimizacion</title>
<style>
body {
background-color: #d0efb141;
font-family: calibri, Helvetica, Arial;
}
h1 {
text-align: center;
font-size: 100px;
}
table {
width: 100%;
border-collapse: collapse;
font-size: 25px;
font-weight: bold;
}
table td, table th {
border: 0px dashed #77A6B6;
padding: 10px;
}
table tr:nth-child(even){ background-color: #9DC3C2; }
table tr:nth-child(odd){ background-color: #B3D89C; }
table tr:hover {
background-color: #77A6B6;
color: #feffff;
}
table th {
color: white;
background-color: #4d7298;
text-align: left;
padding-top: 12px;
padding-bottom: 12px;
}
.content {
width: 90%;
margin: 0 auto;
}
</style>
</head>
<body>
<h2>Reporte Optimizacion C3D</h2>
<div class="content"><table>
<tr>
<th>#</th>
<th>C3D Original</th>
<th>C3D Optimizado</th>
<th>Nombre Mirilla</th>
<th>Regla</th>
</tr>
<tr>
'''
for i, token in enumerate(lista):
texto += "<td>" + str(i + 1) + "</td>"
texto += "<td>" + token.c3d_original + "</td>"
texto += "<td>" + token.c3d_optimizado + "</td>"
texto += "<td>" + token.nombre_regla + "</td>"
texto += "<td>" + str(token.numero_regla) + "</td>"
texto += "</tr>"
texto += "</table></div></body></html>"
f = open(nombre, 'w')
f.write(texto)
f.close()
```
#### File: team19/Analisis_Descendente/gramatica.py
```python
from graphviz import Digraph
import ply.lex as lex
import ply.yacc as yacc
import re
import Analisis_Descendente.ReporteGramatical as ReporteGramatical
# Analisis lexico
lista = []
palabras_reservadas = (
# NUMERIC TYPES
'SMALLINT', 'INTEGER', 'BIGINT', 'DECIMAL', 'NUMERIC', 'REAL',
'DOUBLE', 'PRECISION', 'MONEY',
# CHARACTER TYPES
'CHARACTER', 'VARYING', 'VARCHAR', 'CHAR', 'TEXT',
# DATA TIME TYPES
'TIMESTAMP', 'OUT', 'WITH', 'WITHOUT', 'TIME', 'ZONE', 'DATE',
'INTERVAL',
'YEAR', 'MONTH', 'DAY', 'HOUR', 'MINUTE', 'SECOND', 'TO',
# BOOLEAN TYPES
'BOOLEAN',
# ENUMERATED TYPES
'CREATE', 'TYPE', 'AS', 'ENUM',
# OPERATORS
'BETWEEN', 'IN', 'LIKE', 'ILIKE', 'SIMILAR',
'IS', 'NOT', 'NULL', 'AND', 'OR',
# DEFINITION CREATE
'REPLACE', 'IF', 'EXISTS', 'OWNER', 'MODE', 'DATABASE',
# SHOW DATABASES
'SHOW', 'DATABASES',
# ALTER DATABASE
'ALTER', 'RENAME', 'CURRENT_USER', 'SESSION_USER',
# DROP DARTABASE
'DROP',
# CREATE TABLE
'TABLE', 'CONSTRAINT', 'CHECK', 'DEFAULT', 'PRIMARY', 'REFERENCES', 'KEY',
'FOREIGN', 'UNIQUE',
# alter table
'ADD', 'SET', 'COLUMN', 'INHERITS',
# DML
'INSERT', 'INTO', 'VALUES',
'UPDATE', 'WHERE', 'DELETE', 'FROM',
# SELECT
'SELECT', 'EXTRACT', 'DATE_PART', 'NOW', 'GREATEST', 'LEAST',
'GROUP', 'BY', 'SUM', 'CURRENT_TIME', 'CURRENT_DATE', 'DISTINCT',
'HAVING'
)
tokens = palabras_reservadas +\
(
# OPERADORES COMPARADORES
'PUNTO',
'PORCENTAJE',
'PARIZQ',
'PARDER',
'CORIZQ',
'CORDER',
'IGUAL',
'DIFERENTEQ',
'MAYORQ',
'MENORQ',
'MAYORIGUALQ',
'MENORIGUALQ',
'MAS',
'LLAVEA',
'LLAVEC',
'MENOS',
'POR',
'DIVISION',
'NOENTERO',
'NODECIMAL',
'PTCOMA',
'COMA',
'IDENTIFICADOR',
'UMENOS',
'CADENA',
'CARACTER_O_CADENA',
)
# EXPRESIONES REGULARES SIMPLES
t_LLAVEC = r'\}'
t_LLAVEA = r'\{'
t_PARIZQ = r'\('
t_PARDER = r'\)'
t_CORIZQ = r'\['
t_CORDER = r'\]'
t_PTCOMA = r';'
t_COMA = r','
t_PUNTO = r'\.'
# OPERADORES ARITMETICOS
t_MAS = r'\+'
t_MENOS = r'-'
t_POR = r'\*'
t_DIVISION = r'/'
t_PORCENTAJE = r'%'
# OPERADORES RELACIONALES
t_IGUAL = r'\='
t_MAYORQ = r'\>'
t_MENORQ = r'\<'
t_MAYORIGUALQ = r'\>='
t_MENORIGUALQ = r'\<='
t_DIFERENTEQ = r'\<>'
# EXPRESIONES REGULARES COMPUESTAS
# reconcomiento de id
def t_ID(t):
r'[_a-zA-Z][a-zA-Z_0-9_]*'
if (t.value.upper()) in palabras_reservadas:
t.type = t.value.upper()
#print(t.type)
else:
t.type = 'IDENTIFICADOR'
return t
# numero decimal
def t_NODECIMAL(t):
r'(\d+\.\d+)|(\.\d+)'
try:
#print("numero decimal : ", t.value, " - ", float(t.value))
#print("tipo: ", t.type)
t.value = float(t.value)
except ValueError:
#print("Floaat value too large %d", t.value)
t.value = 0
return t
# numero entero
def t_NOENTERO(t):
r'\d+'
try:
t.value = int(t.value)
except ValueError:
#print("Integer value too large %d", t.value)
t.value = 0
return t
# cadena con comillas dobles
def t_CADENA(t):
r'\".*?\"'
t.value = t.value[1:-1]
return t
# cadena con comillas simples
def t_CARACTER_O_CADENA(t):
r'\'.*?\''
t.value = t.value[1:-1]
return t
# Caracteres ignorados
t_ignore = " \b\f\n\r\t"
# COMENTARIO MULTILINEA /* */
def t_COMENMUL(t):
r'/\*(.|\n)*?\*/'
t.lexer.lineno += t.value.count('\n')
# COMENTARIO SIMPLE --
def t_COMENSIM(t):
r'--.*\n'
t.lexer.lineno += 1
def t_newline(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_error(t):
#print("error lexico '%s'" % t.value)
t.lexer.skip(1)
# ----------------------------------------------------------------------------------------------------------------------
# Asociación de operadores y precedencia
'''
precedence = (
('left', 'OR'),
('left', 'AND','BETWEEN','NOT','LIKE','ILIKE','IN'),
('left', 'DIFERENTEQ','IGUAL', 'MAYORQ',
'MENORQ', 'MAYORIGUALQ', 'MENORIGUALQ'),
('left', 'MAS', 'MENOS'),
('left', 'POR', 'DIVISION','PORCENTAJE'),
('left', 'PARDER', 'PARIZQ'),
('left', 'AS'),
('right', 'UMENOS','UMAS',),
)'''
def listado(bandera, produccion):
# Declaración e inicilizacion de la variable "estática"
if(bandera == 1):
return lista
# inicio de la gramática
def p_inicio(t):
'''
s : instrucciones
'''
#print("Analisis sintactico exitoso")
gramatica = 's::= instrucciones'
lista.append(gramatica)
def p_instrucciones_lista(t):
'''instrucciones : instruccion instruccionesp
|
'''
try:
if t[2]:
gramatica = 'instrucciones ::= instruccion instrucciones\''
lista.append(gramatica)
except:
gramatica = 'instrucciones ::= epsilon'
lista.append(gramatica)
pass
def p_instrucciones_lista1(t):
'''instruccionesp : instruccion instrucciones
|
'''
try:
if t[1]:
gramatica = 'instrucciones\' ::= instruccion instrucciones'
lista.append(gramatica)
t[0] = t[1]
except:
gramatica = 'instrucciones\' ::= epsilon'
lista.append(gramatica)
t[0] = []
pass
# inicia instrucciones
def p_instruccion_create(t):
'''instruccion : CREATE createp PTCOMA
| ALTER factorizar_alter PTCOMA
| DROP droptp PTCOMA
| SELECT selectp PTCOMA
| INSERT INTO IDENTIFICADOR VALUES PARIZQ expresion PARDER PTCOMA
| UPDATE IDENTIFICADOR SET expresion WHERE expresion PTCOMA
| DELETE FROM IDENTIFICADOR WHERE expresion PTCOMA
'''
if str(t[1]).lower() == 'create':
gramatica = 'instruccion ::= \'CREATE\' createp \';\''
lista.append(gramatica)
elif str(t[1]).lower() == 'alter':
gramatica = 'instruccion ::= \'ALTER\' factorizar_alter \';\''
lista.append(gramatica)
elif str(t[1]).lower() == 'drop':
gramatica = 'instruccion ::= \'DROP\' droptp \';\''
lista.append(gramatica)
elif str(t[1]).lower() == 'select':
gramatica = 'instruccion ::= \'SELECT\' selectp \';\''
lista.append(gramatica)
elif str(t[1]).lower() == 'insert':
gramatica = 'instruccion ::= \'INSERT\' \'INTO\' \'' + \
t[3]+'\' \'VALUES\' \'(\' expresion \')\' \';\''
lista.append(gramatica)
elif str(t[1]).lower() == 'update':
gramatica = 'instruccion ::= \'UPDATE\' \'' + \
t[2]+'\' \'SET\' expresion \'WHERE\' expresion \';\''
lista.append(gramatica)
elif str(t[1]).lower() == 'delete':
gramatica = 'instruccion ::= \'DELETE\' \'FROM\' \'' + \
t[3]+'\' \'WHERE\' expresion \';\''
lista.append(gramatica)
t[0] = t[1]
# posiblemente me de tiempo agregar lo que falta de los select , pero
# de ser asi los voy a poner hasta abajo , asi que solo los vas agregando esas nuevas producciones
# gracias mindi
def p_instruccion_showdatabase(t):
'''instruccion : SHOW DATABASES opcional3 PTCOMA
'''
gramatica = 'instruccion ::= \'SHOW\' \'DATABASES\' opcional3 \';\''
lista.append(gramatica)
t[0] = t[1]
def p_alterfacotizar(t):
''' factorizar_alter : DATABASE alterp
| TABLE l_campo
'''
if str(t[1]).lower() == 'database':
gramatica = 'factorizar_alter ::= \'DATABASE\' alterp '
lista.append(gramatica)
elif str(t[1]).lower() == 'table':
gramatica = 'factorizar_alter ::= \'TABLE\' l_campo '
lista.append(gramatica)
t[0] = t[1]
def p_selectprima(t):
''' selectp : EXTRACT PARIZQ l_campo PARDER
| DATE_PART PARIZQ expresion l_campo PARDER
| NOW PARIZQ PARDER
| GREATEST PARIZQ expresion PARDER
| LEAST PARIZQ expresion PARDER
| expresion FROM expresion where
| CURRENT_TIME
| CURRENT_DATE
| TIMESTAMP CARACTER_O_CADENA
| DISTINCT expresion FROM expresion where
'''
if str(t[1]).lower() == 'extract':
gramatica = 'selectp ::= \'EXTRACT\' \'(\' l_campo \')\''
lista.append(gramatica)
elif str(t[1]).lower() == 'date_part':
gramatica = 'selectp ::= \'date_part\' \'(\' expresion l_campo \')\''
lista.append(gramatica)
elif str(t[1]).lower() == 'now':
gramatica = 'selectp ::= \'now\' \'(\' \')\''
lista.append(gramatica)
elif str(t[1]).lower() == 'greatest':
gramatica = 'selectp ::= \'GREATEST\' \'(\' expresion \')\''
lista.append(gramatica)
elif str(t[1]).lower() == 'least':
gramatica = 'selectp ::= \'LEAST\' \'(\' expresion \')\''
lista.append(gramatica)
else:
gramatica = 'selectp ::= expresion \'FROM\''
lista.append(gramatica)
def p_wherprod(t):
'''where : WHERE expresion group
| group
| '''
def p_groupBy(t):
'''group : GROUP BY expresion hav'''
def p_havingprod(t):
'''hav : HAVING expresion
| '''
def p_drop_triprima(t):
'''droptp : DATABASE dropp IDENTIFICADOR
| TABLE IDENTIFICADOR
'''
if str(t[1]).lower() == 'database':
gramatica = 'droptp ::= \'DATABASE\' dropp \''+t[3]+'\''
lista.append(gramatica)
elif str(t[1]).lower() == 'table':
gramatica = 'droptp ::= \'TABLE\' \''+t[2]+'\''
lista.append(gramatica)
def p_dropprima(t):
'''dropp : IF EXISTS'''
# #print('-->'+str(t[1]))
gramatica = 'dropp ::= \'IF\' \'EXISTS\''
lista.append(gramatica)
def p_dropprima1(t):
'''dropp : '''
# #print('-->'+str(t[1]))
gramatica = 'dropp ::= epsilon'
lista.append(gramatica)
def p_alterprima(t):
'''alterp : IDENTIFICADOR alterpp
'''
gramatica = 'alterp ::= \''+t[1]+'\' alterpp'
lista.append(gramatica)
def p_alterprima1(t):
'''alterpp : RENAME TO alterppp
| OWNER TO alterppp
'''
if str(t[1]).lower() == 'rename':
gramatica = 'alterpp ::= \'RENAME\' \'TO\' alterpp '
lista.append(gramatica)
elif str(t[1]).lower() == 'owner':
gramatica = 'alterpp ::= \'OWNER\' \'TO\' alterpp '
lista.append(gramatica)
def p_alterprima2(t):
'''
alterppp : IDENTIFICADOR
| CURRENT_USER
| SESSION_USER
'''
gramatica = 'alterppp ::= \'' + t[1] + '\''
lista.append(gramatica)
def p_createprima(t):
'''
createp : OR REPLACE DATABASE opcional IDENTIFICADOR opcional
| TYPE createpp
| DATABASE createpp
| TABLE createpp
'''
if str(t[1]).lower() == 'or':
gramatica = 'createp ::= \'OR\' \'REPLACE\' \'DATABASE\' opcional \'' + \
t[5] + '\' opcional'
lista.append(gramatica)
elif str(t[1]).lower() == 'type':
gramatica = 'createp ::= \'TYPE\' createpp '
lista.append(gramatica)
elif str(t[1]).lower() == 'database':
gramatica = 'createp ::= \'DATABASE\' createpp '
lista.append(gramatica)
elif str(t[1]).lower() == 'table':
gramatica = 'createp ::= \'TABLE\' createpp '
lista.append(gramatica)
def p_createbiprima(t):
'''
createpp : IDENTIFICADOR createtp
'''
gramatica = 'createpp ::= \''+t[1]+'\' createtp'
lista.append(gramatica)
def p_createtriprima(t):
'''
createtp : AS ENUM PARIZQ l_cadenas PARDER
| opcional
| PARIZQ l_campos PARDER createqp
'''
if str(t[1]).lower() == 'as':
gramatica = 'createtp ::= \'AS\' \'ENUM\' \'(\' l_cadenas \')\''
lista.append(gramatica)
elif str(t[1]).lower() == '(':
gramatica = 'createtp ::= \'(\' l_campos \')\' createqp '
lista.append(gramatica)
else:
gramatica = 'createtp ::= opcional '
lista.append(gramatica)
def p_createquitoprima(t):
''' createqp : INHERITS PARIZQ IDENTIFICADOR PARDER
'''
gramatica = 'createqp ::= \'INHERITS\' \'(\' \''+t[3]+'\' \')\''
lista.append(gramatica)
def p_createquitoprima1(t):
''' createqp : '''
gramatica = 'createqp ::= epsilon '
lista.append(gramatica)
def p_create_campos_tablas(t):
'''l_campos : IDENTIFICADOR l_campo l_campos
'''
gramatica = 'l_campos ::= \''+t[1]+'\' l_campo l_campos'
lista.append(gramatica)
def p_create_campos_tablas1(t):
'''l_campos : '''
gramatica = 'l_campos ::= epsilon'
lista.append(gramatica)
def p_create_campos_tablas2(t):
'''l_campos : COMA IDENTIFICADOR l_campo l_campos
'''
gramatica = 'l_campos ::= \',\' \''+t[2]+'\' l_campo l_campos'
lista.append(gramatica)
def p_create_campos_tablas3(t):
'''l_campos : COMA l_campo l_campos
'''
gramatica = 'l_campos ::= \',\' l_campo l_campos'
lista.append(gramatica)
def p_create_campo_tabla(t):
'''l_campo : tipo l_campo'''
gramatica = 'l_campo ::= tipo l_campo'
lista.append(gramatica)
def p_create_campo_tabla1(t):
'''l_campo : '''
gramatica = 'l_campo ::= epsilon'
lista.append(gramatica)
def p_alterlistacolumn(t):
'''l_altercolumn : IDENTIFICADOR TYPE l_campo l_altercolumn
'''
gramatica = 'l_altercolumn ::= \'' + \
t[1]+'\' \'TYPE\' l_campo l_altercolumn'
lista.append(gramatica)
def p_alterlistacolumn1(t):
'''l_altercolumn : IDENTIFICADOR SET NOT NULL
'''
gramatica = 'l_altercolumn ::= \''+t[1]+'\' \'SET\' \'NOT\' \'NULL\''
lista.append(gramatica)
def p_alterlistacolumn2(t):
'''l_altercolumn : COMA ALTER COLUMN IDENTIFICADOR TYPE l_campo l_altercolumn
'''
gramatica = 'l_altercolumn ::= \',\' \'ALTER\' \'COLUMN\' \'' + \
t[4]+'\' \'TYPE\' l_campo l_altercolumn'
lista.append(gramatica)
def p_alterlistacolumn3(t):
'''l_altercolumn : COMA ALTER COLUMN IDENTIFICADOR SET NOT NULL
'''
gramatica = 'l_altercolumn ::= \',\' \'ALTER\' \'COLUMN\' \'' + \
t[4]+'\' \'SET\' \'NOT\' \'NULL\''
lista.append(gramatica)
# -----------------------------------------------------------------
# agregar tipo de datos se usen en el create table
def p_tipo_datos(t):
'''tipo : INTEGER
| ADD
| RENAME
| DATE
| SET
| NOT
| NULL
| PRIMARY KEY
| FOREIGN KEY
| CONSTRAINT
| UNIQUE
| IDENTIFICADOR
| REFERENCES
| ALTER COLUMN l_altercolumn
| DROP
| PARIZQ l_cadenas PARDER
| YEAR
| FROM
| TIMESTAMP
| HOUR
| SECOND
| MINUTE
| DAY
| MONTH
| IDENTIFICADOR PUNTO IDENTIFICADOR
'''
if str(t[1]).lower() == 'primary' or str(t[1]).lower() == 'foreign':
gramatica = 'tipo ::= \''+t[1]+'\' \'KEY\''
lista.append(gramatica)
elif str(t[1]).lower() == 'alter':
gramatica = 'tipo ::= \'ALTER\' \'COLUMN\' l_altercolumn'
lista.append(gramatica)
elif str(t[1]).lower() == '(':
gramatica = 'tipo ::= \'(\' l_cadenas \')\''
lista.append(gramatica)
else:
gramatica = 'tipo ::= \''+t[1]+'\''
lista.append(gramatica)
t[0] = t[1]
def p_tipo_datos1(t):
'''tipo : MONEY
| SMALLINT
| BIGINT
| DECIMAL
| NUMERIC
| REAL
| CARACTER_O_CADENA
'''
#print("varchar print")
gramatica = 'tipo ::= \''+str(t[1])+'\''
lista.append(gramatica)
t[0] = t[1]
def p_tipo_datos2(t):
'''tipo : DOUBLE PRECISION
'''
#print("varchar print")
gramatica = 'tipo ::= \''+str(t[1])+'\' \''+str(t[2])+'\''
lista.append(gramatica)
t[0] = t[1]
def p_tipo_datos3(t):
'''tipo : VARCHAR PARIZQ NOENTERO PARDER
| CHAR PARIZQ NOENTERO PARDER
| CHECK PARIZQ expresion PARDER
| CHARACTER PARIZQ NOENTERO PARDER
'''
#print("varchar print")
gramatica = 'tipo ::= \''+t[1]+'\' \'' + \
t[2]+'\' \''+str(t[3])+'\' \''+t[4]+'\''
lista.append(gramatica)
t[0] = t[1]
def p_tipo_datos4(t):
'''tipo : CHARACTER VARYING PARIZQ NOENTERO PARDER
'''
#print("varchar print")
gramatica = 'tipo ::= \''+t[1]+'\' \''+t[2] + \
'\' \''+t[3]+'\' \''+str(t[4])+'\' \''+t[5]+'\''
lista.append(gramatica)
t[0] = t[1]
def p_tipo_datos5(t):
'''tipo : DOUBLE
| NOENTERO
| TEXT
| BOOLEAN
'''
gramatica = 'tipo ::= \''+t[1]+'\''
lista.append(gramatica)
t[0] = t[1]
def p_tipo_datos6(t):
'''tipo : DECIMAL PARIZQ NOENTERO COMA NOENTERO PARDER
'''
gramatica = 'tipo ::= \''+str(t[1])+'\' \''+t[2] + '\' \'' + \
str(t[3])+'\' \''+t[4]+'\' \''+str(t[5])+'\' \''+t[6]+'\''
lista.append(gramatica)
t[0] = t[1]
def p_listaCadenas(t):
''' l_cadenas : CARACTER_O_CADENA l_cadenasp
| IDENTIFICADOR l_cadenasp
'''
gramatica = 'l_cadenas ::= \''+t[1]+'\' l_cadenasp'
lista.append(gramatica)
def p_listaCadenas2(t):
''' l_cadenasp : COMA CARACTER_O_CADENA l_cadenasp
| COMA IDENTIFICADOR l_cadenasp
'''
gramatica = 'l_cadenasp ::= \''+t[1]+'\' \''+t[2]+'\' l_cadenasp'
lista.append(gramatica)
def p_listaCadenas3(t):
''' l_cadenasp : '''
gramatica = 'l_cadenasp ::= epsilon'
lista.append(gramatica)
# Pueden o no pueden venir
def p_opcional(t):
'''opcional : IF NOT EXISTS
| OWNER opcional1 IDENTIFICADOR opcional2
'''
if str(t[1]).lower() == 'if':
gramatica = 'opcional ::= \''+t[1]+'\' \'NOT\' \'EXISTS\''
lista.append(gramatica)
elif str(t[1]).lower() == 'owner':
gramatica = 'opcional ::= \'OWNER\' opcional1 \''+t[3]+'\' opcional2'
lista.append(gramatica)
def p_opcional_1(t):
'''opcional : '''
gramatica = 'opcional ::= epsilon'
lista.append(gramatica)
def p_opcional1(t):
'''opcional1 : IGUAL'''
gramatica = 'opcional1 ::= \''+t[1]+'\''
lista.append(gramatica)
def p_opcional1_1(t):
'''opcional1 : '''
gramatica = 'opcional1 ::= epsilon'
lista.append(gramatica)
def p_opcional2(t):
''' opcional2 : MODE opcional1 NOENTERO
'''
gramatica = 'opcional2 ::= \'MODE\' opcional1 \''+str(t[3])+'\''
lista.append(gramatica)
def p_opcional2_1(t):
''' opcional2 : '''
gramatica = 'opcional2 ::= epsilon'
lista.append(gramatica)
def p_opcional3(t):
'''opcional3 : LIKE CARACTER_O_CADENA
'''
#print(t[2])
gramatica = 'opcional3 ::= \'LIKE\' \''+t[2]+'\''
lista.append(gramatica)
def p_opcional3_1(t):
'''opcional3 : '''
#print(t[2])
gramatica = 'opcional3 ::= epsilon'
lista.append(gramatica)
def p_expresion(t):
'''expresion : w
'''
gramatica = 'expresion ::= w'
lista.append(gramatica)
def p_expresion16(t):
'''w : x wp
'''
gramatica = 'w ::= x wp'
lista.append(gramatica)
def p_expresion15(t):
'''wp : IGUAL x wp
'''
gramatica = 'wp ::= \'=\' x wp'
lista.append(gramatica)
def p_expresion15_1(t):
'''wp : '''
gramatica = 'wp ::= epsilon'
lista.append(gramatica)
def p_expresion10(t):
'''x : y xp
'''
gramatica = 'x ::= y xp'
lista.append(gramatica)
def p_expresion11(t):
'''xp : OR y xp
'''
gramatica = 'xp ::= \'OR\' y xp'
lista.append(gramatica)
def p_expresion11_1(t):
'''xp : '''
gramatica = 'xp ::= epsilon'
lista.append(gramatica)
def p_expresion8(t):
'''y : z yp
'''
gramatica = 'y ::= z yp'
lista.append(gramatica)
def p_expresion9(t):
'''yp : AND z yp
'''
gramatica = 'yp ::= \'AND\' z yp'
lista.append(gramatica)
def p_expresion9_1(t):
'''yp : '''
gramatica = 'yp ::= epsilon'
lista.append(gramatica)
def p_expresion6(t):
'''z : a zp
'''
gramatica = 'z ::= a zp'
lista.append(gramatica)
def p_expresion7(t):
'''zp : DIFERENTEQ a zp
| MAYORQ a zp
| MAYORIGUALQ a zp
| MENORQ a zp
| MENORIGUALQ a zp
'''
gramatica = 'zp ::= \''+t[1]+'\' a zp'
lista.append(gramatica)
def p_expresion7_1(t):
'''zp : '''
gramatica = 'zp ::= epsilon'
lista.append(gramatica)
def p_expresion1(t):
'''a : b ap
'''
gramatica = 'a ::= b ap'
lista.append(gramatica)
def p_expresion2(t):
'''ap : MAS b ap
| MENOS b ap
'''
gramatica = 'ap ::= \''+t[1]+'\' b ap'
lista.append(gramatica)
def p_expresion2_1(t):
'''ap : '''
gramatica = 'ap ::= epsilon'
lista.append(gramatica)
def p_expresion3(t):
'''b : c bp
'''
gramatica = 'b ::= c bp'
lista.append(gramatica)
def p_expresion4(t):
'''bp : POR c bp
| DIVISION c bp
'''
gramatica = 'bp ::= \''+t[1]+'\' c bp'
lista.append(gramatica)
def p_expresion4_1(t):
'''bp : '''
gramatica = 'bp ::= epsilon'
lista.append(gramatica)
def p_expresion12(t):
'''c : d dp
'''
gramatica = 'c ::= d dp'
lista.append(gramatica)
def p_expresion13(t):
'''dp : COMA d dp
'''
gramatica = 'dp ::= \',\' d dp'
lista.append(gramatica)
def p_expresion13_1(t):
'''dp : '''
gramatica = 'dp ::= epsilon'
lista.append(gramatica)
def p_expresion5(t):
'''d : PARIZQ a PARDER
| IDENTIFICADOR
| CADENA
| CARACTER_O_CADENA
| NOENTERO
| NODECIMAL
| BOOLEAN
| INTERVAL
| NOW PARIZQ PARDER
| SUM PARIZQ tipo PARDER
| IDENTIFICADOR PUNTO IDENTIFICADOR
'''
if str(t[1]).lower() == 'now' :
gramatica = 'd ::= \''+t[1]+'\' \'(\' \')\''
lista.append(gramatica)
elif str(t[1]).lower() == '(':
gramatica = 'd ::= \'(\' a \')\''
lista.append(gramatica)
elif str(t[1]).lower() == 'sum':
gramatica = 'd ::= \'SUM\' \'(\''
lista.append(gramatica)
else:
gramatica = 'd ::= \''+str(t[1])+'\''
lista.append(gramatica)
def p_error(t):
#print("Error sintáctico en '%s'" % t.value)
def ejecutar_analisis(entrada):
lexer = lex.lex(reflags=re.IGNORECASE)
parser = yacc.yacc()
#print(entrada)
parser.parse(entrada)
#print("Se interpreto todo")
ReporteGramatical.ReporteGramatical.generarReporte(listado(1, None))
# llamado al metodo ejecutar
f = open("./entrada.txt", "r")
input = f.read()
ejecutar_analisis(input)
```
#### File: team20/execution/executeOptimization_result.py
```python
class executeOptimization_result:
def __init__(self, c3d_optimized, print_optimization_table):
self.c3d_optimized = c3d_optimized
self.print_optimization_table = print_optimization_table
```
#### File: team25/optimization/optLexer.py
```python
reservadas = {
'def': 'DEF',
'if': 'IF',
# PROPIO
'goto': 'GOTO',
'label': 'LABEL',
# ESPECIALES
'False': 'FALSE',
'True': 'TRUE',
'or': 'OR',
'and': 'AND',
# Pass
'pass': 'PASS'
}
tokens = [
# LITERALES
"ID",
"ENTERO",
"DECIMAL",
"COMMENT",
"CADENA",
# IGUAL
"ASIGNACION",
# OPERADOR ARITMETICOS
"SUMA",
"RESTA",
"MULTI",
"DIV",
"POTENCIA",
# OPERADORES RELACIONALES
"MAYORQUE",
"MENORQUE",
"IGUAL",
"DIFERENTE",
"IGMAYORQUE",
"IGMENORQUE",
# CONTENEDORES
"PARIZQ",
"PARDER",
"CORIZQ",
"CORDER",
# PUNTUACION
"DOSPUNTOS",
"PUNTO",
"COMA",
# ESPECIALES
"WITHGOTO",
"ETIQUETA",
# IGNORAR
"IMPORTACIONES"
] + list(reservadas.values())
def t_IMPORTACIONES(t):
r"from(.*)\n"
t.lexer.lineno += t.value.count("\n")
t.lexer.skip(0)
t_ETIQUETA = r"\.L\d+"
t_WITHGOTO = r"@with_goto"
# NORMAL
t_ASIGNACION = r"="
# OPERACIONES ARITMETICAS
t_POTENCIA = r"\*\*"
t_SUMA = r"\+"
t_RESTA = r"-"
t_MULTI = r"\*"
t_DIV = r"/"
# OPERACIONES RELACIONALES
t_MAYORQUE = r">"
t_MENORQUE = r"<"
t_IGMAYORQUE = r">="
t_IGMENORQUE = r"<="
t_IGUAL = r"=="
t_DIFERENTE = r"!="
# CONTENEDORES
t_PARIZQ = r"\("
t_PARDER = r"\)"
t_CORIZQ = r"\["
t_CORDER = r"\]"
# PUNTUACION
t_DOSPUNTOS = r":"
t_PUNTO = r"\."
t_COMA = r","
t_CADENA = r"(\'.*?\'|\".*?\")"
def t_DECIMAL(t):
r"-?\d+\.\d+(e(-|\+)?\d+)?|\d+(e(-|\+)?\d+)"
try:
t.value = float(t.value)
except ValueError:
print("No se pudo convertir %d", t.value)
t.value = 0
return t
def t_ENTERO(t):
r"-?\d+"
try:
t.value = int(t.value)
except ValueError:
print("Integer value too large %d", t.value)
t.value = 0
return t
def t_ID(t):
r"[a-zA-Z_][a-zA-Z_0-9]*"
# Verificamos si no es una palabra reservada
t.type = reservadas.get(t.value, "ID")
return t
def t_COMMENT(t):
r"\#(.*)\n"
t.lexer.lineno += t.value.count("\n")
t.lexer.skip(0)
def t_newline(t):
r"\n+"
t.lexer.lineno += t.value.count("\n")
# Funcion de error para el lexer
def t_error(t):
print("Illegal character '%s'" % t.value[0])
t.lexer.skip(1)
t_ignore = " \t"
"""import ply.lex as lex
lexer = lex.lex()
#para debugger los nuevos tokens
lexer.input('''
-5 - -9
''')
while not False:
token = lexer.token()
if not token:
break
print(f'tipo: {token.type} valor: {token.value} linea:{token.lineno} col:{token.lexpos}')"""
```
#### File: team26/G26/reporte.py
```python
import os
from Utils.fila import fila
def reporteTabla( datos):
f = open("./Reportes/Reporte_TablaSimbolos.html", "w")
f.write("<!DOCTYPE html>\n")
f.write("<html>\n")
f.write(" <head>\n")
f.write(' <meta charset="UTF-8">\n')
f.write(' <meta name="viewport" content="width=device-width, initial-scale=1.0">')
f.write(" <title>Reporte de tabla simbolos</title>\n")
f.write(' <link rel="stylesheet" href="style.css">\n')
f.write(" </head>\n")
f.write(" <body>\n")
f.write(" <p><b>Reporte Tabla de Simbolos<b></p>\n")
f.write(" <div>\n")
for a in datos.tablaSimbolos:
if a == 'funciones_':
f.write("<div>\n")
f.write("<p class='base'>Funciones/Procedimientos</p>")
f.write("<center>\n")
f.write("<table>\n")
f.write("<tr class='titulo'> <td><b>Nombre</b></td> <td><b>Return</b></td> <td><b>Tipo</b></td></tr>\n")
for func in datos.tablaSimbolos['funciones_']:
if func['drop'] == 1:
f.write(" <tr><td>")
f.write(func['name'])
f.write("</td><td>")
if func['return'] == None or func['return'] == '':
f.write("None")
else:
f.write(func['return'])
f.write("</td><td>")
f.write(func['tipo'])
f.write("</td></tr>\n")
f.write("</table>\n")
f.write("</center>\n")
continue
f.write(" <div>\n")
f.write("<p>BASE DE DATOS: ")
f.write(a)
f.write("</p>\n")
owner = datos.tablaSimbolos[a]['owner']
for table in datos.tablaSimbolos[a]['tablas']:
columnas = []
for column in datos.tablaSimbolos[a]['tablas'][table]['columns']:
cc = ""
try:
cc = column['name']
except:
cc = column.name
nombre = cc
tt = ""
try:
tt = column.type
except:
tt = column['type']
tipo = tt
yy = ""
try:
yy = column.size
except:
yy = column['size']
size = yy
c = fila(nombre, tipo, size)
ff = ""
try:
ff = column['pk']
except:
ff = column.pk
if ff != None:
c.setPK()
gg = ""
try:
for fkAS in column['fk']:
if fkAS == None:
gg = False
else:
gg = True
except:
for fkAS in column.fk:
if fkAS == None:
gg = False
else:
gg = True
c.setFK()
c.FK = gg
aa = ""
try:
aa = column['unique']
except:
aa = column.unique
if aa != None:
c.setUnique()
bb = ""
try:
bb = column['default']
except:
bb = column.default
if bb == None:
c.setDefault('None')
else:
c.setDefault(column.default)
columnas.append(c)
f.write("<p class='tabla'>Tabla: ")
f.write(table)
f.write("</p>")
f.write("<center>")
f.write(" <table>\n")
f.write(" <tr class='titulo'> <td><b>Nombre</b></td> <td><b>Tipo</b></td> <td><b>Size</b></td> <td><b>PK</b></td> <td><b>FK</b></td> <td><b>Unique</b></td> <td><b>Default</b></td> </tr>\n")
for col in columnas:
f.write(" <tr><td>")
f.write(col.nombre)
f.write("</td><td>")
f.write(col.tipo)
f.write("</td><td>")
f.write(str(col.size))
f.write("</td><td>")
if col.PK == False:
f.write("False")
else:
f.write("True")
f.write("</td><td>")
if col.FK == False:
f.write("False")
else:
f.write("True")
f.write("</td><td>")
if col.unique == False:
f.write("False")
else:
f.write("True")
f.write("</td><td>")
f.write(col.default)
f.write("</td></tr>\n")
f.write(" </table>\n")
f.write("</center>\n")
f.write("</div>")
if 'index' in datos.tablaSimbolos[a]:
f.write("<div>")
f.write("<center>\n")
for column in datos.tablaSimbolos[a]['index']:
f.write("<p class='i'>Indice :")
f.write(column.name)
f.write("<li>")
f.write("<ol>Nombre: ")
f.write(column.name)
f.write("</ol></li><li>Columnas: ")
try:
tc = ("<ul>")
tc += ("Tabla ->")
tc += (column.table)
tc += (" Columna ->")
tc += (column.columns.id)
tc += (" Tipo ->")
if column.columns.option:
tc += ('Hash')
else:
tc += ('lower')
tc += ("</ul>\n")
f.write(tc)
except:
for h in column.columns:
tc = ("<ul>")
tc += ("Tabla ->")
tc += (column.table)
tc += (" Columna ->")
tc += (h.column)
tc += ("</ul>\n")
f.write(tc)
f.write("</li><li>Orden: ")
f.write(column.order)
f.write("</p>\n")
f.write("/<center>\n")
f.write(" </div>\n")
f.write(" </div>\n")
f.write(" </body>\n")
f.write("</html>\n")
f.close()
def hacerReporteGramatica(gramatica):
if gramatica != None:
f = open("./Reportes/GramaticaAutomatica.md", "w")
f.write("# Gramatica Generada Automaticamente\n")
f.write("La gramatica que se genero en el analisis realizado es la siguiente:\n")
f.write("******************************************************************\n")
f.write(gramatica)
f.write("\n******************************************************************")
f.close()
else:
f = open("./Reportes/GramaticaAutomatica.md", "w")
f.write("#Gramatica Generada Automaticamente\n")
f.write("No se detecto")
def Rerrores(errores, semanticos, nombre):
nombre = "./Reportes/" + nombre
f = open(nombre, "w")
f.write("<!DOCTYPE html>\n")
f.write("<html>\n")
f.write(" <head>\n")
f.write(' <meta charset="UTF-8">\n')
f.write(' <meta name="viewport" content="width=device-width, initial-scale=1.0">')
f.write(" <title>Reporte de errores</title>\n")
f.write(' <link rel="stylesheet" href="style.css">\n')
f.write(" </head>\n")
f.write(" <body>\n")
f.write(" <p><b>Reporte de Errores<b></p>")
f.write(" <div>")
f.write(" <center>")
f.write(" <table>\n")
f.write(" <tr class='titulo'> <td><b>Tipo</b></td> <td><b>Descripcion</b></td> <td><b>Linea</b></td> </tr>\n")
for error in errores:
f.write(" <tr> <td>" + error.getTipo() + "</td> <td>" + error.getDescripcion() + "</td> <td>"+ str(error.getLinea()) + "</td> </tr>\n")
for semantico in semanticos:
f.write(" <tr> <td>Semantico" + "</td> <td>" + semantico.desc + "</td> <td>" + str(semantico.line) + "</td> </tr>\n")
f.write(" </table>\n")
f.write(" </center>")
f.write(" </div>")
f.write(" </body>\n")
f.write("</html>\n")
f.close()
```
#### File: Types/Validations/Character.py
```python
syntaxPostgreErrors = []
def validateVarchar(n, val):
if 0 <= len(val) and len(val) <= n:
return None
syntaxPostgreErrors.append("Error: 22026: Excede el limite de caracteres")
return {"Type": "varchar", "Descripción": "Excede el limite de caracteres"}
def validateChar(n, val):
if len(val) == n:
return None
syntaxPostgreErrors.append("Error: 22026: Restriccion de caracteres")
return {"Type": "char", "Descripción": "Restriccion de caracteres"}
def validateBoolean(val):
s = str(val).lower()
if s == "true" or s == "false":
return None
elif val == 1 or val == 0:
return None
syntaxPostgreErrors.append("Error: 22000: Tipo invalido (Boolean)")
return {"Type": "boolean", "Descripción": "invalido"}
```
#### File: fase2/team07/principal.py
```python
import gramatica as g
import Utils.TablaSimbolos as table
import Utils.Lista as l
import Librerias.storageManager.jsonMode as storage
import Utils.Error as error
import Instrucciones.DML.select as select
storage.dropAll()
datos = l.Lista({}, '')
def parsear(input_x):
res = []
fx = open("entrada.txt", "w")
fx.write(input_x)
fx.close()
ruta = 'entrada.txt'
f = open(ruta, "r")
input = f.read()
instrucciones = g.parse(input)
for instr in instrucciones['ast']:
if instr == None:
continue
result = instr.execute(datos)
if isinstance(result, error.Error):
print(result)
elif isinstance(instr, select.Select):
res.append(instr.ImprimirTabla(result))
#print(instr.ImprimirTabla(result))
else:
res.append(result)
#print(input)
return '\n '.join([(str(elem)) for elem in res])
#print('\n\nTABLA DE SIMBOLOS')
#print(datos)
def otro():
ruta = '../G26/entrada.txt'
f = open(ruta, "r")
input = f.read()
instrucciones = g.parse(input)
for instr in instrucciones['ast']:
if instr == None:
continue
result = instr.execute(datos)
if isinstance(result, error.Error):
print(result)
elif isinstance(instr, select.Select):
print(instr.ImprimirTabla(result))
else:
print(result)
'''
ruta = '../G26/entrada.txt'
f = open(ruta, "r")
input = f.read()
instrucciones = g.parse(input)
for instr in instrucciones['ast']:
if instr == None:
continue
result = instr.execute(datos)
if isinstance(result, error.Error):
print(result)
elif isinstance(instr, select.Select):
print(instr.ImprimirTabla(result))
else:
print(result)
print('\n\nTABLA DE SIMBOLOS')
print(datos)
'''
```
#### File: fase2/team05/checksum.py
```python
import hashlib
import os
from pathlib import Path
import storageManager as u
hashmodes=['MD5','SHA256']
class Checksum:
def __init__(self):
self.chksum_db={}
self.chksum_tables={}
self.chksum_test={}
self.default_dir='data/'
def checksumDatabase(self,database: str, mode: str) -> str: #retorno checksum string
chksum=None
encoded_as=u.getCodificacionDatabase(database)
#print(database,'encoded as:',encoded_as)
paths=self.setDirectory(database,None)
string2chksum=''
try:
if paths is not None:
for p in paths:
with open(str(p),'r',encoding=encoded_as) as f:
string2chksum=string2chksum+str(f.read())
if mode in hashmodes:
if mode=='MD5':
chksum=str(self.md5(string2chksum))
self.chksum_db[str(database+';md5')]=str(chksum+';md5')
if mode=='SHA256':
chksum=str(self.sha256(string2chksum))
self.chksum_db[str(database+';sha256')]=str(chksum+';sha256')
else:
return None
except:
return None
return chksum
def checksumTable(self, database: str, table:str, mode: str) -> str: #retorno checksum string
chksum=None
encoded_as=u.getCodificacionDatabase(database)
#print(database,'encoded as:',encoded_as)
paths=self.setDirectory(database,table)
try:
if paths is not None:
string2chksum=''
for p in paths:
with open(str(p),'r',encoding=encoded_as) as f:
string2chksum=string2chksum+str(f.read())+'\n'
if mode in hashmodes:
if mode=='MD5':
chksum=str(self.md5(string2chksum))
self.chksum_tables[str(database+'-'+table+';md5')]=str(chksum+';md5')
if mode=='SHA256':
chksum=str(self.sha256(string2chksum))
self.chksum_tables[str(database+'-'+table+';sha256')]=str(chksum+';sha256')
else:
return None
except:
return None
return chksum
def md5(self, cadena:str) -> str:
string_md5=''
m=hashlib.md5()
m.update(cadena.encode())
string_md5=m.digest()
del m
return string_md5
def sha256(self, cadena:str) -> str:
string_sha256=''
m=hashlib.sha256()
m.update(cadena.encode())
string_sha256=m.digest()
del m
return string_sha256
def stringtest(self, cadena:str, mode:str) -> str:
print('CHECKSUM')
chksum=''
if mode in hashmodes:
if mode=='MD5':
chksum=str(self.md5(cadena))
self.chksum_test[str(cadena+';md5')]=str(chksum+';md5')
if mode=='SHA256':
chksum=str(self.sha256(cadena))
self.chksum_test[str(cadena+';sha256')]=str(chksum+';sha256')
else:
return None
else:
return None
return chksum
def printDict(self, dic:dict):
print('DICCIONARIO: ')
for t in dic:
print(t)
def setDirectory(self,database:str,table:str):
modo=u.getModoBaseDatos(database)
if modo !=None:
#print('Modo DB:',database,'|',modo)
self.default_dir=''
paths=[]
if modo=='avl':
self.default_dir='data/avlMode/'
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if table==None:
if f.name.startswith(str(database)):
paths.append(f.path)
else:
if f.name.endswith(str(table)+'.tbl') and f.name.startswith(str(database)):
paths.append(f.path)
if modo=='b':
self.default_dir='data/b/'
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if f.name.startswith(str(database)):
if table==None:
paths.append(f.path)
else:
if f.name.endswith(str(table)+'-b.bin'):
paths.append(f.path)
if modo=='bplus':
self.default_dir='data/BPlusMode/'+str(database)+'/'
if table==None:
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if f.name.startswith(database):
paths.append(f.path)
else:
lista=u.showTables(database)
for l in lista:
with os.scandir(self.default_dir+str(l)+'/') as folders:
for c in folders:
paths.append(c.path)
if modo=='dict':
self.default_dir='data/dictMode/'+str(database)+'/'
if table==None:
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
paths.append(f.path)
else:
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if f.name.startswith(str(table)):
paths.append(f.path)
if modo=='isam':
self.default_dir='data/ISAMMode/tables/'
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if f.name.startswith(str(database)):
if table==None:
paths.append(f.path)
else:
if f.name.endswith(str(table)+'.bin'):
paths.append(f.path)
if modo=='json':
self.default_dir='data/json/'
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if f.name.startswith(str(database)):
if table==None:
paths.append(f.path)
else:
if f.name.endswith(str(table)):
paths.append(f.path)
if modo=='hash':
self.default_dir='data/hash/'+str(database)+'/'
with os.scandir(self.default_dir) as ficheros:
for f in ficheros:
if table==None:
paths.append(f.path)
else:
if f.name.endswith(str(table)+'.bin'):
paths.append(f.path)
return paths
else:
return None
chk=Checksum()
print(chk.checksumDatabase('BD5','MD5'))
print(chk.checksumTable('BD5','Year','MD5'))
print('--------------')
#print('BD1')
##print('MD5',chk.checksumDatabase('BD1','MD5'))
#print('SHA256',chk.checksumDatabase('BD1','SHA256'))
#print('--------------')
#print('BD3')
#print('MD5',chk.checksumDatabase('BD3','MD5'))
#print('SHA256',chk.checksumDatabase('BD3','SHA256'))
#print('--------------')
#print('BD4')
#print('MD5',chk.checksumDatabase('BD4','MD5'))
#print('SHA256',chk.checksumDatabase('BD4','SHA256'))
#print('--------------')
#print('BD5')
#print('MD5',chk.checksumDatabase('BD5','MD5'))
#print('SHA256',chk.checksumDatabase('BD5','SHA256'))
#print('--------------')
#print('BD6')
#print('MD5',chk.checksumDatabase('BD6','MD5'))
#print('SHA256',chk.checksumDatabase('BD6','SHA256'))
#print('--------------')
#print('BD7')
#print('MD5',chk.checksumDatabase('BD7','MD5'))
#print('SHA256',chk.checksumDatabase('BD7','SHA256'))
#print('--------------')
#print('BD8')
#print('MD5',chk.checksumDatabase('BD8','MD5'))
#print('SHA256',chk.checksumDatabase('BD8','SHA256'))
#print(chk.checksumTable('BD6','Cliente','MD5'))
```
#### File: fase2/team10/Grafos.py
```python
from graphviz import Digraph
from arbolNario import ArbolN
class Nodo:
def __init__(self, valor ):
self.valor = valor
self.conexiones = {}
class Grafo:
def __init__(self):
self.nodosAgregados = []
def returnValores(self):
return [nodo.valor for nodo in self.nodosAgregados]
def insertIndependiente(self, ind):
self.nodosAgregados.append(ind)
def insertar(self, nodoPadre, distancia, nodoHijo):
if len(self.nodosAgregados) == 0:
nodoP = Nodo(nodoPadre)
nodoH = Nodo(nodoHijo)
nodoP.conexiones[nodoH.valor] = distancia
nodoH.conexiones[nodoP.valor] = distancia
self.nodosAgregados.append(nodoP)
self.nodosAgregados.append(nodoH)
else:
valores = self.returnValores()
if nodoPadre in valores :
indice = valores.index(nodoPadre)
nodoP = self.nodosAgregados[indice]
if nodoHijo in valores:
index2 =valores.index(nodoHijo)
nodoH = self.nodosAgregados[index2]
nodoP.conexiones[nodoH.valor] = distancia
nodoH.conexiones[nodoP.valor] = distancia
else:
nodoH = Nodo(nodoHijo)
nodoP.conexiones[nodoH.valor] = distancia
nodoH.conexiones[nodoP.valor] = distancia
self.nodosAgregados.append(nodoH)
else:
nodoP = Nodo(nodoPadre)
if nodoHijo in valores:
index2 =valores.index(nodoHijo)
nodoH = self.nodosAgregados[index2]
nodoP.conexiones[nodoH.valor] = distancia
nodoH.conexiones[nodoP.valor] = distancia
self.nodosAgregados.append(nodoP)
else:
nodoH = Nodo(nodoHijo)
nodoP.conexiones[nodoH.valor] = distancia
nodoH.conexiones[nodoP.valor] = distancia
self.nodosAgregados.append(nodoH)
self.nodosAgregados.append(nodoP)
def retornaGrafo(self):
for i in self.nodosAgregados:
print("Nodo: " + str(i.valor) )
for j in i.conexiones.keys():
print(str(i.valor) +"<----"+ str(i.conexiones[j])+ "----> "+ str(j) )
def graficar(self):
f = Digraph(name='grafo' , filename= 'grafo.gv', format = "svg")
f.attr(rankdir='LR')
f.attr('node', shape='box')
for i in self.nodosAgregados:
f.node(i.valor)
nodosvisitados =[]
for i in self.nodosAgregados:
nodosvisitados.append(i.valor)
if list(i.conexiones.keys()) !=[]:
for j in i.conexiones.keys():
if j not in nodosvisitados:
f.edge_attr["arrowhead"] ='vee'
f.edge(i.valor, j, label= str(i.conexiones[j]) )
f.view()
def ArbolGenerador(self, Raiz):
tree = ArbolN()
nodos_visitados = []
nodos = [nodo.valor for nodo in self.nodosAgregados]
NodoRaiz = self.nodosAgregados[nodos.index(Raiz)]
nodos_visitados.append(NodoRaiz.valor)
tree.insertar(NodoRaiz.valor, 0)
nodosXvisitar = []
posicion = 0
for i in NodoRaiz.conexiones.keys():
tree.insertar(i ,NodoRaiz.conexiones[i], NodoRaiz.valor,posicion )
nodos_visitados.append(i)
nodosXvisitar.append(self.nodosAgregados[nodos.index(i)])
posicion +=1
for j in nodosXvisitar:
position = 0
for k in j.conexiones.keys():
if k not in nodos_visitados:
tree.insertar(k, j.conexiones[k],j.valor,position )
nodos_visitados.append(j.valor)
nodos_visitados.append(k)
nodosXvisitar.append(self.nodosAgregados[nodos.index(k)])
position +=1
tree.GenGraph()
```
#### File: team14/storage/storage.py
```python
import pickle
from .BPlusMode import BPlusMode as BPlusM
from .BMode import BMode as BM
from .ISAMMode import ISAMMode as ISAMM
from .HashMode import HashMode as HashM
from .AVLMode import avlMode as AVLM
from .jsonMode import jsonMode as jsonM
from .DictMode import DictMode as DictM
from .encryption import _decrypt, _encrypt
from .Blockchain import *
import os
import hashlib
import zlib
#*---------------------------------------others----------------------------------------------*
# Comprueba la existencia de los directorios
def initcheck():
if not os.path.exists('data'):
os.makedirs('data')
if not os.path.exists("data/Info"):
os.makedirs('data/Info')
if not os.path.exists('data/Info/databasesInfo.bin'):
Info = [{}, {}, {}]
commit(Info, 'databasesInfo')
if not os.path.exists("data/Info/safeModeTables"):
os.makedirs('data/Info/safeModeTables')
# guarda un objeto en un archivo binario
def commit(objeto, fileName):
file = open("data/Info/" + fileName + ".bin", "wb+")
file.write(pickle.dumps(objeto))
file.close()
# lee un objeto desde un archivo binario
def rollback(fileName):
file = open("data/Info/" + fileName + ".bin", "rb")
b = file.read()
file.close()
return pickle.loads(b)
initcheck()
databasesinfo = rollback('databasesInfo')
# *----------------------------------databases CRUD-------------------------------------------*
# crea una instancia de base de datos y la guarda en la lista
def createDatabase(database: str, mode: str, encoding: str) -> int:
result = 0
coding = ['ascii', 'iso-8859-1', 'utf8']
if encoding.lower() not in coding:
return 4
if database in databasesinfo[0]:
return 2
else:
if mode == 'avl':
result = AVLM.createDatabase(database)
elif mode == 'b':
result = BM.createDatabase(database)
elif mode == 'bplus':
result = BPlusM.createDatabase(database)
elif mode == 'dict':
result = DictM.createDatabase(database)
elif mode == 'isam':
result = ISAMM.createDatabase(database)
elif mode == 'json':
result = jsonM.createDatabase(database)
elif mode == 'hash':
result = HashM.createDatabase(database)
else:
result = 3
if result == 0:
databasesinfo[0].update({database: {'mode': mode, 'encoding': encoding}})
databasesinfo[1].update({database: {}})
commit(databasesinfo, 'databasesInfo')
return result
# devuelve una lista con los nombres de las bases de datos existentes
def showDatabases() -> list:
dbsi = databasesinfo[0].keys()
dbs = []
for k in dbsi:
dbs.append(k)
return dbs
# Modifica el nombre de la base de datos
def alterDatabase(databaseOld: str, databaseNew: str) -> int:
result = 0
if databaseOld not in databasesinfo[0]:
result = 2
elif databaseNew in databasesinfo[0]:
result = 3
else:
if databasesinfo[0][databaseOld]['mode'] == 'avl':
result = AVLM.alterDatabase(databaseOld, databaseNew)
elif databasesinfo[0][databaseOld]['mode'] == 'b':
result = BM.alterDatabase(databaseOld, databaseNew)
elif databasesinfo[0][databaseOld]['mode'] == 'bplus':
result = BPlusM.alterDatabase(databaseOld, databaseNew)
elif databasesinfo[0][databaseOld]['mode'] == 'dict':
result = DictM.alterDatabase(databaseOld, databaseNew)
elif databasesinfo[0][databaseOld]['mode'] == 'isam':
result = ISAMM.alterDatabase(databaseOld, databaseNew)
elif databasesinfo[0][databaseOld]['mode'] == 'json':
result = jsonM.alterDatabase(databaseOld, databaseNew)
elif databasesinfo[0][databaseOld]['mode'] == 'hash':
result = HashM.alterDatabase(databaseOld, databaseNew)
else:
result = 4
if result == 0:
databasesinfo[0][databaseNew] = databasesinfo[0][databaseOld]
del databasesinfo[0][databaseOld]
databasesinfo[1][databaseNew] = databasesinfo[1][databaseOld]
del databasesinfo[1][databaseOld]
if databaseOld in databasesinfo[2]:
databasesinfo[2][databaseNew] = databasesinfo[2][databaseOld]
del databasesinfo[2][databaseOld]
commit(databasesinfo, 'databasesInfo')
return result
#Elimina bases de datos
def dropDatabase(database: str) -> int:
result = 0
if database not in databasesinfo[0]:
return 2
else:
if database in databasesinfo[2]:
for i in databasesinfo[2][database]:
result = deleteFunctions(database, i)
if result != 0:
break
else:
result = deleteFunctions(database, databasesinfo[0][database]['mode'])
if result == 0:
del databasesinfo[0][database]
del databasesinfo[1][database]
if database in databasesinfo[2]:
del databasesinfo[2][database]
commit(databasesinfo, 'databasesinfo')
return result
# elimina la base de datos de los registros del modo
def deleteFunctions(database, mode):
if mode == 'avl':
return AVLM.dropDatabase(database)
elif mode == 'b':
return BM.dropDatabase(database)
elif mode == 'bplus':
return BPlusM.dropDatabase(database)
elif mode == 'dict':
return DictM.dropDatabase(database)
elif mode == 'isam':
return ISAMM.dropDatabase(database)
elif mode == 'json':
return jsonM.dropDatabase(database)
elif mode == 'hash':
return HashM.dropDatabase(database)
# cambia el modo de una base de datos completa
def alterDatabaseMode(database: str, mode: str) -> int:
modes = ['avl', 'b', 'bplus', 'dict', 'isam', 'json', 'hash']
try:
if database not in databasesinfo[0]:
return 2
elif mode not in modes:
return 4
else:
if databasesinfo[0][database]['mode'] == mode:
return 1
else:
createDatabase('temporal_name', mode, 'utf8')
for key in databasesinfo[1][database].keys():
createTable('temporal_name', key, databasesinfo[1][database][key]['numberColumns'])
if databasesinfo[1][database][key]['PK'] is not None:
alterAddPK('temporal_name', key, databasesinfo[1][database][key]['PK'])
registers = extractTable(database, key)
for register in registers:
insert('temporal_name', key, register)
dropDatabase(database)
alterDatabase('temporal_name', database)
return 0
except:
return 1
# Metodo que modifica la codificacion que acepta la base de datos
def alterDatabaseEncoding(database: str, encoding: str) -> int:
result = 0
coding = ['ascii', 'iso-8859-1', 'utf8']
if encoding not in coding:
return 3
elif database not in databasesinfo[0]:
return 2
elif encoding == databasesinfo[0][database]["encoding"]:
return 1
else:
try:
tables = []
tuples = []
copy = {}
tables = showTables(database)
for i in tables:
tuples = extractTable(database, i)
copy.update({i: tuples[:]})
for j in copy[i]:
try:
decoding = databasesinfo[0][database]['encoding']
for k in j:
if isinstance(k, str):
ind = j.index(k)
x = k.encode(decoding)
j[ind] = x.decode(encoding)
except:
return 1
else:
databasesinfo[0][database]['encoding'] = encoding
for i in tables:
truncate(database, i)
for j in copy[i]:
result = insert(database, i, j)
if result != 0:
return result
else:
commit(databasesinfo, 'databasesinfo')
return 0
except:
return 1
#*----------------------------------tables-------------------------------------------*
# crea una instancia de Tabla y lo almacena en el listado de tablas de la base de datos
def createTable(database: str, table: str, numberColumns: int) -> int:
result = 0
if database not in databasesinfo[0]:
return 2
else:
#verificando si toda la base de datos esta compresa
tablas = showTables(database)
if len(tablas)>0:
descompresos=0
for tabla in tablas:
print(tabla,' estado ',databasesinfo[1][database][tabla]['Compress'])
if databasesinfo[1][database][tabla]['Compress'] == False:
descompresos += 1
else:
descompresos=1
#creando tablas
if databasesinfo[0][database]['mode'] == 'avl':
result = AVLM.createTable(database, table, numberColumns)
elif databasesinfo[0][database]['mode'] == 'b':
result = BM.createTable(database, table, numberColumns)
elif databasesinfo[0][database]['mode'] == 'bplus':
result = BPlusM.createTable(database, table, numberColumns)
elif databasesinfo[0][database]['mode'] == 'dict':
result = DictM.createTable(database, table, numberColumns)
elif databasesinfo[0][database]['mode'] == 'isam':
result = ISAMM.createTable(database, table, numberColumns)
elif databasesinfo[0][database]['mode'] == 'json':
result = jsonM.createTable(database, table, numberColumns)
elif databasesinfo[0][database]['mode'] == 'hash':
result = HashM.createTable(database, table, numberColumns)
if result == 0:
#guardando informacion de las tablas
if descompresos!=0:
databasesinfo[1][database].update(
{table: {'mode': databasesinfo[0][database]['mode'], 'numberColumns': numberColumns, 'PK': None,
'safeMode': False, 'Compress': False}})
commit(databasesinfo, 'databasesinfo')
else:
databasesinfo[1][database].update(
{table: {'mode': databasesinfo[0][database]['mode'], 'numberColumns': numberColumns, 'PK': None,
'safeMode': False, 'Compress': True}})
commit(databasesinfo, 'databasesinfo')
return result
# devuelve un lista de todas las tablas almacenadas en una base de datos
def showTables(database: str) -> list:
try:
databasetables2 = []
databasetables = databasesinfo[1][database].keys()
for k in databasetables:
databasetables2.append(k)
return databasetables2
except:
return []
#extrae y devuelve todos los registros de una tabla
def extractTable(database: str, table: str) -> list:
tuples = []
if database not in databasesinfo[0]:
return []
if table not in databasesinfo[1][database]:
return []
if databasesinfo[1][database][table]['mode'] == 'avl':
tuples = AVLM.extractTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'b':
tuples = BM.extractTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
tuples = BPlusM.extractTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'dict':
tuples = DictM.extractTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'isam':
tuples = ISAMM.extractTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'json':
tuples = jsonM.extractTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'hash':
tuples = HashM.extractTable(database, table)
if databasesinfo[1][database][table]['Compress'] == True:
tabla = tuples
for i in range(0, len(tabla)):
tupla = tabla[i]
for j in range(0, len(tupla)):
# print(tupla[j])
if type(tupla[j]) == bytes:
tupla[j] = zlib.decompress(tupla[j]).decode()
tabla[i] = tupla
tuples=tabla
return tuples
# extrae y devuelve una lista de registros dentro de un rango especificado
def extractRangeTable(database: str, table: str, columnNumber: int, lower: any, upper: any) -> list:
try:
encoding = databasesinfo[0][database]['encoding']
try:
lower = lower.encode()
lower = lower.decode(encoding)
upper = upper.encode()
upper = upper.decode(encoding)
except:
return []
tuples = []
if database not in databasesinfo[0]:
return []
if table not in databasesinfo[1][database]:
return []
if databasesinfo[1][database][table]['mode'] == 'avl':
tuples = AVLM.extractRangeTable(database, table, columnNumber, lower, upper)
elif databasesinfo[1][database][table]['mode'] == 'b':
tuples = BM.extractRangeTable(database, table, columnNumber, lower, upper)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
tuples = BPlusM.extractRangeTable(database, table, columnNumber, lower, upper)
elif databasesinfo[1][database][table]['mode'] == 'dict':
tuples = DictM.extractRangeTable(database, table, columnNumber, lower, upper)
elif databasesinfo[1][database][table]['mode'] == 'isam':
tuples = ISAMM.extractRangeTable(database, table, columnNumber, lower, upper)
elif databasesinfo[1][database][table]['mode'] == 'json':
tuples = jsonM.extractRangeTable(database, table, lower, upper)
elif databasesinfo[1][database][table]['mode'] == 'hash':
tuples = HashM.extractRangeTable(database, table, columnNumber, lower, upper)
if databasesinfo[1][database][table]['Compress'] == True:
tabla = tuples
for i in range(0, len(tabla)):
tupla = tabla[i]
for j in range(0, len(tupla)):
if type(tupla[j]) == bytes:
tupla[j] = zlib.decompress(tupla[j]).decode()
tabla[i] = tupla
tuples=tabla
return tuples
except:
return []
# vincula una nueva PK a la tabla y todos sus registros
def alterAddPK(database: str, table: str, columns: list) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.alterAddPK(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.alterAddPK(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.alterAddPK(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.alterAddPK(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.alterAddPK(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.alterAddPK(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.alterAddPK(database, table, columns)
if result == 0:
databasesinfo[1][database][table]['PK'] = columns
return result
except:
return 1
# elimina el vinculo de la PK
def alterDropPK(database: str, table: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.alterDropPK(database, table)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.alterDropPK(database, table)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.alterDropPK(database, table)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.alterDropPK(database, table)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.alterDropPK(database, table)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.alterDropPK(database, table)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.alterDropPK(database, table)
if result == 0:
databasesinfo[1][database][table]['PK'] = None
return result
except:
return 1
# vincula una FK entre dos tablas
def alterTableAddFK(database: str, table: str, indexName: str, columns: list, tableRef: str, columnsRef: list) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database] or tableRef not in databasesinfo[1][database]:
result = 3
else:
if len(columns) >= 1 and len(columnsRef)>= 1:
if len(columns) == len(columnsRef):
tableColumns = databasesinfo[1][database][table]['numberColumns']
tableColumnsRef = databasesinfo[1][database][tableRef]['numberColumns']
col1 = True
col2 = True
for values in columns:
if values >= tableColumns:
col1 = False
break
for values in columnsRef:
if values >= tableColumnsRef:
col2 = False
break
if col1 and col2:
register1 = extractTable(database, table)
register2 = extractTable(database, tableRef)
if len(register1) == 0 and len(register2) == 0:
res = createTable(database, table + 'FK', 2)
if res == 0:
res1 = insert(database, table + 'FK', [tableRef, columnsRef])
if res1 == 0:
dictFK = {indexName: {'columns': columns}}
FKey = {'FK': dictFK}
databasesinfo[1][database][table].update(FKey)
commit(databasesinfo, 'databasesInfo')
else:
result = 1
else:
result = 1
else:
if len(register1) > 0 and len(register2) == 0:
result = 1
else:
Values1 = []
Rep = True
for value in register2:
Fk1 = ''
for i in columns:
if i == len(value) - 1:
Fk1 = Fk1 + value[i]
else:
Fk1 = Fk1 + value[i] + '_'
if Fk1 in Values1:
Rep = False
break
else:
Values1.append(Fk1)
if Rep:
Val1 = True
for value in register1:
Fk2 = ''
for i in columnsRef:
if i == len(value) - 1:
Fk2 = Fk2 + value[i]
else:
Fk2 = Fk2 + value[i] + '_'
if Fk2 not in Values1:
Val1 = False
break
if Val1:
res = createTable(database,table + 'FK',3)
if res == 0:
res1 = insert(database,table+'FK',[indexName,tableRef,columnsRef])
if res1 == 0:
dictFK = {indexName: {'columns':columns}}
FKey = {'FK': dictFK}
databasesinfo[1][database][table].update(FKey)
commit(databasesinfo,'databasesInfo')
else: result = 1
else:
result = 1
else:
result = 5
else:
result = 1
else:
result = 1
else:
result = 4
else:
result = 1
return result
except:
return 1
# elimina el vinculo de una FK entre las tablas
def alterTableDropFK(database: str, table: str, indexName: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
if 'FK' in databasesinfo[1][database][table]:
if indexName in databasesinfo[1][database][table]['FK']:
res = dropTable(database, table+'FK')
if res == 0:
del databasesinfo[1][database][table]['FK'][indexName]
commit(databasesinfo,'databasesinfo')
result = 0
else:
result = 1
else:
result = 4
else:
result = 1
return result
except:
return 1
# vincula un indice unico a la tabla
def alterTableAddUnique(database: str, table: str, indexName: str, columns: list) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
if len(columns) >= 1:
tableColumns = databasesinfo[1][database][table]['numberColumns']
col1 = True
for values in columns:
if values >= tableColumns:
col1 = False
break
if col1:
registers = extractTable(database,table)
if len(registers) == 0:
res = createTable(database, table + 'IndexUnique', 2)
if res == 0:
res1 = insert(database, table + 'IndexUnique', [indexName,columns])
if res1 == 0:
dictIU = {indexName: {'columns': columns}}
IndexU = {'IndexUnique': dictIU}
databasesinfo[1][database][table].update(IndexU)
commit(databasesinfo, 'databasesInfo')
else:
result = 1
else:
result = 1
else:
tableColumns = databasesinfo[1][database][table]['numberColumns']
col1 = True
for values in columns:
if values >= tableColumns:
col1 = False
break
if col1:
Values1 = []
Rep = True
for value in registers:
Fk1 = ''
for i in columns:
if i == len(value) - 1:
Fk1 = Fk1 + value[i]
else:
Fk1 = Fk1 + value[i] + '_'
if Fk1 in Values1:
Rep = False
break
else:
Values1.append(Fk1)
if Rep:
res = createTable(database, table + 'IndexUnique', 2)
if res == 0:
res1 = insert(database, table + 'IndexUnique', [indexName, columns])
if res1 == 0:
dictIU = {indexName: {'columns': columns}}
IndexU = {'IndexUnique': dictIU}
databasesinfo[1][database][table].update(IndexU)
commit(databasesinfo, 'databasesInfo')
else:
result = 1
else:
result = 1
else:
result = 5
else:
result = 1
else:
result = 1
else:
result = 1
return result
except:
return 1
# elimina el vinculo del indice unico en la tabla
def alterTableDropUnique(database: str, table: str, indexName: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
if 'IndexUnique' in databasesinfo[1][database][table]:
if indexName in databasesinfo[1][database][table]['IndexUnique']:
res = dropTable(database, table + 'IndexUnique')
if res == 0:
del databasesinfo[1][database][table]['IndexUnique'][indexName]
commit(databasesinfo, 'databasesinfo')
result = 0
else:
result = 1
else:
result = 4
else:
result = 1
return result
except:
return 1
# vincula un indice a las columnas de una tabla
def alterTableAddIndex(database: str, table: str, indexName: str, columns: list) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
if len(columns) >= 1:
tableColumns = databasesinfo[1][database][table]['numberColumns']
col1 = True
for values in columns:
if values >= tableColumns:
col1 = False
break
if col1:
res = createTable(database, table + 'Index', 2)
if res == 0:
res1 = insert(database, table + 'Index', [indexName, columns])
if res1 == 0:
dictI = {indexName: {'columns': columns}}
Index = {'Index': dictI}
databasesinfo[1][database][table].update(Index)
commit(databasesinfo, 'databasesInfo')
else:
result = 1
else:
result = 1
else:
result = 1
else:
result = 1
return result
except:
return 1
# elimina el indice de una tabla
def alterTableDropIndex(database: str, table: str, indexName: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
if 'Index' in databasesinfo[1][database][table]:
if indexName in databasesinfo[1][database][table]['Index']:
res = dropTable(database, table + 'Index')
if res == 0:
del databasesinfo[1][database][table]['Index'][indexName]
commit(databasesinfo, 'databasesinfo')
result = 0
else:
result = 1
else:
result = 4
else:
result = 1
return result
except:
return 1
# cambia el nombre de una tabla
def alterTable(database: str, tableOld: str, tableNew: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif tableOld not in databasesinfo[1][database]:
result = 3
elif tableNew in databasesinfo[1][database]:
result = 4
elif databasesinfo[1][database][tableOld]['mode'] == 'avl':
result = AVLM.alterTable(database, tableOld, tableNew)
elif databasesinfo[1][database][tableOld]['mode'] == 'b':
result = BM.alterTable(database, tableOld, tableNew)
elif databasesinfo[1][database][tableOld]['mode'] == 'bplus':
result = BPlusM.alterTable(database, tableOld, tableNew)
elif databasesinfo[1][database][tableOld]['mode'] == 'dict':
result = DictM.alterTable(database, tableOld, tableNew)
elif databasesinfo[1][database][tableOld]['mode'] == 'isam':
result = ISAMM.alterTable(database, tableOld, tableNew)
elif databasesinfo[1][database][tableOld]['mode'] == 'json':
result = jsonM.alterTable(database, tableOld, tableNew)
elif databasesinfo[1][database][tableOld]['mode'] == 'hash':
result = HashM.alterTable(database, tableOld, tableNew)
if result == 0:
databasesinfo[1][database][tableNew] = databasesinfo[1][database][tableOld]
del databasesinfo[1][database][tableOld]
commit(databasesinfo, 'databasesinfo')
return result
except:
return 1
# modifica el modo de una tabla en especifico
def alterTableMode(database: str, table: str, mode: str) -> int:
modes = ['avl', 'b', 'bplus', 'dict', 'isam', 'json', 'hash']
try:
if database not in databasesinfo[0]:
return 2
elif table not in databasesinfo[1][database]:
return 3
elif mode not in modes:
return 4
else:
if databasesinfo[1][database][table]['mode'] == mode:
return 1
else:
registers = extractTable(database, table)
numberColumns = databasesinfo[1][database][table]['numberColumns']
PK = None
if databasesinfo[1][database][table]['PK'] is not None:
PK = databasesinfo[1][database][table]['PK']
dropTable(database, table)
if mode == 'avl':
AVLM.createDatabase(database)
AVLM.createTable(database, table, numberColumns)
elif mode == 'b':
BM.createDatabase(database)
BM.createTable(database, table, numberColumns)
elif mode == 'bplus':
BPlusM.createDatabase(database)
BPlusM.createTable(database, table, numberColumns)
elif mode == 'dict':
DictM.createDatabase(database)
DictM.createTable(database, table, numberColumns)
elif mode == 'isam':
ISAMM.createDatabase(database)
ISAMM.createTable(database, table, numberColumns)
elif mode == 'json':
jsonM.createDatabase(database)
jsonM.createTable(database, table, numberColumns)
elif mode == 'hash':
HashM.createDatabase(database)
HashM.createTable(database, table, numberColumns)
databasesinfo[1][database].update(
{table: {'mode': mode, 'numberColumns': numberColumns, 'PK': None, 'safeMode': False, 'Compress': False}})
commit(databasesinfo, 'databasesinfo')
if PK is not None:
alterAddPK(database, table, PK)
for register in registers:
insert(database, table, register)
databasesinfo[2].update({database: []})
if len(databasesinfo[2][database]) == 0:
databasesinfo[2][database].append(databasesinfo[0][database]['mode'])
if mode not in databasesinfo[2][database]:
databasesinfo[2][database].append(mode)
return 0
except:
return 1
#Agrega una columna a una tabla
def alterAddColumn(database: str, table: str, default: any) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
try:
encoding = databasesinfo[0][database]['encoding']
if isinstance(default, str):
default = default.encode()
default = default.decode(encoding)
except:
return 1
if databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.alterAddColumn(database, table, default)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.alterAddColumn(database, table, default)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.alterAddColumn(database, table, default)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.alterAddColumn(database, table, default)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.alterAddColumn(database, table, default)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.alterAddColumn(database, table, default)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.alterAddColumn(database, table, default)
return result
except:
return 1
# eliminacion de una columna
def alterDropColumn(database: str, table: str, columnNumber: int) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.alterDropColumn(database, table, columnNumber)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.alterDropColumn(database, table, columnNumber)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.alterDropColumn(database, table, columnNumber)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.alterDropColumn(database, table, columnNumber)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.alterDropColumn(database, table, columnNumber)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.alterDropColumn(database, table, columnNumber)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.alterDropColumn(database, table, columnNumber)
return result
except:
return 1
# eliminacion de la tabla
def dropTable(database: str, table: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.dropTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.dropTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.dropTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.dropTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.dropTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.dropTable(database, table)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.dropTable(database, table)
if result == 0:
del databasesinfo[1][database][table]
commit(databasesinfo, 'databasesInfo')
return result
except:
return 1
# insercion de los registros
def insert(database: str, table: str, register: list) -> int:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
else:
try:
encoding = databasesinfo[0][database]['encoding']
for i in register:
if isinstance(i, str):
ind = register.index(i)
x = i.encode()
register[ind] = x.decode(encoding)
except:
return 1
if databasesinfo[1][database][table]['Compress'] == True:
for i in range(0, len(register)):
if type(register[i]) == str:
register[i] = zlib.compress(bytes(register[i].encode()))
if databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.insert(database, table, register)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.insert(database, table, register)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.insert(database, table, register)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.insert(database, table, register)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.insert(database, table, register)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.insert(database, table, register)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.insert(database, table, register)
if result == 0 and databasesinfo[1][database][table]['safeMode']:
insert_block(database, table, register)
return result
# carga masiva de archivos hacia las tablas
def loadCSV(file: str, database: str, table: str) -> list:
try:
res = []
import csv
with open(file, 'r') as f:
reader = csv.reader(f, delimiter=',')
for row in reader:
res.append(insert(database, table, row))
return res
except:
return []
#Metodo que muestra la informacion de un registro
def extractRow(database: str, table: str, columns: list) -> list:
try:
try:
encoding = databasesinfo[0][database]['encoding']
for i in columns:
if isinstance(i, str):
ind = columns.index(i)
x = i.encode()
columns[ind] = x.decode(encoding)
except:
return []
result = []
if database not in databasesinfo[0]:
result = []
elif table not in databasesinfo[1][database]:
result = []
elif databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.extractRow(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.extractRow(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.extractRow(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.extractRow(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.extractRow(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.extractRow(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.extractRow(database, table, columns)
if databasesinfo[1][database][table]['Compress'] == True:
tabla = result
for i in range(0, len(tabla)):
tupla = tabla[i]
for j in range(0, len(tupla)):
# print(tupla[j])
if type(tupla[j]) == bytes:
tupla[j] = zlib.decompress(tupla[j]).decode()
tabla[i] = tupla
tuples = result
return result
except:
return []
#Metodo que modifica los valores de un registro
def update(database: str, table: str, register: dict, columns: list) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['Compress'] == True:
return 1
else:
try:
encoding = databasesinfo[0][database]['encoding']
for i in register:
if isinstance(register[i], str):
x = register[i].encode()
register[i] = x.decode(encoding)
except:
return 1
oldRegister = None
if databasesinfo[1][database][table]['safeMode']:
oldRegister = extractRow(database, table, columns)
if databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.update(database, table, register, columns)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.update(database, table, register, columns)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.update(database, table, register, columns)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.update(database, table, register, columns)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.update(database, table, register, columns)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.update(database, table, register, columns)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.update(database, table, register, columns)
if databasesinfo[1][database][table]['safeMode'] and result == 0:
newRegister = extractRow(database, table, columns)
update_block(database,table, newRegister, oldRegister)
return result
except:
return 1
#Metodo que elimina un registro
def delete(database: str, table: str, columns: list) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['Compress'] == True:
return 1
else:
try:
encoding = databasesinfo[0][database]['encoding']
for i in columns:
ind = columns.index(i)
if isinstance(columns[ind], str):
x = columns[ind].encode()
columns[ind] = x.decode(encoding)
except:
return 1
if databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.delete(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.delete(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.delete(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.delete(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.delete(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.delete(database, table, columns)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.delete(database, table, columns)
return result
except:
return 1
# Metodo que elimina todos los registros de una tabla
def truncate(database: str, table: str) -> int:
try:
result = 0
if database not in databasesinfo[0]:
result = 2
elif table not in databasesinfo[1][database]:
result = 3
elif databasesinfo[1][database][table]['mode'] == 'avl':
result = AVLM.truncate(database, table)
elif databasesinfo[1][database][table]['mode'] == 'b':
result = BM.truncate(database, table)
elif databasesinfo[1][database][table]['mode'] == 'bplus':
result = BPlusM.truncate(database, table)
elif databasesinfo[1][database][table]['mode'] == 'dict':
result = DictM.truncate(database, table)
elif databasesinfo[1][database][table]['mode'] == 'isam':
result = ISAMM.truncate(database, table)
elif databasesinfo[1][database][table]['mode'] == 'json':
result = jsonM.truncate(database, table)
elif databasesinfo[1][database][table]['mode'] == 'hash':
result = HashM.truncate(database, table)
return result
except:
return 1
#Genera el checksum de una base de datos
def checksumDatabase(database: str, mode: str) -> str:
try:
if database in databasesinfo[0]:
if mode.lower() == 'md5':
hash = hashlib.md5()
elif mode.lower() == 'sha256':
hash = hashlib.sha256()
else:
return None
for key, value in list(databasesinfo[1][database].items()):
if value['mode'] == 'avl':
hash.update(open('data/avlMode/' + database + '_' + key + '.tbl', 'rb').read())
elif value['mode'] == 'b':
hash.update(open('data/BMode/' + database + '-' + key + '-' + 'b'+'.bin', 'rb').read())
elif value['mode'] == 'bplus':
hash.update(open('data/BPlusMode/' + database + '/' + key + '/' + key + '.bin', 'rb').read())
elif value['mode'] == 'dict':
hash.update(open('data/' + database + '/' + key + '.bin', 'rb').read())
elif value['mode'] == 'isam':
hash.update(open('data/ISAMMode/tables/' + database + key + '.bin', 'rb').read())
elif value['mode'] == 'json':
hash.update(open('data/json/' + database + '-' + key, 'rb').read())
elif value['mode'] == 'hash':
hash.update(open('data/hash/' + database + '/' + key + '.bin', 'rb').read())
return hash.hexdigest()
except:
return None
#genera el checksum de una tabla especifica
def checksumTable(database: str, table: str, mode: str) -> str:
try:
if database in databasesinfo[0]:
if mode.lower() == 'md5':
hash = hashlib.md5()
elif mode.lower() == 'sha256':
hash = hashlib.sha256()
else:
return None
if databasesinfo[1][database][table]['mode'] == 'avl':
hash.update(open('data/avlMode/' + database + '_' + table+'.tbl', 'rb').read())
elif databasesinfo[1][database][table]['mode'] == 'b':
hash.update(open('data/BMode/' + database + '-' + table + '-b' + '.bin', 'rb').read())
elif databasesinfo[1][database][table]['mode'] == 'bplus':
hash.update(open('data/BPlusMode/' + database + '/' + table + '/' + table + '.bin', 'rb').read())
elif databasesinfo[1][database][table]['mode'] == 'dict':
hash.update(open('data/' + database + '/' + table + '.bin', 'rb').read())
elif databasesinfo[1][database][table]['mode'] == 'isam':
hash.update(open('data/ISAMMode/tables/' + database + table + '.bin', 'rb').read())
elif databasesinfo[1][database][table]['mode'] == 'json':
hash.update(open('data/json/' + database + '-' + table, 'rb').read())
elif databasesinfo[1][database][table]['mode'] == 'hash':
hash.update(open('data/hash/' + database + '/' + table +'.bin', 'rb').read())
return hash.hexdigest()
except:
return None
#comprime una tabla
def alterTableCompress(database: str, table: str, level: int) -> int:
if database not in databasesinfo[0]:
return 2
if level<-1 or level>9:
return 4
if databasesinfo[1][database][table]['mode'] == 'json':
return 1
tablas = showTables(database)
if table not in tablas:
return 2
try:
tabla=extractTable(database,table)
for i in range(0,len(tabla)):
tupla=tabla[i]
for j in range(0,len(tupla)):
if type(tupla[j])==str:
#------------------------------------aqui es donde se comprime----------------------------
tupla[j]=zlib.compress(bytes(tupla[j].encode()),level)
elif type(tupla[j])==bytes:
tupla[j] = zlib.compress(tupla[j], level)
tabla[i]=tupla
databasesinfo[1][database][table]['Compress'] = True
commit(databasesinfo, 'databasesinfo')
truncate(database,table)
for tupla in tabla:
insert(database,table,tupla)
return 0
except:
return 1
#comprime una base de datos completa
def alterDatabaseCompress(database: str, level: int) -> int:
if database not in databasesinfo[0]:
return 2
if level<-1 or level>9:
return 4
try:
tablas=showTables(database)
compreso=0
for tabla in tablas:
compreso+=alterTableCompress(database,tabla,level)
if compreso==0:
return 0
else:
return 1
except:
return 1
#descomprime una tabla de datos
def alterTableDecompress(database: str, table: str) -> int:
if database not in databasesinfo[0]:
return 2
if databasesinfo[1][database][table]['Compress']!=True:
return 3
if databasesinfo[1][database][table]['mode'] == 'json':
return 1
tablas = showTables(database)
if table not in tablas:
return 1
try:
tabla=extractTable(database,table)
for i in range(0,len(tabla)):
tupla=tabla[i]
for j in range(0,len(tupla)):
if type(tupla[j])==bytes:
# ------------------------------------aqui es donde se descomprime----------------------------
tupla[j]=zlib.decompress(tupla[j]).decode()
tabla[i]=tupla
databasesinfo[1][database][table]['Compress'] = False
commit(databasesinfo, 'databasesinfo')
truncate(database,table)
for tupla in tabla:
insert(database,table,tupla)
return 0
except:
return 1
#Descomprime una base de datos entera
def alterDatabaseDecompress(database: str) -> int:
if database not in databasesinfo[0]:
return 2
tablas=showTables(database)
compresion = False
for table in tablas:
if databasesinfo[1][database][table]['Compress']==True:
compresion=True
if compresion==False:
return 3
try:
for table in tablas:
if databasesinfo[1][database][table]['Compress'] == True:
alterTableDecompress(database,table)
return 0
except:
return 1
# devuelve un text cifrado
def encrypt(backup: str, password: str) -> str:
return _encrypt(backup, password)
# devuelve un texto descifrado
def decrypt(cipherBackup: str, password: str) -> str:
return _decrypt(cipherBackup, password)
# activa el modo seguro de una tabla y crea un archivo json para ello
def safeModeOn(database: str, table: str) -> int:
try:
databasesinfo[1][database][table]['safeMode'] = True
turn_on_safe_mode(database, table)
return 0
except:
return 1
# desactiva el modo seguro de la tabla y elimina su archivo json
def safeModeOff(database: str, table: str) -> int:
try:
databasesinfo[1][database][table]['safeMode'] = False
turn_off_safe_mode(database, table)
return 0
except:
return 1
# grafica el diagrama de estructura de una base de datos
def graphDSD(database: str) -> str:
try:
result = 0
if database not in databasesinfo[0]:
result = None
else:
content = 'digraph GraphDatabase{\n' \
' rankdir=LR\n' \
' nodesep=.05;\n' \
' node [shape=record,width=.1,height=.1];\n' \
' subgraph cluster0{\n'\
' label="'+ database +'";\n'
tables = showTables(database)
for table in tables:
newTB = table + 'FK'
if newTB in tables:
tables.remove(newTB)
for table in tables:
if table+'FK' not in tables:
content += ' '+table +'[label= "'+ table +'"]\n'
for table in tables:
if 'FK' in databasesinfo[1][database][table] and len(databasesinfo[1][database][table]['FK']) > 0:
references = extractTable(database,table+'FK')
for num in references:
ref = ' '+str(num[1]) + '->' + table +'\n'
content += ref
content += ' }\n' \
'}'
diagram = open(database+'DSD.dot','w')
diagram.write(content)
result = diagram.name
diagram.close()
os.system("dot -Tpng "+ database +"DSD.dot -o "+ database +"DSD.png")
return result
except:
return 1
# grafica el diagrama de dependencias de una tabla
def graphDF(database: str, table: str) -> str:
try:
result = 0
if database not in databasesinfo[0]:
result = None
elif table not in databasesinfo[1][database]:
result = None
else:
content = 'digraph GraphDatabase{\n' \
' rankdir=LR\n' \
' nodesep=.05;\n' \
' node [shape=record,width=.1,height=.1];\n' \
' subgraph cluster0{\n' \
' label="' + database.upper() + '-' + table.upper() + '";\n'
nodos = []
cols = []
columns = databasesinfo[1][database][table]['numberColumns']
PK = databasesinfo[1][database][table]['PK']
IndexUnique = None
if 'IndexUnique' in databasesinfo[1][database][table]:
for indexU in databasesinfo[1][database][table]['IndexUnique']:
IndexUnique = databasesinfo[1][database][table]['IndexUnique'][indexU]['columns']
if PK is not None:
content += ' subgraph cluster1{\n' \
' label = "PK"\n'
label = '[label = "'
for i in PK:
nodoName = 'nodo' + str(i) + '_PK'
nodos.append(nodoName)
if i == PK[-1]:
label += '<' + nodoName + '>'+ str(i) + ''
label += '"];'
content += ' PK' + label + '\n'
content += ' }\n'
else:
label += '<' + nodoName + '>'+ str(i) + '|'
if IndexUnique is not None:
content += ' subgraph cluster3{\n' \
' label = "IndexUnique"\n'
label = '[label = "'
for i in IndexUnique:
nodoName = 'nodo' + str(i) + '_IndexUnique'
nodos.append(nodoName)
if i == IndexUnique[-1]:
label += '<' + nodoName + '>' + str(i) + ''
label += '"];'
content += ' IndexUnique' + label + '\n'
content += ' }\n'
else:
label += '<' + nodoName + '>' + str(i) + '|'
for i in range(columns):
cols.append(i)
reg = []
content += ' subgraph cluster4{\n' \
' label = "Registers"\n'
label = '[label = "'
for i in cols:
nodePK = 'nodo' + str(i) + '_PK'
nodeIndexUnique = 'nodo' + str(i) + '_IndexUnique'
nodoName = 'nodo' + str(i) +'_Reg'
if nodePK not in nodos and nodeIndexUnique not in nodos:
reg.append(nodoName)
if i == cols[-1]:
label += '<' + nodoName + '>' + str(i) + ''
label += '"];'
content += ' Register' + label + '\n'
content += ' }\n'
else:
label += '<' + nodoName + '>' + str(i) + '|'
for arrows in nodos:
arr1 = arrows.split('_')
if arr1[1] == 'PK':
for tuple in reg:
direction = 'PK:' + arrows + '->' + 'Register:' + tuple + '\n'
content += direction
if arr1[1] == 'IndexUnique':
for tuple in reg:
direction = 'IndexUnique:' + arrows + '->' + 'Register:' + tuple + '\n'
content += direction
content += ' }\n' \
'}'
diagram = open(database + '-' + table + 'DF.dot', 'w')
diagram.write(content)
result = diagram.name
diagram.close()
os.system("dot -Tpng " + database + '-' + table + "DF.dot -o " + database + '-' + table + "DF.png")
return result
except:
return 1
```
#### File: storage/misc/checksum.py
```python
import hashlib
from storage import TytusStorage as h
def checksumDatabase(database, mode):
try:
temp=""
if h._database(database):
temp += "[-"+database+"-]:"
if h.showTables(database):
for i in h.showTables(database):
temp += "["+i+"]:"
if h.extractTable(database,i):
for j in h.extractTable(database,i):
if j:
for k in j:
temp += str(k) + ","
else:
temp+="None,"
else:
return None
if temp:
if mode == "MD5":
hash = hashlib.md5(temp.encode(h._database(database)["encoding"])).hexdigest()
return hash
elif mode == "SHA256":
hash = hashlib.sha256(temp.encode(h._database(database)["encoding"])).hexdigest()
return hash
else:
return None #return 3
else:
return None
else:
return None #return 2
except:
return None #return 1
def checksumTable(database, table, mode):
try:
temp = ""
if h._database(database):
temp += "[-" + database + "-]:"
for i in h.showTables(database):
if i == table:
temp += "[-" + i + "-]:"
if h.extractTable(database, i):
for j in h.extractTable(database, i):
for k in j:
temp += str(k)+","
else:
temp+="None"
if mode == "MD5":
hash = hashlib.md5(temp.encode(h._database(database)["encoding"])).hexdigest()
return hash
elif mode == "SHA256":
hash = hashlib.sha256(temp.encode(h._database(database)["encoding"])).hexdigest()
return hash
else:
return None
else:
return None
except:
return None
``` |
{
"source": "josugoar/human-benchmark",
"score": 2
} |
#### File: model/scripts/fetch.py
```python
import asyncio
import glob
import pathlib
import random
from os import path
from typing import Callable, Optional, Union
import chess
import numpy as np
from chess import engine, pgn
from utils import bitboard, moves, synchronize, tanh
_path: Callable[[str], str] = lambda p, /: path.join(path.dirname(path.realpath(__file__)), p)
async def fetch(file: Union[str, bytes, int], /, *, checkpoint: Optional[int] = None) -> None:
kwds = {x: [] for x in ("X", "y_1", "y_2")}
_, uci_protocol = await engine.popen_uci(_path("../lib/stockfish/stockfish"))
with open(file) as f:
savez = lambda: np.savez(_path(f"../data/npz/{pathlib.PurePath(f.name).stem}"), **kwds)
while True:
try:
try:
play_result = await uci_protocol.play(
board := random.choice(tuple(pgn.read_game(f).mainline())).board(),
limit=engine.Limit(time=.1),
info=engine.INFO_SCORE
)
except AttributeError:
break
for kwd, x in zip(kwds.values(), (
bitboard(
board,
dtype=int
),
moves.index(
(play_result.move if board.turn else chess.Move(
*(len(chess.SQUARES) - np.array((
play_result.move.from_square,
play_result.move.to_square
)) - 1),
promotion=play_result.move.promotion
)).uci()
),
tanh(
play_result.info["score"].relative.score(
mate_score=7625
),
k=.0025
)
)):
kwd.append(x)
except (AttributeError, IndexError, ValueError):
continue
if checkpoint and not len(kwds["X"]) % checkpoint:
savez()
savez()
await uci_protocol.quit()
async def main() -> None:
semaphore = asyncio.Semaphore(value=3)
await asyncio.gather(*(
synchronize(semaphore)(fetch)(
file, checkpoint=10000
) for file in glob.glob(_path("../data/*.pgn"))
))
if __name__ == "__main__":
asyncio.set_event_loop_policy(engine.EventLoopPolicy())
asyncio.run(main())
``` |
{
"source": "JosunLP/OSZ_chat_ITS",
"score": 3
} |
#### File: JosunLP/OSZ_chat_ITS/echoclient.py
```python
import socket
import sys
import threading
from datetime import datetime
from helper import MessageParser
from helper import const
from model import Message
HOST = const.HOST
PORT = const.PORT
CURSOR_UP_ONE = '\x1b[1A'
ERASE_LINE = '\x1b[2K'
def message_listener(s: socket):
try:
while True:
data = s.recv(1024)
if not data:
break
m: Message.Message = MessageParser.byte_array_to_message(data)
print(m.msg)
except (ConnectionAbortedError, ConnectionResetError):
print("Connection with server closed!")
print("Welcome to ChatZ")
UNAME = input("Whats your name? :")
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
thread = threading.Thread(target=message_listener, args=[s])
thread.start()
m = Message.Message(datetime.now().strftime(const.TIME_FILTER), "LOGIN", UNAME)
send = MessageParser.message_to_byte_array(m)
s.send(send)
while True:
string = input("$~ ")
sys.stdout.write(CURSOR_UP_ONE)
sys.stdout.write(ERASE_LINE)
if "!logout" in string:
m = Message.Message(datetime.now().strftime(const.TIME_FILTER), "LOGOUT", UNAME)
send = MessageParser.message_to_byte_array(m)
s.send(send)
break
m = Message.Message(datetime.now().strftime(const.TIME_FILTER), string, UNAME)
send = MessageParser.message_to_byte_array(m)
s.send(send)
except (ConnectionAbortedError, ConnectionResetError):
print("Connection with server closed!")
```
#### File: OSZ_chat_ITS/helper/MessageParser.py
```python
from model import Message
import json
def message_to_byte_array(msg: Message.Message):
x = json.dumps(msg.__dict__)
return bytearray(x.encode())
def byte_array_to_message(b: bytearray):
d = json.loads(b.decode("utf8"))
return Message.Message(d["timestamp"], d["msg"], d["sender"])
```
#### File: OSZ_chat_ITS/model/Message.py
```python
class Message(object):
def __init__(self, timestamp, msg, sender):
super(Message, self).__init__()
self.timestamp = timestamp
self.msg = msg
self.sender = sender
``` |
{
"source": "josusky/pywiscat",
"score": 2
} |
#### File: pywiscat/wis1/report.py
```python
import json
import logging
import click
from pywiscat.cli_helpers import cli_callbacks
from pywiscat.wis1.util import (create_file_list, search_files_by_term, group_by_originator)
LOGGER = logging.getLogger(__name__)
@click.group()
def report():
"""Reporting functions"""
pass
def group_search_results_by_organization(directory: str, terms: list) -> dict:
"""
Searches directory tree of metadata for matching search terms and
and groups by organization
:param directory: directory to metadata files
:param terms: list of terms
:returns: dict of results grouped by organization
"""
matches = search_files_by_term(directory, terms)
matches_by_org = group_by_originator(matches)
return matches_by_org
@click.command()
@click.pass_context
@cli_callbacks
@click.option('--directory', '-d', required=False,
help='Directory with metadata files to process',
type=click.Path(resolve_path=True, file_okay=False))
@click.option('--term', '-t', 'terms', multiple=True, required=True)
@click.option('--file-list', '-f', 'file_list_file',
type=click.Path(exists=True, resolve_path=True), required=False,
help='File containing JSON list with metadata files to process, alternative to "-d"')
def terms_by_org(ctx, terms, directory, file_list_file, verbosity):
"""Analyze term searches by organization"""
if file_list_file is None and directory is None:
raise click.UsageError('Missing --file-list or --directory option')
results = {}
if not file_list_file:
click.echo(f'Analyzing records in {directory} for terms {terms}')
results = group_search_results_by_organization(directory, terms)
else:
file_list = []
with open(file_list_file, "r", encoding="utf-8") as file_list_json:
try:
file_list = json.load(file_list_json)
except Exception as err:
LOGGER.error(f'Failed to read file list {file_list_file}: {err}')
return
results = group_by_originator(file_list)
if results:
click.echo(json.dumps(results, indent=4))
else:
click.echo('No results')
@click.command()
@click.pass_context
@cli_callbacks
@click.option('--directory', '-d', required=False,
help='Directory with metadata files to process',
type=click.Path(resolve_path=True, file_okay=False))
@click.option('--file-list', '-f', 'file_list_file',
type=click.Path(exists=True, resolve_path=True), required=False,
help='File containing JSON list with metadata files to process, alternative to "-d"')
def records_by_org(ctx, directory, file_list_file, verbosity):
"""Report number of records by organization / originator"""
if file_list_file is None and directory is None:
raise click.UsageError('Missing --file-list or --directory option')
results = {}
if not file_list_file:
click.echo(f'Analyzing records in {directory}')
file_list = create_file_list(directory)
results = group_by_originator(file_list)
else:
file_list = []
with open(file_list_file, "r", encoding="utf-8") as file_list_json:
try:
file_list = json.load(file_list_json)
except Exception as err:
LOGGER.error(f'Failed to read file list {file_list_file}: {err}')
return
results = group_by_originator(file_list)
if results:
click.echo(json.dumps(results, indent=4))
else:
click.echo('No results')
report.add_command(terms_by_org)
report.add_command(records_by_org)
``` |
{
"source": "josusky/qpid-proton",
"score": 2
} |
#### File: c/tests/fdlimit.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import subprocess
import time
import test_subprocess
from test_unittest import unittest
# Check if we can run prlimit to control resources
try:
assert subprocess.check_call(["prlimit"], stdout=open(os.devnull, 'w')) == 0, 'prlimit is present, but broken'
prlimit_available = True
except OSError:
prlimit_available = False
class PRLimitedBroker(test_subprocess.Server):
def __init__(self, fdlimit, *args, **kwargs):
super(PRLimitedBroker, self).__init__(
['prlimit', '-n{0:d}:'.format(fdlimit), "broker", "", "0"], # `-n 256:` sets only soft limit to 256
stdout=subprocess.PIPE, universal_newlines=True, *args, **kwargs)
self.fdlimit = fdlimit
class FdLimitTest(unittest.TestCase):
devnull = open(os.devnull, 'w')
@classmethod
def tearDownClass(cls):
if cls.devnull:
cls.devnull.close()
# @unittest.skipUnless(prlimit_available, "prlimit not available")
@unittest.skip("temporarily disabled (epoll fix pending)")
def test_fd_limit_broker(self):
"""Check behaviour when running out of file descriptors on accept"""
# Not too many FDs but not too few either, some are used for system purposes.
fdlimit = 256
with PRLimitedBroker(fdlimit, kill_me=True) as b:
receivers = []
# Start enough receivers to use all FDs
# NOTE: broker does not log a file descriptor related error at any point in the test, only
# PN_TRANSPORT_CLOSED: amqp:connection:framing-error: connection aborted
# PN_TRANSPORT_CLOSED: proton:io: Connection reset by peer - disconnected :5672 (connection aborted)
for i in range(fdlimit + 1):
receiver = test_subprocess.Popen(["receive", "", b.port, str(i)], stdout=self.devnull)
receivers.append(receiver)
# All FDs are now in use, send attempt will (with present implementation) hang
with test_subprocess.Popen(["send", "", b.port, "x"],
stdout=self.devnull, stderr=subprocess.STDOUT) as sender:
time.sleep(1) # polling for None immediately would always succeed, regardless whether send hangs or not
self.assertIsNone(sender.poll())
# Kill receivers to free up FDs
for r in receivers:
r.kill()
for r in receivers:
r.wait()
# Sender now succeeded and exited
self.assertEqual(sender.wait(), 0)
# Additional send/receive should succeed now
self.assertIn("10 messages sent", test_subprocess.check_output(["send", "", b.port], universal_newlines=True))
self.assertIn("10 messages received", test_subprocess.check_output(["receive", "", b.port], universal_newlines=True))
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "josuuribe/sapereaude",
"score": 3
} |
#### File: backend/kernel/Neural.py
```python
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn import model_selection
from sklearn import preprocessing
from sklearn.metrics import mean_squared_error
import random
import kernel
import kernel.Interfaces.IStoreManager
import os
import warnings
import tensorflow.python.util.deprecation as deprecation
deprecation._PRINT_DEPRECATION_WARNINGS = False
class Neural:
"""
This class computes and manages the neural network.
"""
def __init__(self, store_manager: kernel.Interfaces.IStoreManager):
"""
Constructor that initializes entity
:param store_manager: Store that manages data to be get or set.
"""
warnings.filterwarnings('ignore')
self.__store_manager__ = store_manager
self.__activation_function__ = tf.nn.relu
self.__learning_rate__ = kernel.get_learning_rate()
self.__session__ = tf.Session()
self.__batches__ = kernel.get_batch_size()
self.__epochs__ = kernel.get_epochs()
self.__data_train__ = None
self.__target_train__ = None
self.__data_test__ = None
self.__target_test__ = None
self.__input_size__ = 0
self.__output_size__ = 0
def process(self, data, target):
"""
This method prepares to data to be used as train and test in training mode.
:param data: Data to be trained.
:param target: Target for these data.
"""
if len(data.shape) == 1:
self.__input_size__ = 1
data = data.reshape(1, -1)
else:
self.__input_size__ = len(data[0])
self.__output_size__ = max(target) + 1
self.__data_train__ = np.nan_to_num(data, 0)
self.__data_train__ = data
self.__target_train__ = target
x_train, x_test, y_train, y_test = model_selection.train_test_split(self.__data_train__,
self.__target_train__,
test_size=0.20,
random_state=42)
scaler = preprocessing.StandardScaler().fit(x_train)
self.__data_train__ = scaler.transform(x_train)
self.__target_train__ = y_train
scaler = preprocessing.StandardScaler().fit(x_test)
self.__data_test__ = scaler.transform(x_test)
self.__target_test__ = y_test
def create_softmax(self):
"""
Creates softmax architecture for this neural network.
"""
tf.reset_default_graph()
self.input_pl = tf.placeholder(
dtype=tf.float32,
shape=[None, self.__input_size__],
name="inputplaceholder")
self.output_pl = tf.placeholder(
dtype=tf.int16,
shape=[None, self.__output_size__],
name="userdefinedoutput")
dense2 = tf.layers.dense(inputs=self.input_pl,
units=2048,
activation=self.__activation_function__,
name="2_dense_layer")
dense3 = tf.layers.dense(inputs=dense2,
units=512,
activation=self.__activation_function__,
name="3_dense_layer")
dense4 = tf.layers.dense(inputs=dense3,
units=64,
activation=self.__activation_function__,
name="4_dense_layer")
self.network_prediction = tf.layers.dense(inputs=dense4,
units=self.__output_size__,
activation=None,
name="prediction_dense_layer")
cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(
labels=self.output_pl,
logits=self.network_prediction)
self.loss_tensor = tf.reduce_mean(cross_entropy)
self.optimizer = tf.train.AdamOptimizer(self.__learning_rate__).minimize(self.loss_tensor)
def create_rnn(self):
"""
Demo about how to use a RNN
"""
rnn_size = 10
self.dropout_keep_prob = tf.placeholder(tf.float32)
self.input_pl = tf.placeholder(tf.float64, [None, self.__input_size__])
self.output_pl = tf.placeholder(tf.int32, [None, self.__output_size__])
embedding_matrix = tf.Variable(tf.random_uniform([200, 20], -1.0, 1.0))
embedding_output = tf.nn.embedding_lookup(embedding_matrix, self.__data_train__)
cell = tf.nn.rnn_cell.BasicRNNCell(num_units=rnn_size)
output, state = tf.nn.dynamic_rnn(cell, embedding_output, dtype=tf.float32)
output = tf.nn.dropout(output, self.dropout_keep_prob)
output = tf.transpose(output, [1, 0, 2])
last = tf.gather(output, int(output.get_shape()[0]) - 1)
weight = tf.Variable(tf.truncated_normal([rnn_size, 2], stddev=0.1))
bias = tf.Variable(tf.constant(0.1, shape=[self.__output_size__]))
logits_out = tf.nn.softmax(tf.add(tf.matmul(last, weight), bias))
self.loss = tf.reduce_mean(
tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits_out, labels=self.output_pl))
self.accuracy = tf.reduce_mean(
tf.cast(tf.equal(tf.argmax(logits_out, 1), tf.cast(self.output_pl, tf.int64)), tf.float32))
self.optimizer = tf.train.RMSPropOptimizer(self.__learning_rate__).minimize(self.loss)
def reset_and_train_network_rnn(self,
batch_size):
"""
Demo about how to train a RNN.
:param batch_size:
:return:
"""
train_loss = []
test_loss = []
train_acc = []
test_acc = []
for epoch in range(batch_size):
shuffle_ix = np.random.permutation(np.arange(len(self.__data_train__)))
x_train = x_train[shuffle_ix]
y_train = y_train[shuffle_ix]
num_baches = int(len(x_train) / batch_size) + 1
for i in range(num_baches):
min_ix = i * batch_size
max_ix = np.min([len(x_train), ((i + 1) * batch_size)])
x_train_batch = x_train[min_ix:max_ix]
y_train_batch = y_train[min_ix:max_ix]
train_dic = {self.input_pl: x_train_batch, self.output_pl: y_train_batch,
self.self.dropout_keep_prob: 0.5}
self.__session__.run(self.optimizer, feed_dict=train_dic)
temp_train_loss, temp_train_acc = self.__session__.run([self.loss, self.accuracy], feed_dict=train_dic)
train_loss.append(temp_train_loss)
train_acc.append((temp_train_acc))
test_dict = {self.input_pl: self.__data_test__, self.output_pl: self.__target_test__,
self.dropout_keep_prob: 1.0}
temp_test_loss, temp_test_acc = self.__session__.run([self.loss, self.accuracy], feed_dict=test_dict)
test_loss.append(temp_test_loss)
test_acc.append(temp_test_acc)
print("Epoch {} completado, Loss: {:.3f}, Acc: {:.3f}".format(i + 1, temp_test_loss, temp_test_acc))
def reset_and_train_network(self,
verbose=True):
"""
Demo about how to reset and train a RNN.
:param verbose: If debugging information about training should be printed.
:return: Loss history, very useful to see network benchmark.
"""
self.__session__ = tf.Session()
init = tf.global_variables_initializer() # https://www.tensorflow.org/api_docs/python/tf/global_variables_initializer
self.__session__.run(init)
zipped = list(zip(self.__data_train__, self.__target_train__))
loss_history = list()
for _ in range(self.__epochs__):
datax = list()
datay = list()
for _ in range(self.__batches__):
samp = random.choice(zipped)
datax.append(samp[0])
one_hot = [0] * self.__output_size__
one_hot[samp[1]] = 1
datay.append(one_hot)
_, l = self.__session__.run([self.optimizer, self.loss_tensor],
feed_dict={self.input_pl: datax,
self.output_pl: datay})
if verbose:
print(l)
loss_history.append(l)
return loss_history
def predict(self, data):
"""
Predicts a value with given data.
:param data: Data used to predict value.
:return: Predicted value.
"""
self.__session__ = tf.Session()
init = tf.global_variables_initializer() # https://www.tensorflow.org/api_docs/python/tf/global_variables_initializer
self.__session__.run(init)
predicted_values = self.__session__.run(self.network_prediction,
feed_dict={
self.input_pl: data})
return np.argmax(predicted_values, axis=1)
def evaluate_network(self, data, target):
"""
Evaluates network, useful to test network efficiency.
:param data: Data to evaluate.
:param target: Target value.
:return: Predicted values and loss for each predicted value.
"""
datay = list()
for x in target:
datasetY2 = [0] * self.__output_size__
datasetY2[x] = 1.0
datay.append(datasetY2)
predicted_values, loss_dataset = self.__session__.run([self.network_prediction, self.loss_tensor],
feed_dict={
self.input_pl: data,
self.output_pl: datay})
return predicted_values, loss_dataset
def squared_error(self, data, target):
"""
Computes the squared error, useful to test network fiability.
:param data: Data to evaluate.
:param target: Target value.
:return: Mean squared error.
"""
datay = list()
for x in target:
datasetY2 = [0] * self.__output_size__
datasetY2[x] = 1.0
datay.append(datasetY2)
predicted_values = self.__session__.run(self.network_prediction,
feed_dict={self.input_pl: data})
mse = mean_squared_error(predicted_values, datay)
return mse
def compute_success(self, target, predicted):
"""
Computes success % based on correct targets and predicted targets.
:param target: Target value.
:param predicted: Predicted values.
:return: Number of correct matches vs wrong between range 0-100, 100 is 100% success.
"""
ok = 0
for i in range(len(target)):
if (np.argmax(predicted[i]) == target[i]):
ok += 1
return ok / len(target)
def visualize_function(self, function, name):
"""
Visualizes different functions to see performance.
:param function: Function to evaluate.
:param name: Function name for print in graph.
"""
inputdata = np.arange(-5.0, 5.0, 0.1)
mydatainput = tf.Variable(inputdata)
functionoutput = function(mydatainput)
with tf.Session() as temp_session:
init = tf.global_variables_initializer()
temp_session.run(init)
activationdata = functionoutput.eval(session=temp_session)
plt.plot(inputdata, activationdata)
plt.xlabel("input")
plt.ylabel("activation")
plt.title(name)
plt.show()
def save_model(self, user_id):
"""
Saves the model for this session in user folder.
:param user_id: User id to get the folder to be used to save.
"""
root = self.__store_manager__.get_model_folder(user_id)
f_tf = os.path.join(root, str(user_id) + ".model")
saver = tf.train.Saver()
saver.save(self.__session__, f_tf)
def load_model(self, user_id):
"""
Loads the model in this user session.
:param user_id: User id to get the model to be loaded.
"""
root = self.__store_manager__.get_model_folder(user_id)
f_tf = os.path.join(root, str(user_id) + ".model.meta")
saver = tf.train.import_meta_graph(f_tf)
saver.restore(self.__session__, tf.train.latest_checkpoint(root))
graph = tf.get_default_graph()
self.input_pl = graph.get_tensor_by_name("inputplaceholder:0")
self.network_prediction = graph.get_tensor_by_name("prediction_dense_layer/BiasAdd:0")
```
#### File: backend/kernel/Warden.py
```python
from kernel.Interfaces.IConsumer import IConsumer
from kernel.Interfaces.IProducer import IProducer
from kernel.Interfaces.IStoreManager import IStoreManager
from kernel.Neural import Neural
import kernel
from ctypes import c_int32
import multiprocessing
import numpy as np
import subprocess
class Warden:
"""
This component is the central component that manages all dependencies among others.
"""
def __init__(self, producer: IProducer, consumer: IConsumer, store_manager: IStoreManager):
"""
Constructor that initializes entity
:param producer: Producer that creates data.
:param consumer: Consumer that consumes data.
:param store_manager: Store manager to get and set data.
"""
self.__producer__ = producer(self)
self.__consumer__ = consumer(self)
self.__store_manager__ = store_manager()
self.__queue__ = multiprocessing.Queue()
self.__event__ = multiprocessing.Event()
self.__external_event__ = multiprocessing.Event()
self.__internal_event__ = multiprocessing.Event()
self.__command_id__: multiprocessing.Value = multiprocessing.Value(c_int32, 0)
self.__user_id__: multiprocessing.Value = multiprocessing.Value(c_int32, 0)
self.__threshold__ = kernel.get_threshold()
self.__mode__ = 0
self.__neural__ = None
self.__commands__ = None
def set_train_mode(self):
"""
Set mode as train.
:return:
"""
self.__mode__ = 0
def set_inference_mode(self):
"""
Sets mode as inference.
:return:
"""
self.__mode__ = 1
def is_train_mode(self):
"""
Tells if actual mode is train.
:return:
"""
return self.__mode__ == 0
def is_inference_mode(self):
"""
Tells if actual mode is inference.
:return:
"""
return self.__mode__ == 1
def get_active_command(self):
"""
Get the active command, the command that is being trained.
:return: The command in training mode.
"""
return self.__command_id__
def set_active_command(self, command_id):
"""
Set the active command, the command that is going to be trained.
:param command_id: Command id to be trained.
"""
self.__command_id__ = command_id
def get_active_user(self):
"""
Get the user that is being trained.
:return: The user in training mode.
"""
return self.__user_id__
def set_active_user(self, user_id):
"""
Set the user that is in active mode.
:param user_id: The user in training mode.
:return: The active user.
"""
self.__user_id__ = user_id
def lock(self):
"""
Locks the producer-consumer relation.
"""
self.__consumer__.lock()
def wait_internal(self):
"""
Waits until a process be signaled.
"""
self.__internal_event__.wait()
def lock_internal_process(self):
"""
Locks a process waiting to continue.
:return:
"""
self.__internal_event__.clear()
def unlock_internal_process(self):
"""
Unlocks a signaled process.
"""
self.__internal_event__.set()
def wait_process(self):
"""
Waits until a process be signaled.
"""
self.__external_event__.wait()
def lock_external_process(self):
"""
Locks a process waiting to continue.
:return:
"""
self.__external_event__.clear()
def unlock_external_process(self):
"""
Unlocks a signaled process.
"""
self.__external_event__.set()
def start(self):
"""
Starts Warden and process associated depending on it is in train or inference the execution will be different.
"""
print("Arrancando componente productor")
producer_process = multiprocessing.Process(target=self.__producer__.start)
producer_process.daemon = True
producer_process.start()
print("Arrancando componente consumidor")
if self.is_inference_mode():
print("Cargando datos del modelo")
self.__store_manager__.refresh()
self.__neural__ = Neural(self.__store_manager__)
self.__neural__.load_model(self.__user_id__)
print("Cargando datos del usuario")
user = self.__store_manager__.get_user(self.__user_id__)
commands = user.get_commands()
self.__commands__ = {}
for command in commands:
parameters = command.get_parameters()
if parameters is None:
parameters = [command.get_action()]
else:
parameters.insert(0, command.get_action())
self.__commands__[command.get_id()] = parameters
self.__consumer__.start()
else:
self.__event__.clear()
consumer_process = multiprocessing.Process(target=self.__consumer__.start)
consumer_process.daemon = True
consumer_process.start()
def execute(self, data):
"""
Executes the command infered.
:param data: Data used for neural network to infer command.
"""
try:
command_id = self.__neural__.predict(data) + 1
if command_id < len(self.__commands__):
subprocess.run(self.__commands__[(command_id)[0]])
except Exception as e:
print(e)
def train(self):
"""
Executes the train mode
"""
print("Preparando ML")
self.__store_manager__.refresh()
self.__neural__ = Neural(self.__store_manager__)
user = self.__store_manager__.get_user(self.__user_id__)
data = None
target = np.array([])
cmds = user.get_commands()
for cmd in cmds:
cmd = self.__store_manager__.load_command(self.__user_id__, cmd)
if cmd is not None:
d_res = cmd.get_eeg()
if d_res is not None:
t_res = np.repeat(cmd.get_id(), len(d_res)).astype(np.int)
if data is None:
data = d_res
else:
if len(data.shape) == 1:
data = data[0].reshape(1, -1)
try:
data = np.append(data, d_res, axis=0)
except Exception:
print("exp")
target = np.concatenate((target, t_res), axis=0).astype(np.int)
print("Empezando ML")
self.__neural__.process(data, target)
self.__neural__.create_softmax()
train_loss = self.__neural__.reset_and_train_network(False)
predicted_values, test_loss = self.__neural__.evaluate_network(self.__neural__.__data_train__,
self.__neural__.__target_train__)
percentage = self.__neural__.compute_success(self.__neural__.__target_train__, predicted_values) * 100
print("Se ha conseguido un {0:.2f}% de acierto con un error de {1:.2f}% y una perdida de {2:.2f}.".format(
percentage, test_loss, min(train_loss)))
if percentage > self.__threshold__:
print("Se va a guardar este modelo.")
self.__neural__.save_model(self.__user_id__)
else:
print("La calidad es demasiado baja y se va a descartar este modelo.")
self.unlock_external_process()
def stop(self):
"""
Stops Warden and all processes.
"""
self.__producer__.stop()
self.__consumer__.stop()
```
#### File: backend/managers/__init__.py
```python
from kernel.Interfaces import IStoreManager, IProducer, IConsumer
import pkgutil
import importlib
import inspect
import managers
import os
import json
import backend
global __processed__
__processed__ = False
__store_manager__ = None
__consumer_manager__ = None
__producer_manager__ = None
if not __processed__:
imported_package = __import__(managers.__name__, fromlist=['blah'])
ui_manager = None
for _, pluginname, ispkg in pkgutil.iter_modules(imported_package.__path__):
name = managers.__name__ + "." + pluginname
plugin_module = importlib.import_module(name)
for (key, value) in inspect.getmembers(plugin_module, inspect.isclass):
if issubclass(value, IStoreManager.IStoreManager) & (value is not IStoreManager.IStoreManager):
# print("Encontrado IUserManager: "+str(value))
__store_manager__ = value
if issubclass(value, IConsumer.IConsumer) & (value is not IConsumer.IConsumer):
# print("Encontrado IConsumer: "+str(value))
__consumer_manager__ = value
if issubclass(value, IProducer.IProducer) & (value is not IProducer.IProducer):
# print("Encontrado IProducer: "+str(value))
__producer_manager__ = value
data = None
path = os.path.join(backend.CONFIG_PATH)
with open(path) as json_file:
data = json.load(json_file)
__processed__ = True
def get_store_manager():
return __store_manager__
def get_consumer_manager():
return __consumer_manager__
def get_producer_manager():
return __producer_manager__
def get_host():
return data['CyKitServer']['host']
def get_port():
return data['CyKitServer']['port']
def get_buffer_size():
return data['CyKitServer']['buffer_size']
def get_delay():
return data['Consumer']['delay']
def get_duration():
return data['Consumer']['duration']
```
#### File: backend/managers/JsonStoreManager.py
```python
import kernel
from kernel.Interfaces.IStoreManager import IStoreManager
import jsonpickle
import os
import numpy as np
class JsonStoreManager(IStoreManager):
def __init__(self):
"""
Constructor that initializes entity
"""
super().__init__()
self.refresh()
def refresh(self):
"""
Reloads file with new information, in particular recreates the user collection and all info associated.
"""
root = kernel.get_connection_string()
path_to_user = os.path.join(root, "users.json")
try:
f = open(path_to_user, "r")
json = f.read()
frozen = jsonpickle.decode(json)
self.__users__ = frozen.__users__
except Exception as e:
print("Initial users file not found")
finally:
if 'f' in locals() and f is not None:
f.close()
@staticmethod
def version():
return 1.0
def get_user(self, user_id):
try:
for user_to_search in self.get_users():
if user_to_search.get_id() == int(user_id):
return user_to_search
except Exception as e:
return None
return None
def get_users(self):
return self.__users__
def add_user(self, user):
max_id = 0
if len(self.__users__) > 0:
max_id = max(self.__users__, key=lambda t: t.get_id()).get_id()
user.set_id(max_id + 1)
self.__users__.append(user)
def remove_user(self, user):
self.__users__.remove(user)
def update_user(self, user):
for i, user_to_update in enumerate(self.get_users()):
if user_to_update.get_id() == user.get_id():
self.__users__[i] = user
def get_user_folders(self, user_id):
"""
Get the user folder and creates them if necessary.
:param user_id: User id related to these folders.
:return: 3 folders, the user folder, the command folder to store EEG information and the model folder.
"""
root = kernel.get_connection_string()
user = self.get_user(user_id)
path_to_user = os.path.join(root, str(user.get_id()) + "_" + str(user.get_name()))
os.makedirs(path_to_user, exist_ok=True)
path_to_command = os.path.join(path_to_user, "cmd")
os.makedirs(path_to_command, exist_ok=True)
path_to_model = os.path.join(path_to_user, "model")
os.makedirs(path_to_model, exist_ok=True)
return path_to_user, path_to_command, path_to_model
def save_command(self, user_id, command):
"""
Saves command in command folder.
:param user_id: User id to find user folder.
:param command: Command folder to save.
"""
path_to_user, path_to_command, _ = self.get_user_folders(user_id)
path_data = os.path.join(path_to_command, str(command.get_id()))
np.savetxt(path_data, command.get_eeg())
def load_command(self, user_id, command):
"""
Loads a command using EEG saved file data.
:param user_id: User id to find user folder.
:param command: Command folder to save.
:return:
"""
_, path_to_command, _ = self.get_user_folders(user_id)
path_data = os.path.join(path_to_command, str(command.get_id()))
try:
data = np.loadtxt(path_data)
command.set_eeg(data)
except Exception as e:
print("No se ha podido cargar el archivo EEG")
return None
return command
def save(self):
"""
Saves all information in users.json file.
"""
try:
frozen = jsonpickle.encode(self)
root = kernel.get_connection_string()
path_to_user = os.path.join(root, "users.json")
f = open(path_to_user, "w+")
f.write(frozen)
f.close()
except Exception:
print("Error guardando datos")
def get_model_folder(self, user_id):
"""
Returns model folder.
:param user_id: User id to find user folder.
:return: Model folder.
"""
_, _, path_to_model = self.get_user_folders(user_id)
return path_to_model
``` |
{
"source": "JosuVicente/facial_and_characteristics_recognition_with_speech_support",
"score": 3
} |
#### File: facial_and_characteristics_recognition_with_speech_support/src/model_utils.py
```python
import matplotlib.pyplot as plt
from scipy.misc import imread
from scipy.misc import imresize
from random import shuffle
import numpy as np
import cv2
from keras.models import load_model
import numpy as np
from statistics import mode
import glob
import os
import face_recognition
import string
from random import *
from gtts import gTTS
def get_labels(dataset_name):
if dataset_name == 'fer2013':
return {0:'angry',1:'disgust',2:'sad',3:'happy',
4:'sad',5:'surprise',6:'neutral'}
elif dataset_name == 'imdb':
return {0:'woman', 1:'man'}
else:
raise Exception('Invalid dataset name')
def preprocess_input(images):
images = images/255.0
return images
def _imread(image_name):
return imread(image_name)
def _imresize(image_array, size):
return imresize(image_array, size)
def split_data(ground_truth_data, training_ratio=.8, do_shuffle=False):
ground_truth_keys = sorted(ground_truth_data.keys())
if do_shuffle == True:
shuffle(ground_truth_keys)
num_train = int(round(training_ratio * len(ground_truth_keys)))
train_keys = ground_truth_keys[:num_train]
validation_keys = ground_truth_keys[num_train:]
return train_keys, validation_keys
def display_image(image_array):
image_array = np.squeeze(image_array).astype('uint8')
plt.imshow(image_array)
plt.show()
def to_categorical(integer_classes, num_classes=2):
integer_classes = np.asarray(integer_classes, dtype='int')
num_samples = integer_classes.shape[0]
categorical = np.zeros((num_samples, num_classes))
categorical[np.arange(num_samples), integer_classes] = 1
return categorical
# parameters
detection_model_path = '../models/face/haarcascade_frontalface_default.xml'
emotion_model_path = '../models/emotion/simple_CNN.530-0.65.hdf5'
gender_model_path = '../models/gender/simple_CNN.81-0.96.hdf5'
emotion_labels = get_labels('fer2013')
gender_labels = get_labels('imdb')
frame_window = 10
x_offset_emotion = 20
y_offset_emotion = 40
x_offset = 30
y_offset = 60
class Model_Helper:
def __init__(self, detection_model_path, emotion_model_path, current_language, audio_path, image_path):
self.audio_path = audio_path
self.image_path = image_path
print('Loading gender detector...')
self.gender_classifier = load_model(gender_model_path)
print('Loading face detector...')
self.face_detection = cv2.CascadeClassifier(detection_model_path)
print('Loading emotion detector...')
self.emotion_classifier = load_model(emotion_model_path)
print('Loading known faces...')
self.known_faces = []
for filepath in glob.iglob(self.image_path + 'known/*.*', recursive=True):
try:
filename = os.path.splitext(os.path.basename(filepath))[0]
name = os.path.splitext(filename)[0].split('-')[0]
picture = face_recognition.load_image_file(filepath)
encoding = face_recognition.face_encodings(picture)[0]
self.known_faces.append([name, filename, encoding])
except Exception as e:
try:
os.remove(self.image_path + 'known/' + filename+'.jpg')
os.remove(self.audio_path + 'known/' + filename+'.mp3')
except Exception as e:
print(e)
print(str(len(self.known_faces)) + ' faces loaded')
def update_known_faces(self, name, audio_file_name, face_encoding, current_encoding):
temp_faces = []
# Remove previous faces with same encoding
for i in range(len(self.known_faces)):
match = face_recognition.compare_faces([self.known_faces[i][2]], current_encoding)
if match[0]:
print(self.known_faces[i][1] + ' is match')
image_file = self.image_path + 'known/' + self.known_faces[i][1]+'.jpg'
audio_file = self.audio_path + 'known/' + self.known_faces[i][1]+'.mp3'
os.remove(image_file)
print(image_file + ' deleted')
os.remove(audio_file)
print(audio_file + ' deleted')
else:
print(self.known_faces[i][1] + ' no match')
temp_faces.append(self.known_faces[i])
# Add new encoding and data to known faces
temp_faces.append([name, audio_file_name, face_encoding])
print(name + ' added')
self.known_faces = temp_faces
def save_face(self, name, language, face, current_encoding):
try:
rand = "".join(choice(string.ascii_letters) for x in range(randint(8, 8)))
full_name = name + '-' + rand
path_audio = self.audio_path + 'known/' + full_name + '.mp3'
path_image = self.image_path + 'known/' + full_name + '.jpg'
#Convert transcript to standard audio
tts = gTTS(text=name, lang=language, slow=False)
tts.save(path_audio)
#cv2.imshow('image',face)
cv2.imwrite(path_image, face)
#Get face encoding
picture = face_recognition.load_image_file(path_image)
face_encoding = face_recognition.face_encodings(picture)[0]
self.update_known_faces(name, full_name, face_encoding, current_encoding)
return full_name
except Exception as e:
print('**s****')
print(e)
print('**s****')
return ''
```
#### File: facial_and_characteristics_recognition_with_speech_support/src/speech_utils.py
```python
import speech_recognition as sr
from google.cloud import speech
import io
import os
#######################
GOOGLE_CLOUD_SPEECH_CREDENTIALS_PATH = '../files/GoogleCloudSpeechKey.json'
#######################
def transcript_audio(filepath, language, use_cloud):
transcript = '##NONE##'
# The name of the audio file to transcribe
file_name = os.path.join(os.path.dirname(''), filepath)
if use_cloud:
try:
# Instantiates a client
speech_client = speech.Client.from_service_account_json(GOOGLE_CLOUD_SPEECH_CREDENTIALS_PATH)
# Loads the audio into memory
with io.open(file_name, 'rb') as audio_file:
content = audio_file.read()
sample = speech_client.sample(
content,
source_uri=None,
encoding='LINEAR16',
sample_rate_hertz=16000)
# Detects speech in the audio file
alternatives = sample.recognize(language)
if (len(alternatives)>0):
transcript = alternatives[0].transcript
except Exception as e:
print(e)
if (transcript == '##NONE##'):
try:
r = sr.Recognizer()
with sr.AudioFile(file_name) as source:
audio = r.record(source)
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY", show_all=True)`
# instead of `r.recognize_google(audio, show_all=True)`
alternatives = r.recognize_google(audio, show_all=False)
if (len(alternatives)>0):
transcript = alternatives
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
return transcript
``` |
{
"source": "JosuX/Albie",
"score": 2
} |
#### File: script/compression/compress.py
```python
import tinify as tf
def compress(file):
'''Jofer's Tinify API Key'''
tf.key = "<KEY>"
result_data = tf.from_buffer(file).to_buffer()
return result_data
```
#### File: script/crypt/crypt.py
```python
import sys
from cryptography.fernet import Fernet
import csv
from script.compression.compress import compress
maxInt = sys.maxsize
while True:
# decrease the maxInt value by factor 10
# as long as the OverflowError occurs.
try:
csv.field_size_limit(maxInt)
break
except OverflowError:
maxInt = int(maxInt/10)
def load_key():
key = b'<KEY>
return key
def encrypt(filename):
key = load_key()
f = Fernet(key)
with open(filename, 'rb') as file:
file_data = file.read()
compressed_data = compress(file_data)
encrypted_data = f.encrypt(compressed_data)
file.close()
return encrypted_data
def decrypt(crypt):
key = load_key()
f = Fernet(key)
decrypted = f.decrypt(crypt)
return decrypted
def encrypt_bytes(crypt):
key = load_key()
f = Fernet(key)
encrypted = f.encrypt(crypt)
return encrypted
```
#### File: script/gui/createView.py
```python
import os
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt
sourcepath = os.path.dirname(os.path.abspath(__file__))
class Ui_Win_Create(object):
def setupUi(self, AlbiePhotography):
AlbiePhotography.setObjectName("AlbiePhotography")
AlbiePhotography.resize(960, 540)
AlbiePhotography.setWindowTitle("Albie")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../../python_case_study/logoooo/logo1.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
AlbiePhotography.setWindowIcon(icon)
AlbiePhotography.setWindowFlag(Qt.FramelessWindowHint, True)
AlbiePhotography.setStyleSheet("QMainWindow{\n"
"image:url(script/gui/img/overlay no resize .png);\n"
"}")
self.centralwidget = QtWidgets.QWidget(AlbiePhotography)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout_2.setObjectName("gridLayout_2")
self.Frame_All_Attr = QtWidgets.QFrame(self.centralwidget)
self.Frame_All_Attr.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.Frame_All_Attr.setFrameShadow(QtWidgets.QFrame.Raised)
self.Frame_All_Attr.setObjectName("Frame_All_Attr")
self.gridLayout = QtWidgets.QGridLayout(self.Frame_All_Attr)
self.gridLayout.setObjectName("gridLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 2, 0, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem1, 4, 0, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem2, 5, 0, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem3, 0, 0, 1, 1)
self.Container_button = QtWidgets.QHBoxLayout()
self.Container_button.setObjectName("Container_button")
spacerItem4 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem4)
spacerItem5 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem5)
spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem6)
spacerItem7 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem7)
spacerItem8 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem8)
spacerItem9 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem9)
self.Button_create = QtWidgets.QPushButton(self.Frame_All_Attr)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Button_create.sizePolicy().hasHeightForWidth())
self.Button_create.setSizePolicy(sizePolicy)
self.Button_create.setMinimumSize(QtCore.QSize(0, 332))
self.Button_create.setMaximumSize(QtCore.QSize(237, 332))
self.Button_create.setStyleSheet("QPushButton:hover { color: white }\n"
"QPushButton:hover {font-size:32px}\n"
"QPushButton{\n"
"background-color: rgba(255, 255, 255, 0);\n"
"\n"
"}\n"
"QPushButton{\n"
"image:url(script/gui/img/create_album_icon.png);\n"
" color: rgb(255, 255, 255);\n"
" border-radius: 50px;\n"
" border-color: beige;\n"
" font: 18pt \"Phenomena\";\n"
" min-width: 0em;\n"
" padding: 0px;\n"
"}")
self.Button_create.setObjectName("Button_Create")
self.Container_button.addWidget(self.Button_create)
spacerItem10 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem10)
spacerItem11 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem11)
spacerItem12 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem12)
spacerItem13 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem13)
spacerItem14 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem14)
self.Button_view = QtWidgets.QPushButton(self.Frame_All_Attr)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Button_view.sizePolicy().hasHeightForWidth())
self.Button_view.setSizePolicy(sizePolicy)
self.Button_view.setMinimumSize(QtCore.QSize(0, 332))
self.Button_view.setMaximumSize(QtCore.QSize(237, 332))
self.Button_view.setStyleSheet("QPushButton:hover { color: white }\n"
"QPushButton:hover {font-size:32px}\n"
"QPushButton{\n"
"background-color: rgba(255, 255, 255, 0);\n"
"font-color:( white); \n"
"}\n"
"QPushButton{\n"
"image:url(script/gui/img/view_icon.png);\n"
" color: rgb(255, 255, 255);\n"
" border-radius: 50px;\n"
" border-color: beige;\n"
" font: 18pt \"Phenomena\";\n"
" min-width: 0em;\n"
" padding: 0px;\n"
"}")
self.Button_view.setObjectName("Button_View")
self.Container_button.addWidget(self.Button_view)
spacerItem15 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem15)
spacerItem16 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem16)
spacerItem17 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem17)
spacerItem18 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem18)
spacerItem19 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.Container_button.addItem(spacerItem19)
self.gridLayout.addLayout(self.Container_button, 3, 0, 1, 1)
spacerItem20 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem20, 1, 0, 1, 1)
self.gridLayout_2.addWidget(self.Frame_All_Attr, 0, 0, 1, 1)
AlbiePhotography.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(AlbiePhotography)
self.statusbar.setObjectName("statusbar")
AlbiePhotography.setStatusBar(self.statusbar)
self.retranslateUi(AlbiePhotography)
QtCore.QMetaObject.connectSlotsByName(AlbiePhotography)
def retranslateUi(self, AlbiePhotography):
_translate = QtCore.QCoreApplication.translate
AlbiePhotography.setWindowTitle(_translate("AlbiePhotography", "Albie"))
self.Button_create.setText(_translate("AlbiePhotography", "\n"
"\n"
"\n"
"\n"
"Create Album"))
self.Button_create.setShortcut(_translate("AlbiePhotography", "Ctrl+C"))
self.Button_view.setText(_translate("AlbiePhotography", "\n"
"\n"
"\n"
"\n"
"View Album"))
self.Button_view.setShortcut(_translate("AlbiePhotography", "Ctrl+V"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
AlbiePhotography = QtWidgets.QMainWindow()
ui = Ui_Win_Create()
ui.setupUi(AlbiePhotography)
AlbiePhotography.show()
sys.exit(app.exec_())
```
#### File: script/gui/imageview_widget.py
```python
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPalette, QPainter
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtWidgets import QLabel, QSizePolicy, QScrollArea, QMainWindow, QAction
class QImageViewer(QMainWindow):
def __init__(self):
super().__init__()
self.zoomCounter = 0
self.printer = QPrinter()
self.scaleFactor = 0.0
self.imageLabel = QLabel()
self.imageLabel.setBackgroundRole(QPalette.Base)
self.imageLabel.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
self.imageLabel.setScaledContents(True)
self.scrollArea = QScrollArea()
self.scrollArea.setBackgroundRole(QPalette.Dark)
self.scrollArea.setWidget(self.imageLabel)
self.scrollArea.setVisible(False)
self.createActions()
self.setCentralWidget(self.scrollArea)
def keyReleaseEvent(self, event):
if event.key() == Qt.Key_Plus:
self.zoomIn()
if event.key() == Qt.Key_Minus:
self.zoomOut()
def open(self, pixmap):
self.imageLabel.setPixmap(pixmap)
self.scaleFactor = 1.0
self.scrollArea.setVisible(True)
self.printAct.setEnabled(True)
self.fitToWindowAct.setEnabled(True)
self.updateActions()
if not self.fitToWindowAct.isChecked():
self.imageLabel.adjustSize()
def print_(self):
dialog = QPrintDialog(self.printer, self)
if dialog.exec_():
painter = QPainter(self.printer)
rect = painter.viewport()
size = self.imageLabel.pixmap().size()
size.scale(rect.size(), Qt.KeepAspectRatio)
painter.setViewport(rect.x(), rect.y(), size.width(), size.height())
painter.setWindow(self.imageLabel.pixmap().rect())
painter.drawPixmap(0, 0, self.imageLabel.pixmap())
def zoomIn(self):
if self.zoomCounter < 7:
self.zoomCounter += 1
if self.zoomCounter == 0:
self.normalSize()
else:
self.scaleImage(1.2)
def zoomOut(self):
if self.zoomCounter > -7:
self.zoomCounter -= 1
if self.zoomCounter == 0:
self.normalSize()
else:
self.scaleImage(0.8)
def normalSize(self):
self.imageLabel.adjustSize()
self.scaleFactor = 1.0
def fitToWindow(self):
fitToWindow = self.fitToWindowAct.isChecked()
self.scrollArea.setWidgetResizable(fitToWindow)
if not fitToWindow:
self.normalSize()
self.updateActions()
def createActions(self):
self.printAct = QAction("&Print...", self, shortcut="Ctrl+P", enabled=False, triggered=self.print_)
self.exitAct = QAction("E&xit", self, shortcut="Ctrl+Q", triggered=self.close)
self.zoomInAct = QAction("Zoom &In (25%)", self, shortcut="Ctrl++", enabled=False, triggered=self.zoomIn)
self.zoomOutAct = QAction("Zoom &Out (25%)", self, shortcut="Ctrl+-", enabled=False, triggered=self.zoomOut)
self.normalSizeAct = QAction("&Normal Size", self, shortcut="Ctrl+S", enabled=False, triggered=self.normalSize)
self.fitToWindowAct = QAction("&Fit to Window", self, enabled=False, checkable=True, shortcut="Ctrl+F",
triggered=self.fitToWindow)
def updateActions(self):
self.zoomInAct.setEnabled(not self.fitToWindowAct.isChecked())
self.zoomOutAct.setEnabled(not self.fitToWindowAct.isChecked())
self.normalSizeAct.setEnabled(not self.fitToWindowAct.isChecked())
def scaleImage(self, factor):
self.scaleFactor *= factor
self.imageLabel.resize(self.scaleFactor * self.imageLabel.pixmap().size())
self.adjustScrollBar(self.scrollArea.horizontalScrollBar(), factor)
self.adjustScrollBar(self.scrollArea.verticalScrollBar(), factor)
self.zoomInAct.setEnabled(self.scaleFactor < 2.0)
self.zoomOutAct.setEnabled(self.scaleFactor > 0.5)
def adjustScrollBar(self, scrollBar, factor):
scrollBar.setValue(int(factor * scrollBar.value()
+ ((factor - 1) * scrollBar.pageStep() / 2)))
if __name__ == '__main__':
import sys
from PyQt5.QtWidgets import QApplication
app = QApplication(sys.argv)
imageViewer = QImageViewer()
imageViewer.show()
sys.exit(app.exec_())
```
#### File: script/gui/instalog.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class InstaLog(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(960, 540)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(960, 540))
MainWindow.setMaximumSize(QtCore.QSize(960, 540))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("script/gui/img/logo1.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setStyleSheet("background-color: rgb(245, 245, 245);")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(320, 80, 300, 120))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setMinimumSize(QtCore.QSize(200, 120))
self.label_2.setMaximumSize(QtCore.QSize(300, 300))
self.label_2.setStyleSheet("image:url(script/gui/img/instagram letter icon1.png);")
self.label_2.setText("")
self.label_2.setScaledContents(True)
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.textEdit_3 = QtWidgets.QLineEdit(self.centralwidget)
self.textEdit_3.setGeometry(QtCore.QRect(280, 205, 400, 40))
self.textEdit_3.setMinimumSize(QtCore.QSize(400, 40))
self.textEdit_3.setMaximumSize(QtCore.QSize(400, 40))
self.textEdit_3.setStyleSheet(" border-radius: 10px;\n"
"\n"
"background-color: rgb(234, 234, 234);")
self.textEdit_3.setObjectName("textEdit_3")
self.textEdit_4 = QtWidgets.QLineEdit(self.centralwidget)
self.textEdit_4.setGeometry(QtCore.QRect(280, 270, 400, 40))
self.textEdit_4.setMinimumSize(QtCore.QSize(400, 40))
self.textEdit_4.setMaximumSize(QtCore.QSize(400, 40))
self.textEdit_4.setEchoMode(QtWidgets.QLineEdit.Password)
self.textEdit_4.setStyleSheet("\n"
"border-radius: 10px;\n"
"background-color: rgb(234, 234, 234);")
self.textEdit_4.setObjectName("textEdit_4")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(350, 330, 251, 41))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pushButton.setFont(font)
self.pushButton.setStyleSheet("background-color: rgb(16, 140, 255);\n"
"color: rgb(255, 255, 255);\n"
"border-radius: 10px;")
self.pushButton.setAutoDefault(False)
self.pushButton.setDefault(False)
self.pushButton.setFlat(False)
self.pushButton.setObjectName("pushButton")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Albie"))
self.textEdit_3.setPlaceholderText(_translate("MainWindow", "Username"))
self.textEdit_4.setPlaceholderText(_translate("MainWindow", "Password"))
self.pushButton.setText(_translate("MainWindow", "Log in"))
if __name__ == "__main__":
import sys
import os
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = InstaLog()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
```
#### File: script/gui/mainWindow.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt, QTimer
from PyQt5.QtGui import QPixmap, QPainter, QMovie, QMouseEvent, QIcon
from PyQt5.QtWidgets import QMdiArea, QLabel, QMenu, QAction
import os
sourcepath = os.path.dirname(os.path.abspath(__file__))
class paintedcentral(QMdiArea):
def __init__(self, parent=None):
QMdiArea.__init__(self, parent=parent)
def paintEvent(self, event):
QMdiArea.paintEvent(self, event)
painter = QPainter(self.viewport())
background = QPixmap(sourcepath + "/img/mainscreen_background.png")
painter.drawPixmap(self.rect(), background)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(960, 540)
MainWindow.setWindowFlag(Qt.FramelessWindowHint,True)
MainWindow.setMinimumSize(QtCore.QSize(960, 540))
MainWindow.setMaximumSize(QtCore.QSize(960, 540))
font = QtGui.QFont()
font.setFamily("Monospac821 BT")
MainWindow.setFont(font)
self.centralwidget = paintedcentral(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.frame = QtWidgets.QFrame(self.centralwidget)
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.gridLayout_2 = QtWidgets.QGridLayout(self.frame)
self.gridLayout_2.setObjectName("gridLayout_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.menu = QMenu()
self.facebook = QIcon(sourcepath+"/img/social media icons/facebook icon.png")
self.instagram = QIcon(sourcepath+"/img/social media icons/instagram icon.png")
self.twitter = QIcon(sourcepath+"/img/social media icons/twitter icon.png")
self.logo = QIcon(sourcepath+"/img/logo1.jpg")
self.fb = QAction(self.facebook, "Facebook Page")
self.insta = QAction(self.instagram, "Instagram")
self.twit =QAction(self.twitter, "Twitter")
self.site = QAction(self.logo, "Website")
self.menu.addAction(self.fb)
self.menu.addAction(self.insta)
self.menu.addAction(self.twit)
self.menu.addAction(self.site)
menuicon = QIcon()
self.menu.setIcon(menuicon)
self.toolButton = QtWidgets.QPushButton(self.frame)
self.menu.stackUnder(self.frame)
self.toolButton.setMinimumSize(QtCore.QSize(150, 25))
self.toolButton.setMaximumSize(QtCore.QSize(150, 25))
self.toolButton.setMenu(self.menu)
self.toolButton.clicked.connect(self.toolButton.showMenu)
font = QtGui.QFont()
font.setFamily("Phenomena Light")
font.setPointSize(10)
self.toolButton.setFont(font)
stylesheet = """QPushButton{
background-color: rgb(208, 208, 208);
border-radius: 10px;}
QPushButton::menu-indicator{width:0px;}
"""
self.toolButton.setStyleSheet(stylesheet)
self.toolButton.setObjectName("toolButton")
self.horizontalLayout_2.addWidget(self.toolButton)
self.gridLayout_2.addLayout(self.horizontalLayout_2, 0, 0, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.frame_2 = QtWidgets.QFrame(self.frame)
self.frame_2.setMinimumSize(QtCore.QSize(0, 0))
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.horizontalLayout.addWidget(self.frame_2)
self.gridLayout_2.addLayout(self.horizontalLayout, 1, 0, 1, 1)
self.gridLayout.addWidget(self.frame, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Albie"))
self.toolButton.setText(_translate("MainWindow", "CONNECT WITH US!"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
``` |
{
"source": "JosuX/Amplify",
"score": 2
} |
#### File: JosuX/Amplify/alldone.py
```python
from PyQt5 import uic
from PyQt5.QtWidgets import QWidget
class doneUI(QWidget):
def __init__(self):
super().__init__()
uic.loadUi("UI Files/confirm_message.ui", self)
self.background.setStyleSheet("""
[objectName^="background"]
{
background-image: url(Resources/Misc/alldone_bg.png)
}
QPushButton
{
background-color: none;
border: none;
}
""")
```
#### File: JosuX/Amplify/firstflag.py
```python
from PyQt5 import uic, QtGui
from PyQt5.QtWidgets import QWidget
class firstUI(QWidget):
def __init__(self):
super().__init__()
uic.loadUi("UI Files/firstflag.ui", self)
self.background.setStyleSheet("""
[objectName^="background"]
{
background-image: url(Resources/Misc/myflag_bg.png)
}
""")
``` |
{
"source": "josvalbae10/Selenium2Library",
"score": 3
} |
#### File: utils/events/__init__.py
```python
from scope_event import ScopeStart, ScopeEnd
_registered_events = [ ScopeStart, ScopeEnd ]
_events = []
__all__ = [
"on",
"dispatch",
"register_event"
]
def on(event_name, *args, **kwargs):
for event in _registered_events:
if event.name == event_name:
_events.append(event(*args, **kwargs))
return
def dispatch(event_name, *args, **kwargs):
for event in _events:
if event.name == event_name:
event.trigger(*args, **kwargs)
def register_event(event):
for registered_event in _registered_events:
if event.name == registered_event.name:
raise AttributeError("An event with the name " + event.name + " already exists.")
_registered_events.append(event)
```
#### File: lib/mockito/matchers.py
```python
__maintainer__ = "Mockito Maintainers"
__email__ = "<EMAIL>"
__all__ = ['any', 'contains', 'times']
class Matcher:
def matches(self, arg):
pass
class Any(Matcher):
def __init__(self, wanted_type=None):
self.wanted_type = wanted_type
def matches(self, arg):
if self.wanted_type:
return isinstance(arg, self.wanted_type)
else:
return True
def __repr__(self):
return "<Any: %s>" % self.wanted_type
class Contains(Matcher):
def __init__(self, sub):
self.sub = sub
def matches(self, arg):
if not hasattr(arg, 'find'):
return
return self.sub and len(self.sub) > 0 and arg.find(self.sub) > -1
def __repr__(self):
return "<Contains: '%s'>" % self.sub
def any(wanted_type=None):
"""Matches any() argument OR any(SomeClass) argument
Examples:
when(mock).foo(any()).thenReturn(1)
verify(mock).foo(any(int))
"""
return Any(wanted_type)
def contains(sub):
return Contains(sub)
def times(count):
return count
```
#### File: lib/mockito/mocking.py
```python
import inspect
import invocation
from mock_registry import mock_registry
import warnings
__copyright__ = "Copyright 2008-2010, Mockito Contributors"
__license__ = "MIT"
__maintainer__ = "Mockito Maintainers"
__email__ = "<EMAIL>"
__all__ = ['mock', 'Mock']
class _Dummy(object): pass
class TestDouble(object): pass
class mock(TestDouble):
def __init__(self, mocked_obj=None, strict=True):
self.invocations = []
self.stubbed_invocations = []
self.original_methods = []
self.stubbing = None
self.verification = None
if mocked_obj is None:
mocked_obj = _Dummy()
strict = False
self.mocked_obj = mocked_obj
self.strict = strict
self.stubbing_real_object = False
mock_registry.register(self)
def __getattr__(self, method_name):
if self.stubbing is not None:
return invocation.StubbedInvocation(self, method_name)
if self.verification is not None:
return invocation.VerifiableInvocation(self, method_name)
return invocation.RememberedInvocation(self, method_name)
def remember(self, invocation):
self.invocations.insert(0, invocation)
def finish_stubbing(self, stubbed_invocation):
self.stubbed_invocations.insert(0, stubbed_invocation)
self.stubbing = None
def expect_stubbing(self):
self.stubbing = True
def pull_verification(self):
v = self.verification
self.verification = None
return v
def has_method(self, method_name):
return hasattr(self.mocked_obj, method_name)
def get_method(self, method_name):
return self.mocked_obj.__dict__.get(method_name)
def set_method(self, method_name, new_method):
setattr(self.mocked_obj, method_name, new_method)
def replace_method(self, method_name, original_method):
def new_mocked_method(*args, **kwargs):
# we throw away the first argument, if it's either self or cls
if inspect.isclass(self.mocked_obj) and not isinstance(original_method, staticmethod):
args = args[1:]
call = self.__getattr__(method_name) # that is: invocation.RememberedInvocation(self, method_name)
return call(*args, **kwargs)
if isinstance(original_method, staticmethod):
new_mocked_method = staticmethod(new_mocked_method)
elif isinstance(original_method, classmethod):
new_mocked_method = classmethod(new_mocked_method)
self.set_method(method_name, new_mocked_method)
def stub(self, method_name):
original_method = self.get_method(method_name)
original = (method_name, original_method)
self.original_methods.append(original)
# If we're trying to stub real object(not a generated mock), then we should patch object to use our mock method.
# TODO: Polymorphism was invented long time ago. Refactor this.
if self.stubbing_real_object:
self.replace_method(method_name, original_method)
def unstub(self):
while self.original_methods:
method_name, original_method = self.original_methods.pop()
self.set_method(method_name, original_method)
def Mock(*args, **kwargs):
'''A ``mock``() alias.
Alias for compatibility. To be removed in version 1.0.
'''
warnings.warn("\n`Mock()` is deprecated, please use `mock()` (lower 'm') instead.", DeprecationWarning)
return mock(*args, **kwargs)
```
#### File: lib/mockito/mock_registry.py
```python
class MockRegistry:
"""Registers mock()s, ensures that we only have one mock() per mocked_obj, and
iterates over them to unstub each stubbed method. """
def __init__(self):
self.mocks = {}
def register(self, mock):
self.mocks[mock.mocked_obj] = mock
def mock_for(self, cls):
return self.mocks.get(cls, None)
def unstub_all(self):
for mock in self.mocks.itervalues():
mock.unstub()
self.mocks.clear()
mock_registry = MockRegistry()
```
#### File: test/resources/statuschecker.py
```python
import re
from robot.result import ExecutionResult
def process_output(inpath, outpath=None):
result = ExecutionResult(inpath)
_process_suite(result.suite)
result.save(outpath)
return result.return_code
def _process_suite(suite):
for subsuite in suite.suites:
_process_suite(subsuite)
for test in suite.tests:
_process_test(test)
def _process_test(test):
exp = _Expected(test.doc)
_check_status(test, exp)
if test.status == 'PASS':
_check_logs(test, exp)
def _check_status(test, exp):
if exp.status != test.status:
test.status = 'FAIL'
if exp.status == 'PASS':
test.message = ("Test was expected to PASS but it FAILED. "
"Error message:\n") + test.message
else:
test.message = ("Test was expected to FAIL but it PASSED. "
"Expected message:\n") + exp.message
elif not _message_matches(test.message, exp.message):
test.status = 'FAIL'
test.message = ("Wrong error message.\n\nExpected:\n%s\n\nActual:\n%s\n"
% (exp.message, test.message))
elif test.status == 'FAIL':
test.status = 'PASS'
test.message = 'Original test failed as expected.'
def _message_matches(actual, expected):
if actual == expected:
return True
if expected.startswith('REGEXP:'):
pattern = '^%s$' % expected.replace('REGEXP:', '', 1).strip()
if re.match(pattern, actual, re.DOTALL):
return True
if expected.startswith('STARTS:'):
start = expected.replace('STARTS:', '', 1).strip()
if actual.startswith(start):
return True
return False
def _check_logs(test, exp):
for kw_indices, msg_index, level, message in exp.logs:
try:
kw = test.keywords[kw_indices[0]]
for index in kw_indices[1:]:
kw = kw.keywords[index]
except IndexError:
indices = '.'.join(str(i+1) for i in kw_indices)
test.status = 'FAIL'
test.message = ("Test '%s' does not have keyword with index '%s'"
% (test.name, indices))
return
if len(kw.messages) <= msg_index:
if message != 'NONE':
test.status = 'FAIL'
test.message = ("Keyword '%s' should have had at least %d "
"messages" % (kw.name, msg_index+1))
else:
if _check_log_level(level, test, kw, msg_index):
_check_log_message(message, test, kw, msg_index)
def _check_log_level(expected, test, kw, index):
actual = kw.messages[index].level
if actual == expected:
return True
test.status = 'FAIL'
test.message = ("Wrong level for message %d of keyword '%s'.\n\n"
"Expected: %s\nActual: %s.\n%s"
% (index+1, kw.name, expected,
actual, kw.messages[index].message))
return False
def _check_log_message(expected, test, kw, index):
actual = kw.messages[index].message.strip()
if _message_matches(actual, expected):
return True
test.status = 'FAIL'
test.message = ("Wrong content for message %d of keyword '%s'.\n\n"
"Expected:\n%s\n\nActual:\n%s"
% (index+1, kw.name, expected, actual))
return False
class _Expected:
def __init__(self, doc):
self.status, self.message = self._get_status_and_message(doc)
self.logs = self._get_logs(doc)
def _get_status_and_message(self, doc):
if 'FAIL' in doc:
return 'FAIL', doc.split('FAIL', 1)[1].split('LOG', 1)[0].strip()
return 'PASS', ''
def _get_logs(self, doc):
logs = []
for item in doc.split('LOG')[1:]:
index_str, msg_str = item.strip().split(' ', 1)
kw_indices, msg_index = self._get_indices(index_str)
level, message = self._get_log_message(msg_str)
logs.append((kw_indices, msg_index, level, message))
return logs
def _get_indices(self, index_str):
try:
kw_indices, msg_index = index_str.split(':')
except ValueError:
kw_indices, msg_index = index_str, '1'
kw_indices = [int(index) - 1 for index in kw_indices.split('.')]
return kw_indices, int(msg_index) - 1
def _get_log_message(self, msg_str):
try:
level, message = msg_str.split(' ', 1)
if level not in ['TRACE', 'DEBUG', 'INFO', 'WARN', 'FAIL']:
raise ValueError
except ValueError:
level, message = 'INFO', msg_str
return level, message
if __name__=='__main__':
import sys
import os
if not 2 <= len(sys.argv) <= 3 or '--help' in sys.argv:
print __doc__
sys.exit(1)
infile = sys.argv[1]
outfile = sys.argv[2] if len(sys.argv) == 3 else None
print "Checking %s" % os.path.abspath(infile)
rc = process_output(infile, outfile)
if outfile:
print "Output: %s" % os.path.abspath(outfile)
if rc > 255:
rc = 255
sys.exit(rc)
```
#### File: Selenium2Library/test/run_unit_tests.py
```python
import env
import os, sys
import unittest
from Selenium2Library import utils
def run_unit_tests(modules_to_run=[]):
(test_module_names, test_modules) = utils.import_modules_under(
env.UNIT_TEST_DIR, include_root_package_name = False, pattern="test*.py")
bad_modules_to_run = [module_to_run for module_to_run in modules_to_run
if module_to_run not in test_module_names]
if bad_modules_to_run:
print "Specified test module%s not exist: %s" % (
' does' if len(bad_modules_to_run) == 1 else 's do',
', '.join(bad_modules_to_run))
return -1
tests = [unittest.defaultTestLoader.loadTestsFromModule(test_module)
for test_module in test_modules]
runner = unittest.TextTestRunner()
result = runner.run(unittest.TestSuite(tests))
rc = len(result.failures) + len(result.errors)
if rc > 255: rc = 255
return rc
if __name__ == '__main__':
sys.exit(run_unit_tests(sys.argv[1:]))
```
#### File: test/unit/test_webdrivermonkeypatches.py
```python
import unittest
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
from mockito import *
SCRIPT = "return [ window.id, window.name, document.title, document.URL ];"
HANDLE = "17c3dc18-0443-478b-aec6-ed7e2a5da7e1"
class MockWebDriver(RemoteWebDriver):
def __init__(self):
pass
current_window_handle = HANDLE
class WebDriverMonkeyPatchesTests(unittest.TestCase):
def test_window_info_values_are_strings(self):
driver = MockWebDriver()
when(driver).execute_script(SCRIPT).thenReturn(['id', 'name', 'title', 'url'])
info = driver.get_current_window_info()
self.assertEqual(info, (HANDLE, 'id', 'name', 'title', 'url'))
def test_window_info_values_are_empty_strings(self):
driver = MockWebDriver()
when(driver).execute_script(SCRIPT).thenReturn([''] * 4)
info = driver.get_current_window_info()
self.assertEqual(info, (HANDLE, '', 'undefined', 'undefined', 'undefined'))
def test_window_info_values_are_none(self):
driver = MockWebDriver()
when(driver).execute_script(SCRIPT).thenReturn([None] * 4)
info = driver.get_current_window_info()
self.assertEqual(info, (HANDLE, 'undefined', 'undefined', 'undefined', 'undefined'))
def test_window_id_is_bool(self):
driver = MockWebDriver()
when(driver).execute_script(SCRIPT).thenReturn([True, '', '', '']).thenReturn([False, '', '', ''])
info = driver.get_current_window_info()
self.assertEqual(info[1], True)
info = driver.get_current_window_info()
self.assertEqual(info[1], False)
def test_window_id_is_web_element(self):
driver = MockWebDriver()
elem = WebElement(None, '052b083c-0d6e-45ca-bda6-73ca13c42561')
when(driver).execute_script(SCRIPT).thenReturn([elem, '', '', ''])
info = driver.get_current_window_info()
self.assertEqual(info[1], elem)
def test_window_id_is_container(self):
driver = MockWebDriver()
when(driver).execute_script(SCRIPT).thenReturn([['1'], '', '', '']).thenReturn([{'a': 2}, '', '', ''])
info = driver.get_current_window_info()
self.assertEqual(info[1], ['1'])
info = driver.get_current_window_info()
self.assertEqual(info[1], {'a': 2})
def test_window_id_is_empty_container(self):
driver = MockWebDriver()
when(driver).execute_script(SCRIPT).thenReturn([[], '', '', '']).thenReturn([{}, '', '', ''])
info = driver.get_current_window_info()
self.assertEqual(info[1], [])
info = driver.get_current_window_info()
self.assertEqual(info[1], {})
``` |
{
"source": "josvan0/PromoCarDjango",
"score": 2
} |
#### File: PromoCarDjango/cars/models.py
```python
from django.db import models
from company.models import Image
# Create your models here.
class Color(models.Model):
name = models.CharField(max_length=30)
finished = models.CharField(max_length=30, null=True)
hex_code = models.CharField(max_length=6)
def __str__(self):
return f'[#{self.hex_code}] {self.name} {self.finished}'
class Car(models.Model):
model = models.CharField(max_length=50)
year = models.SmallIntegerField()
colors = models.ManyToManyField(Color, related_name='cars')
doors = models.SmallIntegerField(null=True)
tyres = models.SmallIntegerField(null=True)
standard = models.BooleanField(default=False)
electric = models.BooleanField(default=False)
details = models.CharField(max_length=1000, null=True)
photos = models.ManyToManyField(Image, related_name='cars')
def __str__(self):
return f'{self.model} ({self.year})'
def first_photo(self):
try:
return self.photos.all()[0].content.url
except IndexError:
return ''
```
#### File: PromoCarDjango/cars/tests.py
```python
from django.test import TestCase
from django.db.models import F
from django.core.files import File
from django.urls import reverse
from .models import Color, Car
from company.models import Image, Banner
# Create your tests here.
class ColorModelTests(TestCase):
def test_null_data(self):
orange = Color.objects.create(name='Orange', hex_code='FB8500')
Color.objects.create(name='Light Cornflower Blue',
finished='Brillant', hex_code='8ECAE6')
self.assertEqual(len(Color.objects.all()), 2)
self.assertEqual(orange.finished, None)
class CarModelTests(TestCase):
def test_null_data(self):
Car.objects.create(model='BMW M3 Coupe', year=2012)
stored = Car.objects.all()[0]
self.assertEqual(stored.doors, None)
self.assertEqual(stored.electric, False)
self.assertEqual(stored.details, None)
self.assertCountEqual(stored.colors.all(), [])
self.assertCountEqual(stored.photos.all(), [])
def test_incomplete_data(self):
car = Car(model='Lambo', year=2017, tyres=4, standard=True)
car.save()
car.colors.create(name='Orange', hex_code='FB8500')
img = Image()
img.content.save('lambo.jpg',
File(open('path\\lambo.jpg', 'rb')))
car.photos.add(Image.objects.get(id=img.id))
stored = Car.objects.get(id=car.id)
self.assertEqual(stored.tyres, 4)
self.assertEqual(stored.standard, True)
self.assertEqual(stored.electric, False)
self.assertCountEqual(stored.colors.all(), car.colors.all())
self.assertCountEqual(stored.photos.all(), car.photos.all())
class CarsViewsTests(TestCase):
def load_test_data(test):
def wrapper(*args, **kwargs):
Car.objects.create(model='Chevy', year=2001, doors=2)
Car.objects.create(model='<NAME>', year=1990,
doors=4, tyres=4, standard=True)
Car.objects.create(model='<NAME>', year=1996,
doors=4, standard=True,
details='Use for tax in Mexico')
mini = Car(model='Mini Cooper', year=2001, doors=2,
tyres=4, standard=True, details='Special edition')
mini.save()
mini.colors.create(name='Light Cornflower Blue',
finished='Brillant', hex_code='8ECAE6')
mini.colors.create(name='Orange', hex_code='FB8500')
cizeta = Car(model='Cizeta', year=1996, doors=2, tyres=4)
cizeta.save()
cizeta.colors.create(name='Cizeta RED', hex_code='e63946')
cizeta.colors.create(name='Cizeta BLACK', hex_code='000001')
cizeta.colors.create(name='Cizeta WHITE', hex_code='fffffe')
cizeta_img = Image()
cizeta_img.content.save('cizeta.jpg',
File(open('path\\cizeta.jpg', 'rb')))
cizeta.photos.add(cizeta_img)
Banner.objects.create(title='Cizeta!', img=cizeta_img)
return test(*args, **kwargs)
return wrapper
@load_test_data
def test_car_list(self):
response = self.client.get(reverse('cars:index'))
all_cars = Car.objects.only('model', 'year',
'photos').order_by(F('year').desc())[:10]
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Cizeta!')
self.assertQuerysetEqual(response.context['car_list'], all_cars)
@load_test_data
def test_cars_by_year(self):
response_2001 = self.client.get(reverse('cars:by_year',
kwargs={'year': 2001}))
cars_2001 = Car.objects.only('model', 'year',
'photos').filter(year=2001)
response_1996 = self.client.get(reverse('cars:by_year',
kwargs={'year': 1996}))
cars_1996 = Car.objects.only('model', 'year',
'photos').filter(year=1996)
self.assertEqual(response_2001.status_code, 200)
self.assertEqual(response_1996.status_code, 200)
self.assertContains(response_2001, '2001 cars')
self.assertContains(response_1996, '1996 cars')
self.assertQuerysetEqual(response_2001.context['car_list'],
list(cars_2001), ordered=False)
self.assertQuerysetEqual(response_1996.context['car_list'],
list(cars_1996), ordered=False)
@load_test_data
def test_car_detail(self):
car_ids = Car.objects.values('id')
response = self.client.get(reverse('cars:detail',
kwargs={'car_id': car_ids[0]['id']}))
bad_response = self.client.get(reverse('cars:detail',
kwargs={'car_id': 0}))
self.assertEqual(bad_response.status_code, 404)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['car'],
Car.objects.get(id=car_ids[0]['id']))
``` |
{
"source": "josven/opensearchmock",
"score": 2
} |
#### File: tests/fake_elasticsearch/test_suggest.py
```python
from opensearchpy.exceptions import NotFoundError
from tests import TestOpensearchmock, INDEX_NAME, DOC_TYPE, BODY
class TestSuggest(TestOpensearchmock):
def test_should_raise_notfounderror_when_nonindexed_id_is_used_for_suggest(self):
with self.assertRaises(NotFoundError):
self.os.suggest(body={}, index=INDEX_NAME)
def test_should_return_suggestions(self):
self.os.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY)
suggestion_body = {
'suggestion-string': {
'text': 'test_text',
'term': {
'field': 'string'
}
},
'suggestion-id': {
'text': 1234567,
'term': {
'field': 'id'
}
}
}
suggestion = self.os.suggest(body=suggestion_body, index=INDEX_NAME)
self.assertIsNotNone(suggestion)
self.assertDictEqual({
'suggestion-string': [
{
'text': 'test_text',
'length': 1,
'options': [
{
'text': 'test_text_suggestion',
'freq': 1,
'score': 1.0
}
],
'offset': 0
}
],
'suggestion-id': [
{
'text': 1234567,
'length': 1,
'options': [
{
'text': 1234568,
'freq': 1,
'score': 1.0
}
],
'offset': 0
}
],
}, suggestion)
``` |
{
"source": "Josverl/micropy-cli",
"score": 2
} |
#### File: micropy/packages/source_package.py
```python
import shutil
from pathlib import Path
from tempfile import mkdtemp
from typing import Any, Callable, List, Optional, Tuple, Union
from micropy import utils
from micropy.exceptions import RequirementNotFound
from .package import Package
from .source import DependencySource
class PackageDependencySource(DependencySource):
"""Dependency Source for pypi packages.
Args:
package (Package): Package source points too.
format_desc: Callback to format progress bar description.
Defaults to None.
"""
repo: str = "https://pypi.org/pypi/{name}/json"
def __init__(self, package: Package,
format_desc: Optional[Callable[..., Any]] = None):
super().__init__(package)
try:
utils.ensure_valid_url(self.repo_url)
except Exception:
raise RequirementNotFound(
f"{self.repo_url} is not a valid url!", package=self.package)
else:
self._meta: dict = utils.get_package_meta(
str(self.package),
self.repo_url
)
self.format_desc = format_desc or (lambda n: n)
@property
def repo_url(self) -> str:
_url = self.repo.format(name=self.package.name)
return _url
@property
def source_url(self) -> str:
return self._meta.get('url', None)
@property
def file_name(self) -> str:
return utils.get_url_filename(self.source_url)
def fetch(self) -> bytes:
"""Fetch package contents into memory.
Returns:
bytes: Package archive contents.
"""
self.log.debug(f"fetching package: {self.file_name}")
desc = self.format_desc(self.file_name)
content = utils.stream_download(self.source_url, desc=desc)
return content
def __enter__(self) -> Union[Path, List[Tuple[Path, Path]]]:
"""Prepare Pypi package for installation.
Extracts the package into a temporary directory then
generates stubs for type hinting.
This helps with intellisense.
If the dependency is a module, a list
of tuples with the file and stub path, respectively,
will be returned. Otherwise, the path to the package
root will be returned.
Returns:
Root package path or list of files.
"""
self.tmp_path = Path(mkdtemp())
with self.handle_cleanup():
path = utils.extract_tarbytes(self.fetch(), self.tmp_path)
stubs = self.generate_stubs(path)
pkg_root = self.get_root(path)
return pkg_root or stubs
def __exit__(self, *args):
shutil.rmtree(self.tmp_path, ignore_errors=True)
return super().__exit__(*args)
```
#### File: micropy/packages/source_path.py
```python
from pathlib import Path
from typing import List, Optional, Tuple, Union
from .package import Package
from .source import DependencySource
class LocalDependencySource(DependencySource):
"""Dependency Source that is available locally.
Args:
package (Package): Package source points too.
path (Path): Path to package.
"""
def __init__(self, package: Package, path: Path):
super().__init__(package)
self._path = path
self.is_local = True
@property
def path(self) -> Path:
return self._path
def __enter__(self) -> Union[Path, List[Tuple[Path, Optional[Path]]]]:
"""Determines appropriate path.
Returns:
Path to package root or list of files.
"""
return self.path
```
#### File: micropy/project/checks.py
```python
import subprocess as subproc
from functools import partial as _p
from packaging import version
from micropy.logger import Log
VSCODE_MS_PY_MINVER = "2019.9.34474"
log = Log.get_logger('MicroPy')
def iter_vscode_ext(name=None):
"""Iterates over installed VSCode Extensions.
Args:
name (str, optional): Name of Extension to Yield
"""
_cmd = "code --list-extensions --show-versions"
proc = subproc.run(_cmd, stdout=subproc.PIPE, stderr=subproc.PIPE, shell=True)
results = [e.strip() for e in proc.stdout.splitlines()]
for ext in results:
ename, vers = ext.split('@')
if not name:
yield (ename, vers)
if name and ename == name:
yield (ename, vers)
def vscode_ext_min_version(ext, min_version=VSCODE_MS_PY_MINVER, info=None):
"""Check if installed VScode Extension meets requirements.
Args:
ext (str): Name of Extension to Test
min_version (str, optional): Minimum version.
Defaults to VSCODE_MS_PY_MINVER.
info (str, optional): Additional information to output.
Defaults to None.
Returns:
bool: True if requirement is satisfied, False otherwise.
"""
try:
name, vers = next(iter_vscode_ext(name=ext), (ext, '0.0.0'))
except Exception as e:
log.debug(f"vscode check failed to run: {e}")
log.debug("skipping...")
return True
else:
cur_vers = version.parse(vers)
min_vers = version.parse(min_version)
if cur_vers >= min_vers:
return True
log.error(
f"\nVSCode Extension {ext} failed to satisfy requirements!", bold=True)
log.error(f"$[Min Required Version]: {min_vers}")
log.error(f"$[Current Version:] {cur_vers}")
if info:
log.warn(info)
return False
TEMPLATE_CHECKS = {
'ms-python': _p(vscode_ext_min_version,
'ms-python.python',
info=(
"VSCode Integration will fail! "
"See $[BradenM/micropy-cli#50] for details.\n"
)
),
}
```
#### File: micropy/utils/decorators.py
```python
__all__ = ['lazy_property']
def lazy_property(fn):
attr = '_lazy__' + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr):
setattr(self, attr, fn(self))
return getattr(self, attr)
return _lazy_property
``` |
{
"source": "Josverl/MicroPython-Bootcamp",
"score": 3
} |
#### File: Demos/Demo-3 Led and PWM/demo31.py
```python
import machine
BlueLED = machine.Pin(26, machine.Pin.OUT)
BlueLED.value(1)
BlueLED.value(0)
while True:
BlueLED.value(1)
BlueLED.value(0)
import time
while True:
BlueLED.value(1)
time.sleep(0.5)
BlueLED.value(0)
time.sleep(0.5)
# PWM
import machine
BlueLED = machine.PWM(machine.Pin(26), freq=1, duty=50)
BlueLED.deinit()
# Fade LED
import time
import machine
BlueLED = machine.PWM(machine.Pin(26), freq=5000, duty = 0)
while True:
for i in range(100):
BlueLED.duty(i)
time.sleep(0.01)
for i in range(100, 0, -1):
BlueLED.duty(i)
time.sleep(0.01)
# Multicolor LED
import machine
RedLED = machine.PWM(machine.Pin(22), duty = 0)
GreenLED = machine.PWM(machine.Pin(21), duty = 0)
BlueLED = machine.PWM(machine.Pin(26), duty = 0)
RedLED.duty(100*1/5)
GreenLED.duty(100*3/5)
BlueLED.duty(100*4/5)
RedLED.deinit()
GreenLED.deinit()
BlueLED.deinit()
# Rainbow
import time
import machine
def fade(led, begin=0, end=100, step=1):
for i in range(begin, end, step):
led.duty(i)
time.sleep(0.01)
RedLED = machine.PWM(machine.Pin(22), duty = 0)
GreenLED = machine.PWM(machine.Pin(21), duty = 0)
BlueLED = machine.PWM(machine.Pin(26), duty = 0)
while True:
fade(GreenLED) # Ramp up green
fade(RedLED, begin=100,end=0,step=-1) # Ramp down red
fade(BlueLED) # Ramp up blue
fade(GreenLED, begin=100,end=0,step=-1) # Ramp down green
fade(RedLED) # Ramp up red
fade(BlueLED, begin=100,end=0,step=-1) # Ramp down blue
RedLED.deinit()
GreenLED.deinit()
BlueLED.deinit()
# L9110 Fan Motor
import machine
inA = machine.Pin(21, machine.Pin.OUT)
inB = machine.Pin(22, machine.Pin.OUT)
inA.value(0)
inB.value(1) # Forward
inB.value(0) # Stop
inA.value(1) # Reverse
import machine
pwmFan = machine.PWM(machine.Pin(21))
reverseFan = machine.Pin(22, machine.Pin.OUT)
pwmFan.duty(70)
pwmFan.duty(50)
reverseFan.value(1)
pwmFan.deinit()
# SG90 MicroServo
import machine
pwmServo = machine.PWM(machine.Pin(26), freq=50, duty=8)
pwmServo.duty(4)
pwmServo.duty(13)
```
#### File: Demos/Demo-4.2 Modules/myprinter.py
```python
def printit(text):
# use ANSI escape sequeences top print inverted, bold ,red text
# https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit
print("\033[;7m\033[1;31m{}\033[0;0m".format(str(text) ) )
```
#### File: Demos/Demo-4.3 Display/mandel.py
```python
import sys
import time
import display
import machine
import micropython
from micropython import const
TFT_WIDTH = const(320)
TFT_HEIGHT = const(240)
def mandelbrot(tft, width, height, left, right, top, bottom, iterations):
for y in range(height):
for x in range(width):
z = complex(0, 0)
c = complex(left + x * (right - left) / width, top + y * (bottom - top) / height)
norm = abs(z) ** 2
for count in range(iterations):
if norm <= 4:
z = z * z + c
norm = abs(z * z)
else:
break
if count <= 4:
color = tft.DARKGREY
elif count <= 8:
color = tft.GREEN
elif count <= 10:
color = tft.BLUE
elif count <= 12:
color = tft.RED
elif count <= 15:
color = tft.YELLOW
else:
color = tft.BLACK
tft.pixel(x, y, color)
def show(tft):
# https://github.com/loboris/MicroPython_ESP32_psRAM_LoBo/wiki/machine#machinewdtenable
start_time = time.time()
_ = machine.WDT(False)
mandelbrot(
tft,
width=TFT_WIDTH,
height=TFT_HEIGHT,
left=const(-2),
right=0.5,
top=const(1.25),
bottom=const(0),
iterations=const(40),
)
_ = machine.WDT(True)
duration = time.time() - start_time
print("rendered in %.1f sec." % duration)
```
#### File: Demos/Demo-5.1 Cortana/1_FaceAPI-URL.py
```python
import gc
import urequests #pylint: disable=import-error
subscription_key = '0366c4649003438ea99891d9006809bd'
assert subscription_key
# Next, verify `face_api_url` and make sure it corresponds to the region you used when generating the subscription key. If you are using a trial key, you don't need to make any changes.
face_api_url = 'https://westeurope.api.cognitive.microsoft.com/face/v1.0/detect'
# The next few lines of code call into the Face API to detect the faces in the image. In this instance, the image is specified via a publically visible URL. You can also pass an image directly as part of the request body. For more information, see the [API reference](https://westus.dev.cognitive.microsoft.com/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
hdr_key = { 'Ocp-Apim-Subscription-Key': subscription_key }
#for Micropython must add the query string parameters to the url
def post(url,params=None, **kw):
if params!=None:
#todo: urllib.urlencode the parameters
glue = "?"
for k in params:
url=url+"{}{}={}".format(glue,k,params[k])
glue="&"
return urequests.request("POST", url, **kw)
#set up a few different sets of parameters to the API
param_all = {
'returnFaceId': 'true',
'returnFaceLandmarks': 'true',
'returnFaceAttributes': 'age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise'
}
param_some = {
'returnFaceId': 'true',
'returnFaceLandmarks': 'false',
'returnFaceAttributes': 'age,gender,emotion,hair'
}
param_simple = {
'returnFaceId': 'true',
'returnFaceLandmarks': 'false'
}
#The core functionality is in processing an image, and all faces in it
def detect_faces(image_url= None):
if image_url == None:
return None
jsonbody = {"url": image_url}
try:
response = post(face_api_url, json=jsonbody, headers=hdr_key, params=param_some)
if response.status_code >= 400 :
print('En error has occurred : HTTP Error {}'.format(response.status_code))
elif response.status_code == 200:
#print(response.json())
faces = response.json()
finally:
response.close()
gc.collect()
return faces
def process_faces(faces):
print("Detected {} faces in the image".format(len(faces)))
for face in faces:
print ("a {} year old {}".format( face['faceAttributes']['age'], face['faceAttributes']['gender']) )
# You can experiment with different images by changing ``image_url`` to point
# to a different image and rerunning this code.
for i in range(9):
image_url = 'https://how-old.net/Images/faces2/main00{}.jpg'.format(i)
print("Detecting image {}".format(image_url ))
faces = detect_faces( image_url )
process_faces ( faces)
print('---------------------------------------------------------------')
#
image_url = 'http://photonshouse.com/photo/c6/c60bfd79e990878486374b7d833ccd8e.jpg'
faces = detect_faces( image_url )
process_faces ( faces)
image_url = 'https://secure.i.telegraph.co.uk/multimedia/archive/01827/Lakshmi-Mittal_1827147b.jpg'
faces = detect_faces( image_url )
process_faces ( faces)
image_url = 'https://pix.avaxnews.com/avaxnews/4a/42/0004424a.jpeg'
faces = detect_faces( image_url )
process_faces ( faces)
```
#### File: Demos/Demo-5.1 Cortana/2_FaceAPI-IMAGE.py
```python
import ujson
import usocket
import logging
import gc
import uos as os
#Logging
log = logging.getLogger("POST")
log.setLevel(logging.ERROR)
#only during debug
#log.setLevel(logging.DEBUG)
#Utility
def internet_connected():
"test actual internet connectivity"
try:
socket = urlopenbin(b'http://www.msftncsi.com/ncsi.txt')
resp = socket.readline()
finally:
if socket :
socket.close()
return (resp == b'Microsoft NCSI')
def filesize(fname):
try:
s = os.stat(fname)[6]
except:
s = 0
return s
#derived from urllib.urequests module
def urlopen(url, data=None, method="GET", datafile=None):
if data is not None and method == "GET":
method = "POST"
try:
proto, dummy, host, path = url.split("/", 3)
except ValueError:
proto, dummy, host = url.split("/", 2)
path = ""
if proto == "http:":
port = 80
elif proto == "https:":
import ussl
port = 443
else:
raise ValueError("Unsupported protocol: " + proto)
if ":" in host:
host, port = host.split(":", 1)
port = int(port)
ai = usocket.getaddrinfo(host, port , 0, usocket.SOCK_STREAM)
if ai==[] :
print('Error: No internet connectivity')
return None
log.debug(ai)
ai = ai[0]
s = usocket.socket(ai[0], ai[1], ai[2])
try:
log.debug('Connecting')
s.connect(ai[-1])
if proto == "https:":
log.debug('Wrap SSL')
s = ussl.wrap_socket(s, server_hostname=host)
#HTTP Start
s.write(b"{} /{} HTTP/1.0\r\n".format(method,path))
#Headers todo: Pass in headers
s.write(b"Ocp-Apim-Subscription-Key: 0366c4649003438ea99891d9006809bd\r\n")
s.write(b"Accept: application/json\r\n")
s.write(b"Host: {}:{}\r\n".format(host,port))
if data:
log.debug('Send Data')
s.write(b"Content-Length: ")
s.write(str(len(data)))
s.write(b"\r\n")
elif datafile:
log.debug('Send binary Data File')
s.write(b"Content-Type: application/octet-stream\r\n")
s.write(b"cache-control: no-cache\r\n")
s.write(b"content-length: {}\r\n".format(str(filesize(datafile)) ))
s.write(b"Connection: keep-alive\r\n")
s.write(b"\r\n")
if data:
s.write(data)
elif datafile:
with open(datafile,'rb') as f:
while True:
data = f.read(512)
if not data:
break
log.debug('write')
s.write(data)
log.debug('---sent->>')
l = s.readline()
log.debug('<<-hdrs--')
log.info(l)
log.debug('==')
#Read the first status returned.
l = l.split(None, 2)
log.debug(l)
status = int(l[1])
#read through returned headers
while True:
l = s.readline()
if not l or l == b"\r\n":
break
log.debug(l)
if l.startswith(b"Transfer-Encoding:"):
if b"chunked" in l:
raise ValueError("Unsupported " + l)
elif l.startswith(b"Location:"):
raise NotImplementedError("Redirects not yet supported")
except OSError:
log.debug("Error, closing socket")
s.close()
raise
s.setblocking(False)
return s
def detect_faces_binary (fname = None):
url = 'https://westeurope.api.cognitive.microsoft.com/face/v1.0/detect?returnFaceId=true&returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise'
#url = 'http://192.168.137.1:8888/face/v1.0/detect?returnFaceId=true&returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise'
s2 = urlopen(url, method='POST',datafile=fname)
log.debug("Returned")
#Readline does not work :-(
resp_body=b""
while True:
block = s2.read(512)
if block:
log.debug( 'recieving body..')
resp_body+=block
else:
log.debug( 'done')
break
s2.close()
gc.collect() #Free memory
try:
log.info(resp_body)
faces = ujson.loads(resp_body)
#print(response)
except :
raise RuntimeError("Problem communicating with Cortana.")
return faces
def process_faces(faces):
for face in faces:
print ("I see a {} year old {}".format(
face['faceAttributes']['age'],
face['faceAttributes']['gender']
), end=' ')
#print( face['faceId'] )
#print( face['faceAttributes'] )
#print( face['faceAttributes']['gender'] )
#print( face['faceAttributes']['age'] )
print( "with {} hair, {} and {}".format(
face['faceAttributes']['hair']['hairColor'][0]['color'], #main haircolor
", ".join( face['faceAttributes']['facialHair'] ),
face['faceAttributes']['glasses']
))
emotion = face['faceAttributes']['emotion']
print ( sorted(emotion, key=emotion.__getitem__)[:1] )
# In order of sorted values: [1, 2, 3, 4]
#Demo from /flash/foto
if True:
for fname in os.listdir('/flash/foto'):
print('---------------------------------------------------------------')
print( "Foto : /flash/foto/{}".format(fname) )
faces = detect_faces_binary( "/flash/foto/{}".format(fname))
process_faces(faces)
print('---------------------------------------------------------------')
#demo from /sd/foto
if False:
os.sdconfig(os.SDMODE_SPI, clk=18, mosi=23, miso=19, cs=4)
os.mountsd()
for fname in os.listdir('/sd/foto2'):
print('---------------------------------------------------------------')
print( "Foto : /sd/foto/{}".format(fname) )
faces = detect_faces_binary( "/sd/foto/{}".format(fname) )
process_faces(faces)
print('---------------------------------------------------------------')
```
#### File: Demos/Demo-6.1 PIR/PIRDisplay.py
```python
from machine import Pin, Timer
from time import sleep_ms
import logging
logging.basicConfig(level=logging.DEBUG)
#log = logging.getLogger(__name__) #in module
log = logging.getLogger('menu')
#M5Fire - White lead on Port B
PIR_PIN = 26
demo = 1
if demo==1 :
pir = Pin(PIR_PIN,Pin.IN)
while True:
if pir.value():
print('Human detected')
else :
print('.',end='')
sleep_ms(500)
#=================================================================
# This is the function to be executed
# when the PIR sensor first sees movement
def pir_cb(p):
print('Human detected')
log.debug( p)
log.info('Turn the display on')
tft.backlight(1)
#t1.reshoot()
if demo ==2:
#M5Fire - White lead on Port B
# Optional parameters handler and trigger defines the event
pir = Pin(PIR_PIN, Pin.IN, handler=pir_cb, trigger=Pin.IRQ_RISING)
#=====================================================================
def pir_cb2(p):
print('Human detected')
log.debug( p)
if p.irqvalue() == p.IRQ_RISING:
print('Turn the display on')
tft.backlight(1)
#and pause the timer to avoid it turning off
t1.pause()
else:
log.debug('Start timer to turn the display off')
t1.reshoot()
def displayoffcb(timer):
log.debug("[tcb] timer: {}".format(timer.timernum()))
#sanity check as things might have changed ...
if pir.value() == 0:
log.info("Turn the display off")
tft.backlight(0)
else:
log.info("People still around")
t1.reshoot()
if demo ==3:
pir = Pin(PIR_PIN, Pin.IN, handler=pir_cb2, trigger=Pin.IRQ_ANYEDGE)
if not ('t1' in dir()):
t1 = Timer(1)
t1.init(period=5000, mode=t1.ONE_SHOT, callback=displayoffcb)
tft.backlight(1)
```
#### File: Labs/Lab-4.0 WiFi/5_wifi_logging.py
```python
import network,utime #pylint: disable=import-error
# ----------------------------------------------------------
# Define callback function used for monitoring wifi activity
# ----------------------------------------------------------
'''
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
'''
def wifi_cb(info):
_red = "\033[31m"
_cyan= "\033[36m"
_norm = "\033[00m"
if (info[2]):
msg = ", info: {}".format(info[2])
else:
msg = ""
print(_cyan+"I [WiFi] event: {} ({}){}".format( info[0], info[1], msg)+_norm)
# Enable callbacks
network.WLANcallback(wifi_cb)
# ----------------------------------------------------------
# create station interface - Standard WiFi client
wlan = network.WLAN(network.STA_IF)
wlan.active(False)
# activate the interface
wlan.active(True)
# connect to a known WiFi
wlan.connect('IOTBOOTCAMP', 'MicroPython')
# Note that this may take some time, so we need to wait
# Wait 5 sec or until connected
tmo = 50
while not wlan.isconnected():
utime.sleep_ms(100)
tmo -= 1
if tmo == 0:
break
# check if the station is connected to an AP
if wlan.isconnected():
print("=== Station Connected to WiFi \n")
else:
print("!!! Not able to connect to WiFi")
# gets or sets the interface's IP/netmask/gw/DNS addresses
# 'Raw'
print( wlan.ifconfig() )
#pretty
c = wlan.ifconfig()
print("IP:{0}, Network mask:{1}, Router:{2}, DNS: {3}".format( *c ))
```
#### File: Labs/Lab-4.3 Display/x_touch.py
```python
def test_touch():
while True:
lastx = 0
lasty = 0
t,x,y = tft.gettouch()
if t:
dx = abs(x-lastx)
dy = abs(y-lasty)
if (dx > 2) and (dy > 2):
tft.circle(x,y,4,tft.RED)
time.sleep_ms(50)
```
#### File: Labs/lab-4.6 motion tracking/ShockDetector.py
```python
import machine,time
from machine import Pin, Signal, PWM
import m5stack
#blue led on pin 21
#green = Signal( Pin(18,Pin.OUT) )
#red = Signal( Pin(19,Pin.OUT) )
#blue = Signal( Pin(23,Pin.OUT) )
def pwmled(pin):
#all leds use the same timer
led=PWM(pin,timer=1)
led.init(freq=10000)
led.duty(100)
return led
grn_pwm=pwmled(18)
red_pwm=pwmled(19)
blu_pwm=pwmled(23)
if not 'i2c' in dir():
#avoid recreating the same bus
i2c = machine.I2C(0, sda=21, scl=22)
if 104 in i2c.scan():
print('motion sensor detected on i2cbus')
#init only one time
if not 'tft' in dir():
tft = m5stack.Display()
import machine,time
if not 'i2c' in dir():
i2c = machine.I2C(0, sda=21, scl=22)
MOTION_ID = const(104)
if MOTION_ID in i2c.scan():
print('motion sensor detected on i2cbus')
# load motion sensor logic,
# two different devices share the same ID, try and retry
try:
from mpu6050 import MPU6050
motion = MPU6050(i2c, accel_sf=10)
print("Gyro+Accelerometer/Compass MPU id: " + hex(motion.whoami))
except:
from mpu9250 import MPU9250
motion = MPU9250(i2c)
print("Gyro+Accelerometer/Compass {} id: {}".format(motion.__class__.__name__, hex(motion.whoami)))
else:
print('No motion sensor detected')
#shock detector
SENSITIVITY = 11
blu_pwm.duty(0);grn_pwm.duty(0);red_pwm.duty(0)
while 1:
x,y,z = motion.acceleration
if x > SENSITIVITY or y > SENSITIVITY or z > SENSITIVITY :
print('TILT')
#m5stack.Beep(1000)
## Make Beep
## Send Alert to MQTT
## Send Alert via Microsoft Flow
x1 = min( abs(int( x *10 )),100)
y1 = min( abs(int( y *10 )),100)
z1 = min( abs(int( z *10 )),100)
blu_pwm.duty(x1)
grn_pwm.duty(y1)
red_pwm.duty(z1)
time.sleep_ms(100)
print('Done')
```
#### File: Labs/Lab-6.3 Air quality (MCU680)/timer.py
```python
windows.header('Lab 6.3 Air Q - Timer')
#Example using a timer
import machine
from MCU680 import *
#Timer call back function
#This will be called repeatedly by a timer
def sensor_cb(timer):
#external inputs / outputs
global readings
while data_ready():
read_data()
readings = process_data()
#output the results to the serial
print("Temperature: {Temp:4.1f} C, Humidity: {Humi:2.0f}%, Altitude: {Alt} meters, Pressure: {Pres:7.2f} HPa".format( **readings))
#print("IAQ Accuracy: {IAQa} , IAQ : {IAQ}, Gas {Gas} Ohm".format( **readings))
print('')
init_sensor()
#create a timer
t3 = machine.Timer(3)
# call the sensor_cb function every 5 seconds
t3.init(period=5*1000, mode=t3.PERIODIC, callback=sensor_cb)
done= False
if done:
t3.stop()
t3.deinit()
```
#### File: Labs/Lab-7.1 WeatherStation/MCU680.py
```python
import machine, time, ustruct
# Initialize serial
uart = machine.UART(1, tx=26, rx=36, baudrate=9600)
# Variables
measurements = bytearray(20)
def testBit(int_type, offset):
mask = 1 << offset
return(int_type & mask)
# Initialize GY_MCU680 to output all data
time.sleep(4)
uart.write(b'\xa5\x55\x3f\x39')
# Initialize GY_MCU680 in continuous output mode
time.sleep(1)
uart.write(b'\xa5\x56\x02\xfd')
uart.flush()
while True:
buffer_size = uart.any()
# print(" Number in the buffer:", buffer_size)
# to do fix this: "get" 20 bytes at a time"
if buffer_size == 20:
measurements = uart.read()
print("Full: ",measurements)
# print("Flags: ",measurements[2])
print()
# for i in range(buffer_size):
# print(i, ": ",measurements[i])
# print()
# temp2=(Re_buf[4]<<8|Re_buf[5]);
Temp = measurements[4] << 8 | measurements[5]
# temp1=(Re_buf[6]<<8|Re_buf[7]);
Humi = measurements[6] << 8 | measurements[7]
# Pressure=((uint32_t)Re_buf[8]<<16)|((uint16_t)Re_buf[9]<<8)|Re_buf[10];
Pres = measurements[8] << 16 | measurements[9] << 8 | measurements[10]
# IAQ_accuracy= (Re_buf[11]&0xf0)>>4;
IAQa = measurements[11] & int('f0',16) >> 4
# IAQ=((Re_buf[11]&0x0F)<<8)|Re_buf[12];
IAQ = measurements[11] & int('f0',16) << 8 | measurements[12]
# Gas=((uint32_t)Re_buf[13]<<24)|((uint32_t)Re_buf[14]<<16)|((uint16_t)Re_buf[15]<<8)|Re_buf[16];
Gas = measurements[13] << 24 | measurements[14] << 16 | measurements[15] << 8 | measurements[16]
# Altitude=(Re_buf[17]<<8)|Re_buf[18];
Alt = measurements[17] << 8 | measurements[18]
print("Temperature: ", Temp/100)
print("Humidity: ", Humi/100)
print("Pressure: ", Pres)
print("IAQ Accuracy: ", IAQa)
print("IAQ: ", IAQ)
print("Gas: ", Gas)
print("Altitude: ", Alt)
time.sleep(0.1)
# print("Type1: ", type(measurements[2][2])) -> 'str'
# print("Type2: ", type(ustruct.unpack('b',measurements[2][2]))) -> 'tuple'
# print("Type3: ", type(ustruct.unpack('b',measurements[2][2])[0] )) -> 'int'
# Meaning of the Byte0:
# Bit7 Bit6 Bit5 Bit4 Bit3 Bit2 Bit1 Bit0
# NC NC altitude Gas IAQ Air pressure Humidity Temperature
#
# Bit6~Bit7 Reserved
# Bit5 This bit is 1 for output altitude data and 0 for no output.
# (The data type is signed 16 bits: -32768----- 32767, unit m)
# Bit4 The position 1 indicates the output Gas data, 0 has no output;
# Gas indicates the gas resistance resistance value, which decreases with the increase of the
# gas concentration. small. (The data type is unsigned 32 bits: 0----- 4294967296, unit ohm)
# Bit3 This bit is 1 to indicate IAQ data output, 0 is no output;
# IAQ is for indoor air quality. The range of IAQ is 0~500. The larger the value, the worse
# the air quality. The IAQ data type is unsigned 16 bits, with the first 4 bits indicating
# the accuracy of the sensor's IAQ and the last 12 bits representing the IAQ value.
# Bit2 This position 1 indicates the output air pressure data, 0 has no output;
# Data range: 300~110000, unit Pa; (The data type is unsigned 24 bits)
# Bit1 This bit is set to 1 to indicate the humidity data after the output is magnified 100 times.
# Data range: 0~100, unit %rH (ie relative humidity);
# (The data type is unsigned 16 bits)
# Bit0 The position 1 indicates the temperature data after the output is amplified 100 times
# and 0 has no output; Temperature range: -40~85, unit °C;
# (The data type is signed 16 bits: -32768----- 32767)
# I tried to test byte 0 but the value is always the same :-(
# if testBit(ustruct.unpack('B',measurements[2][0])[0], 0) != 0:
# print("Has Temperature (range -40 - 85 °C)")
# if testBit(ustruct.unpack('B',measurements[2][0])[0], 1) != 0:
# print("Has Relative humidity (range 0 - 100%")
# if testBit(ustruct.unpack('B',measurements[2][0])[0], 2) != 0:
# print("Has Air pressure")
# if testBit(ustruct.unpack('B',measurements[2][0])[0], 3) != 0:
# print("Has Indoor Air Quality (IAQ range is 0~500. The larger the value, the worse the air quality qualidade)")
# if testBit(ustruct.unpack('B',measurements[2][0])[0], 4) != 0:
# print(" Tem resistancia de Gas")
# if testBit(ustruct.unpack('B',measurements[2][0])[0], 5) != 0:
# print(" Tem Altitute")
# Arduino code
# temp2=(Re_buf[4]<<8|Re_buf[5]);
# Temperature=(float)temp2/100;
# temp1=(Re_buf[6]<<8|Re_buf[7]);
# Humidity=(float)temp1/100;
# Pressure=((uint32_t)Re_buf[8]<<16)|((uint16_t)Re_buf[9]<<8)|Re_buf[10];
# IAQ_accuracy= (Re_buf[11]&0xf0)>>4;
# IAQ=((Re_buf[11]&0x0F)<<8)|Re_buf[12];
# Gas=((uint32_t)Re_buf[13]<<24)|((uint32_t)Re_buf[14]<<16)|((uint16_t)Re_buf[15]<<8)|Re_buf[16];
# Altitude=(Re_buf[17]<<8)|Re_buf[18];
```
#### File: Labs/Lab-7.1 WeatherStation/windows.py
```python
if not 'tft' in dir():
import display
tft = display.TFT()
tft.init(tft.M5STACK, width=240, height=320, rst_pin=33, backl_pin=32, miso=19, mosi=23, clk=18, cs=14, dc=27, bgr=True, backl_on=1)
screen_w, screen_h = tft.screensize()
header_h = 32
#draw borders
def borders():
tft.resetwin()
tft.clear(tft.NAVY)
#around screen
tft.rect(0,0,screen_w, screen_h, tft.RED)
#border around header
tft.rect(0,0,screen_w, header_h, tft.RED)
header()
def header(text=''):
#draw header
tft.setwin(1, 1, screen_w-2, header_h-2)
tft.clearwin(tft.MAROON)
tft.font(tft.FONT_Comic, transparent = True )
tft.text(0,0,text,tft.YELLOW)
def mainwindow(clear=True,color=tft.BLUE):
#Activate main Window
#print(1, header_h+1, screen_w-2, screen_h-2)
tft.setwin(1, header_h+1, screen_w-2, screen_h-2)
#tft.font(tft.FONT_Minya, transparent = True )
tft.font(tft.FONT_DejaVu18, transparent = True )
if clear:
tft.clearwin(color)
if color != tft.WHITE:
tft.text(0,0,'',tft.WHITE)
else :
tft.text(0,0,'',tft.BLACK)
def home():
tft.text(0,0,"")
def write(text):
tft.text(tft.LASTX ,tft.LASTY,text)
def writeln(text):
tft.text(tft.LASTX ,tft.LASTY,text+'\n')
```
#### File: Labs/Lab-7 Weather/boot.py
```python
import sys
import machine
# Set default path
# Needed for importing modules and upip
sys.path[1] = '/flash/lib'
def get_networktime():
my_timezone = "CET-1CEST" # found in second field, text before the coma, in https://github.com/loboris/MicroPython_ESP32_psRAM_LoBo/blob/master/MicroPython_BUILD/components/micropython/docs/zones.csv
rtc = machine.RTC()
rtc.init((2018, 01, 01, 12, 12, 12))
rtc.ntp_sync(server= "", tz=my_timezone, update_period=3600)
# Auto Connect to the network, starts autoconfig if needed
import wifisetup
wifisetup.auto_connect()
get_networktime()
import network
network.telnet.start(user="micro", password="<PASSWORD>")
# uncomment for file access functions
# from upysh import *
```
#### File: Labs/Lab-7 Weather/mystation.py
```python
Weather/mystation.py
#My Station
import time, gc
import windows;tft=windows.tft
from machine import Pin, Timer
import micropython
micropython.alloc_emergency_exception_buf(100)
#----------------------
# wiring
#----------------------
PIR_PIN = 2
#----------------------
# Initialisation
#----------------------
import logging
logging.basicConfig(level=logging.DEBUG)
#log = logging.getLogger(__name__) #in module
log = logging.getLogger('station')
#-------------------
windows.mainwindow()
#----------------------
#connect to the sensors
#----------------------
log.info('Loading drivers')
import MCU680
from openweathermap import *
#----------------------
INTERVAL = const(3) #seconds
KEEP_ON = const(1) #seconds
# List of city ID city.list.json.gz can be downloaded here
# http://bulk.openweathermap.org/sample/
CityID='2960316'
APIKey='126c5aa86fcedeb3bb3876202a8afc7c'
#----------------------
#Automatic Display activation
#----------------------
def pir_cb2(p):
log.debug("Change detected : {}".format(p))
if p.irqvalue() == p.IRQ_RISING:
log.info('Turn the display on')
tft.backlight(1)
#and pause the timer to avoid it turning off
t1.pause()
else:
log.debug('Start timer to turn the display off')
t1.reshoot()
def displayoffcb(timer):
log.debug("[tcb] timer: {}".format(timer.timernum()))
#sanity check as things might have changed ...
if pir.value() == 0:
log.info("Turn the display off")
tft.backlight(0)
else:
log.info("People still around")
t1.reshoot()
#----------------------
# with the M5Fire connections the
# PIR cannot be connected at the same time as the Sensor :<
#----------------------
if False:
log.info('Start Automatic Display Activation')
pir = Pin(PIR_PIN, Pin.IN, handler=pir_cb2, trigger=Pin.IRQ_ANYEDGE)
if not ('t1' in dir()):
t1 = Timer(1)
t1.init(period=KEEP_ON*1000, mode=t1.ONE_SHOT, callback=displayoffcb)
tft.backlight(1)
#----------------------
#Timer call back function from the air and temp sensor
#This will be called repeatedly by a timer
#----------------------
readings={}
def sensor_cb(timer):
#external inputs / outputs
global readings
while MCU680.data_ready():
MCU680.read_data()
readings = MCU680.process_data()
log.info("Recieved readings from sensor")
#output the results to the serial
print("Temperature: {Temp:4.1f} C, Humidity: {Humi:2.0f}%, Altitude: {Alt} meters, Pressure: {Pres:7.2f} HPa".format( **readings))
#print("IAQ Accuracy: {IAQa} , IAQ : {IAQ}, Gas {Gas} Ohm".format( **readings))
print('')
MCU680.init_sensor()
#----------------------
#create a timer to automatically for for the time
t3 = machine.Timer(3)
#----------------------
# call the sensor_cb function every 2 seconds
SENSOR_INTERVAL = const(10)
t3.init(period=SENSOR_INTERVAL*1000, mode=t3.PERIODIC, callback=sensor_cb)
#----------------------
# unitinalized values
#----------------------
country, city ,forecast, weather , temp = '?','?','?','?', 0
#----------------------
# Now continue to poll, send , and update
# every x seconds
#----------------------
x=0
while True:
#Clean main window
#avoid too frequent getting updates; one in every 10 minutes
x+=1
if x%(int(600/INTERVAL)) ==1:
windows.mainwindow()
log.info('updating...')
#----------------------
#Load openweathermap forecast functions
#----------------------
try:
windows.writeln('Retrieve the weather forecast')
#get the weather forecast
country, city ,forecast = getforecast(CityID,APIKey)
windows.writeln("Get reported current weather")
country, city ,weather, temp = getcurrentweather(CityID,APIKey)
windows.writeln('Recieved weather and forecast')
except:
writeln('Error')
pass
windows.mainwindow()
windows.writeln("Today's observation for \n{}, {}:".format(country,city) )
windows.writeln('{}C , {}'.format(temp, weather) )
windows.writeln("Tomorow's forecast : " )
windows.writeln('{}\n'.format(forecast) )
#-------------------
if readings != {}:
windows.writeln("Temperature: {Temp:4.1f} C".format( **readings))
windows.writeln("Humidity: {Humi:2.0f}%".format( **readings))
windows.writeln("Pressure: {Pres:7.2f} HPa".format( **readings))
#------------
#clean memory , and wait for a while
gc.collect()
log.debug('sleeping...')
time.sleep(INTERVAL)
log.debug('awake!')
```
#### File: Labs/Lab-7 Weather/wunderground.py
```python
import socket
import ujson
import urequests
def getforecast(lat='49.53892899',lng='6.12860155'):
#load information for location
url = 'http://api.wunderground.com/api/35bb891b4697284b/geolookup/forecast/q/{},{}.json'.format(lat,lng)
try:
#ask for forecast
response = urequests.get(url)
#extract the json , and convert it to a dict in one go
forecast = response.json()
except:
print('Could not retrieve the weather forecast')
#todo error handling
finally:
response.close()
try:
country = forecast['location']['country_name']
city = forecast['location']['city']
#Just one day
day=forecast['forecast']['txt_forecast']['forecastday'][0]
forecast = day['icon']
return country,city,forecast
except:
return "unknown","unknown","unknown"
def getcurrentweather(lat='49.53892899',lng='6.12860155'):
#load information for location
url = 'http://api.wunderground.com/api/35bb891b4697284b/geolookup/conditions/q/{},{}.json'.format(lat,lng)
try:
#ask for forecast
response = urequests.get(url)
#extract the json , and convert it to a dict in one go
info = response.json()
except:
print('Could not retrieve the weather forecast')
#todo error handling
finally:
response.close()
try:
country = info['location']['country_name']
city = info['location']['city']
weather = info['current_observation']['weather']
temp_c = info['current_observation']['temp_c']
return country,city,weather,temp_c
except:
return "unknown","unknown","unknown", None
``` |
{
"source": "Josverl/micropython-stubber",
"score": 2
} |
#### File: micropython-stubber/board/main.py
```python
import uos as os
import time
def countdown():
for i in range(5, 0, -1):
print("start stubbing in {}...".format(i))
time.sleep(1)
import createstubs
# import stub_lvgl
try:
# only run import if no stubs yet
os.listdir("stubs")
print("stub folder was found, stubbing is not automatically started")
except OSError:
countdown()
```
#### File: Josverl/micropython-stubber/process.py
```python
import argparse
import itertools
import re
import sys
from optparse import Values
from pathlib import Path
# Pyminifier Dep
token_utils = None
minification = None
try:
from pyminifier import token_utils, minification
except ImportError:
pass
ROOT = Path(__file__).parent
SCRIPT = ROOT / "board" / "createstubs.py"
DEST = ROOT / "minified" / "createstubs.py"
PATCHES = ROOT / "patches"
def apply_patch(s, patch, revert=False):
"""
Apply patch to string s to recover newer string.
If revert is True, treat s as the newer string, recover older string.
Credits:
<NAME> 2016/12/05
https://gist.github.com/noporpoise/16e731849eb1231e86d78f9dfeca3abc
"""
_hdr_pat = re.compile(r"@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@")
s = s.splitlines(True)
p = patch.splitlines(True)
t = ""
i = sl = 0
(midx, sign) = (1, "+") if not revert else (3, "-")
while i < len(p) and not p[i].startswith("@@"):
i += 1 # skip header lines
while i < len(p):
m = _hdr_pat.match(p[i])
if not m:
raise Exception("Bad patch -- regex mismatch [line " + str(i) + "]")
l = int(m.group(midx)) - 1 + (m.group(midx + 1) == "0") # noqa
if sl > l or l > len(s):
raise Exception("Bad patch -- bad line num [line " + str(i) + "]")
t += "".join(s[sl:l])
sl = l
i += 1
while i < len(p) and p[i][0] != "@":
if i + 1 < len(p) and p[i + 1][0] == "\\":
line = p[i][:-1]
i += 2
else:
line = p[i]
i += 1
if len(line) > 0:
if line[0] == sign or line[0] == " ":
t += line[1:]
sl += line[0] != sign
t += "".join(s[sl:])
return t
def edit_lines(content, edits, show_diff=False):
"""Edit string by list of edits
Args:
content (str): content to edit
edits ([(str, str)]): List of edits to make.
The first string in the tuple represents
the type of edit to make, can be either:
comment - comment text out (removed on minify)
rprint - replace text with print
rpass - replace text with pass
The second string is the matching text to replace
show_diff (bool, optional): Prints diff of each edit.
Defaults to False.
Returns:
str: edited string
"""
def comment(l, x):
return l.replace(x, f"# {x}")
def rprint(l, x): # lgtm [py/unused-local-variable] pylint: disable= unused-variable
split = l.split("(")
if len(split) > 1:
return l.replace(split[0].strip(), "print")
return l.replace(x, f"print")
def rpass(l, x): # lgtm [py/unused-local-variable] pylint: disable= unused-variable
return l.replace(x, f"pass")
def get_whitespace_context(content, index):
"""Get whitespace count of lines surrounding index"""
def count_ws(line):
return sum(1 for _ in itertools.takewhile(str.isspace, line))
lines = content[index - 1 : index + 2]
context = (count_ws(l) for l in lines)
return context
def handle_multiline(content, index):
"""Handles edits that require multiline comments
Example:
self._log.debug("info: {} {}".format(
1,
2
))
Here, only commenting out the first self._log line will raise
an error. So this function returns all lines that need to
be commented out instead.
It also checks for situations such as this:
if condition:
self._log.debug('message')
Here, since the only functionality of the conditional is the call log,
both lines would be returned to comment out.
"""
line = content[index]
open_cnt = line.count("(")
close_cnt = line.count(")")
ahead_index = 1
look_ahead = 0
while not open_cnt == close_cnt:
look_ahead = l_index + ahead_index
ahead_index += 1
next_l = content[look_ahead]
open_cnt += next_l.count("(")
close_cnt += next_l.count(")")
if ahead_index > 1:
return range(index, look_ahead + 1)
prev = content[index - 1]
_, line_ws, post_ws = get_whitespace_context(content, index)
prev_words = prev.strip().strip(":").split()
check = any(
t
in (
"if",
"else",
)
for t in prev_words
)
if check and line_ws != post_ws:
return range(index - 1, index + 1)
def handle_try_except(content, index):
"""Checks if line at index is in try/except block
Handles situations like this:
try:
something()
except:
self._log.debug('some message')
Simply removing the self._log call would create a syntax error,
which is what this function checks for.
"""
prev = content[index - 1]
_, line_ws, post_ws = get_whitespace_context(content, index)
if "except" in prev and line_ws != post_ws:
return True
lines = []
multilines = set()
content = content.splitlines(keepends=True)
for line in content:
_line = line
for edit, text in edits:
if text in line:
if edit == "comment":
l_index = content.index(line)
# Check if edit spans multiple lines
mline = handle_multiline(content, l_index)
if mline:
multilines.update(mline)
break
# Check if line is only statement in try/except
if handle_try_except(content, l_index):
edit = "rpass"
text = line.strip()
func = eval(edit) # pylint: disable= eval-used
line = func(line, text)
if line != _line:
if show_diff:
print(f"\n- {_line.strip()}")
print(f"+ {line.strip()}")
break
lines.append(line)
for line_num in multilines:
# Go back and comment out multilines
line = lines[line_num]
lines[line_num] = comment(line, line.strip())
stripped = "".join(lines)
return stripped
def minify_script(patches=None, keep_report=True, show_diff=False):
"""minifies createstubs.py
Args:
patches ([PathLike], optional): List of paths to patches to apply.
Defaults to None.
keep_report (bool, optional): Keeps single report line in createstubs
Defautls to True.
show_diff (bool, optional): Print diff from edits. Defaults to False.
Returns:
str: minified source text
"""
patches = patches or []
edits = [
("comment", "print"),
("comment", "import logging"),
("comment", "self._log ="),
("comment", "self._log.debug"),
("comment", "self._log.warning"),
("comment", "self._log.info"),
("comment", "self._log.error"),
]
if keep_report:
report = (
"rprint",
(
'self._log.info("Stub module: {:<20} to file:'
' {:<55} mem:{:>5}".'
"format(module_name, file_name, m1))"
),
)
clean = (
"rprint",
'self._log.info("Clean/remove files in folder: {}".format(path))',
)
edits.insert(0, report)
edits.insert(1, clean)
minopts = Values({"tabs": False})
with SCRIPT.open("r") as f:
content = f.read()
for path in patches:
path = Path(path)
content = apply_patch(content, path.read_text())
content = edit_lines(content, edits, show_diff=show_diff)
tokens = token_utils.listified_tokenizer(content)
source = minification.minify(tokens, minopts)
return source
def get_patches():
"""Iterate patch files"""
for f in PATCHES.iterdir():
yield (f.stem, f.resolve())
def resolve_patches(patch_names):
"""Validates/Provides help for patches"""
patch_files = list(get_patches())
patches = [
next((p for p in patch_files if p[0] == n), (n, None)) for n in patch_names
]
paths = []
for name, path in patches:
if path is None:
print(f"Cannot find patch: {name}")
print("\nAvailable Patches:")
print("\n".join(p[0] for p in get_patches()))
sys.exit(0)
print(f"Applying Patch: {name}")
paths.append(path)
return paths
def cli_patch(**kwargs):
"""apply patch cli handler"""
print("Patching createstubs.py...")
out = kwargs.get("output")
patch_names = kwargs.pop("patches")
paths = resolve_patches(patch_names)
with SCRIPT.open("r") as f:
source = f.read()
for p in paths:
content = apply_patch(source, p.read_text())
with out.open("w+") as o:
o.write(content)
print("\nDone!")
print("Patched file written to:", out)
def cli_minify(**kwargs):
"""minify cli handler"""
print("\nMinifying createstubs.py...")
out = kwargs.pop("output")
patches = kwargs.pop("patch")
if not minification:
print("pyminifier is required to minify createstubs.py\n")
print("Please install via:\n pip install pyminifier")
sys.exit(1)
patch_paths = resolve_patches(patches)
with out.open("w+") as f:
report = kwargs.pop("no_report")
diff = kwargs.pop("diff")
source = minify_script(patches=patch_paths, keep_report=report, show_diff=diff)
f.write(source)
print("\nDone!")
print("Minified file written to:", out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Pre/Post Processing for createstubs.py"
)
parser.set_defaults(func=None)
parser.add_argument(
"-o",
"--output",
help="Specify file to output to. Defaults to ./minified/createstubs.py",
type=Path,
default=DEST,
)
subparsers = parser.add_subparsers(help="Command to execute")
minify_parser = subparsers.add_parser(
"minify", help=("Create minified version of" " createstubs.py")
)
minify_parser.add_argument(
"-p",
"--patch",
action="append",
help="Apply patch before minification",
default=[],
)
minify_parser.add_argument(
"-d", "--diff", help="Print diff report from minify", action="store_true"
)
minify_parser.add_argument(
"-n",
"--no-report",
help=(
"Disables all output from createstubs.py."
" Use if your having memory related issues."
),
action="store_false",
)
minify_parser.set_defaults(func=cli_minify)
patch_parser = subparsers.add_parser(
"patch", help=("Apply a patch to createstubs.py")
)
patch_parser.add_argument(
"patches",
help="List of patches to apply, seperated by a space.",
action="append",
)
patch_parser.set_defaults(func=cli_patch)
args = parser.parse_args()
if not args.func:
parser.print_help()
else:
args.func(**vars(args))
```
#### File: micropython-stubber/src/get_cpython.py
```python
import os
import glob
import shutil
import subprocess
import logging
import json
import utils
from version import VERSION
log = logging.getLogger(__name__)
family = "common"
def get_core(requirements, stub_path=None):
"Download MicroPython compatibility modules"
if not stub_path:
stub_path = "./all-stubs/cpython-core"
# use pip to dowload requirements file to build folder in one go
# pip install --no-compile --no-cache-dir --target ./scratch/test --upgrade -r ./src/micropython.txt
build_path = os.path.abspath("./build")
os.makedirs(stub_path, exist_ok=True)
os.makedirs(build_path, exist_ok=True)
mod_manifest = None
try:
subprocess.run(
[
"pip",
"install",
"--target",
build_path,
"-r",
requirements,
"--no-cache-dir",
"--no-compile",
"--upgrade",
"--no-binary=:all:",
],
capture_output=False,
check=True,
)
# build modules.json
mod_manifest = utils.manifest(machine=family, version=VERSION)
# copy *.py files in build folder to stub_path
for filename in glob.glob(os.path.join(build_path, "*.py")):
log.info("pipped : {}".format(filename))
f_name, f_ext = os.path.splitext(
os.path.basename(filename)
) # pylint: disable=unused-variable
mod_manifest["modules"].append(
{"file": os.path.basename(filename), "module": f_name}
)
try:
shutil.copy2(filename, stub_path)
except OSError as err:
log.exception(err)
except OSError as err:
log.error(
"An error occurred while trying to run pip to download the MicroPython compatibility modules from PyPi: {}".format(
err
)
)
finally:
# remove build folder
shutil.rmtree(build_path, ignore_errors=True)
if mod_manifest:
#write the the module manifest
with open(stub_path+"/modules.json", "w") as outfile:
json.dump(mod_manifest, outfile, indent=4, sort_keys=True)
if __name__ == "__main__":
# just run a quick test
logging.basicConfig(format="%(levelname)-8s:%(message)s", level=logging.INFO)
get_core(
requirements="./src/reqs-cpython-mpy.txt", stub_path="./scratch/cpython_common"
)
```
#### File: micropython-stubber/src/make_stub_files.py
```python
import ast
# from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
from collections import OrderedDict
# Requires Python 2.7 or above. Without OrderedDict
# the configparser will give random order for patterns.
import configparser # Python 3
import glob
# Todo: Depricated , replace with argparse https://docs.python.org/3/library/argparse.html
import optparse
import os
import re
import subprocess
import sys
import time
import types
import io # Python 3
isPython3 = sys.version_info >= (3, 0, 0)
debug_flag = False
def is_known_type(s):
"""
Return True if s is nothing but a single known type.
Recursively test inner types in square brackets.
"""
return ReduceTypes().is_known_type(s)
def merge_types(a1, a2):
"""
a1 and a2 may be strings or lists.
return a list containing both of them, flattened, without duplicates.
"""
# Only useful if visitors could return either lists or strings.
assert a1 is not None
assert a2 is not None
r1 = a1 if isinstance(a1, (list, tuple)) else [a1]
r2 = a2 if isinstance(a2, (list, tuple)) else [a2]
return sorted(set(r1 + r2))
def reduce_types(aList, name=None, trace=False):
"""
Return a string containing the reduction of all types in aList.
The --trace-reduce command-line option sets trace=True.
If present, name is the function name or class_name.method_name.
"""
return ReduceTypes(aList, name, trace).reduce_types()
# Top-level functions
def dump(title, s=None):
if s:
print("===== %s...\n%s\n" % (title, s.rstrip()))
else:
print("===== %s...\n" % title)
def dump_dict(title, d):
"""Dump a dictionary with a header."""
dump(title)
for z in sorted(d):
print("%30s %s" % (z, d.get(z)))
print("")
def dump_list(title, aList):
"""Dump a list with a header."""
dump(title)
for z in aList:
print(z)
print("")
def main():
"""
The driver for the stand-alone version of make-stub-files.
All options come from ~/stubs/make_stub_files.cfg.
"""
# g.cls()
controller = StandAloneMakeStubFile()
controller.scan_command_line()
controller.scan_options()
controller.run()
if not controller.silent:
print("done")
# def pdb(self):
# pass
# # '''Invoke a debugger during unit testing.'''
# # try:
# # import leo.core.leoGlobals as leo_g
# # assert leo_g
# # # leo_g.pdb()
# # except ImportError:
# # import pdb
# # pdb.set_trace()
def truncate(s, n):
"""Return s truncated to n characters."""
return s if len(s) <= n else s[: n - 3] + "..."
class AstFormatter:
"""
A class to recreate source code from an AST.
This does not have to be perfect, but it should be close.
"""
# pylint: disable=consider-using-enumerate
level = 0
# Entries...
def format(self, node):
"""Format the node (or list of nodes) and its descendants."""
self.level = 0
val = self.visit(node)
# pylint: disable=consider-using-ternary
return val and val.strip() or ""
def visit(self, node):
"""Return the formatted version of an Ast node, or list of Ast nodes."""
if isinstance(node, (list, tuple)):
return ",".join([self.visit(z) for z in node])
elif node is None:
return "None"
else:
assert isinstance(node, ast.AST), node.__class__.__name__
method_name = "do_" + node.__class__.__name__
try:
method = getattr(self, method_name)
except AttributeError:
return ""
s = method(node)
# assert type(s) == type('abc'), (node, type(s))
assert g.isString(s), type(s)
return s
# Contexts...
# 2: ClassDef(identifier name, expr* bases,
# stmt* body, expr* decorator_list)
# 3: ClassDef(identifier name, expr* bases,
# keyword* keywords, expr? starargs, expr? kwargs
# stmt* body, expr* decorator_list)
#
# keyword arguments supplied to call (NULL identifier for **kwargs)
# keyword = (identifier? arg, expr value)
def do_ClassDef(self, node):
result = []
name = node.name # Only a plain string is valid.
bases = [self.visit(z) for z in node.bases] if node.bases else []
if getattr(node, "keywords", None): # Python 3
for keyword in node.keywords:
bases.append("%s=%s" % (keyword.arg, self.visit(keyword.value)))
if getattr(node, "starargs", None): # Python 3
bases.append("*%s", self.visit(node.starargs))
if getattr(node, "kwargs", None): # Python 3
bases.append("*%s", self.visit(node.kwargs))
#
# Fix issue #2: look ahead to see if there are any functions in this class.
empty = not any(isinstance(z, ast.FunctionDef) for z in node.body)
tail = " ..." if empty else ""
if bases:
result.append(
self.indent("class %s(%s):%s\n" % (name, ",".join(bases), tail))
)
else:
result.append(self.indent("class %s:%s\n" % (name, tail)))
# Fix #2
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
# 2: FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list)
# 3: FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list,
# expr? returns)
def do_FunctionDef(self, node):
"""Format a FunctionDef node."""
result = []
if node.decorator_list:
for z in node.decorator_list:
result.append("@%s\n" % self.visit(z))
name = node.name # Only a plain string is valid.
args = self.visit(node.args) if node.args else ""
if getattr(node, "returns", None): # Python 3.
returns = self.visit(node.returns)
result.append(self.indent("def %s(%s): -> %s\n" % (name, args, returns)))
else:
result.append(self.indent("def %s(%s):\n" % (name, args)))
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
def do_Interactive(self, node):
for z in node.body:
self.visit(z)
def do_Module(self, node):
assert "body" in node._fields
result = "".join([self.visit(z) for z in node.body])
return result # 'module:\n%s' % (result)
def do_Lambda(self, node):
return self.indent(
"lambda %s: %s" % (self.visit(node.args), self.visit(node.body))
)
# Expressions...
def do_Expr(self, node):
"""An outer expression: must be indented."""
return self.indent("%s\n" % self.visit(node.value))
def do_Expression(self, node):
"""An inner expression: do not indent."""
return "%s\n" % self.visit(node.body)
def do_GeneratorExp(self, node):
elt = self.visit(node.elt) or ""
gens = [self.visit(z) for z in node.generators]
gens = [z if z else "<**None**>" for z in gens] # Kludge: probable bug.
return "<gen %s for %s>" % (elt, ",".join(gens))
def do_AugLoad(self, node):
return "AugLoad"
def do_Del(self, node):
return "Del"
def do_Load(self, node):
return "Load"
def do_Param(self, node):
return "Param"
def do_Store(self, node):
return "Store"
# Operands...
# 2: arguments = (expr* args, identifier? vararg, identifier? kwarg, expr* defaults)
# 3: arguments = (arg* args, arg? vararg,
# arg* kwonlyargs, expr* kw_defaults,
# arg? kwarg, expr* defaults)
def do_arguments(self, node):
"""Format the arguments node."""
kind = self.kind(node)
assert kind == "arguments", kind
args = [self.visit(z) for z in node.args]
defaults = [self.visit(z) for z in node.defaults]
# Assign default values to the last args.
args2 = []
n_plain = len(args) - len(defaults)
for i in range(len(args)):
if i < n_plain:
args2.append(args[i])
else:
args2.append("%s=%s" % (args[i], defaults[i - n_plain]))
if isPython3:
args = [self.visit(z) for z in node.kwonlyargs]
defaults = [self.visit(z) for z in node.kw_defaults]
n_plain = len(args) - len(defaults)
for i in range(len(args)):
if i < n_plain:
args2.append(args[i])
else:
args2.append("%s=%s" % (args[i], defaults[i - n_plain]))
# Add the vararg and kwarg expressions.
vararg = getattr(node, "vararg", None)
if vararg:
args2.append("*" + self.visit(vararg))
kwarg = getattr(node, "kwarg", None)
if kwarg:
args2.append("**" + self.visit(kwarg))
else: # OLD PYTHON 2.x code
# Add the vararg and kwarg names.
name = getattr(node, "vararg", None)
if name:
args2.append("*" + name)
name = getattr(node, "kwarg", None)
if name:
args2.append("**" + name)
return ",".join(args2)
# 3: arg = (identifier arg, expr? annotation)
def do_arg(self, node):
if getattr(node, "annotation", None):
return "%s: %s" % (node.arg, self.visit(node.annotation))
else:
return node.arg
# Attribute(expr value, identifier attr, expr_context ctx)
def do_Attribute(self, node):
return "%s.%s" % (
self.visit(node.value),
node.attr,
) # Don't visit node.attr: it is always a string.
def do_Bytes(self, node): # Python 3.x only.
return str(node.s)
# Call(expr func, expr* args, keyword* keywords, expr? starargs, expr? kwargs)
def do_Call(self, node):
func = self.visit(node.func)
args = [self.visit(z) for z in node.args]
for z in node.keywords:
# Calls f.do_keyword.
args.append(self.visit(z))
if getattr(node, "starargs", None):
args.append("*%s" % (self.visit(node.starargs)))
if getattr(node, "kwargs", None):
args.append("**%s" % (self.visit(node.kwargs)))
args = [z for z in args if z] # Kludge: Defensive coding.
return "%s(%s)" % (func, ",".join(args))
# keyword = (identifier arg, expr value)
def do_keyword(self, node):
# node.arg is a string.
value = self.visit(node.value)
# This is a keyword *arg*, not a Python keyword!
return "%s=%s" % (node.arg, value)
def do_comprehension(self, node):
result = []
name = self.visit(node.target) # A name.
it = self.visit(node.iter) # An attribute.
result.append("%s in %s" % (name, it))
ifs = [self.visit(z) for z in node.ifs]
if ifs:
result.append(" if %s" % ("".join(ifs)))
return "".join(result)
def do_Dict(self, node):
result = []
keys = [self.visit(z) for z in node.keys]
values = [self.visit(z) for z in node.values]
if len(keys) == len(values):
# result.append('{\n' if keys else '{')
result.append("{")
items = []
for i in range(len(keys)):
items.append("%s:%s" % (keys[i], values[i]))
result.append(", ".join(items))
result.append("}")
# result.append(',\n'.join(items))
# result.append('\n}' if keys else '}')
else:
print(
"Error: f.Dict: len(keys) != len(values)\nkeys: %s\nvals: %s"
% (repr(keys), repr(values))
)
return "".join(result)
def do_Ellipsis(self, node):
return "..."
def do_ExtSlice(self, node):
return ":".join([self.visit(z) for z in node.dims])
def do_Index(self, node):
return self.visit(node.value)
def do_List(self, node):
# Not used: list context.
# self.visit(node.ctx)
elts = [self.visit(z) for z in node.elts]
elts = [z for z in elts if z] # Defensive.
return "[%s]" % ",".join(elts)
def do_ListComp(self, node):
elt = self.visit(node.elt)
gens = [self.visit(z) for z in node.generators]
gens = [z if z else "<**None**>" for z in gens] # Kludge: probable bug.
return "%s for %s" % (elt, "".join(gens))
def do_Name(self, node):
return node.id
def do_NameConstant(self, node): # Python 3 only.
s = repr(node.value)
return "bool" if s in ("True", "False") else s
def do_Num(self, node):
return repr(node.n)
# Python 2.x only
def do_Repr(self, node):
return "repr(%s)" % self.visit(node.value)
def do_Slice(self, node):
lower, upper, step = "", "", ""
if getattr(node, "lower", None) is not None:
lower = self.visit(node.lower)
if getattr(node, "upper", None) is not None:
upper = self.visit(node.upper)
if getattr(node, "step", None) is not None:
step = self.visit(node.step)
if step:
return "%s:%s:%s" % (lower, upper, step)
else:
return "%s:%s" % (lower, upper)
def do_Str(self, node):
"""This represents a string constant."""
return repr(node.s)
# Subscript(expr value, slice slice, expr_context ctx)
def do_Subscript(self, node):
value = self.visit(node.value)
the_slice = self.visit(node.slice)
return "%s[%s]" % (value, the_slice)
def do_Tuple(self, node):
elts = [self.visit(z) for z in node.elts]
return "(%s)" % ", ".join(elts)
# Operators...
def do_BinOp(self, node):
return "%s%s%s" % (
self.visit(node.left),
self.op_name(node.op),
self.visit(node.right),
)
def do_BoolOp(self, node):
op_name = self.op_name(node.op)
values = [self.visit(z) for z in node.values]
return op_name.join(values)
def do_Compare(self, node):
result = []
lt = self.visit(node.left)
ops = [self.op_name(z) for z in node.ops]
comps = [self.visit(z) for z in node.comparators]
result.append(lt)
if len(ops) == len(comps):
for i in range(len(ops)):
result.append("%s%s" % (ops[i], comps[i]))
else:
print("can not happen: ops", repr(ops), "comparators", repr(comps))
return "".join(result)
def do_UnaryOp(self, node):
return "%s%s" % (self.op_name(node.op), self.visit(node.operand))
def do_IfExp(self, node):
return "%s if %s else %s " % (
self.visit(node.body),
self.visit(node.test),
self.visit(node.orelse),
)
# Statements...
def do_Assert(self, node):
test = self.visit(node.test)
if getattr(node, "msg", None):
message = self.visit(node.msg)
return self.indent("assert %s, %s" % (test, message))
else:
return self.indent("assert %s" % test)
def do_Assign(self, node):
return self.indent(
"%s=%s\n"
% ("=".join([self.visit(z) for z in node.targets]), self.visit(node.value))
)
def do_AugAssign(self, node):
return self.indent(
"%s%s=%s\n"
% (
self.visit(node.target),
self.op_name(node.op), # Bug fix: 2013/03/08.
self.visit(node.value),
)
)
def do_Break(self, node):
return self.indent("break\n")
def do_Continue(self, node):
return self.indent("continue\n")
def do_Delete(self, node):
targets = [self.visit(z) for z in node.targets]
return self.indent("del %s\n" % ",".join(targets))
def do_ExceptHandler(self, node):
result = []
result.append(self.indent("except"))
if getattr(node, "type", None):
result.append(" %s" % self.visit(node.type))
if getattr(node, "name", None):
if isinstance(node.name, ast.AST):
result.append(" as %s" % self.visit(node.name))
else:
result.append(" as %s" % node.name) # Python 3.x.
result.append(":\n")
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
# Python 2.x only
def do_Exec(self, node):
body = self.visit(node.body)
args = [] # Globals before locals.
if getattr(node, "globals", None):
args.append(self.visit(node.globals))
if getattr(node, "locals", None):
args.append(self.visit(node.locals))
if args:
return self.indent("exec %s in %s\n" % (body, ",".join(args)))
else:
return self.indent("exec %s\n" % (body))
def do_For(self, node):
result = []
result.append(
self.indent(
"for %s in %s:\n" % (self.visit(node.target), self.visit(node.iter))
)
)
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
if node.orelse:
result.append(self.indent("else:\n"))
for z in node.orelse:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
def do_Global(self, node):
return self.indent("global %s\n" % (",".join(node.names)))
def do_If(self, node):
result = []
result.append(self.indent("if %s:\n" % (self.visit(node.test))))
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
if node.orelse:
result.append(self.indent("else:\n"))
for z in node.orelse:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
def do_Import(self, node):
names = []
for fn, asname in self.get_import_names(node):
if asname:
names.append("%s as %s" % (fn, asname))
else:
names.append(fn)
return self.indent("import %s\n" % (",".join(names)))
def get_import_names(self, node):
"""Return a list of the the full file names in the import statement."""
result = []
for ast2 in node.names:
if self.kind(ast2) == "alias":
data = ast2.name, ast2.asname
result.append(data)
else:
print("unsupported kind in Import.names list", self.kind(ast2))
return result
def do_ImportFrom(self, node):
names = []
for fn, asname in self.get_import_names(node):
if asname:
names.append("%s as %s" % (fn, asname))
else:
names.append(fn)
return self.indent("from %s import %s\n" % (node.module, ",".join(names)))
# Nonlocal(identifier* names)
def do_Nonlocal(self, node):
return self.indent("nonlocal %s\n" % ", ".join(node.names))
def do_Pass(self, node):
return self.indent("pass\n")
# Python 2.x only
def do_Print(self, node):
vals = []
for z in node.values:
vals.append(self.visit(z))
if getattr(node, "dest", None):
vals.append("dest=%s" % self.visit(node.dest))
if getattr(node, "nl", None):
vals.append("nl=%s" % node.nl)
return self.indent("print(%s)\n" % (",".join(vals)))
def do_Raise(self, node):
args = []
for attr in ("type", "inst", "tback"):
if getattr(node, attr, None) is not None:
args.append(self.visit(getattr(node, attr)))
if args:
return self.indent("raise %s\n" % (",".join(args)))
else:
return self.indent("raise\n")
def do_Return(self, node):
if node.value:
return self.indent("return %s\n" % (self.visit(node.value).strip()))
else:
return self.indent("return\n")
# Starred(expr value, expr_context ctx)
def do_Starred(self, node):
return "*" + self.visit(node.value)
# Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
def do_Try(self, node): # Python 3
result = []
result.append(self.indent("try:\n"))
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
if node.handlers:
for z in node.handlers:
result.append(self.visit(z))
if node.orelse:
result.append(self.indent("else:\n"))
for z in node.orelse:
self.level += 1
result.append(self.visit(z))
self.level -= 1
if node.finalbody:
result.append(self.indent("finally:\n"))
for z in node.finalbody:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
def do_TryExcept(self, node):
result = []
result.append(self.indent("try:\n"))
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
if node.handlers:
for z in node.handlers:
result.append(self.visit(z))
if node.orelse:
result.append("else:\n")
for z in node.orelse:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
def do_TryFinally(self, node):
result = []
result.append(self.indent("try:\n"))
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
result.append(self.indent("finally:\n"))
for z in node.finalbody:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
def do_While(self, node):
result = []
result.append(self.indent("while %s:\n" % (self.visit(node.test))))
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
if node.orelse:
result.append("else:\n")
for z in node.orelse:
self.level += 1
result.append(self.visit(z))
self.level -= 1
return "".join(result)
# 2: With(expr context_expr, expr? optional_vars,
# stmt* body)
# 3: With(withitem* items,
# stmt* body)
# withitem = (expr context_expr, expr? optional_vars)
def do_With(self, node):
result = []
result.append(self.indent("with "))
vars_list = []
if getattr(node, "context_expression", None):
result.append(self.visit(node.context_expresssion))
if getattr(node, "optional_vars", None):
try:
for z in node.optional_vars:
vars_list.append(self.visit(z))
except TypeError: # Not iterable.
vars_list.append(self.visit(node.optional_vars))
if getattr(node, "items", None): # Python 3.
for item in node.items:
result.append(self.visit(item.context_expr))
if getattr(item, "optional_vars", None):
try:
for z in item.optional_vars:
vars_list.append(self.visit(z))
except TypeError: # Not iterable.
vars_list.append(self.visit(item.optional_vars))
result.append(",".join(vars_list))
result.append(":\n")
for z in node.body:
self.level += 1
result.append(self.visit(z))
self.level -= 1
result.append("\n")
return "".join(result)
def do_Yield(self, node):
if getattr(node, "value", None):
return self.indent("yield %s\n" % (self.visit(node.value)))
else:
return self.indent("yield\n")
# YieldFrom(expr value)
def do_YieldFrom(self, node):
return self.indent("yield from %s\n" % (self.visit(node.value)))
# Utils...
def kind(self, node):
"""Return the name of node's class."""
return node.__class__.__name__
def indent(self, s):
return "%s%s" % (" " * 4 * self.level, s)
def op_name(self, node, strict=True):
"""Return the print name of an operator node."""
d = {
# Binary operators.
"Add": "+",
"BitAnd": "&",
"BitOr": "|",
"BitXor": "^",
"Div": "/",
"FloorDiv": "//",
"LShift": "<<",
"Mod": "%",
"Mult": "*",
"Pow": "**",
"RShift": ">>",
"Sub": "-",
# Boolean operators.
"And": " and ",
"Or": " or ",
# Comparison operators
"Eq": "==",
"Gt": ">",
"GtE": ">=",
"In": " in ",
"Is": " is ",
"IsNot": " is not ",
"Lt": "<",
"LtE": "<=",
"NotEq": "!=",
"NotIn": " not in ",
# Context operators.
"AugLoad": "<AugLoad>",
"AugStore": "<AugStore>",
"Del": "<Del>",
"Load": "<Load>",
"Param": "<Param>",
"Store": "<Store>",
# Unary operators.
"Invert": "~",
"Not": " not ",
"UAdd": "+",
"USub": "-",
}
name = d.get(self.kind(node), "<%s>" % node.__class__.__name__)
if strict:
assert name, self.kind(node)
return name
class AstArgFormatter(AstFormatter):
"""
Just like the AstFormatter class, except it prints the class
names of constants instead of actual values.
"""
# Return generic markers to allow better pattern matches.
def do_BoolOp(self, node): # Python 2.x only.
return "bool"
def do_Bytes(self, node): # Python 3.x only.
return "bytes" # return str(node.s)
def do_Name(self, node):
return "bool" if node.id in ("True", "False") else node.id
def do_Num(self, node):
return "number" # return repr(node.n)
def do_Str(self, node):
"""This represents a string constant."""
return "str" # return repr(node.s)
class LeoGlobals:
"""A class supporting g.pdb and g.trace for compatibility with Leo."""
class NullObject:
"""
An object that does nothing, and does it very well.
From the Python cookbook, recipe 5.23
"""
def __init__(self, *args, **keys):
pass
def __call__(self, *args, **keys):
return self
def __repr__(self):
return "NullObject"
def __str__(self):
return "NullObject"
def __bool__(self):
return False
def __nonzero__(self):
return 0
def __delattr__(self, attr):
return self
def __getattr__(self, attr):
return self
def __setattr__(self, attr, val):
return self
# pylint: disable=protected-access
def _callerName(self, n=1, files=False):
# print('_callerName: %s %s' % (n,files))
try: # get the function name from the call stack.
f1 = sys._getframe(n) # The stack frame, n levels up.
code1 = f1.f_code # The code object
name = code1.co_name
if name == "__init__":
name = "__init__(%s,line %s)" % (
self.shortFileName(code1.co_filename),
code1.co_firstlineno,
)
if files:
return "%s:%s" % (self.shortFileName(code1.co_filename), name)
else:
return name # The code name
except ValueError:
# print('g._callerName: ValueError',n)
return "" # The stack is not deep enough.
except Exception:
# es_exception()
return "" # "<no caller name>"
def caller(self, i=1):
"""Return the caller name i levels up the stack."""
return self.callers(i + 1).split(",")[0]
def callers(self, n=4, count=0, excludeCaller=True, files=False):
"""Return a list containing the callers of the function that called g.callerList.
If the excludeCaller keyword is True (the default), g.callers is not on the list.
If the files keyword argument is True, filenames are included in the list.
"""
# sys._getframe throws ValueError in both cpython and jython if there are less than i entries.
# The jython stack often has less than 8 entries,
# so we must be careful to call g._callerName with smaller values of i first.
result = []
i = 3 if excludeCaller else 2
while 1:
s = self._callerName(i, files=files)
# print(i,s)
if s:
result.append(s)
if not s or len(result) >= n:
break
i += 1
result.reverse()
if count > 0:
result = result[:count]
sep = "\n" if files else ","
return sep.join(result)
def cls(self):
"""Clear the screen."""
if sys.platform.lower().startswith("win"):
os.system("cls") # pylint: disable=no-member
def execute_shell_commands(self, commands, trace=False):
"""
Execute each shell command in a separate process.
Wait for each command to complete, except those starting with '&'
"""
if g.isString(commands):
commands = [commands]
for command in commands:
wait = not command.startswith("&")
if command.startswith("&"):
command = command[1:].strip()
proc = subprocess.Popen(command, shell=True)
if wait:
proc.communicate()
def isString(self, s):
"""Return True if s is any string, but not bytes."""
# pylint: disable=no-member
if isPython3:
return isinstance(s, str)
else: # OLD Python 2
return isinstance(s, types.StringTypes)
def isUnicode(self, s):
"""Return True if s is a unicode string."""
# pylint: disable=no-member
if isPython3:
return isinstance(s, str)
else: # OLD Python 2
return isinstance(s, types.UnicodeType)
def objToString(self, obj, indent="", printCaller=False, tag=None):
"""Pretty print any Python object to a string."""
# pylint: disable=undefined-loop-variable
# Looks like a a pylint bug.
#
# Compute s.
if isinstance(obj, dict):
s = self.dictToString(obj, indent=indent)
elif isinstance(obj, list):
s = self.listToString(obj, indent=indent)
elif isinstance(obj, tuple):
s = self.tupleToString(obj, indent=indent)
elif g.isString(obj):
# Print multi-line strings as lists.
s = obj
lines = g.splitLines(s)
if len(lines) > 1:
s = self.objToString(lines, indent=indent)
else:
s = repr(s)
else:
s = repr(obj)
#
# Compute the return value.
if printCaller and tag:
prefix = "%s: %s" % (g.caller(), tag)
elif printCaller or tag:
prefix = self.caller() if printCaller else tag
else:
prefix = None
return "%s...\n%s\n" % (prefix, s) if prefix else s
toString = objToString
def dictToString(self, d, indent="", tag=None):
"""Pretty print a Python dict to a string."""
# pylint: disable=unnecessary-lambda
if not d:
return "{}"
result = ["{\n"]
indent2 = indent + " " * 4
n = 2 + len(indent) + max([len(repr(z)) for z in d.keys()])
for i, key in enumerate(sorted(d, key=lambda z: repr(z))):
pad = " " * max(0, (n - len(repr(key))))
result.append("%s%s:" % (pad, key))
result.append(self.objToString(d.get(key), indent=indent2))
if i + 1 < len(d.keys()):
result.append(",")
result.append("\n")
result.append(indent + "}")
s = "".join(result)
return "%s...\n%s\n" % (tag, s) if tag else s
def listToString(self, obj, indent="", tag=None):
"""Pretty print a Python list to a string."""
if not obj:
return "[]"
result = ["["]
indent2 = indent + " " * 4
for i, obj2 in enumerate(obj):
if len(obj) > 1:
result.append("\n" + indent2)
result.append(self.objToString(obj2, indent=indent2))
if i + 1 < len(obj) > 1:
result.append(",")
elif len(obj) > 1:
result.append("\n" + indent)
result.append("]")
s = "".join(result)
return "%s...\n%s\n" % (tag, s) if tag else s
def tupleToString(self, obj, indent="", tag=None):
"""Pretty print a Python tuple to a string."""
if not obj:
return "(),"
result = ["("]
indent2 = indent + " " * 4
for i, obj2 in enumerate(obj):
if len(obj) > 1:
result.append("\n" + indent2)
result.append(self.objToString(obj2, indent=indent2))
if len(obj) == 1 or i + 1 < len(obj):
result.append(",")
elif len(obj) > 1:
result.append("\n" + indent)
result.append(")")
s = "".join(result)
return "%s...\n%s\n" % (tag, s) if tag else s
# def pdb(self):
# pass
# # try:
# # import leo.core.leoGlobals as leo_g
# # leo_g.pdb()
# # except ImportError:
# # import pdb
# # pdb.set_trace()
def printObj(self, obj, indent="", printCaller=False, tag=None):
"""Pretty print any Python object using g.pr."""
print(self.objToString(obj, indent=indent, printCaller=printCaller, tag=tag))
# printDict = printObj
# printList = printObj
# printTuple = printObj
def shortFileName(self, fileName, n=None):
# pylint: disable=invalid-unary-operand-type, no-member
if n is None or n < 1:
return os.path.basename(fileName)
else:
return "/".join(fileName.replace("\\", "/").split("/")[-n:])
def splitLines(self, s):
"""Split s into lines, preserving trailing newlines."""
return s.splitlines(True) if s else []
def trace(self, *args, **keys):
pass
# try:
# import leo.core.leoGlobals as leo_g
# leo_g.trace(caller_level=2, *args, **keys)
# except ImportError:
# print(args, keys)
class Pattern(object):
"""
A class representing regex or balanced patterns.
Sample matching code, for either kind of pattern:
for m in reversed(pattern.all_matches(s)):
s = pattern.replace(m, s)
"""
def __init__(self, find_s, repl_s=""):
"""Ctor for the Pattern class."""
self.find_s = find_s
self.repl_s = repl_s
if self.is_regex():
self.regex = re.compile(find_s)
elif self.is_balanced():
self.regex = None
else:
# Escape all dangerous characters.
result = []
for ch in find_s:
if ch == "_" or ch.isalnum():
result.append(ch)
else:
result.append("\\" + ch)
self.regex = re.compile("".join(result))
def __eq__(self, obj):
"""Return True if two Patterns are equivalent."""
if isinstance(obj, Pattern):
return self.find_s == obj.find_s and self.repl_s == obj.repl_s
else:
return NotImplemented
def __ne__(self, obj):
"""Return True if two Patterns are not equivalent."""
return not self.__eq__(obj)
def __hash__(self):
"""Pattern.__hash__"""
return len(self.find_s) + len(self.repl_s)
def __repr__(self):
"""Pattern.__repr__"""
return "%s: %s" % (self.find_s, self.repl_s)
__str__ = __repr__
def is_balanced(self):
"""Return True if self.find_s is a balanced pattern."""
s = self.find_s
if s.endswith("*"):
return True
for pattern in ("(*)", "[*]", "{*}"):
if s.find(pattern) > -1:
return True
return False
def is_regex(self):
"""
Return True if self.find_s is a regular pattern.
For now a kludgy convention suffices.
"""
return self.find_s.endswith("$")
# A dollar sign is not valid in any Python expression.
def all_matches(self, s):
"""
Return a list of match objects for all matches in s.
These are regex match objects or (start, end) for balanced searches.
"""
if self.is_balanced():
aList, i = [], 0
while i < len(s):
progress = i
j = self.full_balanced_match(s, i)
if j is None:
i += 1
else:
aList.append(
(i, j),
)
i = j
assert progress < i
return aList
else:
return list(self.regex.finditer(s))
def full_balanced_match(self, s, i):
"""Return the index of the end of the match found at s[i:] or None."""
i1 = i
trace = False
if trace:
g.trace(self.find_s, s[i:].rstrip())
pattern = self.find_s
j = 0 # index into pattern
while i < len(s) and j < len(pattern) and pattern[j] in ("*", s[i]):
progress = i
if pattern[j : j + 3] in ("(*)", "[*]", "{*}"):
delim = pattern[j]
i = self.match_balanced(delim, s, i)
j += 3
elif j == len(pattern) - 1 and pattern[j] == "*":
# A trailing * matches the rest of the string.
j += 1
i = len(s)
break
else:
i += 1
j += 1
assert progress < i
found = i <= len(s) and j == len(pattern)
if trace and found:
g.trace("%s -> %s" % (pattern, s[i1:i]))
return i if found else None
def match_balanced(self, delim, s, i):
"""
delim == s[i] and delim is in '([{'
Return the index of the end of the balanced parenthesized string, or len(s)+1.
"""
trace = False
assert s[i] == delim, s[i]
assert delim in "([{"
delim2 = ")]}"["([{".index(delim)]
assert delim2 in ")]}"
i1, level = i, 0
while i < len(s):
progress = i
ch = s[i]
i += 1
if ch == delim:
level += 1
elif ch == delim2:
level -= 1
if level == 0:
if trace:
g.trace("found: %s" % s[i1:i])
return i
assert progress < i
# Unmatched: a syntax error.
g.trace("unmatched %s in %s" % (delim, s), g.callers(4))
return len(s) + 1
def match(self, s, trace=False):
"""
Perform the match on the entire string if possible.
Return (found, new s)
"""
trace = False or trace
caller = g.callers(2).split(",")[0].strip()
# The caller of match_all.
s1 = truncate(s, 40)
if self.is_balanced():
j = self.full_balanced_match(s, 0)
if j is None:
return False, s
else:
start, end = 0, len(s)
s = self.replace_balanced(s, start, end)
if trace:
g.trace("%-16s %30s %40s ==> %s" % (caller, self, s1, s))
return True, s
else:
m = self.regex.match(s)
if m and m.group(0) == s:
s = self.replace_regex(m, s)
if trace:
g.trace("%-16s %30s %30s ==> %s" % (caller, self, s1, s))
return True, s
else:
return False, s
def match_entire_string(self, s):
"""Return True if s matches self.find_s"""
if self.is_balanced():
j = self.full_balanced_match(s, 0)
return j == len(s)
else:
m = self.regex.match(s)
return m and m.group(0) == s
def replace(self, m, s):
"""Perform any kind of replacement."""
if self.is_balanced():
start, end = m
return self.replace_balanced(s, start, end)
else:
return self.replace_regex(m, s)
def replace_balanced(self, s1, start, end):
"""
Use m (returned by all_matches) to replace s by the string implied by repr_s.
Within repr_s, * star matches corresponding * in find_s
"""
trace = False
s = s1[start:end]
f, r = self.find_s, self.repl_s
i1 = f.find("(*)")
i2 = f.find("[*]")
i3 = f.find("{*}")
if -1 == i1 == i2 == i3:
return s1[:start] + r + s1[end:]
j = r.find("*")
if j == -1:
return s1[:start] + r + s1[end:]
i = min([z for z in [i1, i2, i3] if z > -1])
assert i > -1 # i is an index into f AND s
delim = f[i]
if trace:
g.trace("head", s[:i], f[:i])
assert s[:i] == f[:i], (s[:i], f[:i])
if trace:
g.trace("delim", delim)
k = self.match_balanced(delim, s, i)
s_star = s[i + 1 : k - 1]
if trace:
g.trace("s_star", s_star)
repl = r[:j] + s_star + r[j + 1 :]
if trace:
g.trace("repl", self.repl_s, "==>", repl)
return s1[:start] + repl + s1[end:]
def replace_regex(self, m, s):
"""Do the replacement in s specified by m."""
s = self.repl_s
for i in range(9):
group = "\\%s" % i
if s.find(group) > -1:
# g.trace(i, m.group(i))
s = s.replace(group, m.group(i))
return s
class ReduceTypes:
"""
A helper class for the top-level reduce_types function.
This class reduces a list of type hints to a string containing the
reduction of all types in the list.
"""
def __init__(self, aList=None, name=None, trace=False):
"""Ctor for ReduceTypes class."""
self.aList = aList
self.name = name
self.optional = False
self.trace = trace
def is_known_type(self, s):
"""
Return True if s is nothing but a single known type.
It suits the other methods of this class *not* to test inside inner
brackets. This prevents unwanted Any types.
"""
# s1 = s
s = s.strip()
table = (
"",
"None", # Tricky.
"complex",
"float",
"int",
"long",
"number",
"dict",
"list",
"tuple",
"bool",
"bytes",
"str",
"unicode",
)
for s2 in table:
if s2 == s:
return True
elif Pattern(s2 + "(*)", s).match_entire_string(s):
return True
if s.startswith("[") and s.endswith("]"):
inner = s[1:-1]
return self.is_known_type(inner) if inner else True
elif s.startswith("(") and s.endswith(")"):
inner = s[1:-1]
return self.is_known_type(inner) if inner else True
elif s.startswith("{") and s.endswith("}"):
return True
# inner = s[1:-1]
# return self.is_known_type(inner) if inner else True
table = (
# Pep 484: https://www.python.org/dev/peps/pep-0484/
# typing module: https://docs.python.org/3/library/typing.html
# Test the most common types first.
"Any",
"Dict",
"List",
"Optional",
"Tuple",
"Union",
# Not generated by this program, but could arise from patterns.
"AbstractSet",
"AnyMeta",
"AnyStr",
"BinaryIO",
"ByteString",
"Callable",
"CallableMeta",
"Container",
"Final",
"Generic",
"GenericMeta",
"Hashable",
"IO",
"ItemsView",
"Iterable",
"Iterator",
"KT",
"KeysView",
"Mapping",
"MappingView",
"Match",
"MutableMapping",
"MutableSequence",
"MutableSet",
"NamedTuple",
"OptionalMeta",
# 'POSIX', 'PY2', 'PY3',
"Pattern",
"Reversible",
"Sequence",
"Set",
"Sized",
"SupportsAbs",
"SupportsFloat",
"SupportsInt",
"SupportsRound",
"T",
"TextIO",
"TupleMeta",
"TypeVar",
"TypingMeta",
"Undefined",
"UnionMeta",
"VT",
"ValuesView",
"VarBinding",
)
for s2 in table:
if s2 == s:
return True
else:
# Don't look inside bracketss.
pattern = Pattern(s2 + "[*]", s)
if pattern.match_entire_string(s):
return True
return False
def reduce_collection(self, aList, kind):
"""
Reduce the inner parts of a collection for the given kind.
Return a list with only collections of the given kind reduced.
"""
trace = False
if trace:
g.trace(kind, aList)
assert isinstance(aList, list)
assert None not in aList, aList
pattern = Pattern("%s[*]" % kind)
others, r1, r2 = [], [], []
for s in sorted(set(aList)):
if pattern.match_entire_string(s):
r1.append(s)
else:
others.append(s)
if trace:
g.trace("1", others, r1)
for s in sorted(set(r1)):
parts = []
s2 = s[len(kind) + 1 : -1]
for s3 in s2.split(","):
s3 = s3.strip()
if trace:
g.trace("*", self.is_known_type(s3), s3)
parts.append(s3 if self.is_known_type(s3) else "Any")
r2.append("%s[%s]" % (kind, ", ".join(parts)))
if trace:
g.trace("2", r2)
result = others
result.extend(r2)
result = sorted(set(result))
if trace:
g.trace("3", result)
return result
def reduce_numbers(self, aList):
"""
Return aList with all number types in aList replaced by the most
general numeric type in aList.
"""
trace = False
found = None
numbers = ("number", "complex", "float", "long", "int")
for kind in numbers:
for z in aList:
if z == kind:
found = kind
break
if found:
break
if found:
assert found in numbers, found
aList = [z for z in aList if z not in numbers]
aList.append(found)
if trace:
g.trace(aList)
return aList
def reduce_types(self):
"""
self.aList consists of arbitrarily many types because this method is
called from format_return_expressions.
Return a *string* containing the reduction of all types in this list.
Returning a string means that all traversers always return strings,
never lists.
"""
r = [("None" if z in ("", None) else z) for z in self.aList]
assert None not in r
self.optional = "None" in r
# self.show adds Optional if this flag is set.
r = [z for z in r if z != "None"]
if not r:
self.optional = False
return self.show("None")
r = sorted(set(r))
assert r
assert None not in r
r = self.reduce_numbers(r)
for kind in (
"Dict",
"List",
"Tuple",
):
r = self.reduce_collection(r, kind)
r = self.reduce_unknowns(r)
r = sorted(set(r))
assert r
assert "None" not in r
if len(r) == 1:
return self.show(r[0])
else:
return self.show("Union[%s]" % (", ".join(sorted(r))))
def reduce_unknowns(self, aList):
"""Replace all unknown types in aList with Any."""
return [z if self.is_known_type(z) else "Any" for z in aList]
def show(self, s, known=True):
"""Show the result of reduce_types."""
aList, name = self.aList, self.name
trace = False or self.trace
s = s.strip()
if self.optional:
s = "Optional[%s]" % s
if trace and (not known or len(aList) > 1):
if name:
if name.find(".") > -1:
context = "".join(name.split(".")[1:])
else:
context = name
else:
context = g.callers(3).split(",")[0].strip()
context = truncate(context, 26)
known = "" if known else "? "
pattern = sorted(set([z.replace("\n", " ") for z in aList]))
pattern = "[%s]" % truncate(", ".join(pattern), 53 - 2)
print("reduce_types: %-26s %53s ==> %s%s" % (context, pattern, known, s))
# widths above match the corresponding indents in match_all and match.
return s
def split_types(self, s):
"""Split types on *outer level* commas."""
aList, i1, level = [], 0, 0
for i, ch in enumerate(s):
if ch == "[":
level += 1
elif ch == "]":
level -= 1
elif ch == "," and level == 0:
aList.append(s[i1:i])
i1 = i + 1
aList.append(s[i1:].strip())
return aList
class StandAloneMakeStubFile:
"""
A class to make Python stub (.pyi) files in the ~/stubs directory for
every file mentioned in the [Source Files] section of
~/stubs/make_stub_files.cfg.
"""
parser = None
def __init__(self):
"""Ctor for StandAloneMakeStubFile class."""
self.options = {}
# Ivars set on the command line...
self.config_fn = None
# self.finalize('~/stubs/make_stub_files.cfg')
self.enable_unit_tests = False
self.files = [] # May also be set in the config file.
# Ivars set in the config file...
self.output_fn = None
self.output_directory = self.finalize(".")
# self.finalize('~/stubs')
self.overwrite = False
self.prefix_lines = []
self.silent = False
self.trace_matches = False
self.trace_patterns = False
self.trace_reduce = False
self.trace_visitors = False
self.update_flag = False
self.verbose = False # Trace config arguments.
self.warn = False
# Pattern lists, set by config sections...
self.section_names = ("Global", "Def Name Patterns", "General Patterns")
self.def_patterns = [] # [Def Name Patterns]
self.general_patterns = [] # [General Patterns]
self.names_dict = {}
self.op_name_dict = self.make_op_name_dict()
self.patterns_dict = {}
self.regex_patterns = []
def finalize(self, fn):
"""Finalize and regularize a filename."""
fn = os.path.expanduser(fn)
fn = os.path.abspath(fn)
fn = os.path.normpath(fn)
return fn
def make_stub_file(self, fn):
"""
Make a stub file in ~/stubs for all source files mentioned in the
[Source Files] section of ~/stubs/make_stub_files.cfg
"""
if not fn.endswith(".py"):
print("not a python file", fn)
return
if not os.path.exists(fn):
print("not found", fn)
return
# base_fn = os.path.basename(fn)
# out_fn = os.path.join(self.output_directory, base_fn)
# out_fn = out_fn[:-3] + '.pyi'
out_fn = fn + "i"
self.output_fn = os.path.normpath(out_fn)
try:
s = open(fn).read()
node = ast.parse(s, filename=fn, mode="exec")
StubTraverser(controller=self).run(node)
except:
print(
"Unexpected error occurred whilst parsing file %s:" % fn,
sys.exc_info()[0],
)
def run(self):
"""
Make stub files for all files.
Do nothing if the output directory does not exist.
"""
if self.enable_unit_tests:
self.run_all_unit_tests()
if self.files:
dir_ = self.output_directory
if dir_:
if os.path.exists(dir_):
for fn in self.files:
self.make_stub_file(fn)
else:
print("output directory not found: %s" % dir_)
else:
print("no output directory")
elif not self.enable_unit_tests:
print("no input files")
def run_all_unit_tests(self):
"""Run all unit tests in the make_stub_files/test directory."""
import unittest
loader = unittest.TestLoader()
suite = loader.discover(
os.path.abspath("."), pattern="test*.py", top_level_dir=None
)
unittest.TextTestRunner(verbosity=1).run(suite)
def scan_command_line(self):
"""Set ivars from command-line arguments."""
# This automatically implements the --help option.
usage = "usage: make_stub_files.py [options] file1, file2, ..."
parser = optparse.OptionParser(usage=usage)
add = parser.add_option
add("-c", "--config", dest="fn", help="full path to configuration file")
add("-d", "--dir", dest="dir", help="full path to the output directory")
add(
"-o",
"--overwrite",
action="store_true",
default=False,
help="overwrite existing stub (.pyi) files",
)
add(
"-s",
"--silent",
action="store_true",
default=False,
help="run without messages",
)
add(
"-t",
"--test",
action="store_true",
default=False,
help="run unit tests on startup",
)
add(
"--trace-matches",
action="store_true",
default=False,
help="trace Pattern.matches",
)
add(
"--trace-patterns",
action="store_true",
default=False,
help="trace pattern creation",
)
add(
"--trace-reduce",
action="store_true",
default=False,
help="trace st.reduce_types",
)
add(
"--trace-visitors",
action="store_true",
default=False,
help="trace visitor methods",
)
add(
"-u",
"--update",
action="store_true",
default=False,
help="update stubs in existing stub file",
)
add(
"-v",
"--verbose",
action="store_true",
default=False,
help="verbose output in .pyi file",
)
add(
"-w",
"--warn",
action="store_true",
default=False,
help="warn about unannotated args",
)
# Parse the options
options, args = parser.parse_args()
# Handle the options...
self.enable_unit_tests = options.test
self.overwrite = options.overwrite
self.silent = options.silent
self.trace_matches = options.trace_matches
self.trace_patterns = options.trace_patterns
self.trace_reduce = options.trace_reduce
self.trace_visitors = options.trace_visitors
self.update_flag = options.update
self.verbose = options.verbose
self.warn = options.warn
if options.fn:
self.config_fn = options.fn
if options.dir:
dir_ = options.dir
dir_ = self.finalize(dir_)
if os.path.exists(dir_):
self.output_directory = dir_
else:
print("--dir: directory does not exist: %s" % dir_)
print("exiting")
sys.exit(1)
# If any files remain, set self.files.
if args:
args = [self.finalize(z) for z in args]
if args:
self.files = args
def scan_options(self):
"""Set all configuration-related ivars."""
trace = False
if trace:
g.trace("config file", self.config_fn)
if not self.config_fn:
return
self.parser = parser = self.create_parser()
s = self.get_config_string()
self.init_parser(s)
if self.files:
files_source = "command-line"
files = self.files
elif parser.has_section("Global"):
files_source = "config file"
files = parser.get("Global", "files")
files = [z.strip() for z in files.split("\n") if z.strip()]
else:
return
files2 = []
for z in files:
files2.extend(glob.glob(self.finalize(z)))
self.files = [z for z in files2 if z and os.path.exists(z)]
if trace:
print("Files (from %s)...\n" % files_source)
for z in self.files:
print(z)
print("")
if "output_directory" in parser.options("Global"):
s = parser.get("Global", "output_directory")
output_dir = self.finalize(s)
if os.path.exists(output_dir):
self.output_directory = output_dir
if self.verbose:
print("output directory: %s\n" % output_dir)
else:
print("output directory not found: %s\n" % output_dir)
self.output_directory = None # inhibit run().
if "prefix_lines" in parser.options("Global"):
prefix = parser.get("Global", "prefix_lines")
self.prefix_lines = prefix.split("\n")
# The parser does not preserve leading whitespace.
if trace:
print("Prefix lines...\n")
for z in self.prefix_lines:
print(z)
print("")
self.def_patterns = self.scan_patterns("Def Name Patterns")
self.general_patterns = self.scan_patterns("General Patterns")
self.make_patterns_dict()
def make_op_name_dict(self):
"""
Make a dict whose keys are operators ('+', '+=', etc),
and whose values are lists of values of ast.Node.__class__.__name__.
"""
d = {
".": [
"Attr",
],
"(*)": [
"Call",
"Tuple",
],
"[*]": [
"List",
"Subscript",
],
"{*}": [
"???",
],
# 'and': 'BoolOp',
# 'or': 'BoolOp',
}
for op in (
"+",
"-",
"*",
"/",
"%",
"**",
"<<",
">>",
"|",
"^",
"&",
"//",
):
d[op] = [
"BinOp",
]
for op in (
"==",
"!=",
"<",
"<=",
">",
">=",
"is",
"is not",
"in",
"not in",
):
d[op] = [
"Compare",
]
return d
def create_parser(self):
"""Create a RawConfigParser and return it."""
parser = configparser.RawConfigParser(dict_type=OrderedDict)
# Requires Python 2.7
parser.optionxform = str
return parser
def find_pattern_ops(self, pattern):
"""Return a list of operators in pattern.find_s."""
trace = False or self.trace_patterns
if pattern.is_regex():
# Add the pattern to the regex patterns list.
g.trace(pattern)
self.regex_patterns.append(pattern)
return []
d = self.op_name_dict
keys1, keys2, keys3, keys9 = [], [], [], []
for op in d:
aList = d.get(op)
if op.replace(" ", "").isalnum():
# an alpha op, like 'not, 'not in', etc.
keys9.append(op)
elif len(op) == 3:
keys3.append(op)
elif len(op) == 2:
keys2.append(op)
elif len(op) == 1:
keys1.append(op)
else:
g.trace("bad op", op)
ops = []
s = s1 = pattern.find_s
for aList in (keys3, keys2, keys1):
for op in aList:
# Must match word here!
if s.find(op) > -1:
s = s.replace(op, "")
ops.append(op)
# Handle the keys9 list very carefully.
for op in keys9:
target = " %s " % op
if s.find(target) > -1:
ops.append(op)
break # Only one match allowed.
if trace and ops:
g.trace(s1, ops)
return ops
def get_config_string(self):
fn = self.finalize(self.config_fn)
if os.path.exists(fn):
if self.verbose:
print("\nconfiguration file: %s\n" % fn)
f = open(fn, "r")
s = f.read()
f.close()
return s
else:
print("\nconfiguration file not found: %s" % fn)
return ""
def init_parser(self, s):
"""Add double back-slashes to all patterns starting with '['."""
trace = False
if not s:
return
aList = []
for s in s.split("\n"):
if self.is_section_name(s):
aList.append(s)
elif s.strip().startswith("["):
aList.append(r"\\" + s[1:])
if trace:
g.trace("*** escaping:", s)
else:
aList.append(s)
s = "\n".join(aList) + "\n"
if trace:
g.trace(s)
file_object = io.StringIO(s)
self.parser.read_file(file_object)
def is_section_name(self, s):
def munge(s):
return s.strip().lower().replace(" ", "")
s = s.strip()
if s.startswith("[") and s.endswith("]"):
s = munge(s[1:-1])
for s2 in self.section_names:
if s == munge(s2):
return True
return False
def make_patterns_dict(self):
"""Assign all patterns to the appropriate ast.Node."""
for pattern in self.general_patterns:
ops = self.find_pattern_ops(pattern)
if ops:
for op in ops:
# Add the pattern to op's list.
op_names = self.op_name_dict.get(op)
for op_name in op_names:
aList = self.patterns_dict.get(op_name, [])
aList.append(pattern)
self.patterns_dict[op_name] = aList
else:
# Enter the name in self.names_dict.
name = pattern.find_s
# Special case for 'number'
if name == "number":
aList = self.patterns_dict.get("Num", [])
aList.append(pattern)
self.patterns_dict["Num"] = aList
elif name in self.names_dict:
g.trace("duplicate pattern", pattern)
else:
self.names_dict[name] = pattern.repl_s
if debug_flag:
g.trace("names_dict...")
for z in sorted(self.names_dict):
print(" %s: %s" % (z, self.names_dict.get(z)))
if debug_flag:
g.trace("patterns_dict...")
for z in sorted(self.patterns_dict):
aList = self.patterns_dict.get(z)
print(z)
for pattern in sorted(aList):
print(" " + repr(pattern))
# Note: retain self.general_patterns for use in argument lists.
def scan_patterns(self, section_name):
"""Parse the config section into a list of patterns, preserving order."""
trace = False or self.trace_patterns
parser = self.parser
aList = []
if parser.has_section(section_name):
seen = set()
for key in parser.options(section_name):
value = parser.get(section_name, key)
# A kludge: strip leading \\ from patterns.
if key.startswith(r"\\"):
key = "[" + key[2:]
if trace:
g.trace("removing escapes", key)
if key in seen:
g.trace("duplicate key", key)
else:
seen.add(key)
aList.append(Pattern(key, value))
if trace:
g.trace("%s...\n" % section_name)
for z in aList:
print(z)
print("")
# elif trace:
# print('no section: %s' % section_name)
# print(parser.sections())
# print('')
return aList
class Stub(object):
"""
A class representing all the generated stub for a class or def.
stub.full_name should represent the complete context of a def.
"""
def __init__(self, kind, name, parent=None, stack=None):
"""Stub ctor. Equality depends only on full_name and kind."""
self.children = []
self.full_name = "%s.%s" % (".".join(stack), name) if stack else name
self.kind = kind
self.name = name
self.out_list = []
self.parent = parent
self.stack = stack # StubTraverser.context_stack.
if stack:
assert stack[-1] == parent.name, (stack[-1], parent.name)
if parent:
assert isinstance(parent, Stub)
parent.children.append(self)
def __eq__(self, obj):
"""
Stub.__eq__. Return whether two stubs refer to the same method.
Do *not* test parent links. That would interfere with --update logic.
"""
if isinstance(obj, Stub):
return self.full_name == obj.full_name and self.kind == obj.kind
else:
return NotImplemented
def __ne__(self, obj):
"""Stub.__ne__"""
return not self.__eq__(obj)
def __hash__(self):
"""Stub.__hash__. Equality depends *only* on full_name and kind."""
return len(self.kind) + sum([ord(z) for z in self.full_name])
def __repr__(self):
"""Stub.__repr__."""
# return 'Stub: %s %s' % (id(self), self.full_name)
return "Stub: %s\n%s" % (self.full_name, g.objToString(self.out_list))
__str__ = __repr__
def level(self):
"""Return the number of parents."""
return len(self.parents())
def parents(self):
"""Return a list of this stub's parents."""
return self.full_name.split(".")[:-1]
class StubFormatter(AstFormatter):
"""
Formats an ast.Node and its descendants,
making pattern substitutions in Name and operator nodes.
"""
def __init__(self, controller, traverser):
"""Ctor for StubFormatter class."""
self.controller = x = controller
self.traverser = traverser
# 2016/02/07: to give the formatter access to the class_stack.
self.def_patterns = x.def_patterns
self.general_patterns = x.general_patterns
self.names_dict = x.names_dict
self.patterns_dict = x.patterns_dict
self.raw_format = AstFormatter().format
self.regex_patterns = x.regex_patterns
self.trace_matches = x.trace_matches
self.trace_patterns = x.trace_patterns
self.trace_reduce = x.trace_reduce
self.trace_visitors = x.trace_visitors
self.verbose = x.verbose
# mypy workarounds
self.seen_names = []
matched_d = {}
def match_all(self, node, s, trace=False):
"""Match all the patterns for the given node."""
trace = False or trace or self.trace_matches
# verbose = True
d = self.matched_d
name = node.__class__.__name__
s1 = truncate(s, 40)
caller = g.callers(2).split(",")[1].strip()
# The direct caller of match_all.
patterns = self.patterns_dict.get(name, []) + self.regex_patterns
for pattern in patterns:
found, s = pattern.match(s, trace=False)
if found:
if trace:
aList = d.get(name, [])
if pattern not in aList:
aList.append(pattern)
d[name] = aList
print(
"match_all: %-12s %26s %40s ==> %s"
% (caller, pattern, s1, s)
)
break
return s
def visit(self, node):
"""StubFormatter.visit: supports --verbose tracing."""
s = AstFormatter.visit(self, node)
# g.trace('%12s %s' % (node.__class__.__name__,s))
return s
def trace_visitor(self, node, op, s):
"""Trace node's visitor."""
if self.trace_visitors:
caller = g.callers(2).split(",")[1]
s1 = AstFormatter().format(node).strip()
print("%12s op %-6s: %s ==> %s" % (caller, op.strip(), s1, s))
# StubFormatter visitors for operands...
# Attribute(expr value, identifier attr, expr_context ctx)
attrs_seen = [] # type: List[Any]
def do_Attribute(self, node):
"""StubFormatter.do_Attribute."""
trace = False
s = "%s.%s" % (
self.visit(node.value),
node.attr,
) # Don't visit node.attr: it is always a string.
s2 = self.names_dict.get(s)
if trace and s2 and s2 not in self.attrs_seen:
self.attrs_seen.append(s2)
g.trace(s, "==>", s2)
return s2 or s
# Return generic markers to allow better pattern matches.
def do_Bytes(self, node): # Python 3.x only.
return "bytes" # return str(node.s)
def do_Num(self, node):
# make_patterns_dict treats 'number' as a special case.
# return self.names_dict.get('number', 'number')
return "number" # return repr(node.n)
def do_Str(self, node):
"""This represents a string constant."""
return "str" # return repr(node.s)
def do_Dict(self, node):
result = []
keys = [self.visit(z) for z in node.keys]
values = [self.visit(z) for z in node.values]
if len(keys) == len(values):
result.append("{")
items = []
# pylint: disable=consider-using-enumerate
for i in range(len(keys)):
items.append("%s:%s" % (keys[i], values[i]))
result.append(", ".join(items))
result.append("}")
else:
print(
"Error: f.Dict: len(keys) != len(values)\nkeys: %s\nvals: %s"
% (repr(keys), repr(values))
)
# return ''.join(result)
if result:
return "Dict[%s]" % "".join(result)
else:
return "Dict"
def do_List(self, node):
"""StubFormatter.List."""
elts = [self.visit(z) for z in node.elts]
elts = [z for z in elts if z] # Defensive.
# g.trace('=====',elts)
if elts:
return "List[%s]" % ", ".join(elts)
else:
return "List"
# seen_names = [] # t--ype: List[str]
def do_Name(self, node):
"""StubFormatter ast.Name visitor."""
trace = False
d = self.names_dict
name = d.get(node.id, node.id)
s = "bool" if name in ("True", "False") else name
if trace and node.id not in self.seen_names:
self.seen_names.append(node.id)
if d.get(node.id):
g.trace(node.id, "==>", d.get(node.id))
elif node.id == "aList":
g.trace("**not found**", node.id)
return s
# pylint: disable=using-constant-test
def do_Tuple(self, node):
"""StubFormatter.Tuple."""
elts = [self.visit(z) for z in node.elts]
if 1:
return "Tuple[%s]" % ", ".join(elts)
else:
s = "(%s)" % ", ".join(elts)
return self.match_all(node, s)
# return 'Tuple[%s]' % ', '.join(elts)
# StubFormatter visitors for operators...
# BinOp(expr left, operator op, expr right)
def do_BinOp(self, node):
"""StubFormatter.BinOp visitor."""
trace = False or self.trace_reduce
verbose = False
numbers = [
"number",
"complex",
"float",
"long",
"int",
]
op = self.op_name(node.op)
lhs = self.visit(node.left)
rhs = self.visit(node.right)
if op.strip() in ("is", "is not", "in", "not in"):
s = "bool"
elif lhs == rhs:
s = lhs
# Perhaps not always right,
# but it is correct for Tuple, List, Dict.
elif lhs in numbers and rhs in numbers:
s = reduce_types([lhs, rhs], trace=trace)
# reduce_numbers would be wrong: it returns a list.
elif lhs == "str" and op in "%+*":
# str + any implies any is a string.
s = "str"
else:
if trace and verbose and lhs == "str":
g.trace("***** unknown string op", lhs, op, rhs)
# Fall back to the base-class behavior.
s = "%s%s%s" % (self.visit(node.left), op, self.visit(node.right))
s = self.match_all(node, s)
self.trace_visitor(node, op, s)
return s
# BoolOp(boolop op, expr* values)
def do_BoolOp(self, node): # Python 2.x only.
"""StubFormatter.BoolOp visitor for 'and' and 'or'."""
trace = False or self.trace_reduce
op = self.op_name(node.op)
values = [self.visit(z).strip() for z in node.values]
s = reduce_types(values, trace=trace)
s = self.match_all(node, s)
self.trace_visitor(node, op, s)
return s
# Call(expr func, expr* args, keyword* keywords, expr? starargs, expr? kwargs)
def do_Call(self, node):
"""StubFormatter.Call visitor."""
trace = False
func = self.visit(node.func)
args = [self.visit(z) for z in node.args]
for z in node.keywords:
# Calls f.do_keyword.
args.append(self.visit(z))
if getattr(node, "starargs", None):
args.append("*%s" % (self.visit(node.starargs)))
if getattr(node, "kwargs", None):
args.append("**%s" % (self.visit(node.kwargs)))
args = [z for z in args if z] # Kludge: Defensive coding.
# Explicit pattern:
if func in (
"dict",
"list",
"set",
"tuple",
):
if args:
s = "%s[%s]" % (func.capitalize(), ", ".join(args))
else:
s = "%s" % func.capitalize()
else:
s = "%s(%s)" % (func, ", ".join(args))
s = self.match_all(node, s, trace=trace)
self.trace_visitor(node, "call", s)
return s
# keyword = (identifier arg, expr value)
def do_keyword(self, node):
# node.arg is a string.
value = self.visit(node.value)
# This is a keyword *arg*, not a Python keyword!
return "%s=%s" % (node.arg, value)
# Compare(expr left, cmpop* ops, expr* comparators)
def do_Compare(self, node):
"""
StubFormatter ast.Compare visitor for these ops:
'==', '!=', '<', '<=', '>', '>=', 'is', 'is not', 'in', 'not in',
"""
s = "bool" # Correct regardless of arguments.
ops = ",".join([self.op_name(z) for z in node.ops])
self.trace_visitor(node, ops, s)
return s
# If(expr test, stmt* body, stmt* orelse)
def do_IfExp(self, node):
"""StubFormatterIfExp (ternary operator)."""
trace = False or self.trace_reduce
aList = [
self.match_all(node, self.visit(node.body)),
self.match_all(node, self.visit(node.orelse)),
]
s = reduce_types(aList, trace=trace)
s = self.match_all(node, s)
self.trace_visitor(node, "if", s)
return s
# Subscript(expr value, slice slice, expr_context ctx)
def do_Subscript(self, node):
"""StubFormatter.Subscript."""
s = "%s[%s]" % (self.visit(node.value), self.visit(node.slice))
s = self.match_all(node, s)
self.trace_visitor(node, "[]", s)
return s
# UnaryOp(unaryop op, expr operand)
def do_UnaryOp(self, node):
"""StubFormatter.UnaryOp for unary +, -, ~ and 'not' operators."""
op = self.op_name(node.op)
# g.trace(op.strip(), self.raw_format(node.operand))
if op.strip() == "not":
return "bool"
else:
s = self.visit(node.operand)
s = self.match_all(node, s)
self.trace_visitor(node, op, s)
return s
def do_Return(self, node):
"""
StubFormatter ast.Return vsitor.
Return only the return expression itself.
"""
s = AstFormatter.do_Return(self, node)
assert s.startswith("return"), repr(s)
return s[len("return") :].strip()
class StubTraverser(ast.NodeVisitor):
"""
An ast.Node traverser class that outputs a stub for each class or def.
Names of visitors must start with visit_. The order of traversal does
not matter, because so few visitors do anything.
"""
def __init__(self, controller):
"""Ctor for StubTraverser class."""
self.controller = x = controller
# A StandAloneMakeStubFile instance.
# Internal state ivars...
self.class_name_stack = []
self.class_defs_count = 0
# The number of defs seen for this class.
self.context_stack = []
sf = StubFormatter(controller=controller, traverser=self)
self.format = sf.format
self.arg_format = AstArgFormatter().format
self.level = 0
self.output_file = None
self.parent_stub = None
self.raw_format = AstFormatter().format
self.returns = []
self.stubs_dict = {}
# Keys are stub.full_name's. Values are stubs.
self.warn_list = []
# Copies of controller ivars...
self.output_fn = x.output_fn
self.overwrite = x.overwrite
self.prefix_lines = x.prefix_lines
self.silent = x.silent
self.regex_patterns = x.regex_patterns
self.update_flag = x.update_flag
self.trace_matches = x.trace_matches
self.trace_patterns = x.trace_patterns
self.trace_reduce = x.trace_reduce
self.trace_visitors = x.trace_visitors
self.verbose = x.verbose
self.warn = x.warn
# Copies of controller patterns...
self.def_patterns = x.def_patterns
self.names_dict = x.names_dict
self.general_patterns = x.general_patterns
self.patterns_dict = x.patterns_dict
def add_stub(self, d, stub):
"""Add the stub to d, checking that it does not exist."""
trace = False
verbose = False
key = stub.full_name
assert key
if key in d:
caller = g.callers(2).split(",")[1]
g.trace("Ignoring duplicate entry for %s in %s" % (stub, caller))
else:
d[key] = stub
if trace and verbose:
caller = g.callers(2).split(",")[1]
g.trace("%17s %s" % (caller, stub.full_name))
elif trace:
g.trace(stub.full_name)
def indent(self, s):
"""Return s, properly indented."""
# This version of indent *is* used.
return "%s%s" % (" " * 4 * self.level, s)
def out(self, s):
"""Output the string to the console or the file."""
s = self.indent(s)
if self.parent_stub:
self.parent_stub.out_list.append(s)
elif self.output_file:
self.output_file.write(s + "\n")
else:
print(s)
# pylint: disable=using-constant-test
def run(self, node):
"""StubTraverser.run: write the stubs in node's tree to self.output_fn."""
fn = self.output_fn
dir_ = os.path.dirname(fn)
if os.path.exists(fn) and not self.overwrite:
print("file exists: %s" % fn)
elif not dir_ or os.path.exists(dir_):
# time.clock has been deprecated in Python 3.3 and will be removed from Python 3.8: use time.perf_counter or time.process_time instead
t1 = time.perf_counter()
# Delayed output allows sorting.
self.parent_stub = Stub(kind="root", name="<new-stubs>")
for z in self.prefix_lines or []:
self.parent_stub.out_list.append(z)
self.visit(node)
# Creates parent_stub.out_list.
if self.update_flag:
self.parent_stub = self.update(fn, new_root=self.parent_stub)
if 1:
self.output_file = open(fn, "w")
self.output_stubs(self.parent_stub)
self.output_file.close()
self.output_file = None
self.parent_stub = None
t2 = time.perf_counter()
# do some stuff
if not self.silent:
print("wrote: %s in %4.2f sec" % (fn, t2 - t1))
else:
print("output directory not not found: %s" % dir_)
def output_stubs(self, stub):
"""Output this stub and all its descendants."""
for s in stub.out_list or []:
# Indentation must be present when an item is added to stub.out_list.
if self.output_file:
self.output_file.write(s.rstrip() + "\n")
else:
print(s)
# Recursively print all children.
for child in stub.children:
self.output_stubs(child)
def output_time_stamp(self):
"""Put a time-stamp in the output file."""
if self.output_file:
self.output_file.write(
"# make_stub_files: %s\n" % time.strftime("%a %d %b %Y at %H:%M:%S")
)
def update(self, fn, new_root):
"""
Merge the new_root tree with the old_root tree in fn (a .pyi file).
new_root is the root of the stub tree from the .py file.
old_root (read below) is the root of stub tree from the .pyi file.
Return old_root, or new_root if there are any errors.
"""
s = self.get_stub_file(fn)
if not s or not s.strip():
return new_root
if "\t" in s:
# Tabs in stub files make it impossible to parse them reliably.
g.trace("Can not update stub files containing tabs.")
return new_root
# Read old_root from the .pyi file.
old_d, old_root = self.parse_stub_file(
s, root_name="<old-stubs>"
) # pylint: disable=unused-variable
if old_root:
# Merge new stubs into the old tree.
if debug_flag:
print(self.trace_stubs(old_root, header="old_root"))
print(self.trace_stubs(new_root, header="new_root"))
print("***** updating stubs from %s *****" % fn)
self.merge_stubs(self.stubs_dict.values(), old_root, new_root)
if debug_flag:
print(self.trace_stubs(old_root, header="updated_root"))
return old_root
else:
return new_root
def get_stub_file(self, fn):
"""Read the stub file into s."""
if os.path.exists(fn):
try:
s = open(fn, "r").read()
except Exception:
print("--update: error reading %s" % fn)
s = None
return s
else:
print("--update: not found: %s" % fn)
return None
def parse_stub_file(self, s, root_name):
"""
Parse s, the contents of a stub file, into a tree of Stubs.
Parse by hand, so that --update can be run with Python 2.
"""
trace = False
assert "\t" not in s
d = {}
root = Stub(kind="root", name=root_name)
indent_stack = [-1] # To prevent the root from being popped.
stub_stack = [root]
lines = []
pat = re.compile(r"^([ ]*)(def|class)\s+([a-zA-Z_]+)(.*)")
for line in g.splitLines(s):
m = pat.match(line)
if m:
indent, kind, name, rest = (
len(m.group(1)),
m.group(2),
m.group(3),
m.group(4),
)
old_indent = indent_stack[-1]
# Terminate any previous lines.
old_stub = stub_stack[-1]
old_stub.out_list.extend(lines)
if trace:
for s in lines:
g.trace(" " + s.rstrip())
lines = [line]
# Adjust the stacks.
if indent == old_indent:
stub_stack.pop()
elif indent > old_indent:
indent_stack.append(indent)
else: # indent < old_indent
# The indent_stack can't underflow because
# indent >= 0 and indent_stack[0] < 0
assert indent >= 0
while indent <= indent_stack[-1]:
indent_stack.pop()
old_stub = stub_stack.pop()
assert old_stub != root
indent_stack.append(indent)
# Create and push the new stub *after* adjusting the stacks.
assert stub_stack
parent = stub_stack[-1]
stack = [z.name for z in stub_stack[1:]]
parent = stub_stack[-1]
stub = Stub(kind, name, parent, stack)
self.add_stub(d, stub)
stub_stack.append(stub)
if trace:
g.trace("%s%5s %s %s" % (" " * indent, kind, name, rest))
else:
parent = stub_stack[-1]
lines.append(line)
# Terminate the last stub.
old_stub = stub_stack[-1]
old_stub.out_list.extend(lines)
if trace:
for s in lines:
g.trace(" " + s.rstrip())
return d, root
def merge_stubs(self, new_stubs, old_root, new_root, trace=False):
"""
Merge the new_stubs *list* into the old_root *tree*.
- new_stubs is a list of Stubs from the .py file.
- old_root is the root of the stubs from the .pyi file.
- new_root is the root of the stubs from the .py file.
"""
trace = False or trace
verbose = False
# Part 1: Delete old stubs do *not* exist in the *new* tree.
aList = self.check_delete(new_stubs, old_root, new_root, trace and verbose)
# Checks that all ancestors of deleted nodes will be deleted.
aList = list(reversed(self.sort_stubs_by_hierarchy(aList)))
# Sort old stubs so that children are deleted before parents.
if trace and verbose:
dump_list("ordered delete list", aList)
for stub in aList:
if trace:
g.trace("deleting %s" % stub)
parent = self.find_parent_stub(stub, old_root) or old_root
parent.children.remove(stub)
assert not self.find_stub(stub, old_root), stub
# Part 2: Insert new stubs that *not* exist in the *old* tree.
aList = [z for z in new_stubs if not self.find_stub(z, old_root)]
aList = self.sort_stubs_by_hierarchy(aList)
# Sort new stubs so that parents are created before children.
for stub in aList:
if trace:
g.trace("inserting %s" % stub)
parent = self.find_parent_stub(stub, old_root) or old_root
parent.children.append(stub)
assert self.find_stub(stub, old_root), stub
def check_delete(self, new_stubs, old_root, new_root, trace):
"""Return a list of nodes that can be deleted."""
old_stubs = self.flatten_stubs(old_root)
old_stubs.remove(old_root)
aList = [z for z in old_stubs if z not in new_stubs]
if trace:
dump_list("old_stubs", old_stubs)
dump_list("new_stubs", new_stubs)
dump_list("to-be-deleted stubs", aList)
delete_list = []
# Check that all parents of to-be-delete nodes will be deleted.
for z in aList:
z1 = z
for i in range(20): # pylint: disable=unused-variable
z = z.parent
if not z:
g.trace("can not append: new root not found", z)
break
elif z == old_root:
# if trace: g.trace('can delete', z1)
delete_list.append(z1)
break
elif z not in aList:
g.trace("can not delete %s because of %s" % (z1, z))
break
else:
g.trace("can not happen: parent loop")
if trace:
dump_list("delete_list", delete_list)
return delete_list
def flatten_stubs(self, root):
"""Return a flattened list of all stubs in root's tree."""
aList = [root]
for child in root.children:
self.flatten_stubs_helper(child, aList)
return aList
def flatten_stubs_helper(self, root, aList):
"""Append all stubs in root's tree to aList."""
aList.append(root)
for child in root.children:
self.flatten_stubs_helper(child, aList)
def find_parent_stub(self, stub, root):
"""Return stub's parent **in root's tree**."""
return self.find_stub(stub.parent, root) if stub.parent else None
def find_stub(self, stub, root):
"""Return the stub **in root's tree** that matches stub."""
if stub == root: # Must use Stub.__eq__!
return root # not stub!
for child in root.children:
stub2 = self.find_stub(stub, child)
if stub2:
return stub2
return None
def sort_stubs_by_hierarchy(self, stubs1):
"""
Sort the list of Stubs so that parents appear before all their
descendants.
"""
stubs, result = stubs1[:], []
for i in range(50):
if stubs:
# Add all stubs with i parents to the results.
found = [z for z in stubs if z.level() == i]
result.extend(found)
for z in found:
stubs.remove(z)
else:
return result
g.trace("can not happen: unbounded stub levels.")
return [] # Abort the merge.
def trace_stubs(self, stub, aList=None, header=None, level=-1):
"""Return a trace of the given stub and all its descendants."""
indent = " " * 4 * max(0, level)
if level == -1:
aList = ["===== %s...\n" % (header) if header else ""]
for s in stub.out_list:
aList.append("%s%s" % (indent, s.rstrip()))
for child in stub.children:
self.trace_stubs(child, level=level + 1, aList=aList)
if level == -1:
return "\n".join(aList) + "\n"
# 2: ClassDef(identifier name, expr* bases,
# stmt* body, expr* decorator_list)
# 3: ClassDef(identifier name, expr* bases,
# keyword* keywords, expr? starargs, expr? kwargs
# stmt* body, expr* decorator_list)
#
# keyword arguments supplied to call (NULL identifier for **kwargs)
# keyword = (identifier? arg, expr value)
def visit_ClassDef(self, node):
# Create the stub in the old context.
old_stub = self.parent_stub
self.class_defs_count = 0
self.parent_stub = Stub("class", node.name, old_stub, self.context_stack)
self.add_stub(self.stubs_dict, self.parent_stub)
# Enter the new context.
self.class_name_stack.append(node.name)
self.context_stack.append(node.name)
if self.trace_matches or self.trace_reduce:
print("\nclass %s\n" % node.name)
#
# Fix issue #2: look ahead to see if there are any functions in this class.
empty = not any(isinstance(z, ast.FunctionDef) for z in node.body)
tail = " ..." if empty else ""
#
# Format...
bases = [self.visit(z) for z in node.bases] if node.bases else []
if getattr(node, "keywords", None): # Python 3
for keyword in node.keywords:
bases.append("%s=%s" % (keyword.arg, self.visit(keyword.value)))
if getattr(node, "starargs", None): # Python 3
bases.append("*%s", self.visit(node.starargs))
if getattr(node, "kwargs", None): # Python 3
bases.append("*%s", self.visit(node.kwargs))
if not node.name.startswith("_"):
if node.bases:
s = "(%s)" % ", ".join([self.format(z) for z in node.bases])
else:
s = ""
self.out("class %s%s:%s" % (node.name, s, tail))
# Visit...
self.level += 1
for z in node.body:
self.visit(z)
# Restore the context
self.context_stack.pop()
self.class_name_stack.pop()
self.level -= 1
self.parent_stub = old_stub
# 2: FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list)
# 3: FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list,
# expr? returns)
def visit_FunctionDef(self, node):
# Create the stub in the old context.
old_stub = self.parent_stub
self.parent_stub = Stub("def", node.name, old_stub, self.context_stack)
self.add_stub(self.stubs_dict, self.parent_stub)
# Enter the new context.
self.returns = []
self.level += 1
self.context_stack.append(node.name)
for z in node.body:
self.visit(z)
self.context_stack.pop()
self.level -= 1
# Format *after* traversing
# if self.trace_matches or self.trace_reduce:
# if not self.class_name_stack:
# print('def %s\n' % node.name)
self.out(
"def %s(%s) -> %s"
% (node.name, self.format_arguments(node.args), self.format_returns(node))
)
self.parent_stub = old_stub
# arguments = (expr* args, identifier? vararg, identifier? kwarg, expr* defaults)
def format_arguments(self, node):
"""
Format the arguments node.
Similar to AstFormat.do_arguments, but it is not a visitor!
"""
assert isinstance(node, ast.arguments), node
args = [self.raw_format(z) for z in node.args]
defaults = [self.raw_format(z) for z in node.defaults]
# Assign default values to the last args.
result = []
n_plain = len(args) - len(defaults)
for i, arg in enumerate(args):
s = self.munge_arg(arg)
if i < n_plain:
result.append(s)
else:
result.append("%s=%s" % (s, defaults[i - n_plain]))
# Now add the vararg and kwarg args.
name = getattr(node, "vararg", None)
if name:
if hasattr(ast, "arg"): # python 3:
name = self.raw_format(name)
result.append("*" + name)
name = getattr(node, "kwarg", None)
if name:
if hasattr(ast, "arg"): # python 3:
name = self.raw_format(name)
result.append("**" + name)
return ", ".join(result)
type_pattern = re.compile(r".*:.*")
def munge_arg(self, s):
"""Add an annotation for s if possible."""
if s == "self":
return s
for pattern in self.general_patterns:
if pattern.match_entire_string(s):
return "%s: %s" % (s, pattern.repl_s)
if self.warn and s not in self.warn_list:
self.warn_list.append(s)
print("no annotation for %s" % s)
# Fix issue #3.
if self.type_pattern.match(s):
return s
return s + ": Any"
def format_returns(self, node):
"""
Calculate the return type:
- Return None if there are no return statements.
- Patterns in [Def Name Patterns] override all other patterns.
- Otherwise, return a list of return values.
"""
trace = False
name = self.get_def_name(node)
raw = [self.raw_format(z) for z in self.returns]
r = [self.format(z) for z in self.returns]
# Allow StubFormatter.do_Return to do the hack.
# Step 1: Return None if there are no return statements.
if trace and self.returns:
g.trace("name: %s r:\n%s" % (name, r))
if not [z for z in self.returns if z.value is not None]:
empty = not any(isinstance(z, ast.FunctionDef) for z in node.body)
tail = ": ..." if empty else ":"
return "None" + tail
# Step 2: [Def Name Patterns] override all other patterns.
for pattern in self.def_patterns:
found, s = pattern.match(name)
if found:
if trace:
g.trace("*name pattern %s: %s -> %s" % (pattern.find_s, name, s))
return s + ": ..."
# Step 3: remove recursive calls.
raw, r = self.remove_recursive_calls(name, raw, r)
# Step 4: Calculate return types.
return self.format_return_expressions(node, name, raw, r)
def format_return_expressions(self, node, name, raw_returns, reduced_returns):
"""
aList is a list of maximally reduced return expressions.
For each expression e in Alist:
- If e is a single known type, add e to the result.
- Otherwise, add Any # e to the result.
Return the properly indented result.
"""
assert len(raw_returns) == len(reduced_returns)
lws = "\n" + " " * 4
n = len(raw_returns)
known = all([is_known_type(e) for e in reduced_returns])
# g.trace(reduced_returns)
empty = not any(isinstance(z, ast.FunctionDef) for z in node.body)
tail = ": ..." if empty else ":"
if not known or self.verbose:
# First, generate the return lines.
aList = []
for i in range(n):
e, raw = reduced_returns[i], raw_returns[i]
known = " " if is_known_type(e) else "?"
aList.append("# %s %s: %s" % (" ", i, raw))
aList.append("# %s %s: return %s" % (known, i, e))
results = "".join([lws + self.indent(z) for z in aList])
# Put the return lines in their proper places.
if known:
s = reduce_types(reduced_returns, name=name, trace=self.trace_reduce)
return s + tail + results
else:
return "Any" + tail + results
else:
s = reduce_types(reduced_returns, name=name, trace=self.trace_reduce)
return s + tail
def get_def_name(self, node):
"""Return the representaion of a function or method name."""
if self.class_name_stack:
name = "%s.%s" % (self.class_name_stack[-1], node.name)
# All ctors should return None
if node.name == "__init__":
name = "None"
else:
name = node.name
return name
def remove_recursive_calls(self, name, raw, reduced):
"""Remove any recursive calls to name from both lists."""
# At present, this works *only* if the return is nothing but the recursive call.
trace = False
assert len(raw) == len(reduced)
pattern = Pattern("%s(*)" % name)
n = len(reduced)
raw_result, reduced_result = [], []
for i in range(n):
if pattern.match_entire_string(reduced[i]):
if trace:
g.trace("****", name, pattern, reduced[i])
else:
raw_result.append(raw[i])
reduced_result.append(reduced[i])
return raw_result, reduced_result
def visit_Return(self, node):
self.returns.append(node)
# New: return the entire node, not node.value.
# class TestClass:
# '''
# A class containing constructs that have caused difficulties.
# This is in the make_stub_files directory, not the test directory.
# '''
# # pylint: disable=no-member
# # pylint: disable=undefined-variable
# # pylint: disable=no-self-argument
# # pylint: disable=no-method-argument
# # pylint: disable=unsubscriptable-object
# # pylint: disable=undefined-variable
# def parse_group(group):
# # pylint: disable=unsupported-delete-operation
# if len(group) >= 3 and group[-2] == 'as':
# del group[-2:]
# ndots = 0
# i = 0
# while len(group) > i and group[i].startswith('.'):
# ndots += len(group[i])
# i += 1
# assert ''.join(group[:i]) == '.'*ndots, group
# del group[:i]
# assert all(g == '.' for g in group[1::2]), group
# return ndots, os.sep.join(group[::2])
# def return_all(self):
# return all([is_known_type(z) for z in s3.split(',')])
# # return all(['abc'])
# def return_array(self):
# return f(s[1:-1])
# def return_list(self, a):
# return [a]
# # pylint: disable=using-constant-test
# def return_two_lists(s):
# if 1:
# return aList
# else:
# return list(self.regex.finditer(s))
g = LeoGlobals() # For ekr.
if __name__ == "__main__":
main()
```
#### File: tests/checkout_repo/freezer_mpy_test.py
```python
import sys
import pytest
from pathlib import Path
# pylint: disable=wrong-import-position,import-error
import basicgit as git
# Module Under Test
import get_mpy
if not sys.warnoptions:
import os, warnings
warnings.simplefilter("default") # Change the filter in this process
os.environ["PYTHONWARNINGS"] = "default" # Also affect subprocesses
# No Mocks, does actual extraction from repro
# TODO: allow tests to work on any path, not just my own machine
@pytest.mark.parametrize(
"path, port, board",
[
(
"C:\\develop\\MyPython\\TESTREPO-micropython\\ports\\esp32\\modules\\_boot.py",
"esp32",
None,
),
(
"/develop/MyPython/TESTREPO-micropython/ports/esp32/modules/_boot.py",
"esp32",
None,
),
("../TESTREPO-micropython/ports/esp32/modules/_boot.py", "esp32", None),
(
"C:\\develop\\MyPython\\TESTREPO-micropython\\ports\\stm32\\boards\\PYBV11\\modules\\_boot.py",
"stm32",
"PYBV11",
),
(
"/develop/MyPython/TESTREPO-micropython/ports/stm32/boards/PYBV11/modules/_boot.py",
"stm32",
"PYBV11",
),
(
"../TESTREPO-micropython/ports/stm32/boards/PYBV11/modules/_boot.py",
"stm32",
"PYBV11",
),
],
)
def test_extract_target_names(path, port, board):
_port, _board = get_mpy.get_target_names(path)
assert _board == board
assert _port == port
@pytest.mark.basicgit
def test_freezer_mpy_manifest(tmp_path, testrepo_micropython, testrepo_micropython_lib):
"test if we can freeze source using manifest.py files"
# mpy_path = Path(testrepo_micropython)
# mpy_lib = Path(testrepo_micropython_lib)
mpy_path = testrepo_micropython
mpy_lib = testrepo_micropython_lib
# mpy version must be at 1.12 or newer
mpy_version = "v1.12"
version = git.get_tag(mpy_path)
if version < mpy_version:
git.checkout_tag(mpy_version, mpy_path)
version = git.get_tag(mpy_path)
assert version == mpy_version, "prep: could not checkout version {} of {}".format(mpy_version, mpy_path)
stub_path = Path(tmp_path)
get_mpy.get_frozen(str(stub_path), version=mpy_version, mpy_path=mpy_path, lib_path=mpy_lib)
scripts = list(stub_path.rglob("*.py"))
assert scripts is not None, "can freeze scripts from manifest"
assert len(scripts) > 10, "expect at least 50 files, only found {}".format(len(scripts))
@pytest.mark.basicgit
def test_freezer_mpy_folders(tmp_path, testrepo_micropython, testrepo_micropython_lib):
"test if we can freeze source using modules folders"
mpy_path = testrepo_micropython
# mpy version must not be older than 1.12 ( so use 1.10)
mpy_version = "v1.10"
version_x = version = git.get_tag(mpy_path)
if version != mpy_version:
git.checkout_tag(mpy_version, mpy_path)
version = git.get_tag(mpy_path)
assert version == mpy_version, "prep: could not checkout version {} of ./micropython".format(mpy_version)
stub_path = tmp_path
# freezer_mpy.get_frozen(stub_path, mpy_path, lib_path='./micropython-lib')
get_mpy.get_frozen_folders(stub_path, mpy_path, lib_path=str(testrepo_micropython_lib), version=mpy_version)
# restore original version
git.checkout_tag(version_x, mpy_path)
assert True
``` |
{
"source": "Josverl/micropython-stubs",
"score": 2
} |
#### File: stubs/lvgl-8_1_0_dev-esp32/lodepng.py
```python
from typing import Any
class C_Pointer:
''
class LCT:
''
GREY = 0 # type: int
GREY_ALPHA = 4 # type: int
MAX_OCTET_VALUE = 255 # type: int
PALETTE = 3 # type: int
RGB = 2 # type: int
RGBA = 6 # type: int
class LodePNGColorMode:
''
class LodePNGDecoderSettings:
''
class LodePNGDecompressSettings:
''
class LodePNGInfo:
''
class LodePNGState:
''
class LodePNGTime:
''
def add_itext(self, *args) -> Any:
...
def add_text(self, *args) -> Any:
...
def can_have_alpha(self, *args) -> Any:
...
def chunk_ancillary(self, *args) -> Any:
...
def chunk_append(self, *args) -> Any:
...
def chunk_check_crc(self, *args) -> Any:
...
def chunk_create(self, *args) -> Any:
...
def chunk_data(self, *args) -> Any:
...
def chunk_data_const(self, *args) -> Any:
...
def chunk_find(self, *args) -> Any:
...
def chunk_find_const(self, *args) -> Any:
...
def chunk_generate_crc(self, *args) -> Any:
...
def chunk_length(self, *args) -> Any:
...
def chunk_next(self, *args) -> Any:
...
def chunk_next_const(self, *args) -> Any:
...
def chunk_private(self, *args) -> Any:
...
def chunk_safetocopy(self, *args) -> Any:
...
def chunk_type(self, *args) -> Any:
...
def chunk_type_equals(self, *args) -> Any:
...
def clear_icc(self, *args) -> Any:
...
def clear_itext(self, *args) -> Any:
...
def clear_text(self, *args) -> Any:
...
def color_mode_cleanup(self, *args) -> Any:
...
def color_mode_copy(self, *args) -> Any:
...
def color_mode_init(self, *args) -> Any:
...
def color_mode_make(self, *args) -> Any:
...
def convert(self, *args) -> Any:
...
def crc32(self, *args) -> Any:
...
def decode(self, *args) -> Any:
...
def decode24(self, *args) -> Any:
...
def decode32(self, *args) -> Any:
...
def decode_memory(self, *args) -> Any:
...
def decoder_settings_init(self, *args) -> Any:
...
def decompress_settings_init(self, *args) -> Any:
...
default_decompress_settings: Any
def error_text(self, *args) -> Any:
...
def get_bpp(self, *args) -> Any:
...
def get_channels(self, *args) -> Any:
...
def get_raw_size(self, *args) -> Any:
...
def has_palette_alpha(self, *args) -> Any:
...
def inflate(self, *args) -> Any:
...
def info_cleanup(self, *args) -> Any:
...
def info_copy(self, *args) -> Any:
...
def info_init(self, *args) -> Any:
...
def inspect(self, *args) -> Any:
...
def inspect_chunk(self, *args) -> Any:
...
def is_alpha_type(self, *args) -> Any:
...
def is_greyscale_type(self, *args) -> Any:
...
def is_palette_type(self, *args) -> Any:
...
def palette_add(self, *args) -> Any:
...
def palette_clear(self, *args) -> Any:
...
def set_icc(self, *args) -> Any:
...
def state_cleanup(self, *args) -> Any:
...
def state_copy(self, *args) -> Any:
...
def state_init(self, *args) -> Any:
...
def zlib_decompress(self, *args) -> Any:
...
``` |
{
"source": "Josverl/mipystubber",
"score": 2
} |
#### File: tests/commandline/stubber_cli_test.py
```python
from typing import List
import pytest
from pytest_mock import MockerFixture
from mock import MagicMock
from pathlib import Path
from click.testing import CliRunner
# module under test :
import stubber.stubber as stubber
def test_stubber_help():
# check basic commandline sanity check
runner = CliRunner()
result = runner.invoke(stubber.stubber_cli, ["--help"])
assert result.exit_code == 0
assert "Usage:" in result.output
assert "Commands:" in result.output
##########################################################################################
# clone
##########################################################################################
def test_stubber_clone(mocker: MockerFixture, tmp_path: Path):
runner = CliRunner()
mock_clone: MagicMock = mocker.patch("stubber.stubber.git.clone", autospec=True, return_value=0)
mock_fetch: MagicMock = mocker.patch("stubber.stubber.git.fetch", autospec=True, return_value=0)
result = runner.invoke(stubber.stubber_cli, ["clone"])
assert result.exit_code == 0
# either clone or fetch
assert mock_clone.call_count + mock_fetch.call_count == 2
if mock_clone.call_count > 0:
mock_clone.assert_any_call(remote_repo="https://github.com/micropython/micropython.git", path=Path("repos/micropython"))
mock_clone.assert_any_call(remote_repo="https://github.com/micropython/micropython-lib.git", path=Path("repos/micropython-lib"))
else:
mock_fetch.assert_any_call(Path("repos/micropython"))
mock_fetch.assert_any_call(Path("repos/micropython-lib"))
def test_stubber_clone_path(mocker: MockerFixture, tmp_path: Path):
runner = CliRunner()
mock_clone: MagicMock = mocker.MagicMock(return_value=0)
mocker.patch("stubber.stubber.git.clone", mock_clone)
m_tag = mocker.patch("stubber.stubber.git.get_tag", autospec=True)
m_dir = mocker.patch("stubber.stubber.os.mkdir", autospec=True)
# now test with path specified
result = runner.invoke(stubber.stubber_cli, ["clone", "--path", "foobar"])
assert result.exit_code == 0
assert mock_clone.call_count >= 2
mock_clone.assert_any_call(remote_repo="https://github.com/micropython/micropython.git", path=Path("foobar/micropython"))
mock_clone.assert_any_call(remote_repo="https://github.com/micropython/micropython-lib.git", path=Path("foobar/micropython-lib"))
assert m_tag.call_count >= 2
##########################################################################################
# switch
##########################################################################################
@pytest.mark.parametrize(
"params",
[
["switch", "--version", "latest", "--path", "foobar"],
["switch", "--version", "v1.10", "--path", "foobar"],
],
)
def test_stubber_switch(mocker: MockerFixture, params: List[str]):
runner = CliRunner()
# mock_clone: MagicMock = mocker.patch("stubber.stubber.git.clone", autospec=True, return_value=0)
# Mock Path.exists
m_fetch: MagicMock = mocker.patch("stubber.stubber.git.fetch", autospec=True, return_value=0)
m_switch: MagicMock = mocker.patch("stubber.stubber.git.switch_branch", autospec=True, return_value=0)
m_checkout: MagicMock = mocker.patch("stubber.stubber.git.checkout_tag", autospec=True, return_value=0)
m_get_tag: MagicMock = mocker.patch("stubber.stubber.git.get_tag", autospec=True, return_value="v1.42")
m_match = mocker.patch("stubber.stubber.get_mpy.match_lib_with_mpy", autospec=True)
m_exists = mocker.patch("stubber.stubber.Path.exists", return_value=True)
result = runner.invoke(stubber.stubber_cli, params)
assert result.exit_code == 0
# fetch latest
assert m_fetch.call_count == 2
# "foobar" from params is used as the path
m_fetch.assert_any_call(Path("foobar/micropython"))
m_fetch.assert_any_call(Path("foobar/micropython-lib"))
# core
m_match.assert_called_once()
if "latest" in params:
m_switch.assert_called_once()
m_checkout.assert_not_called()
else:
m_switch.assert_not_called()
m_checkout.assert_called_once()
##########################################################################################
# minify
##########################################################################################
def test_stubber_minify(mocker: MockerFixture):
# check basic commandline sanity check
runner = CliRunner()
mock_minify: MagicMock = mocker.MagicMock(return_value=0)
mocker.patch("stubber.stubber.minify", mock_minify)
result = runner.invoke(stubber.stubber_cli, ["minify"])
assert result.exit_code == 0
mock_minify.assert_called_once_with("board/createstubs.py", "./minified", True, False, False)
def test_stubber_minify_all(mocker: MockerFixture):
# check basic commandline sanity check
runner = CliRunner()
mock_minify: MagicMock = mocker.MagicMock(return_value=0)
mocker.patch("stubber.stubber.minify", mock_minify)
result = runner.invoke(stubber.stubber_cli, ["minify", "--all"])
assert result.exit_code == 0
assert mock_minify.call_count == 3
mock_minify.assert_any_call("board/createstubs.py", "./minified", True, False, False)
mock_minify.assert_any_call("board/createstubs_db.py", "./minified", True, False, False)
mock_minify.assert_any_call("board/createstubs_mem.py", "./minified", True, False, False)
##########################################################################################
# stub
##########################################################################################
def test_stubber_stub(mocker: MockerFixture):
# check basic commandline sanity check
runner = CliRunner()
# mock: MagicMock = mocker.MagicMock(return_value=True)
mock: MagicMock = mocker.patch("stubber.stubber.utils.generate_pyi_files", autospec=True, return_value=True)
# fake run on current folder
result = runner.invoke(stubber.stubber_cli, ["stub", "--source", "."])
mock.assert_called_once_with(Path("."))
assert result.exit_code == 0
##########################################################################################
# get-frozen
##########################################################################################
def test_stubber_get_frozen(mocker: MockerFixture, tmp_path: Path):
# check basic commandline sanity check
runner = CliRunner()
mock_version: MagicMock = mocker.patch("stubber.stubber.git.get_tag", autospec=True, return_value="v1.42")
mock: MagicMock = mocker.patch("stubber.stubber.get_mpy.get_frozen", autospec=True)
mock_post: MagicMock = mocker.patch("stubber.stubber.utils.do_post_processing", autospec=True)
# fake run - need to ensure that there is a destination folder
result = runner.invoke(stubber.stubber_cli, ["get-frozen", "--stub-folder", tmp_path.as_posix()])
assert result.exit_code == 0
# FIXME : test failes in CI
mock.assert_called_once()
mock_version.assert_called_once()
mock_post.assert_called_once_with([tmp_path / "micropython-v1_42-frozen"], True, True)
##########################################################################################
# get-lobo
##########################################################################################
def test_stubber_get_lobo(mocker: MockerFixture, tmp_path: Path):
# check basic commandline sanity check
runner = CliRunner()
mock: MagicMock = mocker.patch("stubber.stubber.get_lobo.get_frozen", autospec=True)
mock_post: MagicMock = mocker.patch("stubber.stubber.utils.do_post_processing", autospec=True)
# fake run
result = runner.invoke(stubber.stubber_cli, ["get-lobo", "--stub-folder", tmp_path.as_posix()])
mock.assert_called_once()
mock_post.assert_called_once()
mock_post.assert_called_once_with([tmp_path / "loboris-v3_2_24-frozen"], True, True)
assert result.exit_code == 0
##########################################################################################
# get-core
##########################################################################################
def test_stubber_get_core(mocker: MockerFixture, tmp_path: Path):
# check basic commandline sanity check
runner = CliRunner()
mock: MagicMock = mocker.patch("stubber.stubber.get_cpython.get_core", autospec=True)
mock_post: MagicMock = mocker.patch("stubber.stubber.utils.do_post_processing", autospec=True)
# fake run
result = runner.invoke(stubber.stubber_cli, ["get-core", "--stub-folder", tmp_path.as_posix()])
assert result.exit_code == 0
# process is called twice
assert mock.call_count == 2
# post is called one
mock_post.assert_called_with([tmp_path / "cpython_core-pycopy", tmp_path / "cpython_core-micropython"], True, True)
##########################################################################################
# get-docstubs
##########################################################################################
def test_stubber_get_docstubs(mocker: MockerFixture, tmp_path: Path):
# check basic commandline sanity check
runner = CliRunner()
mock_version: MagicMock = mocker.patch("stubber.stubber.git.get_tag", autospec=True, return_value="v1.42")
mock: MagicMock = mocker.patch("stubber.stubber.generate_from_rst", autospec=True)
mock_post: MagicMock = mocker.patch("stubber.stubber.utils.do_post_processing", autospec=True)
# fake run
result = runner.invoke(stubber.stubber_cli, ["get-docstubs", "--stub-folder", tmp_path.as_posix()])
assert result.exit_code == 0
# process is called twice
assert mock.call_count == 1
mock.assert_called_once()
assert mock_version.call_count >= 1
# post is called one
mock_post.assert_called_with([tmp_path / "micropython-v1_42-docstubs"], False, True)
##########################################################################################
# get-lobo
##########################################################################################
def test_stubber_fallback(mocker: MockerFixture, tmp_path: Path):
# check basic commandline sanity check
runner = CliRunner()
mock: MagicMock = mocker.patch("stubber.stubber.update_fallback", autospec=True)
# mock2: MagicMock = mocker.patch("stubber.update_fallback.update_fallback", autospec=True)
# from .update_fallback import update_fallback,
# fake run
result = runner.invoke(stubber.stubber_cli, ["update-fallback", "--stub-folder", tmp_path.as_posix()])
mock.assert_called_once()
assert result.exit_code == 0
```
#### File: tests/common/upd_fallback_test.py
```python
import os
import pytest
from pathlib import Path
# pylint: disable=wrong-import-position,import-error
# Module Under Test
from stubber.update_fallback import update_fallback, fallback_sources, RELEASED
from stubber import config
def test_update_fallback(tmp_path):
# test requires an actuall filled source
# from actual source
# TODO: Make sure there is an actual source to copy from
stub_path = config.stub_path
# to tmp_path /....
count = update_fallback(stub_path, tmp_path / config.fallback_path)
# assert count >= 50
# limited expectations as there is no source
assert count >= 0
def test_update_fallback_2(tmp_path: Path):
# test requires an actuall filled source
# from actual source
# Make sure there is an actual source to copy from
stub_path = tmp_path
fallback_path = tmp_path / config.fallback_path
# create fake sources
fakes = 0
for (name, source) in fallback_sources(RELEASED):
if not "." in name:
...
file = stub_path / source / name / "__init__.py"
else:
file = stub_path / source / name.replace("*", "")
# create fake file(s)
if not file.parent.exists():
os.makedirs(file.parent)
with open(file, "x") as f:
f.write("# fake \n")
fakes += 1
# to tmp_path /....
count = update_fallback(stub_path, fallback_path, version=RELEASED)
assert count == fakes
count = update_fallback(stub_path, fallback_path)
assert count == fakes
count = update_fallback(stub_path, fallback_path, version="latest")
assert count > 0
```
#### File: tests/integration/minify_test.py
```python
import sys
from pathlib import Path
import subprocess
from types import SimpleNamespace
import pytest
from pytest_mock import MockerFixture
from mock import MagicMock
import stubber.minify as minify
@pytest.mark.parametrize("source", ["createstubs.py", "createstubs_mem.py", "createstubs_db.py"])
@pytest.mark.slow
def test_minification_py(tmp_path: Path, source: str):
"python script - test creation of minified version"
# load process.py in the same python environment
source_path = Path("./board") / source
result = minify.minify(source=source_path, target=tmp_path)
assert result == 0
# now test that log statements have been removed
with open(tmp_path / source) as f:
content = f.readlines()
for line in content:
assert line.find("._log") == -1, "all references to ._log have been removed"
@pytest.mark.parametrize("source", ["createstubs.py", "createstubs_mem.py", "createstubs_db.py"])
def test_minification_quick(tmp_path: Path, source: str, mocker: MockerFixture):
"testthe rest of the minification functions using mocks to reduce the time needed"
# load process.py in the same python environment
source_path = Path("./board") / source
m_minify = mocker.patch(
"stubber.minify.python_minifier.minify",
autospec=True,
return_value="#short",
)
# mock subprocess run
return_val = SimpleNamespace()
return_val.returncode = 0
m_spr = mocker.patch(
"stubber.minify.subprocess.run",
autospec=True,
return_value=return_val,
)
# -----------------------------------------
result = minify.minify(source=source_path, target=tmp_path, cross_compile=True)
assert result == 0
m_minify.assert_called_once()
m_spr.assert_called_once()
# -----------------------------------------
m_minify.reset_mock()
m_spr.reset_mock()
result = minify.minify(source=source_path, target=tmp_path, cross_compile=False, keep_report=False)
assert result == 0
m_minify.assert_called_once()
assert m_spr.call_count == 0
# -----------------------------------------
m_minify.reset_mock()
m_spr.reset_mock()
result = minify.minify(source=source_path, target=tmp_path, cross_compile=False, keep_report=False, diff=True)
assert result == 0
m_minify.assert_called_once()
assert m_spr.call_count == 0
# -----------------------------------------
m_minify.reset_mock()
m_spr.reset_mock()
result = minify.minify(source=source_path, target=tmp_path, cross_compile=True, keep_report=False, diff=True)
assert result == 0
m_minify.assert_called_once()
m_spr.assert_called_once()
``` |
{
"source": "josvos/timeline-service",
"score": 2
} |
#### File: timeline-service/timelineService/app.py
```python
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import str
from builtins import object
import traceback
import sys
import json
import os
from gevent.pywsgi import WSGIServer
from flask import Flask, Response, request, abort, jsonify, make_response
from werkzeug.exceptions import HTTPException
from . import timeline
app = Flask(__name__)
# app.config.from_object("flask_config")
#
# Disable CORS problems
#
def add_cors_headers(response):
response.headers['Access-Control-Allow-Origin'] = '*'
if request.method == 'OPTIONS':
response.headers['Access-Control-Allow-Methods'] = 'DELETE, GET, POST, PUT'
headers = request.headers.get('Access-Control-Request-Headers')
if headers:
response.headers['Access-Control-Allow-Headers'] = headers
return response
app.after_request(add_cors_headers)
API_ROOT = "/timeline/v1/context"
@app.route(API_ROOT, methods=["GET", "POST"])
def allContexts():
timelineServiceUrl = "http://example.com"
if request.method == "POST" or request.args:
if request.form:
args = request.form.to_dict(flat=True)
else:
args = request.args.to_dict(flat=True)
rv = timeline.Timeline.createTimeline(timelineServiceUrl=timelineServiceUrl, **args)
return Response(json.dumps(rv), mimetype="application/json")
else:
rv = timeline.Timeline.getAll()
return Response(json.dumps(rv), mimetype="application/json")
@app.route(API_ROOT + "/<string:contextId>/<string:verb>", methods=["GET"])
def getContextVerb(contextId, verb):
args = request.args.to_dict(flat=True)
tl = timeline.Timeline.get(contextId)
if not tl:
abort(make_response("No such context: %s" % contextId, 404))
method = getattr(tl, verb, None)
if not method:
abort(make_response("No such method: %s" % contextId, 404))
try:
rv = method(**args)
except HTTPException:
raise
except:
status = "500 Internal server error: %s" % ' '.join(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))
traceback.print_exc()
abort(make_response(status, 500))
return Response(json.dumps(rv), mimetype="application/json")
@app.route(API_ROOT + "/<string:contextId>/<string:verb>", methods=["PUT"])
def putContextVerb(contextId, verb):
# Argument passing is convoluted...
args = {}
if request.args:
args.update(request.args.to_dict(flat=True))
if request.form:
args.update(request.form.to_dict(flat=True))
if request.is_json:
jargs = request.get_json()
if type(jargs) == type("") or type(jargs) == type(u""):
# xxxjack Bug workaround, 21-Dec-2016
jargs = json.loads(jargs)
# Usually the json is an object/dict, sometimes not.
if type(jargs) == type({}):
args.update(jargs)
else:
args['postData'] = jargs
tl = timeline.Timeline.get(contextId)
if not tl:
abort(make_response("No such context: %s" % contextId, 404))
method = getattr(tl, verb, None)
if not method:
abort(make_response("No such method: %s" % contextId, 404))
try:
rv = method(**args)
except HTTPException:
raise
except:
status = "500 Internal server error: %s" % ' '.join(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))
traceback.print_exc()
abort(make_response(status, 500))
return Response(json.dumps(rv), mimetype="application/json")
@app.route(API_ROOT + "/<string:contextId>/<string:verb>", methods=["POST"])
def postContextVerb(contextId, verb):
return putContextVerb(contextId, verb)
@app.route(API_ROOT + "/<string:contextId>", methods=["DELETE"])
def deleteContext(contextId):
tl = timeline.Timeline.get(contextId)
if tl:
tl.delete()
return ''
class Server:
def __init__(self, port):
self.port = port if port else 8080
self.server = WSGIServer(("0.0.0.0", self.port), app)
def run(self):
print("timelineService: running on port %d" % self.port)
self.server.serve_forever()
_singleton = None
def app_singleton(port=None):
global _singleton
if _singleton == None:
_singleton = Server(port)
return _singleton
``` |
{
"source": "josvromans/python_shapes",
"score": 3
} |
#### File: shapes/core/image.py
```python
import os
from PIL import Image, ImageDraw
from datetime import datetime
from settings import (
BACKGROUND_COLOR, DATETIME_FORMAT, DEFAULT_IMAGE_SIZE,
IMAGE_DIRECTORY_NAME, IMAGE_EXTENSION, IMAGE_FORMAT, IMAGE_FORMAT_COLOR_MODE,
)
# if you get a DecompressionBombError, you can uncomment the line below, and no check on max pixels will be made.
# Only do this if you trust the image file (so when you created it yourself)
# This is a safety check, that prevents systems for opening very large (and possible corrupt) image files.
# Of course, when the file is very large, your computer might not be able to process it and the program will be killed.
# Image.MAX_IMAGE_PIXELS = None
def get_datetime_string():
"""
All file names should start with a date string, so they are unique and in
both alphabetical and chronological order.
"""
return datetime.now().strftime(DATETIME_FORMAT)
def new_image(size=DEFAULT_IMAGE_SIZE, color=BACKGROUND_COLOR):
image = Image.new(mode=IMAGE_FORMAT_COLOR_MODE, size=size, color=color)
draw = ImageDraw.Draw(image)
return image, draw
def save_image(image, file_name, resize_size=None):
"""
:param image: Pil.Image instance
:param file_name: description that will be included in the actual filename
:param resize_size: when you wish to resize the image, provide the size as a tuple of two integers, like (500, 500)
:return: the file_path of the created image
"""
# the part of the file name before the extension, make sure it will never be longer then 250 characters
file_name = '{}_{}'.format(get_datetime_string(), file_name)[:250]
file_name_with_extension = '{}.{}'.format(file_name, IMAGE_EXTENSION)
new_image_path = os.path.join(IMAGE_DIRECTORY_NAME, file_name_with_extension)
if resize_size is not None:
image = image.resize(size=resize_size, resample=Image.ANTIALIAS) # ALWAYS preserves aspect ratio
image.save(fp=new_image_path, format=IMAGE_FORMAT)
print('Saved {}'.format(new_image_path))
return new_image_path
``` |
{
"source": "Josvth/poliastro",
"score": 2
} |
#### File: tests_earth/tests_atmosphere/test_coesa76.py
```python
import pytest
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.earth.atmosphere import COESA76
from poliastro.earth.atmosphere.coesa76 import p_coeff, rho_coeff
coesa76 = COESA76()
def test_outside_altitude_range_coesa76():
with pytest.raises(ValueError) as excinfo:
r0 = 6356.766 * u.km
coesa76._check_altitude(1001 * u.km, r0)
assert (
"ValueError: Geometric altitude must be in range [0.0 km, 1000.0 km]"
in excinfo.exconly()
)
def test_get_index_coesa76():
expected_i = 7
z = 86 * u.km
i = coesa76._get_index(z, coesa76.zb_levels)
assert i == expected_i
def test_coefficients_over_86km():
# Expected pressure coefficients
expected_p = [9.814674e-11, -1.654439e-07, 1.148115e-04, -0.05431334, -2.011365]
expected_rho = [1.140564e-10, -2.130756e-07, 1.570762e-04, -0.07029296, -12.89844]
assert coesa76._get_coefficients_avobe_86(350 * u.km, p_coeff) == expected_p
assert coesa76._get_coefficients_avobe_86(350 * u.km, rho_coeff) == expected_rho
# SOLUTIONS DIRECTLY TAKEN FROM COESA76 REPORT
coesa76_solutions = {
0.5 * u.km: [284.90 * u.K, 9.5461e2 * u.mbar, 1.1673 * u.kg / u.m ** 3],
1.0 * u.km: [281.651 * u.K, 8.9876e2 * u.mbar, 1.1117 * u.kg / u.m ** 3],
10 * u.km: [223.252 * u.K, 2.6499e2 * u.mbar, 4.1351e-1 * u.kg / u.m ** 3],
77 * u.km: [204.493 * u.K, 1.7286e-2 * u.mbar, 2.9448e-5 * u.kg / u.m ** 3],
86 * u.km: [186.87 * u.K, 3.7338e-3 * u.mbar, 6.958e-6 * u.kg / u.m ** 3],
92 * u.km: [186.96 * u.K, 1.2887e-3 * u.mbar, 2.393e-6 * u.kg / u.m ** 3],
230 * u.km: [915.78 * u.K, 3.9276e-7 * u.mbar, 1.029e-10 * u.kg / u.m ** 3],
1000 * u.km: [1000.0 * u.K, 7.5138e-11 * u.mbar, 3.561e-15 * u.kg / u.m ** 3],
}
@pytest.mark.parametrize("z", coesa76_solutions.keys())
def test_properties_coesa76(z):
# Get expected values from official data
expected_T = coesa76_solutions[z][0]
expected_p = coesa76_solutions[z][1]
expected_rho = coesa76_solutions[z][2]
T, p, rho = coesa76.properties(z)
assert_quantity_allclose(T, expected_T, rtol=1e-4)
assert_quantity_allclose(p, expected_p, rtol=1e-4)
assert_quantity_allclose(rho, expected_rho, rtol=1e-3)
# DATA DIRECTLY TAKEN FROM TABLE-III COESA76 REPORT
sound_speed_viscosity_conductivity = {
0.5
* u.km: [
338.37 * (u.m / u.s),
1.7737e-5 * (u.N * u.s / (u.m) ** 2),
2.5106e-2 * (u.J / u.m / u.s / u.K),
],
10
* u.km: [
299.53 * (u.m / u.s),
1.4577e-5 * (u.N * u.s / (u.m) ** 2),
2.0088e-2 * (u.J / u.m / u.s / u.K),
],
24
* u.km: [
297.72 * (u.m / u.s),
1.4430e-5 * (u.N * u.s / (u.m) ** 2),
1.9862e-2 * (u.J / u.m / u.s / u.K),
],
41
* u.km: [
318.94 * (u.m / u.s),
1.6151e-5 * (u.N * u.s / (u.m) ** 2),
2.2556e-2 * (u.J / u.m / u.s / u.K),
],
50
* u.km: [
329.80 * (u.m / u.s),
1.7037e-5 * (u.N * u.s / (u.m) ** 2),
2.3973e-2 * (u.J / u.m / u.s / u.K),
],
67
* u.km: [
302.57 * (u.m / u.s),
1.4823e-5 * (u.N * u.s / (u.m) ** 2),
2.0469e-2 * (u.J / u.m / u.s / u.K),
],
85
* u.km: [
275.52 * (u.m / u.s),
1.2647e-5 * (u.N * u.s / (u.m) ** 2),
1.7162e-2 * (u.J / u.m / u.s / u.K),
],
}
@pytest.mark.parametrize("z", sound_speed_viscosity_conductivity.keys())
def test_sound_speed_viscosity_conductivity(z):
expected_Cs = sound_speed_viscosity_conductivity[z][0]
expected_mu = sound_speed_viscosity_conductivity[z][1]
expected_k = sound_speed_viscosity_conductivity[z][2]
Cs = coesa76.sound_speed(z)
mu = coesa76.viscosity(z)
k = coesa76.thermal_conductivity(z)
assert_quantity_allclose(Cs, expected_Cs, rtol=1e-4)
assert_quantity_allclose(mu, expected_mu, rtol=1e-4)
assert_quantity_allclose(k, expected_k, rtol=1e-2)
def test_sound_speed_over_86km():
z = 87 * u.km
# test speed of sound over 86 km
with pytest.raises(ValueError) as excinfo:
coesa76.sound_speed(z)
assert (
"ValueError: Speed of sound in COESA76 has just been implemented up to 86km."
in excinfo.exconly()
)
def test_viscosity_over_86km():
z = 87 * u.km
# test viscosity over 86 km
with pytest.raises(ValueError) as excinfo:
coesa76.viscosity(z)
assert (
"ValueError: Dynamic Viscosity in COESA76 has just been implemented up to 86km."
in excinfo.exconly()
)
def test_conductivity_over_86km():
z = 87 * u.km
# test thermal conductivity over 86 km
with pytest.raises(ValueError) as excinfo:
coesa76.thermal_conductivity(z)
assert (
"ValueError: Thermal conductivity in COESA76 has just been implemented up to 86km."
in excinfo.exconly()
)
``` |
{
"source": "josw123/dart-scraper",
"score": 2
} |
#### File: app/api/download.py
```python
import re
import sys
import json
from app import socketio
from app.api.api_key import check_authorized
from app.api.corp_list import get_corp_list
from app.api.globals import transmit
DOWNLOAD = 'DOWNLOAD'
report_tp_regex = re.compile(r'(Annual|Semiannual|Quarterly)')
progress_regex = re.compile(r'(\d{1,3})%')
def transmit_progress(progress, corp_code, corp_name, total, index, report_tp='Annual'):
data = dict(report_tp=report_tp,
progress=progress,
corp_code=corp_code,
corp_name=corp_name,
total=total,
index=index)
transmit.data(DOWNLOAD, data)
class TqdmExtractor(object):
def __init__(self, stderr):
super().__init__()
self.stderr = stderr
self.corp = None
self.total = None
self.index = None
def write(self, text):
self.stderr.write(text)
report_tp = report_tp_regex.search(text)
if report_tp is not None:
report_tp = report_tp.group(1)
progress = progress_regex.search(text)
if progress is not None:
progress = progress.group(1)
transmit_progress(progress, self.corp.corp_code, self.corp.corp_name, self.total, self.index, report_tp)
def flush(self):
self.stderr.flush()
def set_info(self, corp, total, index):
self.total = total
self.index = index
self.corp = corp
@socketio.on(DOWNLOAD)
def download_handler(data):
if isinstance(data, str):
data = json.loads(data)
if not check_authorized():
return
bgn_de = data.get('bgn_de')
end_de = data.get('end_de')
corps = data.get('corps')
path = data.get('path')
separate = data.get('separate', False)
report_tp = data.get('report_tp', 'annual')
report_tp = report_tp.lower()
if (bgn_de and path) is None or len(corps) == 0:
transmit.errors(DOWNLOAD, {'msg': 'Some parameters are missing'})
return
corp_list = get_corp_list()
# stderr progress extractor
stderr = sys.stderr
sys.stderr = TqdmExtractor(stderr)
transmit.start(DOWNLOAD)
total = len(corps)
# Start
for idx, corp_code in enumerate(corps):
corp = corp_list.find_by_corp_code(corp_code)
sys.stderr.set_info(corp, len(corps), index=idx)
try:
# Extracting START
transmit_progress(0, corp.corp_code, corp.corp_name, total, idx)
fs = corp.extract_fs(bgn_de=bgn_de, end_de=end_de, separate=separate, report_tp=report_tp)
filename = '{}_{}_{}.xlsx'.format(corp.corp_name, corp.corp_code, report_tp)
fs.save(path=path, filename=filename)
except Exception as e:
msg = '[{}]{} : {}'.format(corp.corp_code, corp.corp_name, str(e))
transmit.errors(DOWNLOAD, msg)
finally:
# Extracting Finish
transmit_progress(100, corp.corp_code, corp.corp_name, total, idx)
transmit.finish(DOWNLOAD)
# Reset stderr
sys.stderr = stderr
```
#### File: dart-scraper/app/utils.py
```python
import sys
import os
import json
import threading
def get_config_path():
""" Get Configuration Path
Returns
-------
tuple of str
app directory, configuration path
"""
if sys.platform == 'darwin':
app_dir = os.path.join(os.path.expanduser('~/Library/Application Support'), 'dart-scraper')
elif sys.platform == 'win32':
app_dir = os.path.join(os.getenv('appdata'), 'dart-scraper')
else:
app_dir = os.path.join(os.path.expanduser('~'), '.dart-scraper')
config_path = os.path.join(app_dir, 'dart-setting.json')
return app_dir, config_path
def save_config_file(data):
""" Save configuration file
Parameters
----------
data: dict
data to save
"""
config = read_config_file()
if config is None:
config = {}
new_data = {**config, **data}
app_dir, config_path = get_config_path()
if not os.path.exists(app_dir):
os.makedirs(app_dir)
with open(config_path, 'w') as config_file:
json.dump(new_data, config_file)
def read_config_file():
""" Read configuration file
Returns
-------
dict
configuration data
"""
_, config_path = get_config_path()
if not os.path.exists(config_path):
return None
with open(config_path, 'r') as config_file:
data = json.load(config_file)
return data
class ThreadSafeDict(dict):
def __init__(self, * p_arg, ** n_arg):
dict.__init__(self, * p_arg, ** n_arg)
self._lock = threading.Lock()
def __enter__(self) :
self._lock.acquire()
return self
def __exit__(self, type, value, traceback):
self._lock.release()
``` |
{
"source": "joswlv/BentoML",
"score": 2
} |
#### File: BentoML/bentoml/gluon.py
```python
import os
import typing as t
from ._internal.models.base import MODEL_NAMESPACE, Model
from ._internal.types import MetadataType, PathType
from .exceptions import MissingDependencyException
try:
import mxnet # pylint: disable=unused-import
from mxnet import gluon
except ImportError:
raise MissingDependencyException("mxnet is required by GluonModel")
class GluonModel(Model):
"""
Model class for saving/loading :obj:`mxnet.gluon` models
Args:
model (`mxnet.gluon.Block`):
Every :obj:`mxnet.gluon` object is based on :obj:`mxnet.gluon.Block`
metadata (`Dict[str, Any]`, `optional`, default to `None`):
Class metadata
Raises:
MissingDependencyException:
:obj:`mxnet` is required by GluonModel
Example usage under :code:`train.py`::
TODO:
One then can define :code:`bento.py`::
TODO:
"""
def __init__(
self, model: "mxnet.gluon.Block", metadata: t.Optional[MetadataType] = None,
):
super(GluonModel, self).__init__(model, metadata=metadata)
@classmethod
def load(cls, path: PathType) -> "mxnet.gluon.Block":
json_path: str = os.path.join(path, f"{MODEL_NAMESPACE}-symbol.json")
params_path: str = os.path.join(path, f"{MODEL_NAMESPACE}-0000.params")
return gluon.nn.SymbolBlock.imports(json_path, ["data"], params_path)
def save(self, path: PathType) -> None:
self._model.export(os.path.join(path, MODEL_NAMESPACE))
```
#### File: _internal/models/base.py
```python
import typing as t
from pathlib import Path
from ..types import MetadataType, PathType
from ..utils.ruamel_yaml import YAML
MT = t.TypeVar("MT", bound=t.Any)
H5_EXTENSION: str = ".h5"
HDF5_EXTENSION: str = ".hdf5"
JSON_EXTENSION: str = ".json"
PICKLE_EXTENSION: str = ".pkl"
PTH_EXTENSION: str = ".pth"
PT_EXTENSION: str = ".pt"
TXT_EXTENSION: str = ".txt"
YAML_EXTENSION: str = ".yaml"
YML_EXTENSION: str = ".yml"
MODEL_NAMESPACE: str = "bentoml_model"
class Model(object):
"""
:class:`Model` is the base abstraction
for describing the trained model serialization
and deserialization process.
Args:
model (`MT`):
Given model definition. Omit various type depending on given frameworks.
metadata (`Dict[str, Any]`, `optional`, default to `None`):
Class metadata
.. note::
Make sure to add ``# noqa # pylint: disable=arguments-differ`` to :meth:`load` when implementing
newly integration or custom artifacts if the behaviour of ``load`` subclass takes different parameters
.. code-block:: python
from bentoml._internal.artifacts import Model
class CustomModel(Model):
def __init__(self, model, metadata=None):...
@classmethod
def load(cls, path: str, args1, args2):... # noqa # pylint: disable=arguments-differ
Example usage for creating a custom ``Model``::
TODO:
"""
def __init__(self: "Model", model: MT, metadata: t.Optional[MetadataType] = None):
self._model = model
self._metadata = metadata
@property
def metadata(self: "Model") -> t.Optional[MetadataType]:
return self._metadata
@classmethod
def load(cls, path: PathType) -> t.Any:
"""
Load saved model into memory.
Args:
path (`Union[str, os.PathLike]`):
Given path to save artifacts metadata and objects.
This will be used as a class method, interchangeable with
:meth:`save` to load model during development pipeline.
"""
raise NotImplementedError()
def save(self: "Model", path: PathType) -> None:
"""
Perform save instance to given path.
Args:
path (`Union[str, os.PathLike]`, or :obj:`~bentoml._internal.types.PathType`):
Given path to save artifacts metadata and objects.
Usually this can be used with :meth:`~bentoml._internal.artifacts.Model.load` to load
model objects for development::
# train.py
model = MyPyTorchModel().train() # type: torch.nn.Module
...
from bentoml.pytorch import PyTorchModel
PyTorchModel(model).save(".")
pytorch_model = PyTorchModel.load(".") # type: torch.nn.Module
.. admonition:: current implementation
Current implementation initialize base :meth:`save()` and :meth:`load()` in
:code:`__getattribute__()` via wrapper. Since Python doesn't have support
for method overloading, this ensures that model metadata will always be saved
to given directory.
""" # noqa # pylint: enable=line-too-long
raise NotImplementedError()
def __getattribute__(self: "Model", item: str) -> t.Any:
if item == "save":
def wrapped_save(*args, **kw): # type: ignore
path: PathType = args[0] # save(self, path)
if self.metadata:
yaml = YAML()
yaml.dump(
self.metadata, Path(path, f"{MODEL_NAMESPACE}{YML_EXTENSION}"),
)
inherited = object.__getattribute__(self, item)
return inherited(*args, **kw)
return wrapped_save
elif item == "load":
def wrapped_load(*args, **kw): # type: ignore
assert_msg: str = "`load()` requires positional `path`"
assert "path" in args, assert_msg
inherited = object.__getattribute__(self, item)
return inherited(*args, **kw)
return wrapped_load
else:
return object.__getattribute__(self, item)
```
#### File: BentoML/bentoml/keras.py
```python
import os
import typing as t
import cloudpickle
from ._internal.models.base import (
H5_EXTENSION,
HDF5_EXTENSION,
JSON_EXTENSION,
MODEL_NAMESPACE,
PICKLE_EXTENSION,
Model,
)
from ._internal.types import MetadataType, PathType
from .exceptions import MissingDependencyException
# fmt: off
try:
import tensorflow as tf
from tensorflow import keras
except ImportError:
raise MissingDependencyException("tensorflow is required by KerasModel as backend runtime.") # noqa
# fmt: on
class KerasModel(Model):
"""
Model class for saving/loading :obj:`keras` models using Tensorflow backend.
Args:
model (`tf.keras.models.Model`):
Keras model instance and its subclasses.
store_as_json (`bool`, `optional`, default to `False`):
Whether to store Keras model as JSON and weights
custom_objects (`Dict[str, Any]`, `optional`, default to `None`):
Dictionary of Keras custom objects for model
metadata (`Dict[str, Any]`, `optional`, default to `None`):
Class metadata
Raises:
MissingDependencyException:
:obj:`tensorflow` is required by KerasModel
InvalidArgument:
model being packed must be instance of :class:`tf.keras.models.Model`
Example usage under :code:`train.py`::
TODO:
One then can define :code:`bento.py`::
TODO:
"""
_graph = tf.compat.v1.get_default_graph()
# NOTES: sess should be user facing for V1 compatibility
sess = tf.compat.v1.Session(graph=_graph)
def __init__(
self,
model: "keras.models.Model",
store_as_json: t.Optional[bool] = False,
custom_objects: t.Optional[t.Dict[str, t.Any]] = None,
metadata: t.Optional[MetadataType] = None,
):
super(KerasModel, self).__init__(model, metadata=metadata)
self._store_as_json: t.Optional[bool] = store_as_json
self._custom_objects: t.Optional[t.Dict[str, t.Any]] = custom_objects
@staticmethod
def __get_custom_obj_fpath(path: PathType) -> PathType:
return os.path.join(path, f"{MODEL_NAMESPACE}_custom_objects{PICKLE_EXTENSION}")
@staticmethod
def __get_model_saved_fpath(path: PathType) -> PathType:
return os.path.join(path, f"{MODEL_NAMESPACE}{H5_EXTENSION}")
@staticmethod
def __get_model_weight_fpath(path: PathType) -> PathType:
return os.path.join(path, f"{MODEL_NAMESPACE}_weights{HDF5_EXTENSION}")
@staticmethod
def __get_model_json_fpath(path: PathType) -> PathType:
return os.path.join(path, f"{MODEL_NAMESPACE}_json{JSON_EXTENSION}")
@classmethod
def load(cls, path: PathType) -> "keras.models.Model":
default_custom_objects = None
if os.path.isfile(cls.__get_custom_obj_fpath(path)):
with open(cls.__get_custom_obj_fpath(path), "rb") as dco_file:
default_custom_objects = cloudpickle.load(dco_file)
with cls.sess.as_default(): # pylint: disable=not-context-manager
if os.path.isfile(cls.__get_model_json_fpath(path)):
# load keras model via json and weights since json file are in path
with open(cls.__get_model_json_fpath(path), "r") as json_file:
model_json = json_file.read()
obj = keras.models.model_from_json(
model_json, custom_objects=default_custom_objects
)
obj.load_weights(cls.__get_model_weight_fpath(path))
else:
# otherwise, load keras model via standard load_model
obj = keras.models.load_model(
cls.__get_model_saved_fpath(path),
custom_objects=default_custom_objects,
)
if isinstance(obj, dict):
model = obj["model"]
else:
model = obj
return model
def save(self, path: PathType) -> None:
tf.compat.v1.keras.backend.get_session()
# save custom_objects for model
if self._custom_objects:
with open(self.__get_custom_obj_fpath(path), "wb") as custom_object_file:
cloudpickle.dump(self._custom_objects, custom_object_file)
if self._store_as_json:
# save keras model using json and weights if requested
with open(self.__get_model_json_fpath(path), "w") as json_file:
json_file.write(self._model.to_json())
self._model.save_weights(self.__get_model_weight_fpath(path))
else:
# otherwise, save standard keras model
self._model.save(self.__get_model_saved_fpath(path))
```
#### File: BentoML/bentoml/sklearn.py
```python
import os
import typing as t
from ._internal.models.base import MODEL_NAMESPACE, PICKLE_EXTENSION, Model
from ._internal.types import MetadataType, PathType
from .exceptions import MissingDependencyException
MT = t.TypeVar("MT")
try:
import joblib
except ImportError:
try:
from sklearn.externals import joblib
except ImportError:
raise MissingDependencyException(
"sklearn module is required to use SklearnModel"
)
class SklearnModel(Model):
"""
Model class for saving/loading :obj:`sklearn` models.
Args:
model (`Any`, that is omitted by `sklearn`):
Any model that is omitted by `sklearn`
metadata (`Dict[str, Any]`, `optional`, default to `None`):
Class metadata
Raises:
MissingDependencyException:
:obj:`sklearn` is required by SklearnModel
Example usage under :code:`train.py`::
TODO:
One then can define :code:`bento.py`::
TODO:
"""
def __init__(self, model: MT, metadata: t.Optional[MetadataType] = None):
super(SklearnModel, self).__init__(model, metadata=metadata)
@staticmethod
def __get_pickle_fpath(path: PathType) -> PathType:
return os.path.join(path, f"{MODEL_NAMESPACE}{PICKLE_EXTENSION}")
@classmethod
def load(cls, path: PathType) -> t.Any:
return joblib.load(cls.__get_pickle_fpath(path), mmap_mode="r")
def save(self, path: PathType) -> None:
joblib.dump(self._model, self.__get_pickle_fpath(path))
```
#### File: BentoML/bentoml/tensorflow.py
```python
import logging
import os
import pathlib
import typing as t
from distutils.dir_util import copy_tree
from ._internal.models.base import Model
from ._internal.types import MetadataType, PathType
from ._internal.utils.tensorflow import (
cast_tensor_by_spec,
get_arg_names,
get_input_signatures,
get_restored_functions,
pretty_format_restored_model,
)
from .exceptions import MissingDependencyException
try:
import tensorflow as tf
from tensorflow.python.training.tracking.tracking import AutoTrackable
TF2 = tf.__version__.startswith("2")
except ImportError:
raise MissingDependencyException("tensorflow is required by TensorflowModel")
logger = logging.getLogger(__name__)
AUTOTRACKABLE_CALLABLE_WARNING: str = """\
Importing SavedModels from TensorFlow 1.x. `outputs = imported(inputs)`
will not be supported by BentoML due to `tensorflow` API.\n
See https://www.tensorflow.org/api_docs/python/tf/saved_model/load for
more details.
"""
TF_FUNCTION_WARNING: str = """\
Due to TensorFlow's internal mechanism, only methods
wrapped under `@tf.function` decorator and the Keras default function
`__call__(inputs, training=False)` can be restored after a save & load.\n
You can test the restored model object via `TensorflowModel.load(path)`
"""
KERAS_MODEL_WARNING: str = """\
BentoML detected that {name} is being used to pack a Keras API
based model. In order to get optimal serving performance, we recommend
to wrap your keras model `call()` methods with `@tf.function` decorator.
"""
class _TensorflowFunctionWrapper:
def __init__(
self,
origin_func: t.Callable[..., t.Any],
arg_names: t.Optional[list] = None,
arg_specs: t.Optional[list] = None,
kwarg_specs: t.Optional[dict] = None,
) -> None:
self.origin_func = origin_func
self.arg_names = arg_names
self.arg_specs = arg_specs
self.kwarg_specs = {k: v for k, v in zip(arg_names or [], arg_specs or [])}
self.kwarg_specs.update(kwarg_specs or {})
def __call__(self, *args, **kwargs): # type: ignore
if self.arg_specs is None and self.kwarg_specs is None:
return self.origin_func(*args, **kwargs)
for k in kwargs:
if k not in self.kwarg_specs:
raise TypeError(f"Function got an unexpected keyword argument {k}")
arg_keys = {k for k, _ in zip(self.arg_names, args)}
_ambiguous_keys = arg_keys & set(kwargs)
if _ambiguous_keys:
raise TypeError(f"got two values for arguments '{_ambiguous_keys}'")
# INFO:
# how signature with kwargs works?
# https://github.com/tensorflow/tensorflow/blob/v2.0.0/tensorflow/python/eager/function.py#L1519
transformed_args = tuple(
cast_tensor_by_spec(arg, spec) for arg, spec in zip(args, self.arg_specs)
)
transformed_kwargs = {
k: cast_tensor_by_spec(arg, self.kwarg_specs[k])
for k, arg in kwargs.items()
}
return self.origin_func(*transformed_args, **transformed_kwargs)
def __getattr__(self, k): # type: ignore
return getattr(self.origin_func, k)
@classmethod
def hook_loaded_model(cls, loaded_model) -> None: # type: ignore # noqa
funcs = get_restored_functions(loaded_model)
for k, func in funcs.items():
arg_names = get_arg_names(func)
sigs = get_input_signatures(func)
if not sigs:
continue
arg_specs, kwarg_specs = sigs[0]
setattr(
loaded_model,
k,
cls(
func,
arg_names=arg_names,
arg_specs=arg_specs,
kwarg_specs=kwarg_specs,
),
)
_TensorflowFunctionWrapper.__doc__ = """\
TODO:
"""
class TensorflowModel(Model):
"""
Artifact class for saving/loading :obj:`tensorflow` model
with :obj:`tensorflow.saved_model` format
Args:
model (`Union[tf.keras.Models, tf.Module, PathType, pathlib.PurePath]`):
Omit every tensorflow model instance of type :obj:`tf.keras.Models` or
:obj:`tf.Module`
metadata (`Dict[str, Any]`, `optional`, default to `None`):
Class metadata
Raises:
MissingDependencyException:
:obj:`tensorflow` is required by TensorflowModel
Example usage under :code:`train.py`::
TODO:
One then can define :code:`bento.py`::
TODO:
"""
def __init__(
self,
model: t.Union[tf.keras.Model, tf.Module, PathType, pathlib.PurePath],
metadata: t.Optional[MetadataType] = None,
):
super(TensorflowModel, self).__init__(model, metadata=metadata)
@staticmethod
def __load_tf_saved_model( # pylint: disable=unused-private-member
path: str,
) -> t.Union[AutoTrackable, t.Any]:
if TF2:
return tf.saved_model.load(path)
else:
loaded = tf.compat.v2.saved_model.load(path)
if isinstance(loaded, AutoTrackable) and not hasattr(loaded, "__call__"):
logger.warning(AUTOTRACKABLE_CALLABLE_WARNING)
return loaded
@classmethod
def load(cls, path: PathType): # type: ignore
# TODO: type hint returns TF Session or
# Keras model API
model = cls.__load_tf_saved_model(str(path))
_TensorflowFunctionWrapper.hook_loaded_model(model)
logger.warning(TF_FUNCTION_WARNING)
# pretty format loaded model
logger.info(pretty_format_restored_model(model))
if hasattr(model, "keras_api"):
logger.warning(KERAS_MODEL_WARNING.format(name=cls.__name__))
return model
def save( # pylint: disable=arguments-differ
self,
path: PathType,
signatures: t.Optional[t.Union[t.Callable[..., t.Any], dict]] = None,
options: t.Optional["tf.saved_model.SaveOptions"] = None,
) -> None: # noqa
"""
Save TensorFlow Trackable object `obj` from [SavedModel format] to path.
Args:
path (`Union[str, bytes, os.PathLike]`):
Path containing a trackable object to export.
signatures (`Union[Callable[..., Any], dict]`, `optional`, default to `None`):
`signatures` is one of three types:
a `tf.function` with an input signature specified, which will use the default serving signature key
a dictionary, which maps signature keys to either :obj`tf.function` instances with input signatures or concrete functions. Keys of such a dictionary may be arbitrary strings, but will typically be from the :obj:`tf.saved_model.signature_constants` module.
`f.get_concrete_function` on a `@tf.function` decorated function `f`, in which case f will be used to generate a signature for the SavedModel under the default serving signature key,
:code:`tf.function` examples::
>>> class Adder(tf.Module):
... @tf.function
... def add(self, x):
... return x + x
>>> model = Adder()
>>> tf.saved_model.save(
... model, '/tmp/adder',signatures=model.add.get_concrete_function(
... tf.TensorSpec([], tf.float32)))
options (`tf.saved_model.SaveOptions`, `optional`, default to `None`):
:obj:`tf.saved_model.SaveOptions` object that specifies options for saving.
.. note::
Refers to `Signatures explanation <https://www.tensorflow.org/api_docs/python/tf/saved_model/save>`_
from Tensorflow documentation for more information.
Raises:
ValueError: If `obj` is not trackable.
""" # noqa: E501 # pylint: enable=line-too-long
if not isinstance(self._model, (str, bytes, pathlib.PurePath, os.PathLike)):
if TF2:
tf.saved_model.save(
self._model, str(path), signatures=signatures, options=options
)
else:
if options:
logger.warning(
f"Parameter 'options: {str(options)}' is ignored when "
f"using tensorflow {tf.__version__}"
)
tf.saved_model.save(self._model, str(path), signatures=signatures)
else:
assert os.path.isdir(self._model)
copy_tree(str(self._model), str(path))
```
#### File: BentoML/bentoml/transformers.py
```python
import os
import pathlib
import typing as t
from importlib import import_module
from ._internal.models.base import Model
from ._internal.types import MetadataType, PathType
from .exceptions import InvalidArgument, MissingDependencyException, NotFound
try:
import transformers
except ImportError:
raise MissingDependencyException("transformers is required by TransformersModel")
TransformersInput = t.TypeVar(
"TransformersInput",
bound=t.Union[
str, os.PathLike, transformers.PreTrainedModel, transformers.PreTrainedTokenizer
],
)
class TransformersModel(Model):
"""
Model class for saving/loading :obj:`transformers` models.
Args:
model (`Union[str, os.PathLike, Dict[str, Union[transformers.PreTrainedModel, transformers.PreTrainedTokenizer]]`):
A dictionary `{'model':<model_obj>, 'tokenizer':<tokenizer_obj>}`
to setup Transformers model
metadata (`Dict[str, Any]`, `optional`, default to `None`):
Class metadata
Raises:
MissingDependencyException:
:obj:`transformers` is required by TransformersModel
InvalidArgument:
:obj:`model` must be either a dictionary
or a path for saved transformers model or
a pre-trained model string provided by transformers
NotFound:
if the provided model name or model path is not found
Example usage under :code:`train.py`::
TODO:
One then can define :code:`bento.py`::
TODO:
""" # noqa # pylint: enable=line-too-long
_model_type: str = "AutoModelWithLMHead"
def __init__(
self, model: TransformersInput, metadata: t.Optional[MetadataType] = None,
):
super(TransformersModel, self).__init__(model, metadata=metadata)
@staticmethod
def __load_from_directory( # pylint: disable=unused-private-member
path: PathType, model_type: str, tokenizer_type: str
) -> t.Dict[str, t.Any]:
transformers_model = getattr(
import_module("transformers"), model_type
).from_pretrained(str(path))
tokenizer = getattr(
import_module("transformers"), tokenizer_type
).from_pretrained(str(path))
return {"model": transformers_model, "tokenizer": tokenizer}
@staticmethod
def __load_from_dict( # pylint: disable=unused-private-member
transformers_dict: t.Dict[str, t.Any]
) -> dict:
if not transformers_dict.get("model"):
raise InvalidArgument(
" 'model' key is not found in the dictionary."
" Expecting a dictionary of with keys 'model' and 'tokenizer'"
)
if not transformers_dict.get("tokenizer"):
raise InvalidArgument(
"'tokenizer' key is not found in the dictionary. "
"Expecting a dictionary of with keys 'model' and 'tokenizer'"
)
model_class = str(type(transformers_dict.get("model")).__module__)
tokenizer_class = str(type(transformers_dict.get("tokenizer")).__module__)
# if either model or tokenizer is not an object of transformers
if not model_class.startswith("transformers"):
raise InvalidArgument(
"Expecting a transformers model object but object passed is {}".format(
type(transformers_dict.get("model"))
)
)
if not tokenizer_class.startswith("transformers"):
raise InvalidArgument(
"Expecting a transformers model object but object passed is {}".format(
type(transformers_dict.get("tokenizer"))
)
)
return transformers_dict
@classmethod
def __load_from_string( # pylint: disable=unused-private-member
cls, model_name: str
) -> dict:
try:
transformers_model = getattr(
import_module("transformers"), cls._model_type
).from_pretrained(model_name)
tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
return {"model": transformers_model, "tokenizer": tokenizer}
except EnvironmentError:
raise NotFound(f"{model_name} is not available within transformers")
except AttributeError:
raise NotFound(f"transformers has no model type called {cls._model_type}")
# fmt: off
@classmethod
def load(cls, path: t.Union[PathType, dict]): # type: ignore
# fmt: on
if isinstance(path, (str, bytes, os.PathLike, pathlib.PurePath)):
str_path = str(path)
if os.path.isdir(str_path):
with open(os.path.join(path, "__model__type.txt"), "r") as f:
_model_type = f.read().strip()
with open(os.path.join(path, "tokenizer_type.txt"), "r") as f:
_tokenizer_type = f.read().strip()
loaded_model = cls.__load_from_directory(
path, _model_type, _tokenizer_type
)
else:
loaded_model = cls.__load_from_string(str(path))
elif isinstance(path, dict):
loaded_model = cls.__load_from_dict(path)
else:
err_msg: str = """\
Expected either model name or a dictionary only
containing `model` and `tokenizer` as keys, but
got {path} instead.
"""
raise InvalidArgument(err_msg.format(path=type(path)))
return loaded_model
def __save_model_type(self, path: PathType, tokenizer_type: str) -> None:
with open(os.path.join(path, "__model__type.txt"), "w") as f:
f.write(self._model_type)
with open(os.path.join(path, "tokenizer_type.txt"), "w") as f:
f.write(tokenizer_type)
def save(self, path: PathType) -> None:
self._model_type = self._model.get("model").__class__.__name__
tokenizer_type = self._model.get("tokenizer").__class__.__name__
self._model.get("model").save_pretrained(path)
self._model.get("tokenizer").save_pretrained(path)
self.__save_model_type(path, tokenizer_type)
```
#### File: frameworks/tensorflow/test_v2_model_artifact.py
```python
import os
import numpy as np
import pytest
import tensorflow as tf
from bentoml.tensorflow import TensorflowModel
from tests._internal.frameworks.tensorflow_utils import (
KerasSequentialModel,
NativeModel,
NativeRaggedModel,
)
native_data = [[1, 2, 3, 4, 5]]
native_tensor = tf.constant(np.asfarray(native_data))
ragged_data = [[15], [7, 8], [1, 2, 3, 4, 5]]
ragged_tensor = tf.ragged.constant(ragged_data, dtype=tf.float64)
def predict__model(model, tensor):
return model(tensor)
@pytest.mark.parametrize(
"model_class, input_type, predict_fn",
[
(KerasSequentialModel(), native_tensor, predict__model),
(NativeModel(), native_tensor, predict__model),
(NativeRaggedModel(), ragged_tensor, predict__model),
],
)
def test_tensorflow_v2_save_load(model_class, input_type, predict_fn, tmpdir):
TensorflowModel(model_class).save(tmpdir)
assert os.path.exists(os.path.join(tmpdir, "saved_model.pb"))
tf2_loaded = TensorflowModel.load(tmpdir)
comparison = predict_fn(tf2_loaded, input_type) == predict_fn(
model_class, input_type
)
assert all(comparison)
```
#### File: integration/frameworks/test_h2o_model_artifact.py
```python
import json
import os
import typing as t
import h2o
import h2o.automl
import h2o.model
import pandas as pd
import pytest
from bentoml.h2o import H2OModel
test_data = {
"TemperatureCelcius": {"0": 21.6},
"ExhaustVacuumHg": {"0": 62.52},
"AmbientPressureMillibar": {"0": 1017.23},
"RelativeHumidity": {"0": 67.87},
}
def predict_dataframe(
model: "h2o.model.model_base.ModelBase", df: "pd.DataFrame"
) -> t.Optional[str]:
hf = h2o.H2OFrame(df)
pred = model.predict(hf)
return pred.as_data_frame().to_json(orient="records")
@pytest.fixture(scope="module")
def train_h2o_aml() -> h2o.automl.H2OAutoML:
h2o.init()
h2o.no_progress()
df = h2o.import_file(
"https://github.com/yubozhao/bentoml-h2o-data-for-testing/raw/master/"
"powerplant_output.csv"
)
splits = df.split_frame(ratios=[0.8], seed=1)
train = splits[0]
test = splits[1]
aml = h2o.automl.H2OAutoML(
max_runtime_secs=60, seed=1, project_name="powerplant_lb_frame"
)
aml.train(y="HourlyEnergyOutputMW", training_frame=train, leaderboard_frame=test)
return aml
def test_h2o_save_load(train_h2o_aml, tmpdir):
test_df: pd.DataFrame = pd.read_json(json.dumps(test_data))
H2OModel(train_h2o_aml.leader).save(tmpdir)
assert os.path.exists(os.path.join(tmpdir, os.listdir(tmpdir)[0]))
h2o_loaded: h2o.model.model_base.ModelBase = H2OModel.load(tmpdir)
# fmt: off
assert predict_dataframe(train_h2o_aml.leader, test_df) == predict_dataframe(h2o_loaded, test_df) # noqa
# fmt: on
```
#### File: integration/frameworks/test_pytorch_lightning_model_artifact.py
```python
import pandas as pd
import pytorch_lightning as pl
import torch
from bentoml.pytorch import PyTorchLightningModel
from tests._internal.helpers import assert_have_file_extension
test_df = pd.DataFrame([[5, 4, 3, 2]])
class FooModel(pl.LightningModule):
def forward(self, input):
return input.add(1)
def predict_df(model: pl.LightningModule, df: pd.DataFrame):
input_tensor = torch.from_numpy(df.to_numpy())
return model(input_tensor).numpy().tolist()
def test_pl_save_load(tmpdir):
model: pl.LightningModule = FooModel()
PyTorchLightningModel(model).save(tmpdir)
assert_have_file_extension(tmpdir, ".pt")
pl_loaded: pl.LightningModule = PyTorchLightningModel.load(tmpdir)
assert (
predict_df(model, test_df) == predict_df(pl_loaded, test_df) == [[6, 5, 4, 3]]
)
```
#### File: integration/frameworks/test_pytorch_model_artifact.py
```python
import numpy as np
import pandas as pd
import pytest
import torch
from torch import nn
from bentoml.pytorch import PyTorchModel
from tests._internal.frameworks.pytorch_utils import LinearModel, test_df
from tests._internal.helpers import assert_have_file_extension
def predict_df(model: nn.Module, df: pd.DataFrame):
input_data = df.to_numpy().astype(np.float32)
input_tensor = torch.from_numpy(input_data)
return model(input_tensor).unsqueeze(dim=0).item()
@pytest.mark.parametrize("test_type", ["", "tracedmodel", "scriptedmodel"])
def test_pytorch_save_load(test_type, tmpdir):
_model: nn.Module = LinearModel()
if "trace" in test_type:
tracing_inp = torch.ones(5)
model = torch.jit.trace(_model, tracing_inp)
elif "script" in test_type:
model = torch.jit.script(_model)
else:
model = _model
PyTorchModel(model).save(tmpdir)
assert_have_file_extension(tmpdir, ".pt")
pytorch_loaded: nn.Module = PyTorchModel.load(tmpdir)
assert predict_df(model, test_df) == predict_df(pytorch_loaded, test_df)
```
#### File: _internal/frameworks/sklearn_utils.py
```python
from collections import namedtuple
import pandas as pd
from sklearn.datasets import load_iris
from sklearn.neighbors import KNeighborsClassifier
test_data = {
"mean radius": 10.80,
"mean texture": 21.98,
"mean perimeter": 68.79,
"mean area": 359.9,
"mean smoothness": 0.08801,
"mean compactness": 0.05743,
"mean concavity": 0.03614,
"mean concave points": 0.2016,
"mean symmetry": 0.05977,
"mean fractal dimension": 0.3077,
"radius error": 1.621,
"texture error": 2.240,
"perimeter error": 20.20,
"area error": 20.02,
"smoothness error": 0.006543,
"compactness error": 0.02148,
"concavity error": 0.02991,
"concave points error": 0.01045,
"symmetry error": 0.01844,
"fractal dimension error": 0.002690,
"worst radius": 12.76,
"worst texture": 32.04,
"worst perimeter": 83.69,
"worst area": 489.5,
"worst smoothness": 0.1303,
"worst compactness": 0.1696,
"worst concavity": 0.1927,
"worst concave points": 0.07485,
"worst symmetry": 0.2965,
"worst fractal dimension": 0.07662,
}
test_df = pd.DataFrame([test_data])
ModelWithData = namedtuple("ModelWithData", ["model", "data"])
def sklearn_model_data(clf=KNeighborsClassifier, num_data=4) -> ModelWithData:
model = clf()
iris = load_iris()
X = iris.data[:, :num_data]
Y = iris.target
model.fit(X, Y)
return ModelWithData(model=model, data=X)
```
#### File: _internal/frameworks/tensorflow_utils.py
```python
import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
def custom_activation(x):
return tf.nn.tanh(x) ** 2
class CustomLayer(keras.layers.Layer):
def __init__(self, units=32, **kwargs):
super(CustomLayer, self).__init__(**kwargs)
self.units = tf.Variable(units, name="units")
def call(self, inputs, training=False, **kwargs):
if training:
return inputs * self.units
else:
return inputs
def get_config(self):
config = super(CustomLayer, self).get_config()
config.update({"units": self.units.numpy()})
return config
def KerasSequentialModel() -> keras.models.Model:
net = keras.models.Sequential(
(
keras.layers.Dense(
units=1,
input_shape=(5,),
use_bias=False,
kernel_initializer=keras.initializers.Ones(),
),
)
)
opt = keras.optimizers.Adam(0.002, 0.5)
net.compile(optimizer=opt, loss="binary_crossentropy", metrics=["accuracy"])
return net
class NativeModel(tf.Module):
def __init__(self):
super().__init__()
self.weights = np.asfarray([[1.0], [1.0], [1.0], [1.0], [1.0]])
self.dense = lambda inputs: tf.matmul(inputs, self.weights)
@tf.function(
input_signature=[tf.TensorSpec(shape=None, dtype=tf.float64, name="inputs")]
)
def __call__(self, inputs):
return self.dense(inputs)
class NativeRaggedModel(NativeModel):
@tf.function(
input_signature=[
tf.RaggedTensorSpec(tf.TensorShape([None, None]), tf.float64, 1, tf.int64)
]
)
def __call__(self, inputs):
inputs = inputs.to_tensor(shape=[None, 5], default_value=0)
return self.dense(inputs)
```
#### File: tests/_internal/helpers.py
```python
import os
def assert_have_file_extension(dir: str, ext: str):
assert os.path.isdir(dir), f"{dir} is not a directory"
assert any(f.endswith(ext) for f in os.listdir(dir))
```
#### File: tests/unit/test_artifacts.py
```python
import os
import pytest
from bentoml._internal.models import Model, PickleModel
from tests._internal.helpers import assert_have_file_extension
_metadata = {"test": "Hello", "num": 0.234}
def create_mock_class(name):
class Foo:
n = 1
if name:
Foo.__name__ = name
return Foo
class FooModel(Model):
def __init__(self, model, metadata=None):
super().__init__(model, metadata)
if metadata is None:
self._metadata = _metadata
def save(self, path):
return os.path.join(path, "model.test")
@classmethod
def load(cls, path):
return "foo"
class InvalidModel(Model):
"""InvalidModel doesn't have save and load implemented"""
def __init__(self, model=None):
super().__init__(model)
@pytest.mark.parametrize(
"args, kwargs, metadata",
[
([create_mock_class("foo")], {"metadata": _metadata}, _metadata),
([create_mock_class("bar")], {}, None),
([b"\x00"], {}, None),
(["test"], {}, None),
([1], {}, None),
],
)
def test_base_artifact(args, kwargs, metadata):
ba = Model(*args, **kwargs)
pkl = PickleModel(*args, **kwargs)
assert all(
[v == k] for k in [ba.__dict__, pkl.__dict__] for v in ["_model", "_metadata"]
)
assert ba.metadata == metadata
assert pkl.metadata == metadata
@pytest.mark.parametrize(
"model",
[
(create_mock_class("MockModel")),
(create_mock_class("test")),
(create_mock_class("1")),
],
)
def test_save_artifact(model, tmpdir):
foo = FooModel(model, metadata=_metadata)
foo.save(tmpdir)
assert_have_file_extension(tmpdir, ".yml")
@pytest.mark.parametrize(
"model", [(create_mock_class("MockModel")), (create_mock_class("test"))],
)
def test_pkl_artifact(model, tmpdir):
pkl = PickleModel(model, metadata=_metadata)
pkl.save(tmpdir)
assert model == PickleModel.load(tmpdir)
assert_have_file_extension(tmpdir, ".pkl")
assert_have_file_extension(tmpdir, ".yml")
@pytest.mark.parametrize(
"func, exc",
[
(InvalidModel().save, NotImplementedError),
(InvalidModel.load, NotImplementedError),
],
)
def test_invalid_impl(func, exc):
with pytest.raises(exc):
func("/tmp/test")
```
#### File: yatai/yatai/deployment_utils.py
```python
import logging
from .exceptions import YataiDeploymentException
from .proto.deployment_pb2 import Deployment, DeploymentSpec
from .utils.ruamel_yaml import YAML
logger = logging.getLogger(__name__)
SPEC_FIELDS_AVAILABLE_FOR_UPDATE = ["bento_name", "bento_version"]
SAGEMAKER_FIELDS_AVAILABLE_FOR_UPDATE = [
"api_name",
"instance_type",
"instance_count",
"num_of_gunicorn_workers_per_instance",
]
def deployment_dict_to_pb(deployment_dict):
deployment_pb = Deployment()
if deployment_dict.get("spec"):
spec_dict = deployment_dict.get("spec")
else:
raise YataiDeploymentException('"spec" is required field for deployment')
platform = spec_dict.get("operator")
if platform is not None:
# converting platform parameter to DeploymentOperator name in proto
# e.g. 'aws-lambda' to 'AWS_LAMBDA'
deployment_pb.spec.operator = DeploymentSpec.DeploymentOperator.Value(
platform.replace("-", "_").upper()
)
for field in ["name", "namespace"]:
if deployment_dict.get(field):
deployment_pb.__setattr__(field, deployment_dict.get(field))
if deployment_dict.get("labels") is not None:
deployment_pb.labels.update(deployment_dict.get("labels"))
if deployment_dict.get("annotations") is not None:
deployment_pb.annotations.update(deployment_dict.get("annotations"))
if spec_dict.get("bento_name"):
deployment_pb.spec.bento_name = spec_dict.get("bento_name")
if spec_dict.get("bento_version"):
deployment_pb.spec.bento_version = spec_dict.get("bento_version")
return deployment_pb
def deployment_yaml_string_to_pb(deployment_yaml_string):
yaml = YAML()
deployment_yaml = yaml.load(deployment_yaml_string)
return deployment_dict_to_pb(deployment_yaml)
``` |
{
"source": "joswr1ght/md5deep",
"score": 3
} |
#### File: joswr1ght/md5deep/md5deep.py
```python
import os, sys, hashlib
# Reproduce this output with slashes consistent for Windows systems
#ba2812a436909554688154be461d976c A\SEC575-Clown-Chat\nvram
# Optimized for low-memory systems, read whole file with blocksize=0
def md5sum(filename, blocksize=65536):
hash = hashlib.md5()
with open(filename, "rb") as f:
for block in iter(lambda: f.read(blocksize), ""):
hash.update(block)
return hash.hexdigest()
def usage():
print "Usage: md5deep.py [OPTIONS] [FILES]"
print "-r - recursive mode, all subdirectories are traversed."
print "-X <file> - enables negative matching mode."
print "-f - speed up hash calculations, using more memory."
print "-0 - Uses a NULL character (/0) to terminate each line instead of a newline. Useful for processing filenames with strange characters."
def validate_hashes(hashfile, hashlist):
# Open file and build a new hashlist
hashlistrec = []
with open(hashfile, "r") as f:
for line in f:
filehash,filename = line.rstrip().split(" ")
# Convert to platform covention directory separators
filename = normfname(filename)
# Add entry to hashlistrec
hashlistrec.append((filename, filehash))
for diff in list(set(hashlistrec) - set(hashlist)):
# Replicate "-n" md5deep functionality; print only the filename
# if the file is missing in the filename list; print the hash
# of the current file if it is different from the negative match
# file.
if (not os.path.isfile(diff[0])):
# File from negative match list is missing, just print filename
print winfname(diff[0])
else:
print diff[0] + " " + winfname(diff[1])
# Produce a Windows-style filename
def winfname(filename):
return filename.replace("/","\\")
# Normalize filename based on platform
def normfname(filename):
if os.name == 'nt': # Windows
return filename.replace("/", "\\")
else:
return filename.replace("\\","/")
if __name__ == '__main__':
opt_recursive = None
opt_negmatch = None
opt_fast = None
opt_null = None
opt_files = []
if len(sys.argv) == 1:
usage()
sys.exit(0)
args = sys.argv[1:]
it = iter(args)
for i in it:
if i == '-r':
opt_recursive = True
continue
elif i == '-0':
opt_null = True
continue
elif i == '-f':
opt_fast = True
elif i == '-X':
opt_negmatch = next(it)
if not os.path.isfile(opt_negmatch):
sys.stdout.write("Cannot open negative match file %s\n"%opt_negmatch)
sys.exit(-1)
continue
else:
opt_files.append(i)
if opt_fast:
md5blocklen=0
else:
# Default to optimize for low-memory systems
md5blocklen=65536
# Build a list of (hash,filename) for each file, regardless of specified
# options
hashlist = []
# Hash files in the current directory
for f in opt_files:
if os.path.isfile(f):
hashlist.append((f, md5sum(f, md5blocklen)))
# Walk all subdirectories
if opt_recursive:
for start in sys.argv[1:]:
for (directory, _, files) in os.walk(start):
for f in files:
path = os.path.join(directory, f)
hashlist.append((path, md5sum(path, md5blocklen)))
# With the hashlist built, compare to the negative match list, or print
# the results.
if opt_negmatch:
validate_hashes(opt_negmatch, hashlist)
else:
# Just print out the list with Windows-syle filenames
for hash in hashlist:
if opt_null:
print "%s %s\0"%(hash[1],winfname(hash[0]))
else:
print "%s %s"%(hash[1],winfname(hash[0]))
``` |
{
"source": "joswr1ght/pptxtoc",
"score": 3
} |
#### File: joswr1ght/pptxtoc/pptxtoc.py
```python
import argparse
import tempfile
import zipfile
import glob
import pdb ### Remove
import platform
import sys
import os
import re
from PIL import ImageFont
import pdb
from itertools import groupby
from operator import itemgetter
from xml.dom.minidom import parse
from shutil import rmtree
from pptx import Presentation
from pptx.util import Inches, Pt
from pptx.enum.text import PP_ALIGN, MSO_AUTO_SIZE
FONTSIZE=18
# This is the number of dots that fit using the given font in a 8.5" text box
MAXDOTS=109.0
# This is the maximum pixel width of a 8.5" text box
MAXPXWIDTHTEXT=570.0
MAXPXWIDTHBULLETS=580.0
MAXLINESPERSLIDE=16
def getnotes(pptxfile):
words = {}
tmpd = tempfile.mkdtemp()
zipfile.ZipFile(pptxfile).extractall(path=tmpd, pwd=None)
# Parse notes content
path = tmpd + '/ppt/notesSlides/'
for infile in glob.glob(os.path.join(path, '*.xml')):
#parse each XML notes file from the notes folder.
dom = parse(infile)
noteslist = dom.getElementsByTagName('a:t')
# The page number is part of the filename
page = int(re.sub(r'\D', "", infile.split("/")[-1]))
text = ''
for node in noteslist:
xmlTag = node.toxml()
xmlData = xmlTag.replace('<a:t>', '').replace('</a:t>', '')
text += xmlData
# Convert to ascii to simplify
text = text.encode('ascii', 'ignore')
words[page] = text
# Remove all the files created with unzip
rmtree(tmpd)
return words
def createtoc(args, toc):
# Create a new Presentation object, using the Style document as the template
# The style document should be empty; otherwise we'll add the new ToC to the
# end of the specified document.
try:
prs = Presentation(args.stylepptx)
except:
sys.stderr.write("Cannot read input style PowerPoint file \'%s\'. Possible malformed file.\n"%args.stylepptx)
return
# Create a blank slide in the object using the second master slide style by default
blank_slide_layout = prs.slide_layouts[args.stylemasterslide]
slide = prs.slides.add_slide(blank_slide_layout)
slide.shapes.title.text = "Table of Contents"
# Get font information
font = ImageFont.truetype(args.fontdir + args.font, FONTSIZE)
# The ToC entries and the page numbers are strings delimited by \n
titles=''
pages=''
linecount=0
for pagenum in sorted(toc):
tocpxlen = font.getsize(toc[pagenum])[0]
if tocpxlen > MAXPXWIDTHTEXT:
sys.stderr.write("Text for ToC entry on page %d (\"%s\") is too long, truncating.\n"%(pagenum, toc[pagenum]))
# Trim one character off at a time until it fits! Presumably, the author will want to go back
# and fix their original content for a smarter summarization of the ToC entry.
while tocpxlen > MAXPXWIDTHTEXT:
toc[pagenum] = toc[pagenum][:-1]
tocpxlen = font.getsize(toc[pagenum])[0]
titles += toc[pagenum] + "\n"
pages += str(pagenum) + "\n"
linecount+=1
# If we exceed MAXLINESPERSLIDE, create the slide and add a new empty slide for more content
if linecount == MAXLINESPERSLIDE:
print "New slide"
generateslide(titles, pages, slide, font, prs)
titles=''
pages=''
linecount=0
# Create a blank slide in the object using the second master slide style by default
blank_slide_layout = prs.slide_layouts[args.stylemasterslide]
slide = prs.slides.add_slide(blank_slide_layout)
slide.shapes.title.text = "Table of Contents"
if linecount != 0:
print "Last slide"
generateslide(titles, pages, slide, font, prs)
def generateslide(titles, pages, slide, font, prs):
# This is the size of a single dot in pixels
dotwidth=MAXPXWIDTHTEXT/MAXDOTS
# Build the left-hand ToC entries first
top=Inches(1.75)
left=Inches(.5)
width=Inches(8.5)
height=Inches(5)
txBox = slide.shapes.add_textbox(left, top, width, height)
tf = txBox.text_frame
tf.auto_size=None
tf.text = titles
p=tf.paragraphs[0]
p.font.name = 'Tahoma'
p.font.size=Pt(FONTSIZE)
txBox = slide.shapes.add_textbox(left, top, width, height)
tf = txBox.text_frame
# Iterate through each of the ToC entries, calculating the number of dots needed in the middle textbox
for title in titles.split('\n')[0:-1]:
tocpxlen = font.getsize(title)[0]
#print "DEBUG: %03d %s"%(tocpxlen, toc[page])
# The number of dots we use is the max width in pixels, minus the length of the ToC entry in pixels,
# divided by the pixel width of a single dot, rounded down.
tf.text+=("." * int(( (float(MAXPXWIDTHBULLETS - tocpxlen)) / dotwidth ))) + "\n"
tf.auto_size=None
p=tf.paragraphs[0]
p.alignment = PP_ALIGN.RIGHT
p.font.name = 'Tahoma'
p.font.size=Pt(FONTSIZE)
left=Inches(9)
width=Inches(.5)
txBox = slide.shapes.add_textbox(left, top, width, height)
tf = txBox.text_frame
tf.auto_size=None
tf.text = pages
p=tf.paragraphs[0]
p.alignment = PP_ALIGN.RIGHT
p.font.name = 'Tahoma'
p.font.size=Pt(FONTSIZE)
try:
prs.save(args.outputpptx)
except:
sys.stderr.write("Error saving output pptx file \'%s\'.\n"%args.outputpptx)
return
class HelpWithRawFormatter(argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
pass
if __name__ == "__main__":
# Establish a default for the location of fonts to make it easier to specify the location
# of the font.
deffontdir = '' # Works OK for Windows
if platform.system() == 'Darwin':
deffontdir='/Library/Fonts/'
elif platform.system() == 'Linux':
# Linux stores fonts in sub-directories, so users must specify sub-dir and font name
# beneath this directory.
deffontdir='/usr/share/fonts/truetype/'
# Parse command-line arguments
parser = argparse.ArgumentParser(
description='Create a Table of Content from a PowerPoint file',
prog='pptxtoc.py',
formatter_class=HelpWithRawFormatter)
parser.add_argument('-o', action="store", dest="outputpptx", default='toc.pptx',
help="output pptx ToC slide")
parser.add_argument('-f', action="store", dest="font", default='Tahoma',
help="font filename")
parser.add_argument('-F', action="store", dest="fontdir", default=deffontdir,
help="font directory")
parser.add_argument('-s', action="store", dest="stylepptx", default="Style.pptx",
help="PowerPoint style document with no slides")
parser.add_argument('-S', action="store", dest="stylemasterslide", type=int, default=2,
help="slide number in master view to use for output ToC slide")
# parser.add_argument('-z', action="store", dest="fontsize", type=int, default=FONTSIZE,
# help="font size in points/pt")
parser.add_argument('pptxfile')
args = parser.parse_args()
# Add the filename extension to font, relieving the user of this burden
args.font += '.ttf'
# Make sure the files exist and are readable
if not (os.path.isfile(args.pptxfile) and os.access(args.pptxfile, os.R_OK)):
sys.stderr.write("Cannot read the PowerPoint file \'%s\'.\n"%args.pptxfile)
sys.exit(1)
if not (os.path.isfile(args.stylepptx) and os.access(args.stylepptx, os.R_OK)):
sys.stderr.write("Cannot read the PowerPoint style file \'%s\'.\n"%args.stylepptx)
sys.exit(1)
if not (os.path.isfile(args.fontdir + args.font) and os.access(args.fontdir + args.font, os.R_OK)):
sys.stderr.write("Cannot read the the font file \'%s\'.\n"%(args.fontdir + args.font))
sys.exit(1)
# Decrement the style master slide number for offset counting
if (args.stylemasterslide < 1):
sys.stderr.write("Invalid style document master slide number \'%d\'.\n"%args.stylemasterslide)
sys.exit(1)
args.stylemasterslide -= 1
# Retrieve all the notes from the pptx file in a page number-keyed dictionary
words = getnotes(args.pptxfile)
# Search for {{{whatever}}} and build a new dictionary of the page numbers and whatevers
toc = {}
for key in words:
m=re.search(r'{{{(.*)}}}',words[key])
if m is not None:
# key+1 reflects the page number from offset counting
toc[key+1] = m.groups(0)[0]
# Generate the output ToC slide using the identified page numbers and titles
createtoc(args, toc)
print "Finished."
sys.exit(0)
``` |
{
"source": "JosXa/bachelor-thesis-insurance",
"score": 2
} |
#### File: JosXa/bachelor-thesis-insurance/appglobals.py
```python
import os
import pathlib
from playhouse.db_url import connect
import settings
_db = None
ROOT_DIR = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
DATA_DIR = ROOT_DIR / 'model' / 'data'
def db():
global _db
if not _db:
_db = connect(settings.DATABASE_URL, autorollback=True)
return _db
# globals
db = db()
```
#### File: bachelor-thesis-insurance/clients/botapiclients.py
```python
from abc import ABCMeta, abstractmethod
from typing import List, TypeVar, Any
from model import Update
ChatAction = TypeVar('ChatAction')
class BotAPIClient(object, metaclass=ABCMeta):
"""
Base class for a Chatbot client.
In order to add another bot platform to the system, inherit from this class and implement all methods accordingly.
"""
@property
@abstractmethod
def client_name(self) -> str: pass
@abstractmethod
def initialize(self):
""" Called to set internal state and perform additional initialization """
pass
@abstractmethod
def start_listening(self):
""" Start receiving updates by webhook or long polling """
pass
@abstractmethod
def add_plaintext_handler(self, callback):
""" Adds a handler filtering only plain text updates """
pass
@abstractmethod
def download_voice(self, voice_id, filepath):
""" Downloads a voice message from the bot API """
pass
@abstractmethod
def add_voice_handler(self, callback):
""" Adds a handler filtering only voice memo updates """
pass
@abstractmethod
def add_media_handler(self, callback):
""" Adds a handler filtering media (photo, video, sticker, gif, etc.) updates """
pass
@abstractmethod
def set_start_handler(self, callback):
""" Adds the start handler, called when the bot is started by the user """
pass
@abstractmethod
def add_error_handler(self, callback):
"""
Adds an error handler, called when the internal update processing fails
with exception
"""
pass
@abstractmethod
def show_typing(self, user):
"""
Displays an "is typing" notification to the user
"""
pass
@abstractmethod
def unify_update(self, update: Any) -> Update:
"""
Creates the internal `Update` object our backend works with
from whatever type of update or event this particular bot API uses.
"""
pass
@abstractmethod
def perform_actions(self, actions: List[ChatAction]):
"""
Performs a sequence of `ChatActions` planned by the `DialogManager`
"""
pass
```
#### File: bachelor-thesis-insurance/core/planningagent.py
```python
from abc import ABCMeta, abstractmethod
from logic.responsecomposer import ResponseComposer
class IPlanningAgent(metaclass=ABCMeta):
""" Base class for a PlanningAgent """
@abstractmethod
def build_next_actions(self, context) -> ResponseComposer: pass
```
#### File: bachelor-thesis-insurance/core/routing.py
```python
import re
from abc import ABCMeta, abstractmethod
from typing import Callable
from core import MessageUnderstanding, States
from corpus import emojis
from corpus.emojis.emoji import is_emoji
from logic.intents import AFFIRMATION_INTENTS, MEDIA_INTENT, NEGATION_INTENTS
class BaseHandler(metaclass=ABCMeta):
"""
Base class for a handler to filter and route incoming utterances to their respective callbacks.
"""
def __init__(self, callback: Callable):
self.callback = callback
@abstractmethod
def matches(self, understanding: MessageUnderstanding) -> bool: pass
def __str__(self):
return self.callback.__name__
class RegexHandler(BaseHandler):
"""
Handler to filter the text of incoming utterances based on a regex `pattern`.
"""
def __init__(self, callback: Callable, pattern, flags=0):
self.pattern = re.compile(pattern, flags)
super(RegexHandler, self).__init__(callback)
def matches(self, understanding: MessageUnderstanding):
if not understanding.text:
return False
return self.pattern.match(understanding.text)
class IntentHandler(BaseHandler):
"""
Handler to filter incoming `MessageUnderstandings` based on their intents and/or parameters.
"""
def __init__(self, callback: Callable, intents=None, parameters=None):
"""
:param callback: Callback function
:param intents: List of prefixes that the intent must start with
:param parameters: List of exact parameters that must be contained in the message
"""
if not callable(callback):
raise ValueError("First argument `callback` must be callable.")
self._intents = [intents] if isinstance(intents, str) else intents
self._parameters = [parameters] if isinstance(parameters, str) else parameters
super(IntentHandler, self).__init__(callback)
def contains_intent(self, intent):
return intent in self._intents
def matches(self, understanding: MessageUnderstanding):
"""
Returns True if intents and parameters match the rules of this handler instance.
If a **list of intents** is defined, then **only one** of the incoming intents must
match. The intent comparison is done via `intent.startswith(self.expected_intent)`.
If a **list of parameters** is defined, then **all** of the corresponding incoming parameters must be non-empty.
"""
if self._intents is not None:
if not any(understanding.intent.startswith(x) for x in self._intents):
return False
if self._parameters:
if not understanding.parameters:
return False
for p in self._parameters:
if p in understanding.parameters.keys():
# Check if value is non-empty
if not understanding.parameters[p]:
return False
else:
return False
return True
def __str__(self):
return f"{self.__class__.__name__}(" \
f"{self.callback.__name__}, " \
f"intents={str(self._intents)[:70]}, " \
f"parameters={self._parameters})"
def __repr__(self):
return f"{self._intents} / {self._parameters} --> {self.callback.__name__}"
class AffirmationHandler(BaseHandler):
"""
Handler to conveniently filter all affirmative intents ("yes", "correct", "smalltalk.agent.right", etc.)
"""
def __init__(self, callback):
self.intents = AFFIRMATION_INTENTS
super(AffirmationHandler, self).__init__(callback)
def matches(self, understanding: MessageUnderstanding):
if understanding.parameters:
for k, v in understanding.parameters.items():
if k in self.intents and v:
return True
if understanding.intent in self.intents:
return True
return False
class NegationHandler(BaseHandler):
"""
Handler to conveniently filter all negative intents ("no", "wrong", "smalltalk.dialog.wrong", etc.)
"""
def __init__(self, callback):
self.intents = NEGATION_INTENTS
super(NegationHandler, self).__init__(callback)
def matches(self, understanding: MessageUnderstanding):
if understanding.parameters:
for k, v in understanding.parameters.items():
if k in self.intents and v:
return True
if understanding.intent in self.intents:
return True
return False
class MediaHandler(BaseHandler):
"""
Handler to filter incoming media entities (Videos, Images, Stickers, etc.)
The `MessageUnderstanding` will have a special flag for an intent in that case, set by the `DialogManager`.
"""
def __init__(self, callback):
super(MediaHandler, self).__init__(callback)
def matches(self, understanding: MessageUnderstanding):
return understanding.intent == MEDIA_INTENT
class EmojiHandler(BaseHandler):
"""
Handler to filter incoming emojis based on their sentiment score.
"""
def __init__(self, callback, negative=False, neutral=False, positive=False, threshold=0.5):
"""
There are three distinct categories of sentiment for a particular emoji: `negative`, `neutral` and `positive`.
:param callback: Callback to execute on match
:param negative: Whether to match emojis with negative sentiment
:param neutral: Whether to match emojis with neutral sentiment
:param positive: Whether to match emojis with positive sentiment
:param threshold: Ratio between 0 and 1 that is applied to each category
"""
self.negative = negative
self.neutral = neutral
self.positive = positive
self.all_emotions = not any((negative, neutral, positive))
self.threshold = threshold
super(EmojiHandler, self).__init__(callback)
def matches(self, understanding: MessageUnderstanding):
# Short-circuit if all emotions (=all emojis) should be matched
if not understanding.text:
return False
if self.all_emotions:
return any(is_emoji(c) for c in understanding.text)
total = 0
neg = 0
neut = 0
pos = 0
for x in understanding.text:
emoji = emojis.sentiments.get(x)
if not emoji:
continue
total += 1
neg += emoji["negative"]
neut += emoji["neutral"]
pos += emoji["positive"]
if total == 0:
return False
return any((
self.negative and (neg / total) >= self.threshold,
self.neutral and (neut / total) >= self.threshold,
self.positive and (pos / total) >= self.threshold,
))
class Router(object):
"""
Holds handlers that the application defines.
- `stateless` handlers are matched independent of the current state of the dialog.
- `states` is a mapping of a particular `state` to its appropriate handlers.
- `fallbacks` are handlers that are matched if no matching handler could be found for the current state of the
dialog.
"""
def __init__(self, rules=None, warn_bypassed=True):
self.warn_bypassed = warn_bypassed
self.states = {}
self.fallbacks = []
self.stateless = []
if rules:
self.add_rules_dict(rules)
def add_rules_dict(self, rules_dict):
states = rules_dict.get('dialog_states', {})
fallbacks = rules_dict.get('fallbacks', []) + rules_dict.get(States.FALLBACK, [])
stateless = rules_dict.get('stateless', []) + rules_dict.get(States.STATELESS, [])
self.fallbacks.extend(self._flatten(fallbacks))
self.stateless.extend(self._flatten(stateless))
for state, handlers in states.items():
handler_list = self._flatten(handlers)
self.states.setdefault(state, []).extend(handler_list)
def iter_stateless_matches(self, understanding: MessageUnderstanding):
for rule in self.stateless:
# Non-breaking, yield all
if rule.matches(understanding):
yield rule
def find_matching_state_handler(self, state, understanding: MessageUnderstanding):
for rule in self.states.get(state, []):
# break after first occurence
if rule.matches(understanding):
return rule
return None
def get_fallback_handler(self, understanding: MessageUnderstanding):
for rule in self.fallbacks:
# break after first occurence
if rule.matches(understanding):
return rule
return None
@staticmethod
def _flatten(obj):
for i in obj:
if isinstance(i, (list, tuple)):
yield from Router._flatten(i)
else:
yield i
```
#### File: bachelor-thesis-insurance/core/understanding.py
```python
import datetime
from typing import Dict
class MessageUnderstanding:
"""
Data holding class for an incoming utterance, enriched with NLU information.
"""
def __init__(
self,
text: str,
intent: str,
parameters: Dict[str, str] = None,
contexts=None,
date: datetime.datetime = None,
score: float = None,
media_location: str = None):
self.text = text
self.intent = intent
self.parameters = parameters if parameters else None
self.contexts = contexts if contexts else None,
self.score = score
self.date = date if date else datetime.datetime.now()
self.media_location = media_location
def __str__(self):
params = {k: v for k, v in self.parameters.items() if v} if self.parameters else None
return f"Understanding('{self.intent}'" \
f"{', ' + str(params) if params else ''})" \
f"{', ' + str(self.contexts) if any(self.contexts) else ''}"
```
#### File: bachelor-thesis-insurance/corpus/media.py
```python
import os
from mwt import mwt
from appglobals import ROOT_DIR
media_path = os.path.join(ROOT_DIR, 'assets', 'files')
all_media = os.listdir(media_path)
phone_images_path = os.path.join(ROOT_DIR, 'corpus', 'phonedata', 'photo')
all_phone_images = os.listdir(phone_images_path)
@mwt(120)
def get_file_by_media_id(media_id):
"""
Searches for a media file or phone model image by its `media_id`
:param media_id: Filename without extension
:return: Absolute file path
"""
try:
file = next(x for x in all_media if os.path.splitext(x)[0].lower() == media_id.lower())
return os.path.join(media_path, file)
except StopIteration:
pass
try:
file = next(x for x in all_phone_images if os.path.splitext(x)[0].lower() == media_id.lower())
return os.path.join(phone_images_path, file)
except StopIteration:
return None
```
#### File: bachelor-thesis-insurance/logic/planning.py
```python
import datetime
import random
from collections import Counter
from typing import List, Union
from logzero import logger as log
from const import MONTHS
from core import Context
from core.dialogmanager import StopPropagation
from core.planningagent import IPlanningAgent
from core.routing import Router
from corpus.responsetemplates import ResponseTemplate, SelectiveTemplateLoader, TemplateRenderer, TemplateSelector
from logic.responsecomposer import ResponseComposer
from logic.rules.claimhandlers import excuse_did_not_understand
from logic.rules.smalltalkhandlers import no_rule_found
from model import UserAnswers
from util import timing
class PlanningAgent(IPlanningAgent):
"""
Concrete implementation of the IPlanningAgent interface.
This agent is responsible for matching and execution of the routes defined in the `application_router`,
which is a state machine.
In abstract terms, the planning agent builds up a series of `ChatActions` to be executed by a bot API
client.
"""
def __init__(self, router: Router):
self.router = router
@staticmethod
def _get_shared_parameters(context):
"""
Returns the rendering and condition evaluation environment for Jinja2 templates
"""
def chance(value: float) -> bool:
return random.random() < value
return dict(
user=context.user,
get_answer=lambda identifier: UserAnswers.get_answer(context.user, identifier),
has_answered=lambda identifier: UserAnswers.has_answered(context.user, identifier),
questionnaire_completion=context.questionnaire_completion_ratio,
user_recent=context.has_incoming_intent,
bot_recent=context.has_outgoing_intent,
question=context.current_question,
questionnaire=context.current_questionnaire,
overall_completion=context.overall_completion_ratio,
num_actions=lambda intent: len(context.filter_outgoing_utterances(
lambda ca: intent in ca.intents, 12)),
get=lambda key: context.get(key, None),
formal=context.user.formal_address,
informal=not context.user.formal_address,
chance=chance,
month=lambda n: MONTHS.get(n),
is_this_year=lambda y: y == datetime.datetime.now().year
)
@staticmethod
def _create_composer(context):
"""
Creates a ResponseComposer instance with shared parameters
"""
params = PlanningAgent._get_shared_parameters(context)
return ResponseComposer(
context.user,
SelectiveTemplateLoader(
params,
template_selector=LeastRecentlyUsedSelector(context)),
TemplateRenderer(params)
)
def build_next_actions(self, context: Context) -> Union[ResponseComposer, None]:
u = context.last_user_utterance
if u is None:
return
composer = self._create_composer(context)
text = f'"{u.text[:50]}"' if u.text else ''
log.info(f'Incoming message: {text}, {u}')
log.debug(f'Current dialog states: {context.dialog_states}')
# Execute every matching stateless handler first
for handler in self.router.iter_stateless_matches(u):
try:
if handler.callback(composer, context):
log.debug(f"Stateless handler triggered: {handler}")
except StopPropagation:
# Some handlers may stop the propagation of the update through the chain of state handlers
if composer.is_empty:
log.error("StopPropagation was raised but no chat actions were constructed.")
return
self._log_actions(composer)
return composer
next_state = None
# Dialog states are a priority queue
for state in context.dialog_states.iter_states():
# Find exactly one handler in any of the prioritized states
handler = self.router.find_matching_state_handler(state, u)
if handler is None:
continue
next_state = handler.callback(composer, context)
log.info(f"State handler triggered: {handler}")
handler_found = True
break
else:
# If no handler was found in any of the states, try the fallbacks
handler = self.router.get_fallback_handler(u)
if handler is not None:
next_state = handler.callback(composer, context)
log.info(f"Fallback handler triggered: {handler}")
handler_found = True
else:
log.warning(f"No matching rule found in dialog_states "
f"{list(context.dialog_states.iter_states())} with intent "
f"{u.intent} and parameters {u.parameters}.")
handler_found = False
if not handler_found:
if u.intent == 'fallback':
excuse_did_not_understand(composer, context)
log.debug(f'Incoming message was not understood: "{u.text}"')
log.debug("Not updating state lifetimes.")
return composer
else:
next_state = no_rule_found(composer, context)
if isinstance(next_state, ResponseComposer):
# Lambdas return the sentence composer, which we don't need (call by reference)
next_state = None
if next_state is not None:
# Handlers return a tuple with the next state, with an integer determining the lifetime of this state as
# the last tuple value.
# For example: `("asking", "do_something", 3)` <-- lifetime of 3 incoming utterances
context.dialog_states.put(next_state)
return composer
self._log_actions(composer)
return composer
@staticmethod
def _log_actions(composer):
text = ', then '.join(f'"{x.render()}"' for x in composer.collect_actions())
log.debug(f'Sending {text}')
class LeastRecentlyUsedSelector(TemplateSelector):
"""
Selects the template that was used the least when there are multiple
valid choices to select from.
Used the KV-store of a user's context to save the counter.
"""
def __init__(self, context: Context):
self.context = context
def select_template(self, candidates: List[ResponseTemplate]):
used_templates = self.context.setdefault('used_templates', Counter())
if len(candidates) == 1:
selection = candidates[0]
else:
best_candidate = (100, None)
for c in candidates:
usages = used_templates.get(c.original_text, 0)
if usages == 0:
best_candidate = (0, c)
break
if usages < best_candidate[0]:
best_candidate = (usages, c)
selection = best_candidate[1]
used_templates.update([selection.original_text])
self.context['used_templates'] = used_templates
return selection
```
#### File: bachelor-thesis-insurance/model/useranswers.py
```python
import datetime
from collections import OrderedDict
from typing import Set
from mwt import mwt
from peewee import *
from corpus.questions import get_question_by_id
from model import User
from model.basemodel import BaseModel
class UserAnswers(BaseModel):
NO_ANSWER = 'No data'
datetime = DateTimeField()
user = ForeignKeyField(User, related_name='answers')
question_id = CharField()
answer = TextField()
# cannot_answer = BooleanField(default=False)
# will_not_answer = BooleanField(default=False)
@staticmethod
def get_answered_question_ids(user: User) -> Set[int]:
return {ua.question_id for ua in UserAnswers.select(UserAnswers.question_id).where(
UserAnswers.user == user,
)}
@classmethod
def reset_answers(cls, user: User) -> int:
return UserAnswers.delete().where(UserAnswers.user == user).execute()
@staticmethod
def add_answer(user: User, question_id: str, answer: str):
return UserAnswers.create(
user=user,
question_id=question_id,
answer=answer,
datetime=datetime.datetime.now()
)
@staticmethod
@mwt(timeout=2)
def get_answer(user: User, question_id: str) -> str:
"""
Queries for the most recent answer to the given `question_id`.
If `safe` is `True`, returns `None` even if the `NO_ANSWER` flag is set.
"""
try:
val = UserAnswers.select(
UserAnswers.answer
).where(
(UserAnswers.user == user) &
(UserAnswers.question_id == question_id)
).order_by(
-UserAnswers.datetime
).first()
if val:
return val.answer
return None
except UserAnswers.DoesNotExist:
return None
@staticmethod
@mwt(timeout=2)
def has_answered(user: User, question_id: str):
ans = UserAnswers.get_answer(user, question_id)
return ans is not None and ans != UserAnswers.NO_ANSWER
@staticmethod
def get_name_answer_dict(user: User):
results = OrderedDict()
for ua in UserAnswers.select().where(
UserAnswers.user == user
).order_by(+UserAnswers.datetime):
if ua.answer == UserAnswers.NO_ANSWER:
continue
q_id = ua.question_id
if q_id in ('first_name', 'last_name'):
continue
q = get_question_by_id(ua.question_id)
if not q.name:
# We ignore the imei photo
continue
results[q.name] = ua.answer
return results
```
#### File: bachelor-thesis-insurance/scripts/reasonablestrings.py
```python
from corpus import all_questions
from corpus.responsetemplates import all_response_templates
def main():
print("The following strings might be generated:\n")
print("=== Question surroundings ===")
question_surroundings = all_response_templates['question_surrounding']
for q in all_questions:
for s in question_surroundings:
text = s.text_template.render({'question': q})
print(text)
print()
print("=== Hint surroundings ===")
hint_surroundings = all_response_templates['hint_surrounding']
for q in all_questions:
for s in hint_surroundings:
text = s.text_template.render({'question': q})
print(text)
if __name__ == '__main__':
main()
```
#### File: tests/core/test_dialogstates.py
```python
import pytest
from core.dialogstates import DialogStates
INITIAL_STATE = "test0"
@pytest.fixture
def ds():
return DialogStates(INITIAL_STATE)
def test_basic_usage(ds: DialogStates):
def states():
return list(ds.iter_states())
assert states() == [INITIAL_STATE]
ds.put("test1")
assert states() == ["test1"]
assert states() == ["test1"]
ds.put(("test2A", "test2B"))
ds.update_step()
assert states() == [("test2A", "test2B")]
with pytest.raises(ValueError):
ds.put(("1", "2", "3", -1)) # negative lifetime
def test_lifetime(ds: DialogStates):
def states():
return list(ds.iter_states())
ds.put(("test0", 1))
assert states() == ["test0", INITIAL_STATE]
ds.update_step()
assert states() == ["test0", INITIAL_STATE]
ds.update_step()
assert states() == [INITIAL_STATE]
ds.put(("test1", 2))
for _ in range(3):
assert states() == ["test1", INITIAL_STATE]
ds.update_step()
assert states() == [INITIAL_STATE]
ds.put((("test1A", "test1B"), 4))
assert states() == [("test1A", "test1B"), INITIAL_STATE]
ds.update_step()
ds.put(("test2", 2))
for _ in range(3):
assert states() == ["test2", ("test1A", "test1B"), INITIAL_STATE]
ds.update_step()
assert states() == [("test1A", "test1B"), INITIAL_STATE]
ds.update_step()
assert states() == [INITIAL_STATE]
``` |
{
"source": "JosXa/GetAltsClient",
"score": 2
} |
#### File: GetAltsClient/examples/quickstart.py
```python
import getaltsclient
def main():
print(
f"getaltsclient version: {getaltsclient.__version__}"
)
if __name__ == "__main__":
main()
``` |
{
"source": "JosXa/messagemerger",
"score": 2
} |
#### File: JosXa/messagemerger/main.py
```python
import json
import os
import re
import sys
import time
import urllib.parse
from functools import wraps
from pathlib import Path
from uuid import uuid4
import telegram
from logzero import logger
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, ParseMode
from telegram.ext import (Updater, CommandHandler, MessageHandler, Filters, CallbackQueryHandler)
from tinydb import TinyDB, Query
from decouple import config
data_dir = Path('~', 'messagemerger').expanduser()
data_dir.mkdir(parents=True, exist_ok=True)
db = TinyDB(data_dir / 'db.json')
user_db = Query()
LIST_OF_ADMINS = [691609650, 62056065]
def start(update, context):
text = "I am a bot to help you merge messages.\n"
text += "Forward a bunch of messages and send /done command when you're done."
update.message.reply_text(text)
def send_help(update, context):
update.message.reply_text("Use /start to get information on how to use me.")
def get_admin_ids(context, chat_id):
return [admin.user.id for admin in context.bot.get_chat_administrators(chat_id)]
def restricted(func):
@wraps(func)
def wrapped(update, context, *args, **kwargs):
user_id = update.effective_user.id
if user_id not in LIST_OF_ADMINS:
print("Unauthorized access denied for {}.".format(user_id))
return
return func(update, context, *args, **kwargs)
return wrapped
def store_forwarded_message(update, context):
user_id = update.message.from_user.id
try:
first_name = update.message.forward_from.first_name + ': '
except AttributeError:
first_name = "HiddenUser: "
text = first_name + update.message.text_markdown
scheme = [text]
context.user_data.setdefault(user_id, []).extend(scheme)
def split_messages(update, context):
user_id = update.message.from_user.id
try:
current_contents = context.user_data[user_id]
text = "\n".join(current_contents)
first_name = text.split(': ')[0]
text = text.replace(str(first_name) + ': ', '')
text = re.sub(r'\n+', '\n', text).strip()
messages = text.splitlines()
filtered_chars = ['$', '&', '+', ',', ':', ';', '=', '?', '@', '#', '|', '<', '>', '.', '^', '*', '(', ')', '%',
'!', '-', '_']
for part in messages:
if part in filtered_chars:
continue
else:
update.message.reply_text(part, parse_mode=ParseMode.MARKDOWN)
except IndexError:
pass
except KeyError:
update.message.reply_text("Forward a merged message.")
finally:
context.user_data.clear()
def done(update, context):
user_id = update.message.from_user.id
try:
data = context.user_data[user_id]
message_id = uuid4()
db.insert({'message_id': str(message_id), 'text': data})
text = "\n".join([i.split(': ', 1)[1] for i in data])
if len(text) <= 4096:
url_msg = text.replace('_', '__').replace('*', '**')
query = urllib.parse.quote(url_msg)
share_url = 'tg://msg_url?url=' + query
markup = InlineKeyboardMarkup([[InlineKeyboardButton("📬 Share", url=share_url)], [
InlineKeyboardButton("📢 Publish to channel", callback_data='{}'.format(message_id)),
InlineKeyboardButton("🗣 Show names", callback_data='{};show_dialogs'.format(message_id))]])
update.message.reply_text(text, reply_markup=markup, parse_mode=ParseMode.MARKDOWN)
else:
messages = [text[i: i + 4096] for i in range(0, len(text), 4096)]
for part in messages:
update.message.reply_text(part, parse_mode=ParseMode.MARKDOWN)
time.sleep(1)
except KeyError:
update.message.reply_text("Forward some messages.")
finally:
context.user_data.clear()
def post(update, context):
user_id = update.effective_user.id
context.bot.send_chat_action(chat_id=user_id, action=telegram.ChatAction.TYPING)
query = update.callback_query
query_data = query.data.split(';')
message_id = query_data[0]
search = db.get(user_db['message_id'] == message_id)
json_str = json.dumps(search)
resp = json.loads(json_str)
data = resp.get('text')
try:
if query_data[1] == "show_dialogs":
text = "\n".join(data)
url_msg = text.replace('_', '__').replace('*', '**')
share_url = 'tg://msg_url?url=' + urllib.parse.quote(url_msg)
markup = InlineKeyboardMarkup([[InlineKeyboardButton("📬 Share", url=share_url)], [
InlineKeyboardButton("📢 Publish to channel", callback_data='{}'.format(message_id)),
InlineKeyboardButton("🙈 Hide names", callback_data='{};hide_dialogs'.format(message_id))]])
query.edit_message_text(text=text, reply_markup=markup, parse_mode=ParseMode.MARKDOWN)
elif query_data[1] == "hide_dialogs":
text = "\n".join([i.split(': ', 1)[1] for i in data])
url_msg = text.replace('_', '__').replace('*', '**')
share_url = 'tg://msg_url?url=' + urllib.parse.quote(url_msg)
markup = InlineKeyboardMarkup([[InlineKeyboardButton("📬 Share", url=share_url)], [
InlineKeyboardButton("📢 Publish to channel", callback_data='{}'.format(message_id)),
InlineKeyboardButton("🗣 Show names", callback_data='{};show_dialogs'.format(message_id))]])
query.edit_message_text(text=text, reply_markup=markup, parse_mode=ParseMode.MARKDOWN)
else:
search = db.search(user_db['user_id'] == str(user_id))
json_str = json.dumps(search[0])
resp = json.loads(json_str)
channel_id = resp['channel_id']
text = "\n".join([i.split(': ', 1)[1] for i in data])
context.bot.send_message(chat_id=channel_id, text=text, parse_mode=ParseMode.MARKDOWN)
context.bot.answer_callback_query(query.id, text="The message has been posted on your channel.",
show_alert=False)
except TypeError:
context.bot.send_message(chat_id=user_id,
text="I am unable to retrieve and process this message, please forward this "
"again.")
except IndexError:
context.bot.send_message(chat_id=user_id, text="You haven't added any channel yet, send /add followed by your "
"channel's id which can be found using @ChannelIdBot.")
def add(update, context):
user_id = update.message.from_user.id
channel_id = ' '.join(context.args)
if context.bot.id in get_admin_ids(context.bot, channel_id):
db.insert({'user_id': str(user_id), 'channel_id': str(channel_id)})
context.bot.send_message(chat_id=channel_id, text="Your channel has been successfully added!")
context.bot.send_message(chat_id=user_id, text="Your channel has been successfully added!")
else:
context.bot.send_message(chat_id=user_id,
text="Please double-check if the bot is an administrator in your channel.")
@restricted
def backup(update, context):
update.message.reply_document(document=open('db.json', 'rb'))
@restricted
def backup_handler(update, context):
file = context.bot.get_file(update.message.document.file_id)
file_name = update.message.document.file_name
os.remove(file_name)
file.download(file_name)
update.message.reply_text(text="Alright! I have uploaded the backup.")
def error_callback(update, context):
logger.warning('Update "%s" caused error "%s"', update, context.error)
def main():
updater = Updater(config("BOT_TOKEN"), use_context=True)
dp = updater.dispatcher
dp.add_handler(CommandHandler("start", start))
dp.add_handler(CommandHandler("help", send_help))
dp.add_handler(CommandHandler("add", add))
dp.add_handler((CallbackQueryHandler(post)))
dp.add_handler(CommandHandler("done", done))
dp.add_handler(CommandHandler("split", split_messages))
dp.add_handler(MessageHandler(Filters.forwarded & Filters.text, store_forwarded_message))
dp.add_handler(CommandHandler("backup", backup))
dp.add_handler(MessageHandler(Filters.document, backup_handler))
dp.add_error_handler(error_callback)
updater.start_polling()
logger.info("Ready to rock..!")
updater.idle()
if __name__ == "__main__":
main()
``` |
{
"source": "JosXa/python-chain",
"score": 3
} |
#### File: acceptance/steps/step_chain.py
```python
import chain
from behave import given, when, then
from itertools import count
from unittest.mock import MagicMock
from chain.core.domains.state import State
@given("a random number of static chains")
def step_create_random_static_chains(context: dict) -> None:
"""Create a Random Number of Static Chains.
This step will generate a random number of static chains.
"""
nb_chains = context.fake.pyint()
context.chain = [chain(context.dummy_function) for _ in range(nb_chains)]
@given("an odd random number of static chains")
def step_create_odd_random_static_chains(context: dict) -> None:
"""Create an Odd Random Number of Static Chains.
This step will generate an odd random number of static chains.
"""
def dummy(context: State) -> None:
pass
nb_chains = context.fake.pyint(min=1, step=2)
context.chain = [chain(dummy) for _ in range(nb_chains)]
@given("a single static chain")
def step_create_single_random_static_chain(context: dict) -> None:
"""Create a Random Number of Static Chains.
This step will generate a random number of static chain.
"""
def dummy(context: State) -> None:
pass
context.chain = [chain(dummy)]
@given("a new chain with mocked function")
def step_create_mocked_chain(context: dict) -> None:
"""Create a Chain with Mocked Function.
This step will generate a new chain with mocked function and append it on the end
of the created chain.
"""
if "chain" not in context:
context.chain = list()
context.mocked_function = MagicMock(return_value=None)
context.chain.append(chain(context.mocked_function))
@given("add a return value to the mocked function")
def step_add_return_value(context: dict) -> None:
"""Add a Return Value to the Mocked Function.
This step will generate a new return value to the mocked function on the chain.
"""
context.expected_output = context.fake.pydict()
context.mocked_function.return_value = context.expected_output
@given("add an arg return value to the mocked function")
def step_add_return_value_as_args(context: dict) -> None:
"""Add a Return Value to the Mocked Function as Args.
This step will generate a new return value as args to be passed to the next function
on the chain.
"""
context.expected_args = context.fake.pytuple()
context.expected_kwargs = context.fake.pydict()
context.mocked_function.return_value = (
context.expected_args,
context.expected_kwargs,
)
@given("a new chain returning random autoincremented data")
def step_create_autoincrementing_chain(context: dict) -> None:
"""Create a Autoincrementing Chain.
This step will generate a new chain with a function that will always return an
autoincremented data.
"""
if "chain" not in context:
context.chain = list()
context.initial_state.count = count()
def autoincrement(context: State) -> tuple:
counted = next(context.count)
return (counted,), dict()
context.chain.append(chain(autoincrement))
@given("a decorated chain function with output")
def step_create_decorated_function_with_output(context: dict) -> None:
"""Create a New Decorated Chain Function With Output.
This step will generate a new decorated chain function.
"""
expected_output = context.fake.pydict()
@chain
def dummy(context: State, expected_output=expected_output) -> None:
return expected_output
if "chain" not in context:
context.chain = list()
context.expected_output = expected_output
context.chain.append(dummy)
@given("a decorated chain function without output")
def step_create_decorated_function_without_output(context: dict) -> None:
"""Create a New Decorated Chain Function Without Output.
This step will generate a new decorated chain function without adding an output.
"""
expected_output = context.fake.pydict()
@chain
def bar(context: State) -> None:
context.bar = "bar"
if "chain" not in context:
context.chain = list()
context.expected_output = expected_output
context.chain.append(bar)
@when("I reverse the chain")
def step_revese_chain(context: dict) -> None:
"""Reverse the Generated Chain.
This step will reverse the current chain.
"""
context.chain = context.chain[::-1]
@when("I add a counter on the current state")
def step_add_counter_to_state(context: dict) -> None:
"""Add Counter on Current State.
This step will add a counter on the current initial state.
"""
context.initial_state.count = count()
@then("the mocked function should have been called with correct data")
def step_check_args_chain(context: dict) -> None:
"""Check if We Are Passing Args.
This step will check if, during a chain, we are passing args between the chained
functions.
"""
calls = context.mocked_function.call_args_list
last_call = calls[-1]
args = last_call[0]
kwargs = last_call[1]
context.expected_kwargs.update({"context": kwargs["context"]})
assert args == context.expected_args
assert kwargs == context.expected_kwargs
assert kwargs["context"].get_state() == context.initial_state.get_state()
@then("the context should not persist data")
def step_check_reversed_chain(context: dict) -> None:
"""Check the Result of the Reversed Chain.
This step will check the result of the reversed chain to see if it has runned
ignoring the previous state.
"""
calls = context.mocked_function.call_args_list
last_call = calls[-1]
args = last_call[0]
kwargs = last_call[1]
assert args[0] == 0
```
#### File: domains/chain/test_chain.py
```python
from pytest import main
from sys import argv
from faker import Faker
from itertools import count
from unittest.mock import patch, MagicMock, PropertyMock
from chain.tests.helpers import DependencyMocker
from chain.core.domains.chain.chain import Chain
file_path = "chain.core.domains.chain.chain"
dependencies = DependencyMocker(file_path)
@patch.multiple(file_path, **dependencies.to_dict())
def test_split_output(fake: Faker, **kwargs) -> None:
"""Test the Split Result Method.
Ensures that our chain can split the previous result from the current context if
it is a tuple. If it is not, it should return an default arg tuple.
"""
# Given
chain = Chain(lambda: None)
args = fake.pytuple()
kwargs = fake.pydict()
# When
with_args = chain._Chain__split_output((args, kwargs))
without_args = chain._Chain__split_output(fake.pydict())
# Then
assert with_args == (args, kwargs)
assert without_args == (tuple(), dict())
@patch.multiple(file_path, **dependencies.to_dict())
def test_execute(fake: Faker, Context: MagicMock = None, **kwargs) -> None:
"""Test the Execution of a Chain.
Ensures that we can execute a chain given a specific context.
"""
# Given
context = Context()
prev_args, prev_kwargs = (tuple(), dict())
function = MagicMock(return_value=None)
context_merge = MagicMock()
chain_split_output = MagicMock(return_value=(prev_args, prev_kwargs))
chain = Chain(function)
prop_context_merge = PropertyMock(return_value=context_merge)
prop_context_output = PropertyMock(return_value=fake.word())
chain.initial_state = fake.word()
chain._Chain__split_output = chain_split_output
type(context).output = prop_context_output
type(context).merge_context = prop_context_merge
# When
result = chain.execute(context=context)
# Then
chain_split_output.assert_called_once_with(context.output)
context_merge.assert_called_once_with(chain.initial_state)
function.assert_called_once_with(*prev_args, **prev_kwargs, context=context.current)
assert result == context
@patch.multiple(file_path, **dependencies.to_dict())
def test_call(fake: Faker, **kwargs) -> None:
"""Test the Direct Call of a Chain.
Ensures that we can execute a chain function directly.
"""
# Given
expected = fake.pydict()
initial_state = fake.pydict()
kwargs = fake.pydict()
args = fake.pytuple()
function = MagicMock(return_value=expected)
chain = Chain(function)
chain.initial_state = initial_state
# When
result = chain(*args, **kwargs)
# Then
function.assert_called_once_with(context=initial_state, *args, **kwargs)
assert result == expected
if __name__ == "__main__":
args = [__file__] + [arg for arg in argv[1:]]
main(args)
``` |
{
"source": "JosXa/sticker-finder",
"score": 2
} |
#### File: migrations/versions/2018_11_30_6c854954ca33_fuck_languages_part_2.py
```python
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6c854954ca33'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('change', 'language')
op.drop_column('sticker_set', 'language')
op.drop_column('tag', 'language')
op.drop_column('user', 'language')
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('language', sa.VARCHAR(), server_default=sa.text("'english'::character varying"), autoincrement=False, nullable=True))
op.add_column('tag', sa.Column('language', sa.VARCHAR(), server_default=sa.text("'english'::character varying"), autoincrement=False, nullable=True))
op.add_column('sticker_set', sa.Column('language', sa.VARCHAR(), server_default=sa.text("'english'::character varying"), autoincrement=False, nullable=True))
op.add_column('change', sa.Column('language', sa.VARCHAR(), server_default=sa.text("'english'::character varying"), autoincrement=False, nullable=True))
```
#### File: migrations/versions/2019_04_13_888b710775ea_move_change_tags_to_new_format.py
```python
from alembic import op
import os
import sys
from sqlalchemy import or_
from sqlalchemy.orm.session import Session
# Set system path, so alembic is capable of finding the stickerfinder module
parent_dir = os.path.abspath(os.path.join(os.getcwd(), "..", "stickerfinder"))
sys.path.append(parent_dir)
from stickerfinder.models import Change, Sticker, Tag # noqa
from stickerfinder.helper.tag import get_tags_from_text # noqa
# revision identifiers, used by Alembic.
revision = '888b710775ea'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
"""Actually change the change sets to the new format."""
session = Session(bind=op.get_bind())
changes = session.query(Change) \
.order_by(Change.created_at.desc()) \
.all()
for change in changes:
old_tags = set(get_tags_from_text(change.old_tags))
new_tags = set(get_tags_from_text(change.new_tags))
added_tags = list(new_tags - old_tags)
removed_tags = list(old_tags - new_tags)
added_tags = session.query(Tag) \
.filter(Tag.name.in_(added_tags)) \
.all()
removed_tags = session.query(Tag) \
.filter(or_(
Tag.is_default_language.is_(change.is_default_language),
Tag.emoji
)) \
.filter(Tag.name.in_(removed_tags)) \
.all()
change.removed_tags = removed_tags
change.added_tags = added_tags
session.commit()
def downgrade():
pass
```
#### File: telegram/callback_handlers/sticker_set.py
```python
from stickerfinder.models import StickerSet
from stickerfinder.helper.telegram import call_tg_func
from stickerfinder.helper.callback import CallbackResult
from stickerfinder.helper.keyboard import get_tag_this_set_keyboard
def handle_deluxe_set_user_chat(session, bot, action, query, payload, user):
"""Make a set a deluxe set."""
sticker_set = session.query(StickerSet).get(payload)
if CallbackResult(action).name == 'ok':
sticker_set.deluxe = True
elif CallbackResult(action).name == 'ban':
sticker_set.deluxe = False
keyboard = get_tag_this_set_keyboard(sticker_set, user)
call_tg_func(query.message, 'edit_reply_markup', [], {'reply_markup': keyboard})
```
#### File: telegram/callback_handlers/tagging.py
```python
from stickerfinder.helper.telegram import call_tg_func
from stickerfinder.helper.keyboard import main_keyboard, get_fix_sticker_tags_keyboard
from stickerfinder.helper.tag import (
send_tagged_count_message,
handle_next,
send_tag_messages,
)
from stickerfinder.models import Sticker
from stickerfinder.helper.tag_mode import TagMode
def handle_tag_next(session, bot, user, query, chat, tg_chat):
"""Send the next sticker for tagging."""
current_sticker = chat.current_sticker
handle_next(session, bot, chat, tg_chat, user)
if chat.current_sticker is not None:
keyboard = get_fix_sticker_tags_keyboard(current_sticker.file_id)
call_tg_func(query.message, 'edit_reply_markup', [], {'reply_markup': keyboard})
def handle_cancel_tagging(session, bot, user, query, chat, tg_chat):
"""Cancel tagging for now."""
# Send a message to the user, which shows how many stickers he already tagged,
# if the user was just tagging some stickers.
# Otherwise just send the normal cancel success message.
if not send_tagged_count_message(session, bot, user, chat):
call_tg_func(query, 'answer', ['All active commands have been canceled'])
call_tg_func(tg_chat, 'send_message', ['All running commands are canceled'],
{'reply_markup': main_keyboard})
chat.cancel(bot)
def handle_fix_sticker_tags(session, payload, user, chat, tg_chat):
"""Handle the `Fix this stickers tags` button."""
sticker = session.query(Sticker).get(payload)
chat.current_sticker = sticker
if chat.tag_mode not in [TagMode.STICKER_SET, TagMode.RANDOM]:
chat.tag_mode = TagMode.SINGLE_STICKER
send_tag_messages(chat, tg_chat, user)
def handle_continue_tagging_set(session, bot, payload, user, chat, tg_chat):
"""Handle the `continue tagging` button to enter a previous tagging session at the same point."""
chat.cancel(bot)
chat.tag_mode = TagMode.STICKER_SET
sticker = session.query(Sticker).get(payload)
chat.current_sticker = sticker
send_tag_messages(chat, tg_chat, user)
```
#### File: telegram/commands/maintenance.py
```python
from sqlalchemy import distinct
from telegram.ext import run_async
from datetime import datetime, timedelta
from stickerfinder.helper.sticker_set import refresh_stickers
from stickerfinder.helper.keyboard import admin_keyboard
from stickerfinder.helper.session import session_wrapper
from stickerfinder.helper.telegram import call_tg_func
from stickerfinder.helper.maintenance import check_maintenance_chat, check_newsfeed_chat
from stickerfinder.helper.cleanup import full_cleanup
from stickerfinder.models import (
StickerSet,
Sticker,
sticker_tag,
Tag,
User,
InlineQuery,
)
@run_async
@session_wrapper(admin_only=True)
def stats(bot, update, session, chat, user):
"""Send a help text."""
# Users
one_month_old = datetime.now() - timedelta(days=30)
month_user_count = session.query(User) \
.join(User.inline_queries) \
.filter(InlineQuery.created_at > one_month_old) \
.group_by(User) \
.count()
one_week_old = datetime.now() - timedelta(days=7)
week_user_count = session.query(User) \
.join(User.inline_queries) \
.filter(InlineQuery.created_at > one_week_old) \
.group_by(User) \
.count()
total_user_count = session.query(User).join(User.inline_queries).group_by(User).count()
# Tags and emojis
total_tag_count = session.query(sticker_tag.c.sticker_file_id) \
.join(Tag, sticker_tag.c.tag_name == Tag.name) \
.filter(Tag.emoji.is_(False)) \
.count()
english_tag_count = session.query(Tag) \
.filter(Tag.is_default_language.is_(True)) \
.filter(Tag.emoji.is_(False)) \
.count()
international_tag_count = session.query(Tag) \
.filter(Tag.is_default_language.is_(False)) \
.filter(Tag.emoji.is_(False)) \
.count()
emoji_count = session.query(Tag).filter(Tag.emoji.is_(True)).count()
# Stickers and sticker/text sticker/tag ratio
sticker_count = session.query(Sticker).count()
tagged_sticker_count = session.query(distinct(sticker_tag.c.sticker_file_id)) \
.join(Tag, sticker_tag.c.tag_name == Tag.name) \
.filter(Tag.emoji.is_(False)) \
.count()
text_sticker_count = session.query(Sticker) \
.filter(Sticker.text.isnot(None)) \
.count()
# Sticker set stuff
sticker_set_count = session.query(StickerSet).count()
normal_set_count = session.query(StickerSet) \
.filter(StickerSet.nsfw.is_(False)) \
.filter(StickerSet.furry.is_(False)) \
.filter(StickerSet.banned.is_(False)) \
.filter(StickerSet.is_default_language.is_(True)) \
.count()
deluxe_set_count = session.query(StickerSet).filter(StickerSet.deluxe.is_(True)).count()
nsfw_set_count = session.query(StickerSet).filter(StickerSet.nsfw.is_(True)).count()
furry_set_count = session.query(StickerSet).filter(StickerSet.furry.is_(True)).count()
banned_set_count = session.query(StickerSet).filter(StickerSet.banned.is_(True)).count()
not_english_set_count = session.query(StickerSet).filter(StickerSet.is_default_language.is_(False)).count()
# Inline queries
total_queries_count = session.query(InlineQuery).count()
last_day_queries_count = session.query(InlineQuery)\
.filter(InlineQuery.created_at > datetime.now() - timedelta(days=1)) \
.count()
stats = f"""Users:
=> last week: {week_user_count}
=> last month: {month_user_count}
=> total: {total_user_count}
Tags:
=> total: {total_tag_count}
=> english: {english_tag_count}
=> international: {international_tag_count}
=> emojis: {emoji_count}
Stickers:
=> total: {sticker_count}
=> with tags: {tagged_sticker_count}
=> with text: {text_sticker_count}
Sticker sets:
=> total: {sticker_set_count}
=> normal: {normal_set_count}
=> deluxe: {deluxe_set_count}
=> nsfw: {nsfw_set_count}
=> furry: {furry_set_count}
=> banned: {banned_set_count}
=> international: {not_english_set_count}
Total queries : {total_queries_count}
=> last day: {last_day_queries_count}
"""
call_tg_func(update.message.chat, 'send_message', [stats], {'reply_markup': admin_keyboard})
@run_async
@session_wrapper(admin_only=True)
def refresh_sticker_sets(bot, update, session, chat, user):
"""Refresh all stickers."""
sticker_sets = session.query(StickerSet) \
.filter(StickerSet.deleted.is_(False)) \
.all()
progress = f'Found {len(sticker_sets)} sets.'
call_tg_func(update.message.chat, 'send_message', args=[progress])
count = 0
for sticker_set in sticker_sets:
refresh_stickers(session, sticker_set, bot)
count += 1
if count % 1000 == 0:
progress = f'Updated {count} sets ({len(sticker_sets) - count} remaining).'
call_tg_func(update.message.chat, 'send_message', args=[progress])
call_tg_func(update.message.chat, 'send_message',
['All sticker sets are refreshed.'], {'reply_markup': admin_keyboard})
@run_async
@session_wrapper(admin_only=True)
def refresh_ocr(bot, update, session, chat, user):
"""Refresh all stickers and rescan for text."""
sticker_sets = session.query(StickerSet).all()
call_tg_func(update.message.chat, 'send_message',
args=[f'Found {len(sticker_sets)} sticker sets.'])
count = 0
for sticker_set in sticker_sets:
refresh_stickers(session, sticker_set, bot, refresh_ocr=True)
count += 1
if count % 200 == 0:
progress = f'Updated {count} sets ({len(sticker_sets) - count} remaining).'
call_tg_func(update.message.chat, 'send_message', args=[progress])
call_tg_func(update.message.chat, 'send_message',
['All sticker sets are refreshed.'], {'reply_markup': admin_keyboard})
@run_async
@session_wrapper(admin_only=True)
def flag_chat(bot, update, session, chat, user):
"""Flag a chat as maintenance or ban chat."""
chat_type = update.message.text.split(' ', 1)[1].strip()
# Flag chat as maintenance channel
if chat_type == 'maintenance':
chat.is_maintenance = not chat.is_maintenance
return f"Chat is {'now' if chat.is_maintenance else 'no longer' } a maintenance chat."
# Flag chat as newsfeed channel
elif chat_type == 'newsfeed':
chat.is_newsfeed = not chat.is_newsfeed
return f"Chat is {'now' if chat.is_newsfeed else 'no longer' } a newsfeed chat."
return 'Unknown flag.'
@run_async
@session_wrapper(admin_only=True)
def start_tasks(bot, update, session, chat, user):
"""Start the handling of tasks."""
if not chat.is_maintenance and not chat.is_newsfeed:
call_tg_func(update.message.chat, 'send_message',
['The chat is neither a maintenance nor a newsfeed chat'],
{'reply_markup': admin_keyboard})
return
elif chat.current_task:
return 'There already is a task active for this chat.'
if chat.is_maintenance:
check_maintenance_chat(session, update.message.chat, chat)
if chat.is_newsfeed:
check_newsfeed_chat(bot, session, chat)
@run_async
@session_wrapper(admin_only=True)
def cleanup(bot, update, session, chat, user):
"""Triggering a one time conversion from text changes to tags."""
threshold = datetime.strptime('Jan 1 2000', '%b %d %Y')
full_cleanup(session, threshold, update=update)
call_tg_func(update.message.chat, 'send_message',
['Cleanup finished.'], {'reply_markup': admin_keyboard})
```
#### File: telegram/commands/usage.py
```python
from telegram.ext import run_async
from stickerfinder.helper.session import session_wrapper
from stickerfinder.models import StickerUsage, Sticker
@run_async
@session_wrapper(check_ban=True, private=True)
def forget_set(bot, update, session, chat, user):
"""Forget every usage of the set of the previously posted sticker."""
if chat.current_sticker is None:
return "You need to send me a sticker first."
usage_file_ids = session.query(StickerUsage.sticker_file_id) \
.join(Sticker) \
.filter(Sticker.sticker_set == chat.current_sticker.sticker_set) \
.filter(StickerUsage.user == user) \
.filter(StickerUsage.sticker_file_id == Sticker.file_id) \
.all()
usage_file_ids = [file_id[0] for file_id in usage_file_ids]
session.expire_all()
session.query(StickerUsage) \
.filter(StickerUsage.sticker_file_id.in_(usage_file_ids)) \
.filter(StickerUsage.user == user) \
.delete(synchronize_session=False)
return "I forgot all of your usages of this set's sticker."
```
#### File: telegram/inline_query/__init__.py
```python
from uuid import uuid4
from sqlalchemy.exc import IntegrityError
from telegram.ext import run_async
from telegram import InlineQueryResultCachedSticker
from stickerfinder.helper.session import hidden_session_wrapper
from stickerfinder.models import (
InlineQuery,
InlineQueryRequest,
)
from .context import Context
from .search import (
search_stickers,
search_sticker_sets,
)
@run_async
@hidden_session_wrapper()
def search(bot, update, session, user):
"""Handle inline queries for sticker search."""
# We don't want banned users
if user.banned:
results = [InlineQueryResultCachedSticker(
uuid4(),
sticker_file_id='CAADAQADOQIAAjnUfAmQSUibakhEFgI')]
update.inline_query.answer(results, cache_time=300, is_personal=True,
switch_pm_text="Maybe don't be a dick :)?",
switch_pm_parameter='inline')
return
offset_payload = update.inline_query.offset
# If the offset is 'done' there are no more stickers for this query.
if offset_payload == 'done':
update.inline_query.answer([], cache_time=0)
return
context = Context(update.inline_query.query, offset_payload, user)
# Create a new inline query or get the respective existing one, if we are working with an offset.
inline_query = InlineQuery.get_or_create(session, context.inline_query_id, context.query, user)
context.inline_query_id = inline_query.id
if context.mode == Context.STICKER_SET_MODE:
inline_query.mode = InlineQuery.SET_MODE
# Save this specific InlineQueryRequest
try:
saved_offset = offset_payload.split(':', 1)[1] if context.offset != 0 else 0
inline_query_request = InlineQueryRequest(inline_query, saved_offset)
session.add(inline_query_request)
session.commit()
except IntegrityError:
# This needs some explaining:
# Sometimes (probably due to slow sticker loading) the telegram clients fire queries with the same offset.
# To prevent this, we have an unique constraint on InlineQueryRequests.
# If this constraint is violated, we assume that the scenario above just happened and just don't answer.
# This prevents duplicate sticker suggestions due to slow internet connections.
session.rollback()
return
if context.mode == Context.STICKER_SET_MODE:
# Remove keyword tags to prevent wrong results
search_sticker_sets(session, update, context, inline_query_request)
else:
search_stickers(session, update, context, inline_query_request)
```
#### File: tests/testing/test_session.py
```python
from stickerfinder.models import User
from tests.factories import user_factory
def test_correct_session_handling(session, user):
"""User is created correctly."""
assert user.username == 'testuser'
second_user = user_factory(session, 5, 'testuser2')
first_user = session.query(User).get(2)
assert first_user is not None
second_user = session.query(User).get(5)
assert second_user is not None
session.delete(first_user)
session.commit()
first_user = session.query(User).get(1)
assert first_user is None
``` |
{
"source": "JosXa/TgIntegration",
"score": 2
} |
#### File: examples/pytest/conftest.py
```python
import asyncio
import logging
from pathlib import Path
import pytest
from decouple import config
from pyrogram import Client
from tgintegration import BotController
examples_dir = Path(__file__).parent.parent
logger = logging.getLogger("tgintegration")
logger.setLevel(logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("pyrogram").setLevel(logging.WARNING)
@pytest.yield_fixture(scope="session", autouse=True)
def event_loop(request):
"""Create an instance of the default event loop for the session."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session")
async def client() -> Client:
# noinspection PyCallingNonCallable
client = Client(
config("SESSION_STRING", default=None) or "tgintegration_examples",
workdir=examples_dir,
config_file=str(examples_dir / "config.ini"),
)
await client.start()
yield client
await client.stop()
@pytest.fixture(scope="module")
async def controller(client):
c = BotController(
client=client,
peer="@BotListBot",
max_wait=10.0,
wait_consecutive=0.8,
)
await c.initialize(start_client=False)
yield c
```
#### File: tests/integration/test_examples.py
```python
import pytest
pytestmark = pytest.mark.asyncio
# TODO: Bot is offline. Does anyone have a nice alternative to automate?
# async def test_dinopark_example(session_name):
# # Late import so that the autouse fixtures run first
# from examples.automation import dinoparkbot
#
# client = dinoparkbot.create_client(session_name)
# game = dinoparkbot.create_game_controller(client)
# await game.perform_full_run()
async def test_idletown_example(session_name):
# Late import so that the autouse fixtures run first
from examples.automation import idletown
idletown.MAX_RUNS = 1
client = idletown.create_client(session_name)
controller = idletown.create_game_controller(client)
await idletown.perform_full_run(controller)
async def test_readme_example(session_name):
# Late import so that the autouse fixtures run first
from examples.readme_example import readmeexample
client = readmeexample.create_client(session_name)
await readmeexample.run_example(client)
```
#### File: tests/unit/test_expectation.py
```python
from unittest.mock import Mock
import pytest
from pyrogram.types import Message
from tgintegration.expectation import Expectation
@pytest.mark.parametrize(
"min_n,max_n,num_msgs,is_sufficient,is_match",
[
# TODO: (0,0,0) ?
(1, 1, 0, False, False),
(1, 1, 1, True, True),
(1, 1, 2, True, False),
],
)
def test_expectation(
min_n: int, max_n: int, num_msgs: int, is_sufficient: bool, is_match: bool
):
obj = Expectation(min_messages=min_n, max_messages=max_n)
msgs = [Mock(Message)] * num_msgs
assert obj.is_sufficient(msgs) == is_sufficient
assert obj._is_match(msgs) == is_match
```
#### File: tgintegration/containers/reply_keyboard.py
```python
import re
from typing import List
from typing import Pattern
from typing import TYPE_CHECKING
from typing import Union
from pyrogram import filters as f
from pyrogram.filters import Filter
from pyrogram.types import KeyboardButton
from pyrogram.types import Message
from tgintegration.containers import NoButtonFound
if TYPE_CHECKING:
from tgintegration.botcontroller import BotController
from tgintegration.containers.responses import Response
class ReplyKeyboard:
"""
Represents a regular keyboard in the Telegram UI and allows to click buttons in the menu.
See Also:
[InlineKeyboard](tgintegration.InlineKeyboard)
"""
def __init__(
self,
controller: "BotController",
chat_id: Union[int, str],
message_id: int,
button_rows: List[List[KeyboardButton]],
):
self._controller: BotController = controller
self._message_id = message_id
self._peer_id = chat_id
self.rows = button_rows
def find_button(self, pattern: Pattern) -> KeyboardButton:
"""
Attempts to retrieve a clickable button anywhere in the underlying `rows` by matching the button captions with
the given `pattern`. If no button could be found, **this method raises** `NoButtonFound`.
Args:
pattern: The button caption to look for (by `re.match`).
Returns:
The `KeyboardButton` if found.
"""
compiled = re.compile(pattern)
for row in self.rows:
for button in row:
# TODO: Investigate why sometimes it's a button and other times a string
if compiled.match(button.text if hasattr(button, "text") else button):
return button
raise NoButtonFound(f"No clickable entity found for pattern r'{pattern}'")
async def _click_nowait(self, pattern, quote=False) -> Message:
button = self.find_button(pattern)
return await self._controller.client.send_message(
self._peer_id,
button.text,
reply_to_message_id=self._message_id if quote else None,
)
@property
def num_buttons(self) -> int:
"""
Returns the total number of buttons in all underlying rows.
"""
return sum(len(row) for row in self.rows)
async def click(
self, pattern: Pattern, filters: Filter = None, quote: bool = False
) -> "Response":
"""
Uses `find_button` with the given `pattern`, clicks the button if found, and waits for the bot to react. For
a `ReplyKeyboard`, this means that a message with the button's caption will be sent to the same chat.
If not button could be found, `NoButtonFound` will be raised.
Args:
pattern: The button caption to look for (by `re.match`).
filters: Additional filters to be given to `collect`. Will be merged with a "same chat" filter and
`filters.text | filters.edited`.
quote: Whether to reply to the message containing the buttons.
Returns:
The bot's `Response`.
"""
button = self.find_button(pattern)
filters = (
filters & f.chat(self._peer_id) if filters else f.chat(self._peer_id)
) & (f.text | f.edited)
async with self._controller.collect(filters=filters) as res: # type: Response
await self._controller.client.send_message(
self._controller.peer,
button.text if hasattr(button, "text") else button,
reply_to_message_id=self._message_id if quote else None,
)
return res
```
#### File: tgintegration/containers/responses.py
```python
from datetime import datetime
from typing import Any
from typing import List
from typing import Optional
from typing import Set
from typing import TYPE_CHECKING
from pyrogram.types import InlineKeyboardMarkup
from pyrogram.types import Message
from pyrogram.types import ReplyKeyboardMarkup
from tgintegration.containers import InlineKeyboard
from tgintegration.containers import ReplyKeyboard
from tgintegration.update_recorder import MessageRecorder
if TYPE_CHECKING:
from tgintegration.botcontroller import BotController
class Response:
def __init__(self, controller: "BotController", recorder: MessageRecorder):
self._controller = controller
self._recorder = recorder
self.started: Optional[float] = None
self.action_result: Any = None
# cached properties
self.__reply_keyboard: Optional[ReplyKeyboard] = None
self.__inline_keyboards: List[InlineKeyboard] = []
@property
def messages(self) -> List[Message]:
return self._recorder.messages
@property
def is_empty(self) -> bool:
return not self.messages
@property
def num_messages(self) -> int:
return len(self.messages)
@property
def full_text(self) -> str:
return "\n".join(x.text for x in self.messages if x.text) or ""
@property
def reply_keyboard(self) -> Optional[ReplyKeyboard]:
if self.__reply_keyboard:
return self.__reply_keyboard
if self.is_empty:
return None
# Contingent upon the way Telegram works,
# only the *last* message with buttons in a response object matters
messages = reversed(self.messages)
for m in messages:
if isinstance(m.reply_markup, ReplyKeyboardMarkup):
last_kb_msg = m
break
else:
return None # No message with a keyboard found
reply_keyboard = ReplyKeyboard(
controller=self._controller,
chat_id=last_kb_msg.chat.id,
message_id=last_kb_msg.id,
button_rows=last_kb_msg.reply_markup.keyboard,
)
self.__reply_keyboard = reply_keyboard
return reply_keyboard
@property
def inline_keyboards(self) -> Optional[List[InlineKeyboard]]:
if self.__inline_keyboards:
return self.__inline_keyboards
if self.is_empty:
return None
inline_keyboards = [
InlineKeyboard(
controller=self._controller,
chat_id=message.chat.id,
message_id=message.id,
button_rows=message.reply_markup.inline_keyboard,
)
for message in self.messages
if isinstance(message.reply_markup, InlineKeyboardMarkup)
]
self.__inline_keyboards = inline_keyboards
return inline_keyboards
@property
def keyboard_buttons(self) -> Set[str]:
all_buttons = set()
for m in self.messages:
markup = m.reply_markup
if markup and hasattr(markup, "keyboard"):
for row in markup.keyboard:
for button in row:
all_buttons.add(button)
return all_buttons
@property
def last_message_datetime(self) -> Optional[datetime]:
return None if self.is_empty else self.messages[-1].date
@property
def last_message_timestamp(self) -> Optional[float]:
return None if self.is_empty else self.messages[-1].date.timestamp()
@property
def commands(self) -> Set[str]:
all_commands = set()
for m in self.messages:
entity_commands = [x for x in m.entities if x.type == "bot_command"]
for e in entity_commands:
all_commands.add(m.text[e.offset, len(m.text) - e.length])
caption_entity_commands = [x for x in m.entities if x.type == "bot_command"]
for e in caption_entity_commands:
all_commands.add(m.caption[e.offset, len(m.caption) - e.length])
return all_commands
async def delete_all_messages(self, revoke: bool = True):
peer_id = self.messages[0].chat.id
await self._controller.client.delete_messages(
peer_id, [x.id for x in self.messages], revoke=revoke
)
def __eq__(self, other):
if not isinstance(other, Response):
return False
return (
self.full_text == other.full_text
and self.inline_keyboards == other.inline_keyboards
# TODO: self.keyboard == other.keyboard
)
def __getitem__(self, item):
return self.messages[item]
def __str__(self):
if self.is_empty:
return "Empty response"
return "\nthen\n".join(['"{}"'.format(m.text) for m in self.messages])
class InvalidResponseError(Exception):
"""
Raised when peer's response did not match the [expectation](tgintegration.expectation.Expectation).
"""
```
#### File: TgIntegration/tgintegration/expectation.py
```python
import logging
from dataclasses import dataclass
from typing import List
from typing import Union
from pyrogram.types import Message
from tgintegration.containers.responses import InvalidResponseError
from tgintegration.timeout_settings import TimeoutSettings
from tgintegration.utils.sentinel import NotSet
logger = logging.getLogger(__name__)
@dataclass
class Expectation:
"""
Defines the expected reaction of a peer.
"""
min_messages: Union[int, NotSet] = NotSet
"""
Minimum number of expected messages.
"""
max_messages: Union[int, NotSet] = NotSet
"""
Maximum number of expected messages.
"""
def is_sufficient(self, messages: List[Message]) -> bool:
n = len(messages)
if self.min_messages is NotSet:
return n >= 1
return n >= self.min_messages
def _is_match(self, messages: List[Message]) -> bool:
n = len(messages)
return (self.min_messages is NotSet or n >= self.min_messages) and (
self.max_messages is NotSet or n <= self.max_messages
)
def verify(self, messages: List[Message], timeouts: TimeoutSettings) -> None:
if self._is_match(messages):
return
n = len(messages)
if n < self.min_messages:
_raise_or_log(
timeouts,
"Expected {} messages but only received {} after waiting {} seconds.",
self.min_messages,
n,
timeouts.max_wait,
)
return
if n > self.max_messages:
_raise_or_log(
timeouts,
"Expected only {} messages but received {}.",
self.max_messages,
n,
)
return
def _raise_or_log(timeouts: TimeoutSettings, msg: str, *fmt) -> None:
if timeouts.raise_on_timeout:
if fmt:
raise InvalidResponseError(msg.format(*fmt))
else:
raise InvalidResponseError(msg)
logger.debug(msg, *fmt)
```
#### File: tgintegration/utils/iter_utils.py
```python
from itertools import chain
def flatten(listOfLists):
"""Return an iterator flattening one level of nesting in a list of lists.
>>> list(flatten([[0, 1], [2, 3]]))
[0, 1, 2, 3]
See also :func:`collapse`, which can flatten multiple levels of nesting.
"""
return chain.from_iterable(listOfLists)
``` |
{
"source": "josygeorge/django-fullstack-milestone-main-project",
"score": 2
} |
#### File: django-fullstack-milestone-main-project/checkout/webhooks.py
```python
from django.conf import settings
from django.http import HttpResponse
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from checkout.webhook_handler import StripeWH_Handler
import stripe
@require_POST
@csrf_exempt
def webhook(request):
"""Listen for webhooks from Stripe"""
# Setup
wh_secret = settings.STRIPE_WH_SECRET
stripe.api_key = settings.STRIPE_SECRET_KEY
# Get the webhook data and verify its signature
payload = request.body
sig_header = request.META['HTTP_STRIPE_SIGNATURE']
event = None
try:
event = stripe.Webhook.construct_event(
payload, sig_header, wh_secret
)
except ValueError as e:
# Invalid payload
return HttpResponse(status=400)
except stripe.error.SignatureVerificationError as e:
# Invalid signature
return HttpResponse(status=400)
except Exception as e:
return HttpResponse(content=e, status=400)
print('Success')
return HttpResponse(status=200)
``` |
{
"source": "josygeorge/django-milestone-project",
"score": 2
} |
#### File: django-milestone-project/bag/views.py
```python
from django.shortcuts import render, redirect, get_object_or_404
from django.core.exceptions import ObjectDoesNotExist
from productstore.models import Product
from .models import Bag, BagItem
from django.contrib.auth.decorators import login_required
# Create your views here.
def _bag_id(request):
bag = request.session.session_key
if not bag:
bag = request.session.create()
return bag
def add_bag(request, product_id):
# fetching the Product by it's id
product = Product.objects.get(id=product_id)
try:
# fetching the Bag by it's id from the session variable
bag = Bag.objects.get(bag_id=_bag_id(request))
except Bag.DoesNotExist:
bag = Bag.objects.create(
bag_id=_bag_id(request))
bag.save()
# Bag item
try:
bag_item = BagItem.objects.get(product=product, bag=bag)
bag_item.quantity += 1
bag_item.save()
except BagItem.DoesNotExist:
bag_item = BagItem.objects.create(
product=product,
quantity=1,
bag=bag,
)
bag_item.save()
return redirect('bag')
# remove the item quantity
def remove_bag(request, product_id):
product = get_object_or_404(Product, id=product_id)
bag = Bag.objects.get(bag_id=_bag_id(request))
bag_item = BagItem.objects.get(product=product, bag=bag)
if bag_item.quantity > 1:
bag_item.quantity -= 1
bag_item.save()
else:
bag_item.delete()
return redirect('bag')
# remove the whole bag item
def remove_bag_item(request, product_id):
product = get_object_or_404(Product, id=product_id)
bag = Bag.objects.get(bag_id=_bag_id(request))
bag_item = BagItem.objects.get(product=product, bag=bag)
bag_item.delete()
return redirect('bag')
def bag(request, total=0, quantity=0, bag_items=None):
try:
tax = 0
grand_total = 0
if request.user.is_authenticated:
bag_items = BagItem.objects.filter(user=request.user, is_active=True)
else:
bag = Bag.objects.get(bag_id=_bag_id(request))
bag_items = BagItem.objects.filter(bag=bag, is_active=True)
for bag_item in bag_items:
total += (bag_item.product.price * bag_item.quantity)
quantity += bag_item.quantity
tax = (13 * total)/100
grand_total = total + tax
except ObjectDoesNotExist:
pass # just ignore
context = {
'total': total,
'quantity': quantity,
'bag_items': bag_items,
'tax': tax,
'grand_total': grand_total,
}
return render(request, 'products/bag.html', context)
@login_required(login_url='/accounts/login')
def checkout(request, total=0, quantity=0, bag_items=None):
try:
tax = 0
grand_total = 0
if request.user.is_authenticated:
bag_items = BagItem.objects.filter(user=request.user, is_active=True)
else:
bag = Bag.objects.get(bag_id=_bag_id(request))
bag_items = BagItem.objects.filter(bag=bag, is_active=True)
for bag_item in bag_items:
total += (bag_item.product.price * bag_item.quantity)
quantity += bag_item.quantity
tax = (13 * total)/100
grand_total = total + tax
except ObjectDoesNotExist:
pass # just ignore
context = {
'total': total,
'quantity': quantity,
'bag_items': bag_items,
'tax': tax,
'grand_total': grand_total,
}
return render(request, 'products/checkout.html', context)
``` |
{
"source": "josylad/Flask-Blog",
"score": 2
} |
#### File: josylad/Flask-Blog/run.py
```python
from app import create_app, db
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, Server
from app.models import *
app = create_app('production')
# app = create_app('development')
manager = Manager(app)
migrate = Migrate(app,db)
manager.add_command('runserver', Server)
manager.add_command('db', MigrateCommand)
@manager.shell
def make_shell_context():
return dict(app = app, db = db, User= User)
if __name__ == '__main__':
manager.run()
``` |
{
"source": "josylad/Mini-CRM",
"score": 2
} |
#### File: Mini-CRM/crmapp/views.py
```python
from django.conf import settings
from django.templatetags.static import static
from django.shortcuts import render, redirect, HttpResponseRedirect
from django.http import HttpResponse, Http404
import datetime as dt
from .models import *
from .forms import RegisterForm
# Create your views here.
def index(request):
date = dt.date.today()
companies = Companies.get_allcompany()
return render(request, 'index.html', {"date": date, "companies":companies})
def employee(request):
employees = Employee.get_allemployee()
return render(request, 'employee.html', {"employees":employees})
def register(request):
if request.method == 'POST':
form = RegisterForm(request.POST)
if form.is_valid():
form.save()
username=form.cleaned_data['username']
receiver=form.cleaned_data['email']
return redirect('/')
else:
form = RegisterForm()
return render(request, 'registration/registration_form.html', {'form':form})
``` |
{
"source": "josylad/News-App",
"score": 4
} |
#### File: News-App/app/models.py
```python
class News:
'''
News class to define News Objects
'''
def __init__(self,url, id, name, description, category):
self.url=url
self.id=id
self.name=name
self.description=description
self.category=category
class NewsArticles:
'''
News Articles class to define MArticles Objects
'''
def __init__(self,url, id, name, description, author, title, urlToImage, time):
self.url=url
self.id=id
self.name=name
self.description=description
self.author =author
self.title=title
self.urlToImage= urlToImage
self.time=time
``` |
{
"source": "josylad/RoomScout",
"score": 3
} |
#### File: bills/tests/test_models.py
```python
from django.contrib.auth import get_user_model
from django.test import TestCase, Client
from bills.models import BillSet, Bill
from houses.models import House
class BillsModelTests(TestCase):
def setUp(self):
self.client = Client()
User = get_user_model()
self.user = User.objects.create_user(username='FredFlintstone', email='<EMAIL>', password='<PASSWORD>')
self.user2 = User.objects.create_user(username='JackyFlintstone', email='<EMAIL>', password='<PASSWORD>')
house = House.objects.create(user=self.user)
house.place_id = 'EiwyNTI5IFN0YWxsaW9uIERyLCBPc2hhd2EsIE9OIEwxSCA3SzQsIENhbmFkYSIxEi8KFAoSCY_JD3vDG9WJEe3JFhlBvwOKEOETKhQKEgnrS9FlwxvViRHYx20MM9m-8g'
house.lat = '43.95858010000001'
house.lon = '-78.91587470000002'
house.street_number = 2529
house.street_name = 'Stallion Drive'
house.city = 'Oshawa'
house.prov_state = 'ON'
house.postal_code = 'L1H 0M4'
house.country = 'Canada'
house.save()
self.house = house
def test_BillSet_creation(self):
print('Testing BillSet creation')
pre_count = BillSet.objects.count()
billset = BillSet()
billset.month = 10
billset.year = 2019
billset.house = self.house
billset.save()
post_count = BillSet.objects.count()
self.assertGreater(post_count, pre_count)
self.assertEqual(billset.get_month_name(), 'October')
self.assertEqual(billset.__str__(), 'October 2019')
def test_Bill_creation(self):
billset = BillSet()
billset.month = 10
billset.year = 2019
billset.house = self.house
billset.save()
pre_count = Bill.objects.count()
bill = Bill()
bill.set = billset
bill.user = self.user
bill.type = 'ELEC'
bill.date = '2019-11-04'
bill.amount = 199.99
bill.save()
post_count = Bill.objects.count()
self.assertGreater(post_count, pre_count)
```
#### File: dashboard/tests/test_views.py
```python
from django.contrib.auth import get_user_model
from django.test import TestCase, Client
from django.urls import reverse
class DashboardViewsTests(TestCase):
def setUp(self):
self.client = Client()
User = get_user_model()
self.user = User.objects.create_user(username='FredFlintstone', email='<EMAIL>', password='<PASSWORD>')
def test_dashboard_views_main_dashboard_get(self):
print('Testing dashboard.views.main_dashboard() GET')
self.client.force_login(self.user)
response = self.client.get(reverse('main_dashboard'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'dashboard/main_dashboard.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_dashboard_views_main_dashboard_get_not_logged_in(self):
print('Testing dashboard.views.main_dashboard() GET not logged in')
self.client.logout()
response = self.client.get(reverse('main_dashboard'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
```
#### File: garbageday/tests/test_models.py
```python
from django.contrib.auth import get_user_model
from django.test import TestCase
from datetime import timedelta
from houses.models import House
from garbageday.models import GarbageDay
class GarbageDayModelTests(TestCase):
def setUp(self):
User = get_user_model()
user = User.objects.create_user(username='Fred_Flintstone', email='<EMAIL>', password='<PASSWORD>')
self.user = user
self.assertEqual(self.user.email, '<EMAIL>')
self.assertEqual(self.user.username, 'Fred_Flintstone')
self.assertTrue(self.user.is_active)
self.assertFalse(self.user.is_staff)
self.assertFalse(self.user.is_superuser)
house = House.objects.create(user=self.user)
house.place_id = 'EiwyNTI5IFN0YWxsaW9uIERyLCBPc2hhd2EsIE9OIEwxSCA3SzQsIENhbmFkYSIxEi8KFAoSCY_JD3vDG9WJEe3JFhlBvwOKEOETKhQKEgnrS9FlwxvViRHYx20MM9m-8g'
house.lat = '43.95858010000001'
house.lon = '-78.91587470000002'
house.street_number = 2529
house.street_name = 'Stallion Drive'
house.city = 'Oshawa'
house.prov_state = 'ON'
house.postal_code = 'L1H 0M4'
house.country = 'Canada'
house.save()
self.house = house
def test_create_garbage_day1(self):
print('Testing GarbageDay Creation Blank')
num_pre = GarbageDay.objects.count()
garbage_day = GarbageDay()
garbage_day.house = self.house
garbage_day.user = self.user
garbage_day.last_garbage_day = "2019-11-04"
garbage_day.next_garbage_day = "2019-11-04"
garbage_day.save()
self.assertEqual(garbage_day.house, self.house)
num_post = GarbageDay.objects.count()
self.assertEqual(num_post, num_pre + 1)
self.assertEqual(garbage_day.garbage_frequency, timedelta(weeks=0, days=0))
def test_create_garbage_day2(self):
print('Testing GarbageDay Creation Filled')
num_pre = GarbageDay.objects.count()
garbage_day = GarbageDay()
garbage_day.house = self.house
garbage_day.user = self.user
garbage_day.last_garbage_day = "2019-11-12"
garbage_day.next_garbage_day = "2019-11-26"
garbage_day.save()
self.assertEqual(garbage_day.house, self.house)
num_post = GarbageDay.objects.count()
self.assertEqual(num_post, num_pre + 1)
self.assertEqual(garbage_day.garbage_frequency, timedelta(weeks=2, days=0))
def test_garbage_day_house_relation(self):
print('Testing GarbageDay House Relation')
garbage_day = GarbageDay()
garbage_day.house = self.house
garbage_day.user = self.user
garbage_day.last_garbage_day = "2019-11-12"
garbage_day.next_garbage_day = "2019-11-26"
garbage_day.save()
self.assertIsNot(self.house.garbageday_set.count(), 0)
```
#### File: garbageday/tests/test_views.py
```python
import datetime
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from houses.models import House
from garbageday.models import GarbageDay
class GarbageDayViewTests(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create_user(username='Fred_Flintstone', email='<EMAIL>', password='<PASSWORD>')
self.user2 = User.objects.create_user(username='JackyFlintstone', email='<EMAIL>', password='<PASSWORD>')
self.assertEqual(self.user.email, '<EMAIL>')
self.assertEqual(self.user.username, 'Fred_Flintstone')
self.assertTrue(self.user.is_active)
self.assertFalse(self.user.is_staff)
self.assertFalse(self.user.is_superuser)
house = House.objects.create(user=self.user)
house.place_id = 'EiwyNTI5IFN0YWxsaW9uIERyLCBPc2hhd2EsIE9OIEwxSCA3SzQsIENhbmFkYSIxEi8KFAoSCY_JD3vDG9WJEe3JFhlBvwOKEOETKhQKEgnrS9FlwxvViRHYx20MM9m-8g'
house.lat = '43.95858010000001'
house.lon = '-78.91587470000002'
house.street_number = 2529
house.street_name = 'Stallion Drive'
house.city = 'Oshawa'
house.prov_state = 'ON'
house.postal_code = 'L1H 0M4'
house.country = 'Canada'
house.save()
self.house = house
def test_garbageday_views_garbageday_manage_get_not_existing(self):
print('Testing garbageday.views.garbageday_manage() GET not existing')
self.client.force_login(self.user)
count = GarbageDay.objects.filter(house=self.house).count()
self.assertEqual(count, 0)
response = self.client.get(reverse('garbageday_manage', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'garbageday/garbageday_create.html')
self.assertContains(response, 'Setup Garbage Day for')
self.assertContains(response, self.house)
self.assertContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_manage_get_existing(self):
print('Testing garbageday.views.garbageday_manage() GET existing')
self.client.force_login(self.user)
self.garbage_day = GarbageDay()
self.garbage_day.house = self.house
self.garbage_day.user = self.user
self.garbage_day.last_garbage_day = "2019-11-12"
self.garbage_day.next_garbage_day = "2019-11-26"
self.garbage_day.save()
count = GarbageDay.objects.filter(house=self.house).count()
self.assertEqual(count, 1)
response = self.client.get(reverse('garbageday_manage', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'garbageday/garbageday_edit.html')
self.assertContains(response, 'Edit Garbage Day for')
self.assertContains(response, self.house)
self.assertContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_manage_get_not_logged_in(self):
print('Testing garbageday.views.garbageday_manage() GET not logged in')
self.client.logout()
response = self.client.get(reverse('garbageday_manage', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
def test_garbageday_views_garbageday_manage_get_wrong_user(self):
print('Testing garbageday.views.garbageday_manage() GET wrong user')
self.client.force_login(self.user2)
response = self.client.get(reverse('garbageday_manage', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_create_get(self):
print('Testing garbageday.views.garbageday_create() GET')
self.client.force_login(self.user)
response = self.client.get(reverse('garbageday_create', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'garbageday/garbageday_create.html')
self.assertContains(response, 'Setup Garbage Day for')
self.assertContains(response, self.house)
self.assertContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_create_get_not_logged_in(self):
print('Testing garbageday.views.garbageday_create() GET not logged in')
self.client.logout()
response = self.client.get(reverse('garbageday_create', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
def test_garbageday_views_garbageday_create_get_wrong_user(self):
print('Testing garbageday.views.garbageday_create() GET wrong user')
self.client.force_login(self.user2)
response = self.client.get(reverse('garbageday_create', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_create_post(self):
print('Testing garbageday.views.garbageday_create() POST')
self.client.force_login(self.user)
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_create', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertGreater(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'houses/house_detail.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertContains(response, self.house)
self.assertContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_create_post_existing(self):
print('Testing garbageday.views.garbageday_create() POST existing')
self.client.force_login(self.user)
self.garbage_day = GarbageDay()
self.garbage_day.house = self.house
self.garbage_day.user = self.user
self.garbage_day.last_garbage_day = "2019-11-12"
self.garbage_day.next_garbage_day = "2019-11-26"
self.garbage_day.save()
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_create', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertEqual(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'houses/house_detail.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertContains(response, self.house)
self.assertContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_create_post_not_logged_in(self):
print('Testing garbageday.views.garbageday_create() POST not logged in')
self.client.logout()
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_create', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertEqual(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
def test_garbageday_views_garbageday_create_post_wrong_user(self):
print('Testing garbageday.views.garbageday_create() POST wrong user')
self.client.force_login(self.user2)
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_create', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertEqual(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertNotContains(response, 'Setup Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_edit_get(self):
print('Testing garbageday.views.garbageday_edit() GET')
self.client.force_login(self.user)
response = self.client.get(reverse('garbageday_edit', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'garbageday/garbageday_edit.html')
self.assertContains(response, 'Edit Garbage Day for')
self.assertContains(response, self.house)
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_edit_get_not_logged_in(self):
print('Testing garbageday.views.garbageday_edit() GET not logged in')
self.client.logout()
response = self.client.get(reverse('garbageday_edit', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, 'Edit Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
def test_garbageday_views_garbageday_edit_get_wrong_user(self):
print('Testing garbageday.views.garbageday_edit() GET wrong user')
self.client.force_login(self.user2)
response = self.client.get(reverse('garbageday_edit', kwargs={'house': self.house.id}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertNotContains(response, 'Edit Garbage Day for')
self.assertNotContains(response, self.house)
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
def test_garbageday_views_garbageday_edit_post(self):
print('Testing garbageday.views.garbageday_edit() POST')
self.client.force_login(self.user)
self.garbage_day = GarbageDay()
self.garbage_day.house = self.house
self.garbage_day.user = self.user
self.garbage_day.last_garbage_day = "2019-11-04"
self.garbage_day.next_garbage_day = "2019-11-04"
self.garbage_day.save()
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_edit', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertEqual(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'houses/house_detail.html')
self.assertNotContains(response, 'Edit Garbage Day for')
self.assertContains(response, self.house)
self.assertContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertEqual(self.house.garbageday_set.first().last_garbage_day, datetime.date(2019, 11, 12))
self.assertEqual(self.house.garbageday_set.first().next_garbage_day, datetime.date(2019, 11, 26))
def test_garbageday_views_garbageday_edit_post_not_logged_in(self):
print('Testing garbageday.views.garbageday_edit() POST not logged in')
self.client.logout()
self.garbage_day = GarbageDay()
self.garbage_day.house = self.house
self.garbage_day.user = self.user
self.garbage_day.last_garbage_day = "2019-11-04"
self.garbage_day.next_garbage_day = "2019-11-04"
self.garbage_day.save()
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_edit', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertEqual(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, 'Edit Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertEqual(self.house.garbageday_set.first().last_garbage_day, datetime.date(2019, 11, 4))
self.assertEqual(self.house.garbageday_set.first().next_garbage_day, datetime.date(2019, 11, 4))
def test_garbageday_views_garbageday_edit_post_wrong_user(self):
print('Testing garbageday.views.garbageday_edit() POST wrong user')
self.client.force_login(self.user2)
self.garbage_day = GarbageDay()
self.garbage_day.house = self.house
self.garbage_day.user = self.user
self.garbage_day.last_garbage_day = "2019-11-04"
self.garbage_day.next_garbage_day = "2019-11-04"
self.garbage_day.save()
pre_count = GarbageDay.objects.count()
req_data = {'LastGarbageDay': '2019-11-12', 'NextGarbageDay': '2019-11-26'}
response = self.client.post(reverse('garbageday_edit', kwargs={'house': self.house.id}), req_data, follow=True)
post_count = GarbageDay.objects.count()
self.assertEqual(post_count, pre_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertNotContains(response, 'Edit Garbage Day for')
self.assertNotContains(response, self.house)
self.assertNotContains(response, 'Garbage Day')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertEqual(self.house.garbageday_set.first().last_garbage_day, datetime.date(2019, 11, 4))
self.assertEqual(self.house.garbageday_set.first().next_garbage_day, datetime.date(2019, 11, 4))
```
#### File: RoomScout/houses/sitemaps.py
```python
from django.contrib.sitemaps import Sitemap
from .models import House
class HouseSitemap(Sitemap):
priority = 0.5
changefreq = 'daily'
def items(self):
return House.objects.all()
```
#### File: payments/tests/test_models.py
```python
from django.test import TestCase
from payments.models import Donation
class PaymentsModelTests(TestCase):
def test_payments_models_Donation_creation(self):
print('Testing payment.models.Donation creation')
pre_count = Donation.objects.count()
donation = Donation.objects.create(amount=200.00, email='<EMAIL>')
post_count = Donation.objects.count()
self.assertGreater(post_count, pre_count)
def test_payments_models_Donation_creation1(self):
print('Testing payment.models.Donation creation 1')
pre_count = Donation.objects.count()
donation = Donation.objects.create(amount=200, email='<EMAIL>')
post_count = Donation.objects.count()
self.assertGreater(post_count, pre_count)
```
#### File: RoomScout/rooms/models.py
```python
from django.db import models
from django.templatetags.static import static
from django.urls import reverse
from accounts.models import User
from houses.models import House
from utils.datetime import time_diff_display
class Room(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
house = models.ForeignKey(House, on_delete=models.CASCADE, related_name='house')
price = models.DecimalField(max_digits=19, decimal_places=2, default=0.00)
name = models.CharField(max_length=200, default='')
description = models.TextField(default='')
is_available = models.BooleanField(verbose_name='Available', default=True , help_text='Room is available')
is_accessible = models.BooleanField(default=False, verbose_name="Accessible", help_text="House is accessible with ramp or elevator")
open_to_students = models.BooleanField(default=True, help_text="Students are free to inquire about rooms at this house")
pet_friendly = models.BooleanField(default=False, help_text="Pets are allowed")
utilities_included = models.BooleanField(default=False, help_text="All utilities are included in the price of rent")
parking = models.BooleanField(default=False, help_text="Parking spot is included or available")
furnished = models.BooleanField(default=False, help_text="Room is furnished with at least a bed, mattress, and dresser")
female_only = models.BooleanField(default=False, help_text="Only females are allowed to inquire")
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('room_detail', args=[str(self.pk)])
def get_time_difference_display(self):
return time_diff_display(self.updated_at)
def get_first_image(self):
room_images = self.roomimage_set
if room_images.count() > 0:
return room_images.first().image.url
house_images = self.house.houseimage_set
if house_images.count() > 0:
return house_images.first().image.url
return static('logos/logo.PNG')
class Inquiry(models.Model):
STATUS_CHOICES = [('O', 'Open'), ('D', 'Dismissed')]
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
message = models.TextField(default='')
move_in_date = models.DateField(default='1997-11-04')
status = models.CharField(choices=STATUS_CHOICES, default='O', max_length=3)
class RoomLike(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
```
#### File: utils/tests/test_datetime.py
```python
from datetime import datetime, timedelta
from django.test import TestCase
from utils.datetime import *
class DateTimeTest(TestCase):
def test_time_diff_display(self):
print('Testing utils.datetime.time_diff_display()')
updated_0min_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=0, minutes=0)
self.assertEqual(time_diff_display(updated_0min_ago), 'Less than 1 hour ago')
updated_1min_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=0, minutes=1)
self.assertEqual(time_diff_display(updated_1min_ago), 'Less than 1 hour ago')
updated_59min_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=0, minutes=59)
self.assertEqual(time_diff_display(updated_59min_ago), 'Less than 1 hour ago')
updated_1hr_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=1, minutes=0)
self.assertEqual(time_diff_display(updated_1hr_ago), "1 hour ago")
updated_2hr_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=2, minutes=0)
self.assertEqual(time_diff_display(updated_2hr_ago), "2 hours ago")
updated_1day_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=24, minutes=0)
self.assertEqual(time_diff_display(updated_1day_ago), "1 day ago")
updated_10days_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=240, minutes=0)
self.assertEqual(time_diff_display(updated_10days_ago), "10 days ago")
updated_30days_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=720, minutes=0)
self.assertEqual(time_diff_display(updated_30days_ago), "30 days ago")
updated_90days_ago = datetime.datetime.now(datetime.timezone.utc) - timedelta(hours=2160, minutes=0)
self.assertEqual(time_diff_display(updated_90days_ago), "90 days ago")
``` |
{
"source": "josyulakrishna/detectron2_pruning",
"score": 2
} |
#### File: detectron2_pruning/prunning_train/custom_trainv2.py
```python
from detectron2.data.datasets import register_coco_instances
from detectron2.data import MetadataCatalog
from detectron2.data import DatasetCatalog
from detectron2.engine import DefaultTrainer
from detectron2.engine import DefaultPredictor
from detectron2.evaluation import COCOEvaluator
from detectron2.config import get_cfg
from detectron2 import model_zoo
import cv2
import os
from detectron2.utils.visualizer import ColorMode
from detectron2.utils.visualizer import Visualizer
import random
from detectron2.engine import DefaultPredictor
######## REGISTER DATASET #################
print("registering")
register_coco_instances("pruningTrain", {},"/home/josyula/Documents/DataAndModels/AugmentedData/v1/train/_annotations.coco.json", "/home/josyula/Documents/DataAndModels/AugmentedData/v1/train")
register_coco_instances("pruningVal", {}, "/home/josyula/Documents/DataAndModels/AugmentedData/v1/valid/_annotations.coco.json", "/home/josyula/Documents/DataAndModels/AugmentedData/v1/valid")
register_coco_instances("pruningTest", {}, "/home/josyula/Documents/DataAndModels/AugmentedData/v1/test/_annotations.coco.json", "/home/josyula/Documents/DataAndModels/AugmentedData/v1/test")
# dataset_dicts = DatasetCatalog.get("prunningTrain")
# for data_ in ["pruningTrain", "pruningVal", "pruningTest"]:
# dataset_dicts = DatasetCatalog.get(data_)
# pruning_meta_data = MetadataCatalog.get(data_).thing_classes
# print(pruning_meta_data)
###########################################
################## VISUALIZE ##############
# import random
# from detectron2.utils.visualizer import Visualizer
#
# pruning_meta_data = MetadataCatalog.get("pruningTrain")
# dataset_dicts = DatasetCatalog.get("pruningTrain")
# for d in random.sample(dataset_dicts, 3):
# img = cv2.imread(d["file_name"])
# visualizer = Visualizer(img[:, :, ::-1], metadata=pruning_meta_data, scale=0.5)
# vis = visualizer.draw_dataset_dict(d)
# cv2.imshow("1", vis.get_image()[:, :, ::-1])
# cv2.waitKey(0)
# cv2.destroyAllWindows()
###########################################
############ TRAINING #####################
#
# class CocoTrainer(DefaultTrainer):
#
# @classmethod
# def build_evaluator(cls, cfg, dataset_name, output_folder=None):
#
# if output_folder is None:
# os.makedirs("coco_eval", exist_ok=True)
# output_folder = "coco_eval"
#
# return COCOEvaluator(dataset_name, cfg, False, output_folder)
#
#
# # select from modelzoo here: https://github.com/facebookresearch/detectron2/blob/master/MODEL_ZOO.md#coco-object-detection-baselines
#
#
# cfg = get_cfg()
# cfg.merge_from_file(model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml"))
# cfg.DATASETS.TRAIN = ("pruningTrain",)
# cfg.DATASETS.TEST = ("pruningVal",)
#
#
# cfg.DATALOADER.NUM_WORKERS = 4
# cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url("COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml") # Let training initialize from model zoo
# cfg.SOLVER.IMS_PER_BATCH = 4
# cfg.SOLVER.BASE_LR = 0.001
#
#
# cfg.SOLVER.WARMUP_ITERS = 1000
# cfg.SOLVER.MAX_ITER = 5000 #adjust up if val mAP is still rising, adjust down if overfit
# cfg.SOLVER.STEPS = (1000, 1500)
# cfg.SOLVER.GAMMA = 0.05
#
#
# cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 64
# # cfg.MODEL.ROI_HEADS.NUM_CLASSES = 6 #your number of classes + 1
#
# cfg.TEST.EVAL_PERIOD = 500
#
# os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
# trainer = CocoTrainer(cfg)
# trainer.resume_or_load(resume=False)
# trainer.train()
###########################################
######### PREDICTION #####################
# cfg = get_cfg()
# cfg.merge_from_file(model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml"))
# # cfg.MODEL.WEIGHTS = os.path.join(cfg.OUTPUT_DIR, "model_final.pth")
# cfg.MODEL.WEIGHTS = os.path.join("/home/josyula/Programs/detectron2/output/model_final.pth")
# cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5 # set the testing threshold for this model
# # cfg.DATASETS.TEST = ("pruningTest", )
# cfg.MODEL.ROI_HEADS.NUM_CLASSES = 5 #your number of classes + 1
# dataset_dicts = DatasetCatalog.get("pruningTest")
# pruning_meta_data = MetadataCatalog.get("pruningVal")
# predictor = DefaultPredictor(cfg)
# # random.sample(dataset_dicts, 3)
# i=0
# for d in dataset_dicts:
# im = cv2.imread(d["file_name"])
# print(d["file_name"])
# outputs = predictor(im)
# # print(outputs)
# v = Visualizer(im[:, :, ::-1],
# metadata=pruning_meta_data,
# scale=0.8,
# instance_mode=ColorMode.IMAGE_BW # remove the colors of unsegmented pixels
# )
# v = v.draw_instance_predictions(outputs["instances"].to("cpu"))
# # cv2.imwrite("/home/josyula/Documents/DataAndModels/labeled_data/test/"+str(i)+"pred.jpg", v.get_image()[:, :, ::-1])
# i+=1
# cv2.imshow("1", v.get_image()[:, :, ::-1])
# cv2.waitKey(0)
# cv2.destroyAllWindows()
##################
# pred_classes = output_dict['instances'].pred_classes.cpu().tolist()
# class_names = MetadataCatalog.get("mydataset").thing_classes
# pred_class_names = list(map(lambda x: class_names[x], pred_classes))
########
```
#### File: detectron2_pruning/utils_json_coco/renameJSON2.py
```python
import json
from pathlib import Path
from typing import List
import numpy as np
from PIL import Image
from sahi.utils.coco import Coco, CocoAnnotation, CocoCategory, CocoImage
from sahi.utils.file import list_files_recursively, load_json, save_json
from tqdm import tqdm
import os.path
import glob
import pdb
def get_coco_from_labelme_folder(
labelme_folder: str, coco_category_list: List = None
) -> Coco:
"""
Args:
labelme_folder: folder that contains labelme annotations and image files
coco_category_list: start from a predefined coco cateory list
# get json list
"""
_, abs_json_path_list = list_files_recursively(labelme_folder, contains=[".json"])
labelme_json_list = abs_json_path_list
category_ind = 0
for json_path in tqdm(labelme_json_list, "Converting labelme annotations to COCO format"):
# print(json_path)
data = load_json(json_path)
image_path = str(Path(labelme_folder) / data["imagePath"])
nameOrg = json_path.split("/")[-1].split(".")[0]+".tiff"
data["imagePath"] = nameOrg
data_json = json.dumps(data)
# pdb.set_trace()
with open(json_path, 'w') as outfile:
# json.dump(data_json, outfile)
outfile.write(data_json)
print("written Orig")
#
# flowPath = json_path.split("/")[:-1]
# nameFlow = json_path.split("/")[-1].split(".")[0]+"_flow.png"
# nameFlowJSON = json_path.split("/")[-1].split(".")[0]+"_flow.json"
# flowPath.append(nameFlowJSON)
# flowPath = '/'.join(flowPath)
#
# data["imagePath"] = nameFlow
# data_json = json.dumps(data)
#
# with open(flowPath, 'w') as outfile:
# # json.dump(data_json, outfile)
# outfile.write(data_json)
# print("written flow")
#
#
# maskPath = json_path.split("/")[:-1]
# namemask = json_path.split("/")[-1].split(".")[0]+"_mask.png"
# namemaskJSON = json_path.split("/")[-1].split(".")[0]+"_mask.json"
# maskPath.append(namemaskJSON)
# maskPath = '/'.join(maskPath)
#
# data["imagePath"] = namemask
# data_json = json.dumps(data)
#
# with open(maskPath, 'w') as outfile:
# # json.dump(data_json, outfile)
# outfile.write(data_json)
# print("written mask")
if __name__=="__main__":
# get_coco_from_labelme_folder("~/Documents/DataAndModels/labeled_data")
get_coco_from_labelme_folder("/home/josyula/Documents/DataAndModels/pruning_training/stacked/train/")
``` |
{
"source": "jotacor/tradunio",
"score": 3
} |
#### File: tradunio/Comunio/Comunio.py
```python
import db_tradunio as db
import tabulate
class User:
def __init__(self, idu):
self.id = idu
self.username = None
self.user_data = []
self.players = []
self.transactions = []
self.load_players(idu)
self.load_user_data(idu)
def __repr__(self):
return 'Teamvalue: %s € - Money: %s € - Max bid: %s € - Points: %s' % (
format(self.user_data[-1].teamvalue, ",d"),
format(self.user_data[-1].money, ",d"),
format(self.user_data[-1].maxbid, ",d"),
self.user_data[-1].points)
def load_players(self, idu):
players = db.simple_query(
'SELECT p.idp, p.name, p.position, c.name \
FROM players p, clubs c, owners o \
WHERE o.idp=p.idp AND p.idcl=c.idcl AND o.idu=%s' % idu)
for player in players:
idp, playername, position, clubname = player
ob_player = Player(idp, playername, position, clubname)
self.players.append(ob_player)
def load_user_data(self, idu):
user_data = db.simple_query(
'SELECT u.name, d.date, d.points, d.money, d.teamvalue, d.maxbid \
FROM users u, user_data d \
WHERE u.idu=d.idu AND u.idu=%s ORDER BY d.date ASC' % idu)
self.username = user_data[0][0]
for data in user_data:
name, date, points, money, teamvalue, maxbid = data
ob_user_data = UserData(date, points, money, int(teamvalue), maxbid)
self.user_data.append(ob_user_data)
def load_transactions(self, idu):
transactions = db.simple_query(
'SELECT date, type, price FROM transactions WHERE idu=%s ORDER BY date ASC' % idu)
for transaction in transactions:
date, trans, price = transaction
ob_transaction = Transaction(date, trans, price)
self.transactions.append(ob_transaction)
class Player:
def __init__(self, idp, playername, position, clubname):
self.name = playername
self.clubname = clubname
self.position = position
self.prices = list()
self.points = list()
self.load(idp)
def __repr__(self):
headers = ['Name', 'Club', 'Value', 'Points', 'Position', 'Last date']
total_points = sum(p.points for p in self.points)
table = [self.name, self.clubname, self.position, self.prices[-1].price, total_points, self.prices[-1].date]
return tabulate(table, headers, tablefmt="rst", numalign="right", floatfmt=",.0f")
def load(self, idp):
prices = db.simple_query('SELECT date, price FROM prices WHERE idp=%s ORDER BY date ASC' % idp)
for price in prices:
date, price = prices
ob_price = Price(date, price)
self.prices.append(ob_price)
points = db.simple_query('SELECT gameday, points FROM points WHERE idp=%s ORDER BY gameday ASC' % idp)
for point in points:
gameday, poin = point
ob_point = Points(gameday, poin)
self.points.append(ob_point)
class UserData:
def __init__(self, date, points, money, teamvalue, maxbid):
self.date = date
self.points = points
self.money = money
self.teamvalue = teamvalue
self.maxbid = maxbid
class Transaction:
def __init__(self, date, transaction, price):
self.date = date
self.type = transaction
self.price = price
class Price:
def __init__(self, date, price):
self.date = date
self.price = price
class Points:
def __init__(self, gameday, points):
self.gameday = gameday
self.points = points
def test():
user = User(15797714)
pass
if __name__ == '__main__':
test()
```
#### File: jotacor/tradunio/db_tradunio.py
```python
import MySQLdb
import ConfigParser
import warnings
db_name = 'db_tradunio'
Config = ConfigParser.ConfigParser()
if not Config.read('../config.conf'):
Config.read('config.conf')
db = MySQLdb.connect(
host=Config.get(db_name, 'host'),
user=Config.get(db_name, 'user'),
passwd=Config.get(db_name, 'passwd'),
port=Config.getint(db_name, 'port'),
db=Config.get(db_name, 'db'),
charset=Config.get(db_name, 'charset')
)
cursor = db.cursor()
warnings.filterwarnings('ignore', category=MySQLdb.Warning)
def simple_query(sql):
"""
Transaccio sobre la base de dades simple, es a dir sense commit
@param sql: sql que volem executar en la base de dades
@return: Retornara la consulta associada al sql
"""
cursor.execute(sql)
return cursor.fetchall()
def close_connection():
"""
Cierra la conexión con BBDD
"""
cursor.close()
db.close()
def commit():
"""
Realiza commit a todo lo ejecutado
"""
db.commit()
def nocommit_query(sql):
"""
Transaccio sobre la base de dades sense commit. Utilitzarem esta classe per a fer inserts, updates o truncates
@param sql: sql que volem executar en la base de dades
@return: Executara la accio i fara un commit de la base de dades
"""
cursor.execute(sql)
return cursor.lastrowid
def commit_query(sql):
"""
Transaccio sobre la base de dades amb commit. Utilitzarem esta classe per a fer inserts, updates o truncates
@param sql: sql que volem executar en la base de dades
@return: Executara la accio i fara un commit de la base de dades
"""
cursor.execute(sql)
db.commit()
def rowcount(sql):
"""
Seleccio en la que averiguem si una determinada tupla existeix o no
@param sql: sql que volem executar en la base de dades
@return: Si la tupla no existeix, el rowcount tornara 0 i si existeix tornara un 1
"""
cursor.execute(sql)
return cursor.rowcount
def many_commit_query(sql, values):
"""
Inserta todas las tuplas pasadas como parametro de una sola vez
@param values: nombre de la base de dades en el fitxer de configuracio
@param sql: sql que volem executar en la base de dades
"""
cursor.executemany(sql, values)
db.commit()
def many_nocommit_query(sql, values):
"""
Inserta todas las tuplas pasadas como parametro de una sola vez
@param sql: sql que volem executar en la base de dades
@param values: sql que volem executar en la base de dades
"""
cursor.executemany(sql, values)
``` |
{
"source": "jotaen/klog-sublime",
"score": 3
} |
#### File: jotaen/klog-sublime/smart_completions.py
```python
import sublime
import sublime_plugin
import datetime
import re
INDENT_PATTERN = re.compile('^( {2,4}|\t)')
NOT_INDENTED_PATTERN = re.compile('^[^\s]$')
def complete(trigger, value, details):
return sublime.CompletionItem(
trigger,
annotation=value,
completion=value,
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=(sublime.KIND_ID_AMBIGUOUS, '⏱', 'klog'),
details=details,
)
def date_completions():
DATEFMT = '%Y-%m-%d'
today = datetime.datetime.now()
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
return sublime.CompletionList([
complete('today', today.strftime(DATEFMT), 'Insert today’s date'),
complete('yesterday', yesterday.strftime(DATEFMT), 'Insert yesterday’s date'),
complete('tomorrow', tomorrow.strftime(DATEFMT), 'Insert tomorrow’s date'),
])
def time_completions():
TIMEFMT = '%H:%M'
now = datetime.datetime.now()
return sublime.CompletionList([
complete('now', now.strftime(TIMEFMT), 'Insert current time'),
complete('start', now.strftime(TIMEFMT) + ' - ?', 'Insert open range'),
])
class KlogEventListener(sublime_plugin.EventListener):
def on_query_completions(self, view, prefix, locations):
if not view.match_selector(locations[0], 'source.klog'):
return
cursor = view.line(view.sel()[0])
preceding_text = view.substr(cursor)
# If the cursor is not indented
if NOT_INDENTED_PATTERN.match(preceding_text):
return date_completions()
# If the cursor is on an indented line, offer times.
if INDENT_PATTERN.match(preceding_text):
return time_completions()
``` |
{
"source": "JotaGalera/FindAInformatic",
"score": 3
} |
#### File: JotaGalera/FindAInformatic/application.py
```python
from flask import Flask,request,jsonify
from src.mainProject import Informatic
import json
app = Flask(__name__)
inf = Informatic("Jota","qwerty",3)
@app.route('/')
def satus():
dato={"status":"OK"}
return jsonify(dato)
@app.route('/status')
def satus2():
dato={"status":"OK"}
return jsonify(dato)
@app.route('/ruta/<parametro>')
def ruta(parametro):
datos={ parametro : "Parametro reconocido"}
return jsonify(datos)
@app.route('/changeName/<newname>',methods=['POST'])
def changeName(newname):
cambio = inf.changeName(newname)
return "Nombre Cambiado: "+cambio
@app.route('/changeCv/<newcv>',methods=['POST'])
def changeCv(newcv):
cambio = inf.changeCv(newcv)
return "Cambio CV: "+cambio
@app.route('/changeAge/<newage>',methods=['POST'])
def changeAge(newage):
cambio = inf.changeAge(newage)
return "Cambio de la edad: "+cambio
@app.route('/showData')
def showAll():
return jsonify(inf.showData())
if __name__ == "__main__":
app.run(host='0.0.0.0')
```
#### File: FindAInformatic/src/mainProject.py
```python
from flask import Flask
import json
app = Flask(__name__)
class Informatic:
def __init__(self,name,cv,edad):
self.name = name
self.cv = cv
self.age = edad
def statusFun(self):
return True
def changeName(self,new_name):
if(self.name != new_name and len(new_name)>2):
self.name = new_name
return self.name
def changeCv(self,new_cv):
if(self.cv != new_cv):
self.cv = new_cv
return self.cv
def changeAge(self,new_age):
if(self.age != new_age):
self.age = new_age
return self.age
def showData(self):
if(len(self.name)>0 and len(self.cv)>0 and int(self.age) > 0):
dato = {'Nombre':self.name, 'CV':self.cv, 'Edad':self.age}
else:
dato = "Error en los datos del usuario."
return dato
if __name__ == "__main__":
app.run(debug = True, host='0.0.0.0')
``` |
{
"source": "JotaGo/rock-paper-scissor",
"score": 4
} |
#### File: JotaGo/rock-paper-scissor/rps.py
```python
import random
# Global variable
option = ['rock','paper','scissor']
# Function to let user enter the option
# 1 is Rock
# 2 is Paper
# 3 is Scissor
# if the user Try to enter a string in the input, the program will say "Data entered not valid, please enter a valid option"
# and if the user enter a number higher than 3 or lower than 1, the program will print "Enter a number between 1 and 3"
# then it will return to this function
def data_input():
while True:
try:
player = int(input('''
[1] - Rock
[2] - Paper
[3] - Scissor
\n'''))
return player
except ValueError:
print('Data entered not valid, please enter a valid option')
# this function is to avoid repeating the same print
def result(win,cpu_option):
if win:
print('You win! The CPU choose', cpu_option)
else:
print('You lose! The CPU choose', cpu_option)
# Main function
if __name__ == "__main__":
game_on = True
cnt = [0,0]
player = 0
while game_on:
print('Choose one of the three options:')
while player > 3 or player <1:
player = data_input()
if player > 3 or player <1:
print('Enter a number between 1 and 3')
if player <= 3 and player > 0:
player -= 1
player_option = option[player]
cpu_option = option[random.randint(0,2)]
if player_option == cpu_option:
print('Draw')
elif player_option == 'rock':
if cpu_option == 'scissor':
win = True
result(win,cpu_option)
elif cpu_option == 'paper':
win = False
result(win,cpu_option)
elif player_option == 'paper':
if cpu_option == 'rock':
win = True
result(win,cpu_option)
elif cpu_option == 'scissor':
win = False
result(win,cpu_option)
elif player_option == 'scissor':
if cpu_option == 'paper':
win = True
result(win,cpu_option)
elif cpu_option == 'rock':
win = False
result(win,cpu_option)
stay = input('Wanna continue? (y/n) \n')
if stay == 'y':
game_on = True
player = 0
elif stay == 'n':
game_on = False
else:
print('invalid input')
print('bye bye')
``` |
{
"source": "jotajotaramirez/pisensor",
"score": 3
} |
#### File: jotajotaramirez/pisensor/update.py
```python
import pymongo
from pymongo import MongoClient
from datetime import datetime
import time
DHT22_PIN = 22
DEFAULT_READ_TIMEOUT = 0.5
# Reads MCP3008 values and return same input object with new fields
def read_mcp():
import Adafruit_GPIO.SPI as SPI
import Adafruit_MCP3008
output = { }
# Software SPI configuration:
#CLK = 18
#MISO = 23
#MOSI = 24
#CS = 25
#mcp = Adafruit_MCP3008.MCP3008(clk=CLK, cs=CS, miso=MISO, mosi=MOSI)
# Hardware SPI configuration:
SPI_PORT = 0
SPI_DEVICE = 0
mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
print('Reading MCP3008 values')
for i in range(8):
# The read_adc function will get the value of the specified channel (0-7).
output['mcp_%d' % i] = mcp.read_adc(i)
return output
def read_1wire_temperature():
from w1thermsensor import W1ThermSensor
sensor = W1ThermSensor()
return { 'temp1w': sensor.get_temperature() }
def read_dht22(pin):
import Adafruit_DHT
humidity, temperature = Adafruit_DHT.read_retry(Adafruit_DHT.DHT22, pin)
return { 'dht22_humidity': humidity, 'dht22_temperature': temperature }
def read_i2c_light():
import smbus
import time
# Start measurement at 4lx resolution. Time typically 16ms.
#CONTINUOUS_LOW_RES_MODE = 0x13
# Start measurement at 1lx resolution. Time typically 120ms
#CONTINUOUS_HIGH_RES_MODE_1 = 0x10
# Start measurement at 0.5lx resolution. Time typically 120ms
#CONTINUOUS_HIGH_RES_MODE_2 = 0x11
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
#ONE_TIME_HIGH_RES_MODE_1 = 0x20
# Start measurement at 0.5lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
ONE_TIME_HIGH_RES_MODE_2 = 0x21
# Start measurement at 1lx resolution. Time typically 120ms
# Device is automatically set to Power Down after measurement.
#ONE_TIME_LOW_RES_MODE = 0x23
#bus = smbus.SMBus(0) # Rev 1 Pi uses 0
bus = smbus.SMBus(1) # Rev 2 Pi uses 1
BH1750 = 0x23 # Default device I2C address
data = bus.read_i2c_block_data(BH1750, ONE_TIME_HIGH_RES_MODE_2)
return { 'light': ((data[1] + (256 * data[0])) / 1.2) }
client = MongoClient('mongodb://localhost:27017/')
db = client['sensor']
collection = db['data']
document = { "date": datetime.utcnow() }
# Read analog inputs from MCP3008
document.update(read_mcp())
time.sleep(DEFAULT_READ_TIMEOUT)
# Read 1-wire temperature sensor
document.update(read_1wire_temperature())
time.sleep(DEFAULT_READ_TIMEOUT)
# Read DHT22 humidity/temperature sensor
document.update(read_dht22(DHT22_PIN))
time.sleep(DEFAULT_READ_TIMEOUT)
# Read BH1750 i2c light sensor
document.update(read_i2c_light())
time.sleep(DEFAULT_READ_TIMEOUT)
# Save all data
collection.insert(document)
client.close()
``` |
{
"source": "jotajr/alfred2-loterias-workflow",
"score": 3
} |
#### File: alfred2-loterias-workflow/src/loterias.py
```python
import urllib2
import json
import io
import os
def mega_request():
response = urllib2.urlopen('http://developers.agenciaideias.com.br/loterias/megasena/json')
data = json.load(response)
mega_file = "mega.json"
try:
os.remove(mega_file)
except OSError:
pass
with io.open(mega_file, 'w', encoding='utf-8') as f:
f.write(unicode(json.dumps(data, ensure_ascii=False)))
def mega_concurso():
mega_request()
with open('mega.json') as data_file:
data = json.load(data_file)
result = "Concurso: " + data['concurso']['numero'] + " - Data: " + data['concurso']['data']
print(result)
def mega_num_sorteados():
with open('mega.json') as data_file:
data = json.load(data_file)
sorteados = ""
for item in data['concurso']['numeros_sorteados']:
sorteados = sorteados + str(item) + " "
print(sorteados)
def mega_ganhadores():
with open('mega.json') as data_file:
data = json.load(data_file)
result = "Ganhadores: " + data['concurso']['premiacao']['sena']['ganhadores']
print(result)
def mega_acumulada():
with open('mega.json') as data_file:
data = json.load(data_file)
result = "Acumulado: R$ " + data['concurso']['valor_acumulado']
print(result)
def quina_request():
response = urllib2.urlopen('http://developers.agenciaideias.com.br/loterias/quina/json')
data = json.load(response)
quina_file = "quina.json"
try:
os.remove(quina_file)
except OSError:
pass
with io.open(quina_file, 'w', encoding='utf-8') as f:
f.write(unicode(json.dumps(data, ensure_ascii=False)))
def quina_concurso():
quina_request()
with open('quina.json') as data_file:
data = json.load(data_file)
result = "Concurso: " + data['concurso']['numero'] + " - Data: " + data['concurso']['data']
print(result)
def quina_num_sorteados():
with open('quina.json') as data_file:
data = json.load(data_file)
sorteados = ""
for item in data['concurso']['numeros_sorteados']:
sorteados = sorteados + str(item) + " "
print(sorteados)
def quina_ganhadores():
with open('quina.json') as data_file:
data = json.load(data_file)
result = "Ganhadores: " + data['concurso']['premiacao']['quina']['ganhadores']
print(result)
def quina_acumulada():
with open('mega.json') as data_file:
data = json.load(data_file)
result = "Acumulado: R$ " + data['concurso']['valor_acumulado']
print(result)
def federal_request():
response = urllib2.urlopen('http://developers.agenciaideias.com.br/loterias/loteriafederal/json')
data = json.load(response)
fed_file = "federal.json"
try:
os.remove(fed_file)
except OSError:
pass
with io.open(fed_file, 'w', encoding='utf-8') as f:
f.write(unicode(json.dumps(data, ensure_ascii=False)))
def federal_concurso():
federal_request()
with open('federal.json') as data_file:
data = json.load(data_file)
result = "Concurso: " + data['concurso']['numero'] + " - Data: " + data['concurso']['data']
print(result)
def federal_premios():
with open('federal.json') as data_file:
data = json.load(data_file)
result = " 1 - " + data['concurso']['premiacao']['premio_1']['bilhete'] + " - "
result += "2 - " + data['concurso']['premiacao']['premio_2']['bilhete'] + " - "
result += "3 - " + data['concurso']['premiacao']['premio_3']['bilhete'] + " - "
result += "4 - " + data['concurso']['premiacao']['premio_4']['bilhete'] + " - "
result += "5 - " + data['concurso']['premiacao']['premio_5']['bilhete']
print(result)
``` |
{
"source": "jotajunior/scrapers",
"score": 3
} |
#### File: bfhl/src/bfhl.py
```python
import requests
class BFHL:
platform = None
default_output = 'json'
platforms = ['pc', 'xbox', 'ps3', 'xone', 'ps4']
outputs = ['json', 'jsonp', 'js', 'lines']
base_url = 'http://api.bfhstats.com/api'
def set_platform(self, platform):
self.platform = platform.replace(' ', '').lower()
def check_platform(self, platform):
if not platform or platform not in self.platforms:
raise Exception('BFHL: Invalid platform.')
def check_output(self, output):
if not output or output not in self.outputs:
raise Exception('BFHL: Invalid output.')
def __init__(self, platform='pc'):
self.set_platform(platform)
self.check_platform(self.platform)
def get_basic_parameters(self, platform=None, output=None):
platform = platform if platform else self.platform
output = output if output else 'json'
return {'plat': platform, 'output': output}
def get_player_by_name(self, name, output=None, platform=None):
if not output:
output = self.default_output
if not platform:
platform = self.platform
self.check_platform(platform)
self.check_output(output)
data = self.get_basic_parameters(platform, output)
data['name'] = name
url = self.base_url + '/playerInfo'
return requests.get(url, params=data).text
def user_exists(self, name, platform=None):
if not platform:
platform = self.platform
result = self.get_player_by_name(name, 'json', platform)
return result != '{"error":"notFound"}'
class BF4(BFHL):
platform = None
platforms = ['pc', 'xbox', 'ps3', 'xone', 'ps4']
outputs = ['json', 'jsonp', 'js', 'lines']
base_url = 'http://api.bf4stats.com/api'
def __init__(self, platform='pc'):
super(BF4, self).__init__(platform)
```
#### File: wow/src/wow.py
```python
import requests
import lxml.html
class Wow:
base_url = 'http://{0}.battle.net/wow/en'
# 0 => world, 1 => name
character_url = '/character/{0}/{1}/simple'
achievement_url = '/character/{0}/{1}/achievement'
statistic_url = '/character/{0}/{1}/statistic'
ACCEPTED_REGIONS = ['eu', 'us']
achievements = None
statistics = None
character_text = None
def __init__(self, region='us'):
region = region.lower()
if region not in self.ACCEPTED_REGIONS:
raise Exception('Invalid region.')
self.region = region
self.base_url = self.base_url.format(region)
def is_404(self, text):
return text.find('<h3>Character Not Available</h3>') != -1
def user_exists(self, name, world):
if self.achievement_text or self.character_text:
return True
url = self.base_url
url += self.character_url.format(world, name)
text = requests.get(url).text
if self.is_404(text):
return False
else:
self.character_text = text
return True
def _parse_achievement_string(self, achv):
achv = achv.replace('\t', '')\
.replace('\n', '')\
.replace('\xa0', '')\
.replace(' ', '')
achv = (achv.split('(')[0]).split('/')
achv[0] = int(achv[0])
achv[1] = int(achv[1])
return achv
def _get_total_achievement(self, page):
query = '//div[@class="bar-contents"]/strong/text()'
r = page.xpath(query)
if r:
return self._parse_achievement_string(r[0])
return False
def _get_strength_achievement(self, page):
query = '//div[@class="profile-progress bar-contents border-4"]/text()'
r = page.xpath(query)
if r:
return int(r[0])
return False
def _get_other_achievements(self, page):
query = '//div[@class="bar-contents"]/text()'
r = page.xpath(query)[2:]
keys = ['general', 'quests', 'exploration', 'pvp', 'dungeons'\
,'professions', 'reputation', 'scenarios', 'events'\
,'pet_battles', 'collections','garrisons', 'legacy']
result = {}
i = 0
for item in r:
result[keys[i]] = self._parse_achievement_string(item)
i += 1
return result
def _parse_achievements(self, text):
page = lxml.html.fromstring(text)
base = self._get_other_achievements(page)
total = self._get_total_achievement(page)
if total:
base['total'] = total
strength = self._get_strength_achievement(page)
if strength:
base['strength'] = strength
self.achievements = base
return base
def _get_statistics_keys(self, page):
query = '//li[@id="cat-summary"]/dl/dt/text()'
r = page.xpath(query)
return r
def _get_statistics_values(self, page):
query = '//li[@id="cat-summary" and @class="table"]/dl/dd/text()'
r = page.xpath(query)
return [int(i.replace('\t', '')\
.replace('\n', '')\
.replace(' ', '')\
.replace(',', '')) for i in r]
def _parse_statistics(self, text):
page = lxml.html.fromstring(text)
keys = self._get_statistics_keys(page)
values = self._get_statistics_values(page)
result = {}
for i in range(len(keys)):
result[keys[i]] = values[i]
return result
def get_user_achievements(self, name, world):
if self.achievements:
return self.achievements
url = self.base_url
url += self.achievement_url.format(world, name)
text = requests.get(url).text
# case user doesn't exist
if self.is_404(text):
return False
else:
self.achievement_text = text
self.achievements = self._parse_achievements(text)
return self.achievements
def get_user_statistics(self, name, world):
if self.statistics:
return self.statistics
url = self.base_url
url += self.statistic_url.format(world, name)
text = requests.get(url).text
if self.is_404(text):
return False
else:
self.statistic_text = text
self.statistics = self._parse_statistics(text)
return self.statistics
def get_user_info(self, name, world):
result = {}
result['stats'] = self.get_user_statistics(name, world)
if not result['stats']:
return False
result['achievements'] = self.get_user_achievements(name, world)
return result
``` |
{
"source": "jotalanusse/minety",
"score": 3
} |
#### File: archived/common/copy_file.py
```python
import shutil
# Copy a region file to the output folder
def copy_file(region_file, output_region_directory):
# print(f'Copying [{region_file}] to output directory')
try:
shutil.copy2(region_file, output_region_directory) # Copy the file
except:
print(f'Error while copying file [{region_file}] to output directory')
```
#### File: archived/common/get_worlds.py
```python
def get_worlds():
return [
# ##############################################################################################################################
# ######################################################### [SEASON 2] #########################################################
# ##############################################################################################################################
# {
# 'name': 'BCC Season 2 - overworld', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-2/original/Lelo_world_2.0/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/overworld/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/overworld/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/overworld/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# {
# 'name': 'BCC Season 2 - nether', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-2/original/Lelo_world_2.0/DIM-1/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/the-nether/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/the-nether/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/the-nether/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-2/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# ##############################################################################################################################
# ######################################################### [SEASON 3] #########################################################
# ##############################################################################################################################
# {
# 'name': 'BCC Season 3 - overworld', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-3/original/BCC Server/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/overworld/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/overworld/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/overworld/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# {
# 'name': 'BCC Season 3 - nether', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-3/original/BCC Server/DIM-1/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-nether/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-nether/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-nether/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# {
# 'name': 'BCC Season 3 - end', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-3/original/BCC Server/DIM1/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-end/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-end/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-end/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-end/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-end/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-3/the-end/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# ##############################################################################################################################
# ######################################################### [SEASON 4] #########################################################
# ##############################################################################################################################
# {
# 'name': 'BCC Season 4 - overworld', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-4/original/BCC Server/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '20 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 20, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/seconds-20/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-20.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '25 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 25, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/seconds-25/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-25.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# {
# 'name': 'BCC Season 4 - nether', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-4/original/BCC Server_nether/DIM-1/region', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-nether/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-nether/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-nether/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# {
# 'name': 'BCC Season 4 - end', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-4/original/BCC Server_the_end/DIM1/region', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-end/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-end/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-end/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-end/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-end/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-4/the-end/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# ##############################################################################################################################
# ######################################################### [SEASON 5] #########################################################
# ##############################################################################################################################
# {
# 'name': 'BCC Season 5 - overworld', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-5/original/world/region/', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/overworld/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/overworld/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/overworld/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/overworld/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
# {
# 'name': 'BCC Season 5 - nether', # Name of the world
# 'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/servers/season-5/original/world/DIM-1/region', # Directory of the world region files to be scanned
# 'scans': [
# {
# 'name': '0.05 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/the-nether/seconds-005/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-005.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '5 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/the-nether/seconds-5/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-5.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# },
# {
# 'name': '30 seconds scan', # Name of the scan
# 'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
# 'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
# 'region_output': {
# 'enabled': True, # When disabled, no files will be copied to the output
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/the-nether/seconds-30/', # Output directory for the processed region files
# },
# 'map': {
# 'enabled': True, # Is a map going to be graphed or not
# 'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
# 'file_name': 'heatmap-30.png', # Filename of the map file
# 'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-5/the-nether/heatmaps/', # Output directory for the generated heatmaps
# 'colors': {
# 'default': [0, 0, 0], # Default color for the map
# 'regions': {
# 'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
# 'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
# }
# },
# 'realtime_graph': {
# 'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
# 'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
# 'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
# }
# },
# }
# ]
# },
##############################################################################################################################
################################################### [SEASON 6 - EPISODE 1] ###################################################
##############################################################################################################################
{
'name': 'BCC Season 6 [Episode 1] - overworld', # Name of the world
'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/season 6/bcc/region/', # Directory of the world region files to be scanned
'scans': [
{
'name': '0.05 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/overworld/seconds-005/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-005.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/overworld/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
},
{
'name': '5 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/overworld/seconds-5/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-5.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/overworld/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
},
{
'name': '30 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/overworld/seconds-30/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-30.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/overworld/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
}
]
},
{
'name': 'BCC Season 6 [Episode 1] - nether', # Name of the world
'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/season 6/bcc_nether/DIM-1/region', # Directory of the world region files to be scanned
'scans': [
{
'name': '0.05 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-nether/seconds-005/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-005.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-nether/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
},
{
'name': '5 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-nether/seconds-5/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-5.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-nether/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
},
{
'name': '30 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-nether/seconds-30/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-30.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-nether/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
}
]
},
{
'name': 'BCC Season 6 [Episode 1] - end', # Name of the world
'region_files_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/season 6/bcc_the_end/DIM1/region', # Directory of the world region files to be scanned
'scans': [
{
'name': '0.05 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 0.05, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-end/seconds-005/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-005.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-end/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
},
{
'name': '5 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 5, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-end/seconds-5/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-5.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-end/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
},
{
'name': '30 seconds scan', # Name of the scan
'scan_all_chunks': True, # When enabled, the script will check all the chunks even if the region is already marked as important (the only good thing about this is you get to see an awesome world map)
'inhabited_ticks_threshold': 20 * 30, # How many ticks a chunk has to be inhabited for so that it is considered as an important chunk
'region_output': {
'enabled': True, # When disabled, no files will be copied to the output
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-end/seconds-30/', # Output directory for the processed region files
},
'map': {
'enabled': True, # Is a map going to be graphed or not
'scale_factor': 1.0, # How large the map image output will be (if value is 1.0 one chunk = one pixel)
'file_name': 'heatmap-30.png', # Filename of the map file
'output_directory': '/media/jotalanusse/windows-drive/Servers/Minecraft/backup/clean/season-6/the-end/heatmaps/', # Output directory for the generated heatmaps
'colors': {
'default': [0, 0, 0], # Default color for the map
'regions': {
'inhabited_colormap': 'jet', # Colormap used when a region is inhabited
'non_inhabited_colormap': 'PRGn' # Colormap used when a region is not inhabited
}
},
'realtime_graph': {
'enabled': False, # When enabled, the script will show a real time window showing the map being graphed once the region scanning process is finished
'max_size': 1000, # If you are using a small screen you can modify this value to be able to fit the whole map graph in your screen
'update_interval': 1 # How many chunk renders to skip before displaying the updated graph again
}
},
}
]
},
]
```
#### File: archived/common/process_world.py
```python
from multiprocessing import Pool
from .get_region_files import get_region_files
from .parse_region import parse_region
from .scan_region import scan_region
from .copy_file import copy_file
from .graph_map import graph_map
# Process world and it's region files
def process_world(world):
print(f'Processing world [{world["name"]}]')
region_files = get_region_files(world['region_files_directory']) # Get all region files from the region files directory for this world
regions_pool_instance = Pool() # Start a new multiprocessing pool to parse regions
regions_pool = regions_pool_instance.map_async(parse_region, region_files) # Use multiprocessing to parse all the region files and return a list of regions
regions = regions_pool.get() # Wait for the pool to finish and retreive the regions
regions_pool_instance.close() # Close the pool
for config in world['scans']: # For each configuration process the regions
print(f'Starting scan [{config["name"]}]')
region_scans = [] # Store all the region scans
for region in regions: # Process each region
region_scan = scan_region(region, config['inhabited_ticks_threshold'], config['scan_all_chunks']) # Scan the region with the provided configuration
if region_scan['is_inhabited']: # Here we check if the region is inhabited or not
if config['region_output']['enabled']: # Check if the region output is enabled
copy_file(region_scan['region']['file'], config['region_output']['output_directory']) # Copy the region to the output directory
region_scans.append(region_scan) # Add the region scan to the list
print(f'Scan [{config["name"]}] completed')
if config['map']['enabled']: # Only render map if it is enabled in the configuration
map_graph = graph_map(region_scans, config['map']) # Graph the map and return the image
``` |
{
"source": "jotamaggids/flask_meli_exercise",
"score": 3
} |
#### File: flask_meli_exercise/api/app.py
```python
from flask import Flask, request, jsonify, render_template
from mutant import Mutant
from stats import Stats
import sys
import json
import os
app = Flask(__name__)
@app.route('/', methods=['GET'])
def home():
return """<h1>Distant Reading Archive</h1>
<p>A prototype API for distant reading of science fiction novels</p>
"""
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route("/stats/", methods=['GET'])
def stats_mutant():
stats_classes = Stats()
#verificaciones de ADN: {“count_mutant_dna”:40, “count_human_dna”:100: “ratio”:0.4}
result = stats_classes.return_dna_list()
if result['result']:
dna_status_human = int(result['dna_status_human'])
dna_status_mutant = int(result['dna_status_mutant'])
ratio = round(float(dna_status_mutant/dna_status_human), 2)
dict_response = {"count_mutant_dna": dna_status_mutant, "count_human_dna": dna_status_human, "ratio": ratio}
response = app.response_class(
status=200,
mimetype='application/json',
response=json.dumps(dict_response)
)
return response
else:
response = app.response_class(
status=403,
mimetype='application/json',
response=json.dumps(False)
)
return response
@app.route("/mutant/", methods=['POST'])
def api_mutant():
try:
content = request.get_json()
print(content, file=sys.stderr)
adn = content.get('dna')
mutant_classes = Mutant(adn)
except Exception as e:
print(e)
result = False
response = json.dumps({'error': result}), 400, {'ContentType': 'application/json'}
return response
# Function que valida que la cadena de ADN tenga la longitud y las letras correctas: devuelve True cuando tiene
# el formato correcto y False cuando no
result = mutant_classes.validate_adn_chain()
if result:
# funcion para validar si la en la BD ya tenemos dicha cadena de ADN salvada
result = mutant_classes.validate_exist_dna()
#Si es verdadera devolvemos el estatus, sino lo creamos
if result['status'] == 0:
response = app.response_class(
status=403,
mimetype='application/json',
response=json.dumps(False)
)
return response
elif result['status'] == 1:
response = app.response_class(
status=200,
mimetype='application/json',
response=json.dumps(True)
)
return response
else:
result = mutant_classes.create_dna_chain()
if result:
mutant_classes.save_dna(dna_status=1)
response = app.response_class(
status=200,
mimetype='application/json',
response=json.dumps(True)
)
return response
else:
mutant_classes.save_dna(dna_status=0)
response = app.response_class(
status=403,
mimetype='application/json',
response=json.dumps(False)
)
return response
else:
response = json.dumps({'error': result}), 403, {'ContentType': 'application/json'}
return response
if __name__ == '__main__':
if os.environ.get('PORT') is not None:
app.run(debug=True, host='0.0.0.0', port=os.environ.get('PORT'))
else:
app.run(debug=True, host='0.0.0.0')
```
#### File: flask_meli_exercise/api/stats.py
```python
import mysql.connector
import sys
class Stats():
def return_dna_list(self):
config = {
'user': 'root',
'password': '<PASSWORD>',
'host': 'mysql',
'port': '3306',
'database': 'db_dna'
}
connection = mysql.connector.connect(**config)
cursor = connection.cursor(buffered=True, dictionary=True)
query = 'SELECT SUM(dna_status = 1) AS dna_status_mutant, SUM(dna_status = 0) AS dna_status_human ' \
'FROM db_dna.dna_data WHERE dna_status = 0 OR dna_status = 1'
result = cursor.execute(query)
if cursor.rowcount == 1:
records = cursor.fetchall()
if records[0]['dna_status_human'] is None:
cursor.close()
connection.close()
return {'result': False, 'dna_status_human': 0, 'dna_status_mutant': 0}
else:
cursor.close()
connection.close()
return {'result': True, 'dna_status_human': records[0]['dna_status_human'], 'dna_status_mutant': records[0]['dna_status_mutant']}
else:
cursor.close()
connection.close()
return {'result': False, 'dna_status_human': 0, 'dna_status_mutant': 0}
``` |
{
"source": "jotamjr/ctfs",
"score": 2
} |
#### File: pwn/AAAA/exploit.py
```python
from pwn import *
import sys
script='''
break *0x00400607
break *0x00400631
break *0x0040064c
break *0x0040064d
'''
def hunt():
pause()
#r.sendline(cyclic(128,n=8))
buf = "\x41" * 88 + p64(0x004005f6)
r.sendline(buf)
r.interactive()
if __name__ == '__main__':
context.clear(arch='amd64')
context.terminal = ['tmux', 'splitw', '-v']
binary = "./buff"
log.info("For remote: %s remote" % sys.argv[0])
host, port = "pwn.chal.csaw.io 10101".split()
if len(sys.argv) > 1:
r = remote(host, int(port))
hunt()
else:
r = gdb.debug(binary,gdbscript=script, env={"LD_PRELOAD":""})
hunt()
``` |
{
"source": "jotapem/AlphaPose",
"score": 2
} |
#### File: jotapem/AlphaPose/meerkat_demo.py
```python
import os
import time
import torch
from torch.autograd import Variable
import torch.nn.functional as F
import torchvision.transforms as transforms
import torch.nn as nn
import torch.utils.data
import numpy as np
from pPose_nms import pose_nms, write_json
from SPPE.src.utils.img import im_to_torch
from yolo.darknet import Darknet
from yolo.preprocess import prep_frame
from yolo.util import dynamic_write_results
from dataloader import crop_from_dets
from dataloader import Mscoco
from SPPE.src.main_fast_inference import InferenNet, InferenNet_fast
from SPPE.src.utils.eval import getMultiPeakPrediction, getPrediction
from matching import candidate_reselect as matching
from fn import vis_frame_fast
import cv2
from opt import opt
def main():
''' arg parsing '''
args = vars(opt)
print(args)
if not os.path.exists(args['outputpath']):
os.mkdir(args['outputpath'])
videofile = args['video']
if not len(videofile):
raise IOError('Error: must contain --video')
inp_dim = int(args['inp_dim'])
confidence = args['confidence']
num_classes = args['num_classes']
nms_thresh = args['nms_thesh']
''' load input video stream '''
cap = cv2.VideoCapture(videofile)
# read_frames = 0
# while True:
# ret, frame = cap.read()
# if ret:
# cv2.imwrite('frame.jpg', frame)
# read_frames += 1
# else:
# break
# print("Read %d frames in total" % (read_frames,))
''' load detection model '''
det_model = Darknet("yolo/cfg/yolov3-spp.cfg")
det_model.load_weights("models/yolo/yolov3-spp.weights")
det_model.net_info['height'] = inp_dim
det_model.cuda()
det_model.eval()
batch_size = 1
''' load pose model '''
pose_dataset = Mscoco()
if args['fast_inference']:
pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
else:
pose_model = InferenNet(4 * 1 + 1, pose_dataset)
pose_model.cuda()
pose_model.eval()
''' iterate over stream '''
frame_idx = 0
while True:
ret, frame = cap.read()
if not ret:
break
img, orig_img, dims = prep_frame(frame, inp_dim)
dims = torch.FloatTensor([dims]).repeat(1, 2)
with torch.no_grad():
''' human detection '''
img = img.cuda()
prediction = det_model(img, CUDA=True)
dets = dynamic_write_results(prediction, confidence, num_classes, nms=True, nms_conf=nms_thresh)
if isinstance(dets, int) or dets.shape[0] == 0:
continue
dets = dets.cpu()
dims = torch.index_select(dims, 0, dets[:, 0].long())
scaling_factor = torch.min(inp_dim / dims, 1)[0].view(-1, 1)
# coordinate transfer
dets[:, [1, 3]] -= (inp_dim - scaling_factor * dims[:, 0].view(-1, 1)) / 2
dets[:, [2, 4]] -= (inp_dim - scaling_factor * dims[:, 1].view(-1, 1)) / 2
dets[:, 1:5] /= scaling_factor
for j in range(dets.shape[0]):
dets[j, [1, 3]] = torch.clamp(dets[j, [1, 3]], 0.0, dims[j, 0])
dets[j, [2, 4]] = torch.clamp(dets[j, [2, 4]], 0.0, dims[j, 1])
boxes = dets[:, 1:5]
scores = dets[:, 5:6]
#print(dets[:,0]) # that's the batch index
cv2.imwrite(os.path.join(args['outputpath'], 'frame_%d_input.jpg'%frame_idx), orig_img)
dets_img = orig_img.copy()
for box in boxes:
dets_img = cv2.rectangle(dets_img, tuple(box[:2]), tuple(box[2:]), (255, 255, 255))
cv2.imwrite(os.path.join(args['outputpath'], 'frame_%d_dets.jpg'%frame_idx), dets_img)
if isinstance(boxes, int) or boxes.shape[0] == 0:
continue
inps = torch.zeros(boxes.size(0), 3, opt.inputResH, opt.inputResW)
pt1 = torch.zeros(boxes.size(0), 2)
pt2 = torch.zeros(boxes.size(0), 2)
# self.Q.put((orig_img[k], im_name[k], boxes, scores[dets[:,0]==k], inps, pt1, pt2))
# end of DetectionLoader.update
# start of DetectionProcessor.update
# (orig_img, im_name, boxes, scores, inps, pt1, pt2) = self.detectionLoader.read()
if boxes is None or boxes.nelement() == 0:
continue
inp = im_to_torch(cv2.cvtColor(orig_img, cv2.COLOR_BGR2RGB))
inps, pt1, pt2 = crop_from_dets(inp, boxes, inps, pt1, pt2)
# self.Q.put((inps, orig_img, im_name, boxes, scores, pt1, pt2))
# end of DetectionProcessor.update
# beggining of video_demo.py main loop (more specifically line 75)
''' pose estimation '''
hm = pose_model(inps.cuda())
hm = hm.cpu()#.data()
#print(frame_idx, hm)
# end of video_demo.py main loop
# beggining of DataWriter.save (which is redirected to DataWriter.update)
if args['matching']:
preds = getMultiPeakPrediction(
hm, pt1.numpy(), pt2.numpy(), args['inputResH'], args['inputResW'], args['outputResH'], args['outputResW'])
result = matching(boxes, scores.numpy(), preds)
else:
preds_hm, preds_img, preds_scores = getPrediction(
hm, pt1, pt2, args['inputResH'], args['inputResW'], args['outputResH'], args['outputResW'])
result = pose_nms(
boxes, scores, preds_img, preds_scores)
print(len(result))
frame_with_joints = vis_frame_fast(orig_img, {'imgname': "%d" % frame_idx, 'result': result})
cv2.imwrite(os.path.join(args['outputpath'], 'frame_%d_joints.jpg'%frame_idx), frame_with_joints)
# TODO: find key points and see if they match `video_demo.py` JSON output (apparently they do not, check how JSON is written)
for r in result:
print(frame_idx, r['keypoints'])
frame_idx += 1
#exit(-1)
main() if __name__ == '__main__' else True
``` |
{
"source": "jotaporras/ts_mcfrl",
"score": 2
} |
#### File: src/agents/concrete_agents.py
```python
from __future__ import annotations
import logging
import numpy as np
# from network import physical_network
# from shipping_allocation.envs.network_flow_env import (
# ShippingFacilityEnvironment,
# )
# import experiments_seminar_2.agents.optimizer_agents
from agents import optimizer_agents, pytorch_agents
from agents.Agent import Agent
# Environment and agent
DEBUG = False
logger = logging.getLogger(__name__)
def get_agent(env, environment_config, hparams, agent_name: str):
logger.info("NN agents are using eps_start only.")
num_customers = environment_config["num_customers"]
num_dcs = environment_config["num_dcs"]
num_commodities = environment_config["num_commodities"]
epsilon = hparams["eps_start"]
if agent_name == "random":
return RandomAgent(env)
elif agent_name == "always_zero":
return AlwaysZeroAgent(env)
elif agent_name == "best_fit":
return BestFitAgent(env)
elif agent_name == "random_valid":
return RandomValid(env)
elif agent_name == "do_nothing":
return DoNothingAgent(env)
elif agent_name == "agent_highest":
return AgentHighest(env)
elif agent_name == "lookahead":
return optimizer_agents.LookaheadAgent(env)
elif agent_name == "tree_search":
return optimizer_agents.TreeSearchAgent(env)
elif agent_name == "nn_customer_onehot":
customer_dqn = pytorch_agents.CustomerOnehotDQN(num_customers, num_dcs)
return pytorch_agents.CustomerDQNAgent(env, customer_dqn, epsilon)
elif agent_name == "nn_warehouse_mask":
customer_dqn = pytorch_agents.MaskedMLPDQN(num_dcs)
return pytorch_agents.MaskedMLPDQNAgent(env, customer_dqn, epsilon)
elif agent_name == "nn_mask_plus_customer_onehot":
mask_cust_dqn = pytorch_agents.MaskedPlusOneHotDQN(num_customers, num_dcs)
return pytorch_agents.MaskedPlusOneHotDQNAgent(env, mask_cust_dqn, epsilon)
elif agent_name == "nn_full_mlp":
full_mlp = pytorch_agents.FullMLPDQN(num_customers, num_dcs, num_commodities)
return pytorch_agents.FullMLPDQNAgent(env, full_mlp, epsilon)
elif agent_name == "nn_debug_mlp_cheat":
cheat_mlp = pytorch_agents.DebugMaskedMLPCheatDQN(num_dcs)
return pytorch_agents.MaskedMLPWithCheatDQNAgent(env, cheat_mlp, epsilon)
elif agent_name == "nn_mask_plus_consumption":
mask_cons_mlp = pytorch_agents.MaskPlusConsumptionMLP(
num_customers, num_dcs, num_commodities
)
return pytorch_agents.MaskPlusConsumptionMLPAgent(env, mask_cons_mlp, epsilon)
elif agent_name == "gnn_physnet_aggdemand":
gcn_module = pytorch_agents.PhysnetAggDemandGCN(
num_commodities, num_dcs, num_customers
)
return pytorch_agents.PhysnetAggDemandGCNAgent(env, gcn_module, epsilon)
else:
raise NotImplementedError(f"Agent {agent_name} not implemented.")
class RandomAgent(Agent):
"""The world's simplest agent!"""
def train(self, experience):
pass # do nothing
class AlwaysZeroAgent(Agent):
"""The world's dumbest agent!"""
def get_action(self, state):
return 0
def train(self, experience):
pass # do nothing
class BestFitAgent(Agent):
"""The world's most conservative agent!"""
env: ShippingFacilityEnvironment
network: physical_network
def __init__(self, env):
super().__init__(env)
self.env = env
self.network = env.physical_network
def get_action(self, state):
# TODO: make this backwards compatible again.
# inventory = state["inventory"]
inventory = state.inventory
# order = state["open"][0]
order = state.open[0]
customer = order.customer
cid = customer.node_id - self.network.num_dcs
cust_dcs = np.argwhere(self.network.dcs_per_customer_array[cid, :] > 0)[:, 0]
allowed_dc_invs = inventory[cust_dcs, :]
demand = order.demand
remaining = np.sum(allowed_dc_invs - demand, axis=1)
chosen_dc_index = np.argmax(remaining)
chosen_dc_id = cust_dcs[chosen_dc_index]
if DEBUG:
print("Bestfit chose: ", chosen_dc_id)
print("Inventories: ", inventory)
print("Allowed DCs:", cust_dcs)
if self.network.dcs_per_customer_array[cid, chosen_dc_id] == 1:
print("Chose allowed DC:", cid, chosen_dc_index)
else:
print("Chose ILLEGAL OH NO DC:", cid, chosen_dc_index)
if np.argwhere(cust_dcs == chosen_dc_id).size == 0:
print(
"BESTFIT CHOSE ILLEGAL MOVEMENT. THIS SHOULD NOT HAPPEN. Illegal for customer ",
customer,
"DC",
chosen_dc_id,
)
else:
print("Bestfit chose the legal move", chosen_dc_id)
return chosen_dc_id # todo test this.
def train(self, experience):
pass # do nothing
class RandomValid(Agent):
"""The world's least screwup random agent!"""
env: ShippingFacilityEnvironment
network: physical_network
def __init__(self, env):
super().__init__(env)
self.env = env
self.network = env.physical_network
def get_action(self, state):
inventory = state.inventory
order = state.open[0]
customer = order.customer
cid = customer.node_id - self.network.num_dcs
cust_dcs = np.argwhere(self.network.dcs_per_customer_array[cid, :] > 0)[:, 0]
chosen_dc_id = np.random.choice(cust_dcs)
if DEBUG:
logging.debug(f"RandomValid chose: {chosen_dc_id}")
logging.debug(f"Inventories: {inventory}")
logging.debug(f"Allowed DCs: {cust_dcs}")
logging.debug(
f"Chose allowed DC {chosen_dc_id} for customer {cid}: {self.network.dcs_per_customer_array[cid, chosen_dc_id] == 1}"
)
return chosen_dc_id # todo test this.
def train(self, experience):
pass # do nothing
class DoNothingAgent(Agent):
"""The world's least screwup random agent!"""
env: ShippingFacilityEnvironment
network: physical_network
def __init__(self, env):
super().__init__(env)
self.env = env
self.network = env.environment_parameters.physical_network
def get_action(self, state):
order = state["open"][0]
dc = order.shipping_point
return dc.node_id
def train(self, experience):
pass # do nothing
class AgentHighest(Agent):
"""The world's debugging agent"""
env: ShippingFacilityEnvironment
network: physical_network
def __init__(self, env):
super().__init__(env)
self.env = env
self.network = env.environment_parameters.physical_network
def get_action(self, state):
order = state["open"][0]
customer = order.customer
cid = self.network.num_dcs - customer.node_id
cust_dcs = np.argwhere(self.network.dcs_per_customer_array[cid, :] > 0)[:, 0]
return cust_dcs[-1] # choose the last
def train(self, experience):
pass # do nothing
```
#### File: src/agents/pytorch_agents.py
```python
from __future__ import annotations
import functools
import logging
import numpy as np
import torch
import torch.nn as nn
import torch_geometric
import torchmetrics
from envs import shipping_assignment_state
from envs.shipping_assignment_env import ShippingAssignmentEnvironment
from torch import Tensor
from torch_geometric.nn import GCNConv, max_pool, global_max_pool
from agents.Agent import Agent
# Taken from a legacy implementaiton in the environment, to avoid the import. It's the location on the "state_vector" of the customer ID.
from agents.optimizer_agents import LookaheadAgent
from dqn.noisy_linear import NoisyLinear
_CUSTOMER_METADATA_NEURON = -3
class PyTorchAgent(Agent):
"""
Base PyTorch Agent class for agents in Seminario II (~Sep 13).
It's expected that an agent with this impl passes a network module that generates
Q values the size of the environment action space.
The agent is also responsible for converting the state into an input tensor. The
default is "state_vector" in the state named tuple.
Should override: get_state_vector, get_action, train.
Args:
env: training environment
net: The PyTorch network module.
"""
logger = logging.getLogger(__name__)
def __init__(
self, env: ShippingAssignmentEnvironment, net, epsilon, device="cpu"
) -> None:
super().__init__(env)
self.env = env
self.net = net
self.device = device
self.epsilon = epsilon
self.dcs_per_customer_array = self.env.physical_network.dcs_per_customer_array
self.invalid_penalty = self.env.physical_network.big_m_cost
def get_state_vector(self, state):
"""Must be implemented by the concrete agent"""
pass
def mask_q_values(self, q_values, state):
# Todo maybe move to the action space.
customer_node_id = state.open[0].customer.node_id
customer_id = self.env.physical_network.get_customer_id(customer_node_id)
customer_valid_dcs = self.dcs_per_customer_array[customer_id, :]
# Penalty logic: 1-valid dcs gives you invalid. Multiply that to amplify the 1s to penalty.
penalty_for_invalid = -((1 - customer_valid_dcs) * self.invalid_penalty)
masked_q_values = q_values + penalty_for_invalid
return masked_q_values
def get_action(self, state) -> int:
"""
Using the given network, decide what action to carry out
using an epsilon-greedy policy
Returns:
action
"""
# TODO Sep 13: Before copying, this code was called in a method of this class called play step, decorated with no_grad.
# Should I still no grad my stuff when this is called? Possibly, when doing the env step in the runner.
if np.random.random() < self.epsilon:
action = self.env.action_space.sample(state.open[0].customer.node_id)
else:
state_vector = self.get_state_vector(state)
if isinstance(state_vector, np.ndarray):
state_vector = torch.tensor([state_vector])
if self.device not in ["cpu"]: # TODO THIS MAY BREAK WITH GNN
state_vector = state.cuda(
self.device
) # todo maybe fix wrong type in the future.
# Getting the action with the highest Q value.
q_values = self.net(
state_vector
) # TODO check that the star OP works with regular NN
masked_q_values = self.mask_q_values(q_values, state)
logging.debug("Network output Q values")
logging.debug(q_values)
logging.debug("Masked")
logging.debug(masked_q_values)
_, action = torch.max(masked_q_values, dim=1)
action = int(action.item())
logging.debug(f"Agent chose action {action}")
return action
def reset(self) -> None:
"""TODO as of sep 13 idk if I need this anymore (was copy pasted from old ptl agent), but probably was from OpenAI impl"""
pass
def train(self, experience):
pass
class CustomerOnehotDQN(nn.Module):
"""
Simple MLP network that uses the one hot encoding of customer IDs.
Args:
num_customers: observation size, which is the total number of customers
num_dcs: action space, which is the total number of DCs.
"""
def __init__(self, num_customers: int, num_dcs: int):
super(CustomerOnehotDQN, self).__init__()
self.num_customers = num_customers
self.num_dcs = num_dcs
# Shallow.
# self.net = nn.Sequential(
# nn.Linear(self.num_customers, hidden_size),
# nn.ReLU(),
# nn.Linear(hidden_size, self.num_dcs),
# )
# ultra deep
self.net = nn.Sequential(
nn.Linear(self.num_customers, 128),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(128, 64),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(64, 32),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(32, 16),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(16, 8),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(8, self.num_dcs),
)
# Check shipping_assignment_environment for the metadata neuron definition (create_state_vector, as of Sep 18)
self.customer_metadata_neuron = _CUSTOMER_METADATA_NEURON
def forward(self, x):
"""Convert the traditional state vector into one hot encoding of the customer."""
with torch.no_grad():
xp = torchmetrics.utilities.data.to_onehot(
x[:, self.customer_metadata_neuron] - self.num_dcs,
num_classes=self.num_customers,
)
return self.net(xp.float())
class CustomerDQNAgent(PyTorchAgent):
def __init__(self, env, customer_dqn, epsilon, **kwargs):
super().__init__(env, customer_dqn, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
return state.state_vector.reshape(-1)
def train(self, experience):
"""This is not needed, it's handled by PTL"""
pass
class MaskedMLPDQN(nn.Module):
"""An MLP That takes as an input a one hot encoding mask of the valid warehouses.
The motivation is that the agent doesn't have to learn tnat information and can
instead focus on which is the best warehouse in terms of optimization cost.
"""
def __init__(self, num_dcs):
super().__init__()
self.num_dcs = num_dcs
self.net = nn.Sequential(
nn.Linear(self.num_dcs, 64),
nn.ReLU(),
nn.Linear(64, 32),
nn.ReLU(),
nn.Linear(32, 16),
nn.ReLU(),
nn.Linear(16, self.num_dcs),
)
def forward(self, x):
return self.net(x.float())
class MaskedMLPDQNAgent(PyTorchAgent):
"""An MLP whose input is the mask of which DCS are valid."""
def __init__(self, env, mlp_dqn, epsilon, **kwargs):
super().__init__(env, mlp_dqn, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
latest_open_order = state.open[0]
customer_id = state.physical_network.get_customer_id(
latest_open_order.customer.node_id
)
return state.physical_network.dcs_per_customer_array[customer_id, :]
def train(self, experience):
"""This is not needed, it's handled by PTL"""
pass
class DebugMaskedMLPCheatDQN(nn.Module):
"""Debug MLP with cheat input from looakehead actions."""
def __init__(self, num_dcs):
super().__init__()
self.num_dcs = num_dcs
# The input of this cheat network is two onehots of |DC|
# self.net = nn.Sequential(
# nn.Linear(self.num_dcs, 8),
# nn.ReLU(),
# nn.Linear(8, self.num_dcs),
# )
# self.net = nn.Sequential(
# nn.Linear(self.num_dcs, self.num_dcs * 64),
# #nn.ReLU(),
# nn.Tanh(),
# nn.Linear(self.num_dcs * 64, self.num_dcs),
# )
self.net = nn.Sequential(
nn.Linear(self.num_dcs, self.num_dcs),
)
def forward(self, x):
return self.net(x.float())
class MaskedMLPWithCheatDQNAgent(PyTorchAgent):
"""This agent is to debug that the NNs are actually learning, because the lookahead input
is a cheat code and it should use it to get the best action most times."""
logger = logging.getLogger(__name__)
def __init__(self, env, mlp_dqn, epsilon, **kwargs):
self.lookahead_agent = LookaheadAgent(env)
super().__init__(env, mlp_dqn, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
lookahead_action = self.lookahead_agent.get_action(
state
) # Todo: this calls too many lookaheads.
# TODO also consider using the cost vector directly.
lookahead_onehot = np.zeros(state.physical_network.num_dcs) * 0.0
lookahead_onehot[lookahead_action] = 1.0
latest_open_order = state.open[0]
customer_id = state.physical_network.get_customer_id(
latest_open_order.customer.node_id
)
mask_vector = state.physical_network.dcs_per_customer_array[customer_id, :]
# state_vector = np.hstack((mask_vector, lookahead_onehot))
state_vector = lookahead_onehot
self.logger.debug("MLP with Cheat state vector (lookahead onehotted)")
self.logger.debug(lookahead_onehot)
self.logger.debug(" and valid warehouses are: ")
self.logger.debug(mask_vector)
return lookahead_onehot
def train(self, experience):
"""This is not needed, it's handled by PTL"""
pass
class MaskedPlusOneHotDQN(nn.Module):
"""The combination of MaskedNLP and CustomerOneHot"""
def __init__(self, num_customers, num_dcs):
super().__init__()
self.num_dcs = num_dcs
self.num_customers = num_customers
self.net = nn.Sequential(
nn.Linear(self.num_dcs + self.num_customers, 64),
nn.ReLU(),
nn.Linear(64, 32),
nn.ReLU(),
nn.Linear(32, 16),
nn.ReLU(),
nn.Linear(16, self.num_dcs),
)
def forward(self, x):
return self.net(x.float())
class MaskedPlusOneHotDQNAgent(PyTorchAgent):
"""An MLP whose input is the mask of which DCS are valid, concatenated with customer onehot."""
def __init__(self, env, mlp_dqn, epsilon, **kwargs):
super().__init__(env, mlp_dqn, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
return (
state_to_mask_concat_onehot(state).detach().numpy()
) # TODO inefficient as hell.
def train(self, experience):
pass
class FullMLPDQN(nn.Module):
"""The combination of MaskedNLP and CustomerOneHot"""
def __init__(self, num_customers, num_dcs, num_commodities):
super().__init__()
self.num_dcs = num_dcs
self.num_customers = num_customers
self.num_commodities = num_commodities
# Calculating input size.
mask_size = num_dcs
onehot_size = num_customers
inventory_size = num_commodities * num_dcs
demand = num_commodities
backlog = num_commodities
inventory_after_current_order = num_dcs # A vector of size |W| that says if the current order fits in each dc.
input_size = (
mask_size
+ onehot_size
+ inventory_size
+ demand
+ backlog
+ inventory_after_current_order
)
# Don't be fooled, the layer sizes were pretty arbitrary.
# self.net = nn.Sequential(
# nn.Linear(input_size, input_size * 4),
# nn.LayerNorm(
# input_size * 4
# ), # TODO dont understand why batchnorm1d dont work, probably some shape think. Look for the diff between these two.
# nn.ReLU(),
# nn.Linear(input_size * 4, input_size * 2),
# nn.LayerNorm(
# input_size * 2
# ), # TODO dont understand why batchnorm1d dont work, probably some shape think. Look for the diff between these two.
# nn.ReLU(),
# nn.Linear(input_size * 2, input_size),
# nn.LayerNorm(
# input_size
# ), # TODO dont understand why batchnorm1d dont work, probably some shape think. Look for the diff between these two.
# nn.ReLU(),
# nn.Linear(input_size, input_size // 2),
# nn.LayerNorm(
# input_size // 2
# ), # TODO dont understand why batchnorm1d dont work, probably some shape think. Look for the diff between these two.
# nn.ReLU(),
# nn.Linear(input_size // 2, self.num_dcs),
# )
# Small wide
# self.net = nn.Sequential(
# nn.Linear(input_size, 256), nn.Tanh(), nn.Linear(256, self.num_dcs)
# )
# Linear
# self.net = nn.Sequential(nn.Linear(input_size, self.num_dcs))
# Small wide noisy
# self.net = nn.Sequential(
# NoisyLinear(input_size, 256), nn.Tanh(), NoisyLinear(256, self.num_dcs)
# )
# Linear Noisy
self.net = nn.Sequential(nn.Linear(input_size, self.num_dcs))
def forward(self, x):
normalized_in = nn.functional.normalize(x.float())
return self.net(normalized_in)
class FullMLPDQNAgent(PyTorchAgent):
def __init__(self, env, mlp_dqn, epsilon, **kwargs):
super().__init__(env, mlp_dqn, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
mask_and_onehot = state_to_mask_concat_onehot(state)
inventory_vector = state_to_inventory_vector(state)
latest_open_order = state.open[0]
order_demand_vector = torch.tensor(latest_open_order.demand)
# Get size of commodities form the only order we're guaranteed exists.
num_commodities = latest_open_order.demand.shape[0]
# fixed_demand = orders_to_demand_summary(state.fixed, num_commodities)
fixed_demand = shipping_assignment_state.state_to_fixed_demand(state)
fixed_demand_per_dc = (
shipping_assignment_state.state_to_demand_per_warehouse_commodity(state)
)
open_demand = orders_to_demand_summary(state.open[1:], num_commodities)
aggregate_demand_vector = torch.tensor(
fixed_demand + open_demand
) # todo probably unnecessary now.
inventory_minus_open_order = (
inventory_vector - fixed_demand_per_dc - order_demand_vector
)
with torch.no_grad():
full_vector = (
torch.cat(
[
mask_and_onehot,
inventory_vector,
order_demand_vector,
aggregate_demand_vector,
inventory_minus_open_order,
]
)
.detach()
.numpy()
)
return full_vector
def train(self, experience):
pass
class MaskPlusConsumptionMLP(nn.Module):
"""The combination of MaskedNLP and the consumption (inventory after fixed orders and current)"""
def __init__(self, num_customers, num_dcs, num_commodities):
super().__init__()
self.num_dcs = num_dcs
self.num_customers = num_customers
self.num_commodities = num_commodities
# Calculating input size.
mask_size = num_dcs
inventory_after_current_order = (
num_dcs * num_commodities
) # A vector of size |W| that says if the current order fits in each dc.
input_size = mask_size + inventory_after_current_order
# Small wide
# self.net = nn.Sequential(
# nn.Linear(input_size, 256), nn.Tanh(), nn.Linear(256, self.num_dcs)
# )
# Linear
# self.net = nn.Sequential(nn.Linear(input_size, self.num_dcs))
# Small wide noisy
# self.net = nn.Sequential(
# NoisyLinear(input_size, 256), nn.Tanh(), NoisyLinear(256, self.num_dcs)
# )
# Medium wide with 3 noisies (not as good as 2)
# self.net = nn.Sequential(
# NoisyLinear(input_size, 512),
# nn.Tanh(),
# NoisyLinear(512, 256),
# nn.Tanh(),
# NoisyLinear(256, 256),
# nn.Tanh(),
# nn.Linear(256, self.num_dcs),
# )
# Wide with 2 noisies, better than just 1 and also as good as 3.
self.net = nn.Sequential(
NoisyLinear(input_size, 512),
nn.Tanh(),
NoisyLinear(512, 256),
nn.Tanh(),
nn.Linear(256, self.num_dcs),
)
# Linear Noisy
# self.net = nn.Sequential(nn.Linear(input_size, self.num_dcs))
def forward(self, x):
normalized_in = nn.functional.normalize(x.float())
return self.net(normalized_in)
class MaskPlusConsumptionMLPAgent(PyTorchAgent):
def __init__(self, env, mlp_dqn, epsilon, **kwargs):
super().__init__(env, mlp_dqn, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
mask = torch.tensor(state_to_mask(state))
inventory_vector = state_to_inventory_vector(state)
fixed_demand_per_dc = torch.tensor(
shipping_assignment_state.state_to_demand_per_warehouse_commodity(state)
)
latest_open_order = state.open[0]
order_demand_vector = torch.tensor(latest_open_order.demand)
num_dcs = state.physical_network.num_dcs
num_commodities = state.physical_network.num_commodities
# This line was pretty much trial and error tensor ops:
# Expand requires a matrix that the "singleton dim" (dim of 1) matches in size, and if I do
# .reshape(-1,1) later I can't stack it as [a,b,c,a,b,c]. That's why the (1,-1 reshape).
# The result is a vector of size |W|*|K| which is the order demand repeated |W| times.
rep_order_demand = (
order_demand_vector.reshape(1, -1)
.expand(num_dcs, num_commodities)
.reshape(-1)
)
# the available feature is inventory - fixed at each DC - the current order's demand subtracted to all warehouses.
inventory_after_order = (
inventory_vector - fixed_demand_per_dc - rep_order_demand
)
with torch.no_grad():
mask = nn.functional.normalize(mask.float().reshape(1, -1)).flatten()
inventory_after_order = nn.functional.normalize(
inventory_after_order.float().reshape(1, -1)
).flatten()
full_vector = (
torch.cat(
[
mask,
inventory_after_order,
]
)
.detach()
.numpy()
)
return full_vector
def train(self, experience):
pass
# Saving what was in pytorch agents about GNN
class PhysnetAggDemandGCN(torch.nn.Module):
"""
A GCN where every feature vector is a vector of commodity demands.
For inventories, it's a positive vector of available units.
For demand, it's negative sum units of all orders in the horizon for that customer.
"""
def __init__(self, num_commodities, num_dcs, num_customers):
super().__init__()
self.conv1 = GCNConv(num_commodities, num_commodities * 8)
self.conv2 = GCNConv(num_commodities * 8, num_commodities * 4)
# GraphPool to get (batch,num_commodities)
self.mlp = nn.Linear(num_commodities * 4, num_dcs)
# def forward(self, x: Tensor, edge_index: Tensor) -> Tensor:
def forward(self, data: torch_geometric.data.Data) -> Tensor:
"""
Args: #Todo update docs
x: Node feature matrix of shape [num_nodes, in_channels]
edge_index: Graph connectivity matrix of shape [2, num_edges]
Returns: [batch, num_dcs]
"""
dx, edge_index, batch = (
data.x,
data.edge_index,
data.batch,
) # If not using batches, all nodes in Data should map to the same batch.
if (
batch is None
): # We're not doing batch inference so all nodes belong to the same graph
# TODO important: if I were to use torch.ones instead of zeros, pyg assumes there are two graphs and will leave one empty
batch = torch.zeros(dx.shape[0]).long()
x = dx.float()
x = self.conv1(x, edge_index).relu()
cx = self.conv2(x, edge_index).relu()
# Todo only tested in non batch. But seems legit, stacks all features vertically for each node.
px = global_max_pool(cx, batch)
mx = self.mlp(px)
return mx
class PhysnetAggDemandGCNAgent(PyTorchAgent):
def __init__(self, env, gcn_module, epsilon, **kwargs):
super().__init__(env, gcn_module, epsilon=epsilon, **kwargs)
def get_state_vector(self, state):
# mask_and_onehot = state_to_mask_concat_onehot(state)
# inventory_vector = state_to_inventory_vector(state)
# Node features are agg balance in horizon for each physical node
graph_data = shipping_assignment_state.state_to_agg_balance_in_horizon_gnn(
state
)
# We also need the adjacency matrix.
return graph_data
def train(self, experience):
pass
# Utility funcs used by many agents.
def customer_id_to_onehot(customer_id, num_customers) -> torch.Tensor:
# TODO might be more efficient to just use numpy. Also document shape
with torch.no_grad():
return torchmetrics.utilities.data.to_onehot(
torch.tensor([customer_id]),
num_classes=num_customers,
)
def state_to_mask(state):
"""Gets the mask of valid DCs for the latest order"""
latest_open_order = state.open[0]
customer_id = state.physical_network.get_customer_id(
latest_open_order.customer.node_id
)
mask = state.physical_network.dcs_per_customer_array[customer_id, :]
return mask
def state_to_mask_concat_onehot(state) -> torch.Tensor:
"""Converts a state to a valid warehouse mask concat with customer onehot"""
latest_open_order = state.open[0]
customer_id = state.physical_network.get_customer_id(
latest_open_order.customer.node_id
)
num_customers = state.physical_network.num_customers
mask = state.physical_network.dcs_per_customer_array[customer_id, :]
onehot_vector = customer_id_to_onehot(customer_id, num_customers)
with torch.no_grad():
return torch.cat([onehot_vector.reshape(-1), torch.tensor(mask)])
def state_to_inventory_vector(state):
return torch.tensor(state.inventory.reshape(-1)).detach()
def orders_to_demand_summary(orders, num_commodities):
"""Summarizes order demand"""
demand_vectors = [o.demand for o in orders]
return functools.reduce(
lambda a, b: a + b, demand_vectors, np.zeros(num_commodities)
)
```
#### File: experiments/aug_8_dqn_debugging/two_customers_dqn_debug.py
```python
from experiment_utils import experiment_runner
def two_customers_dqn_debug_run():
num_dcs = 10
num_customers = 2
num_commodities = 4
orders_per_day = 2
dcs_per_customer = 3
demand_mean = 100
demand_var = 20
num_steps = 50
num_episodes = 1000
runner_dqn = experiment_runner.create_dqn_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
)
runner_dqn.run_episodes(
num_steps,
num_episodes,
orders_per_day,
experiment_name="two_customers_dqn_debug",
)
# one ep version for debugging the code.
def two_customers_dqn_debug_sample():
num_dcs = 10
num_customers = 2
num_commodities = 4
orders_per_day = 2
dcs_per_customer = 3
demand_mean = 100
demand_var = 20
num_steps = 50
num_episodes = 1000
runner_dqn = experiment_runner.create_dqn_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
)
runner_dqn.run_episodes(
num_steps,
num_episodes,
orders_per_day,
experiment_name="two_customers_dqn_debug_sample",
)
if __name__ == "__main__":
# two_customers_dqn_debug_run()
two_customers_dqn_debug_sample()
```
#### File: src/experiments_v2/greedy_agent_utils.py
```python
import logging
import random
from typing import Tuple
import numpy as np
import pytorch_lightning as pl
import torch
import wandb
from envs import network_flow_env_builder
from pytorch_lightning.loggers import WandbLogger
from shipping_allocation.envs.network_flow_env import (
EnvironmentParameters,
ShippingFacilityEnvironment,
)
from torch import Tensor
from torch.optim import Adam, Optimizer
from torch.utils.data import DataLoader
import agents
from agents import Agent
from dqn.dqn_common import ShippingFacilityEpisodesDataset
from experiment_utils import report_generator
from experiments_v2.ptl_callbacks import (
MyPrintingCallback,
WandbDataUploader,
ShippingFacilityEnvironmentStorageCallback,
)
# Num epochs == num EPs.
class GreedyAgentRLModel(pl.LightningModule):
"""
This runner is used for greedy agents or agents that
don't need to use the PTL functions for updating a neural network.
"""
environment_parameters: EnvironmentParameters
agent: Agent
DEBUG = False
def __init__(
self,
agent,
env, # TODO#: ShippingAssignmentEnvironment,
experiment_name="",
*args,
**kwargs,
):
super().__init__(*args, **kwargs)
self.agent = agent
self.env = env
self.physical_network = self.env.physical_network
self.experiment_name = experiment_name
# Running values
self.state = self.env.reset()
self.done = False
self.episode_counter = 0
# Metrics
self.episode_reward = 0.0
self.running_reward = 0.0
self.actions = []
self.episode_rewards = []
self.info = {}
self.episodes_info = []
# debug var for env reset
self.was_reset = True
def forward(self, *args, **kwargs):
pass # do nothing related to NNs.
def training_step(self, step_info: Tuple[int, int, int], num_batch):
"""
A step of simulation. Step_info is a tuple of three integers,
see ShippingFacilityEpisodesDataset for the specification
Args:
step_info: (step, num_order, ep_start)
num_batch:
Returns:
"""
step, order, ep_start = step_info
logging.debug("Getting into training step")
if ep_start:
logging.info(f"Starting episode {self.episode_counter}")
if not self.was_reset:
logging.error("ERROR!!! EXPECTED ENV TO BE RESET.")
else:
self.was_reset = False
action = self.agent.get_action(self.state)
# the agent observes the first state and chooses an action
# environment steps with the agent's action and returns new state and reward
next_state, reward, done, info = self.env.step(action)
# print(f"Got reward {reward} done {done}")
self.agent.train((self.state, action, next_state, reward, done))
self.state = next_state
self.episode_reward += reward
if done:
# update the info to store the reports
self.info = info
# Render the current state of the environment
self.env.render()
self.actions.append(action)
self.episode_rewards.append(reward)
shim = (
torch.ones(2, 2, requires_grad=True) - 1
).sum() # a dummy operation to trick ptl
# result = pl.TrainResult(
# minimize=shim
# ) # use the train result just for logging purposes.
self.log("reward", reward)
self.log("episode_reward", self.episode_reward)
self.log("episodes", self.episode_counter)
return shim
def training_epoch_end(self, outputs):
"""
This is triggered when the greedy dataset reaches the end of an episode.
Args:
outputs:
Returns:
"""
logging.info(f"Finishing episode {self.episode_counter}")
# Finished one episode, store reports
logging.info("Finished episode, storing information")
self.episodes_info.append(self.info)
self._wandb_custom_metrics(self.info)
self.episode_counter += 1
self._reset_env_and_metrics()
# return outputs
def _reset_env_and_metrics(self):
logging.info(
f"=========== starting episode {self.episode_counter} loop ==========="
)
logging.debug("Initial environment: ")
self.env.render()
self.state = self.env.reset()
self.done = False
self.episode_reward = 0.0
self.actions = []
self.episode_rewards = []
self.info = {}
self.was_reset = True # Making sure PTL is doing its job.
def train_dataloader(self) -> DataLoader:
"""
This custom dataloader forces to run one step at a time (batching doesn't make sense here.)
it's just a fancy iterator.
"""
return DataLoader(
dataset=ShippingFacilityEpisodesDataset(
num_steps=self.env.num_steps,
orders_per_day=self.env.order_generator.orders_per_day,
),
batch_size=1,
shuffle=False,
)
def _wandb_custom_metrics(self, info):
wandb_metrics = report_generator.convert_info_into_metrics_summary_dict(info)
logging.info(
f"Episode {self.episode_counter} had {wandb_metrics['big_m_count']} BigMs"
)
logging.info("Finished episode with greedy runner, logging metrics to wandb:")
logging.info(wandb_metrics)
wandb.log(
wandb_metrics,
commit=False,
)
def configure_optimizers(self):
# return [
# Adam([torch.ones(2, 2, requires_grad=True)])
# ] # shouldn't use it at all.
return Adam([torch.ones(2, 2, requires_grad=True)])
def backward(self, trainer, loss: Tensor, optimizer: Optimizer) -> None:
return
def main():
config_dict = {
"env": {
"num_dcs": 3,
"num_customers": 5,
"num_commodities": 3,
"orders_per_day": 2,
"dcs_per_customer": 2,
"demand_mean": 500,
"demand_var": 150,
"num_steps": 10, # steps per episode
"big_m_factor": 10000, # how many times the customer cost is the big m.
},
"hps": {
"env": "shipping-v0", # openai env ID.
"episode_length": 30, # todo isn't this an env thing?
"max_episodes": 5, # to do is this num episodes, is it being used?
"batch_size": 30,
"sync_rate": 2, # Rate to sync the target and learning network.
},
"seed": 0,
"agent": "best_fit"
# "agent": "random_valid"
}
torch.manual_seed(config_dict["seed"])
np.random.seed(config_dict["seed"])
random.seed(config_dict["seed"]) # not sure if actually used
np.random.seed(config_dict["seed"])
run = wandb.init( # todo debugging why wrong project and experiment
config=config_dict,
project="rl_warehouse_assignment",
name="best_fit_few_warehouses_debugreward",
)
config = wandb.config
environment_config = config.env
hparams = config.hps
experiment_name = f"gr_{config.agent}_few_warehouses_debugreward"
wandb_logger = WandbLogger(
project="rl_warehouse_assignment",
name=experiment_name,
tags=[
# "debug"
# "experiment"
"local_debug"
],
log_model=False,
)
wandb_logger.log_hyperparams(dict(config))
environment_parameters = network_flow_env_builder.build_network_flow_env_parameters(
environment_config, hparams["episode_length"], order_gen="biased"
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = agents.get_agent(env, environment_config, hparams, config.agent)
model = GreedyAgentRLModel(agent, env, experiment_name=experiment_name)
trainer = pl.Trainer(
max_epochs=hparams["max_episodes"],
# early_stop_callback=False,
val_check_interval=100,
logger=wandb_logger,
# log_save_interval=1,
# row_log_interval=1, # the default of this may leave info behind.
callbacks=[
MyPrintingCallback(),
ShippingFacilityEnvironmentStorageCallback(
experiment_name,
base="data/results/",
experiment_uploader=WandbDataUploader(),
),
],
)
trainer.fit(model)
if __name__ == "__main__":
# logging.root.level = logging.INFO
logging.root.level = logging.DEBUG
main()
```
#### File: src/experiment_utils/experiment_runner.py
```python
from shipping_allocation.envs.network_flow_env import (
EnvironmentParameters,
ShippingFacilityEnvironment,
)
from envs.order_generators import ActualOrderGenerator
from envs.inventory_generators import DirichletInventoryGenerator
from agents.concrete_agents import (
RandomAgent,
Agent,
AlwaysZeroAgent,
BestFitAgent,
RandomValid,
DoNothingAgent,
AgentHighest,
)
from experiment_utils import report_generator
from network import physical_network
import numpy as np
DEBUG = True
class ExperimentRunner:
environment_parameters: EnvironmentParameters
agent: Agent
def __init__(
self,
order_generator,
inventory_generator,
agent,
env: ShippingFacilityEnvironment,
experiment_name="",
):
self.order_generator = order_generator
self.inventory_generator = inventory_generator
self.agent = agent
self.environment_parameters = env.environment_parameters
self.physical_network = self.environment_parameters.network
self.env = env
self.experiment_name = experiment_name
def run_episode(self, ep):
state = self.env.reset()
reward = 0
done = False
print("=========== starting episode loop ===========")
print("Initial environment: ")
self.env.render()
actions = []
episode_rewards = []
info = {}
# demands_per_k = np.zeros((num_commodities,num_steps))
# inventory_at_t = np.zeros((num_commodities,num_steps)) #todo llenar estos eventualmente
while not done:
# action = self.agent.train((obs,action,reward,obs, done))
action = self.agent.get_action(state)
# print(f"Agent is taking action: {action}")
# the agent observes the first state and chooses an action
# environment steps with the agent's action and returns new state and reward
# obs, reward, done, info = self.env.step(action)#old
next_state, reward, done, info = self.env.step(action)
# print(f"Got reward {reward} done {done}")
self.agent.train((state, action, next_state, reward, done))
state = next_state
# Render the current state of the environment
self.env.render()
actions.append(action)
episode_rewards.append(reward)
if done:
print("===========Environment says we are DONE ===========")
if self.experiment_name != "":
print("Writing costs to CSV")
report_generator.write_experiment_reports(
info, self.experiment_name + f"/ep_{ep}"
) # todo consider writing only once instead of each ep.
if DEBUG:
print("Episode done, rewards per step: ", episode_rewards)
print(
"Episode done, average reward per step: ",
sum(episode_rewards) / self.environment_parameters.num_steps,
)
print(
"Episode done, average reward per order: ",
sum(episode_rewards) / len(state["fixed"]),
)
return actions, episode_rewards, info
def run_episodes(self, num_steps, num_episodes, orders_per_day, experiment_name):
self.experiment_name = experiment_name # hotfix
total_rewards = []
average_rewards = []
total_actions = np.zeros(num_steps * orders_per_day)
elapsed = []
self.display_environment()
for i in range(num_episodes):
print("\n\nRunning episode: ", i)
start_time = time.process_time()
actions, episode_rewards, info = self.run_episode(i)
end_time = time.process_time()
total_rewards.append(sum(episode_rewards))
average_rewards.append(np.mean(episode_rewards))
elapsed.append(end_time - start_time)
total_actions += np.array(actions)
# Create datasets
rewards_df = pd.DataFrame(
data={
"experiment_name": [experiment_name] * num_episodes,
"episode": list(range(num_episodes)),
"total_reward": total_rewards,
"average_reward": average_rewards,
"elapsed": elapsed,
}
)
actions_df = pd.DataFrame(total_actions)
base = f"data/results/{experiment_name}"
if not os.path.exists("data"):
os.mkdir("data")
if not os.path.exists("data/results"):
os.mkdir("data/results")
if not os.path.exists(base):
os.mkdir(base)
rewards_df.to_csv(base + "/rewards.csv")
actions_df.to_csv(base + "/actions.csv")
print("done")
if DEBUG:
print("Experiment done, total rewards: ", total_rewards)
print("Sum total rewards: ", sum(total_rewards))
print(
"Total fixed orders",
)
print("Elapsed", elapsed)
print("Total elapsed", sum(elapsed))
def display_environment(self):
# Print things about env, parameters and network that might be useful for reference.
physical_network = self.env.environment_parameters.network
print("\n\n")
print("================================================================")
print("================================================================")
print("================================================================")
print("===== INITIALIZING RUN WITH CURRENT ENVIRONMENT PARAMS ======")
print("===== DCS Per Customer Array ======")
print(physical_network.dcs_per_customer_array)
def create_random_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = RandomAgent(env)
return ExperimentRunner(order_generator, generator, agent, env)
def create_alwayszero_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = AlwaysZeroAgent(env)
return ExperimentRunner(order_generator, generator, agent, env)
class AlwaysFirstAgent(object):
"""The world's DUMBEST agent!"""
def act(self, observation, reward, done):
return 0
# def create_always_first_dc_agent(num_dcs,
# num_customers,
# dcs_per_customer,
# demand_mean,
# demand_var,
# num_commodities,
# orders_per_day
# ):
# physical_network = PhysicalNetwork(
# num_dcs,
# num_customers,
# dcs_per_customer,
# demand_mean,
# demand_var,
# num_commodities,
# )
# order_generator = ActualOrderGenerator(physical_network, orders_per_day)
# generator = NaiveInventoryGenerator()
# agent = AlwaysFirstAgent()
# return ExperimentRunner(order_generator,generator,agent,physical_network)
def create_dqn_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = QNAgent(env)
return ExperimentRunner(order_generator, generator, agent, env)
def create_bestfit_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = BestFitAgent(env)
return ExperimentRunner(
order_generator, generator, agent, env, experiment_name="bestfit_validation"
)
def create_randomvalid_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = RandomValid(env)
return ExperimentRunner(
order_generator, generator, agent, env, experiment_name="randomvalid_validation"
)
def create_donothing_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = DoNothingAgent(env)
return ExperimentRunner(
order_generator, generator, agent, env, experiment_name="randomvalid_validation"
)
def create_agent_66_experiment_runner(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = AgentHighest(env)
return ExperimentRunner(
order_generator, generator, agent, env, experiment_name="randomvalid_validation"
)
def run_with_params(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
# order_generator = NaiveOrderGenerator(num_dcs, num_customers, orders_per_day)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = DirichletInventoryGenerator(physical_network)
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_steps
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = RandomAgent(env)
obs = env.reset()
reward = 0
done = False
print("=========== starting episode loop ===========")
print("Initial environment: ")
env.render()
actions = []
episode_rewards = []
# demands_per_k = np.zeros((num_commodities,num_steps))
# inventory_at_t = np.zeros((num_commodities,num_steps)) #todo llenar estos eventualmente
while not done:
action = agent.act(obs, reward, done)
# print(f"Agent is taking action: {action}")
# the agent observes the first state and chooses an action
# environment steps with the agent's action and returns new state and reward
obs, reward, done, info = env.step(action)
# print(f"Got reward {reward} done {done}")
# Render the current state of the environment
env.render()
actions.append(action)
episode_rewards.append(reward)
if done:
print("===========Environment says we are DONE ===========")
return actions, episode_rewards
import os
import pandas as pd
import time
def run_episodes(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
num_episodes,
experiment_name,
):
total_rewards = []
average_rewards = []
total_actions = np.zeros(num_steps * orders_per_day)
elapsed = []
for i in range(num_episodes):
start_time = time.process_time()
actions, episode_rewards = run_with_params(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_steps,
)
end_time = time.process_time()
total_rewards.append(sum(episode_rewards))
average_rewards.append(np.mean(episode_rewards))
elapsed.append(end_time - start_time)
total_actions += np.array(actions)
# Create datasets
rewards_df = pd.DataFrame(
data={
"experiment_name": [experiment_name] * num_episodes,
"episode": list(range(num_episodes)),
"total_reward": total_rewards,
"average_reward": average_rewards,
"elapsed": elapsed,
}
)
actions_df = pd.DataFrame(total_actions)
base = f"data/results/{experiment_name}"
if not os.path.exists("data"):
os.mkdir("data")
if not os.path.exists("data/results"):
os.mkdir("data/results")
if not os.path.exists(base):
os.mkdir(base)
rewards_df.to_csv(base + "/rewards.csv")
actions_df.to_csv(base + "/actions.csv")
print("done")
```
#### File: src/experiment_utils/mcf_solver.py
```python
import time
from ortools.graph import pywrapgraph
def mcf_solve(mcf):
balances = [mcf.Supply(n) for n in range(mcf.NumNodes())]
# print(balances)
# print("balance sum should be 0 = ",sum(balances))
print("Running optimization")
start = time.process_time_ns()
mcf.Solve()
end = time.process_time_ns()
elapsed_ms = (end - start) / 1000000
print(f"elapsed {elapsed_ms}ms")
print(f"elapsed {round_to_1(elapsed_ms/1000)}s")
if mcf.Solve() == mcf.OPTIMAL:
print("Minimum cost:", mcf.OptimalCost())
# print('')
# print(' Arc Flow / Capacity FlowCost ArcCost')
# for i in range(mcf.NumArcs()):
# cost = mcf.Flow(i) * mcf.UnitCost(i)
# print('%s %3s / %3s %3s\t\t\t%3s' % (
# arcs[i]['name'],
# # mcf.Tail(i),
# # mcf.Head(i),
# mcf.Flow(i),
# mcf.Capacity(i),
# # unscaled_double(cost)
# cost,
# mcf.UnitCost(i)
# )
# )
else:
print("There was an issue with the min cost flow input.")
# print(mcf)
# print(mcf.NumArcs())
# print(' Arc Flow / Capacity FlowCost ArcCost')
# for i in range(mcf.NumArcs()):
# cost = mcf.UnitCost(i)
# # print('%1s -> %1s %3s / %3s %3s\t\t\t%3s' % (
# mcf.Tail(i),
# mcf.Head(i),
# 0,
# mcf.Capacity(i),
# # unscaled_double(cost)
# cost,
# mcf.UnitCost(i)))
return elapsed_ms
```
#### File: shipping_allocation/envs/inventory_generators.py
```python
from abc import ABC
from typing import List
import numpy as np
from network import physical_network
from experiment_utils.Order import Order
class InventoryGenerator(ABC):
# Generates new inventory and distributes it somehow to keep the network balanced for the selected locations.
# Returns a numpy array of shape (num_dcs,num_commodities) representing how much extra inventory is going to appear.
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
): # todo add type when it works.
pass
class NaiveInventoryGenerator(InventoryGenerator):
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
):
# logging.info("==> inventory generator")
total_inventory = sum(
map(lambda o: o.demand, open_orders)
) # TODO rename and do for many commmodities.
even = total_inventory // network.num_dcs
dc_inv = np.array([even] * network.num_dcs).reshape(
network.num_dcs, -1
) # To keep the (dc,product) shape. #todo validate with multiple commodities
# logging.info("Demand", total_inventory)
# logging.info("Pre level dc_inv")
# logging.info(dc_inv)
# logging.info("Total new inv",np.sum(dc_inv))
imbalance = total_inventory - np.sum(dc_inv, axis=0)
# if total_inventory // network.num_dcs != total_inventory / network.num_dcs:
dc_inv[0, :] = dc_inv[0, :] + imbalance
# logging.info("Rebalanced dc inv",dc_inv)
# logging.info("Rebalanced sum",np.sum(dc_inv))
if (np.sum(dc_inv, axis=0) != total_inventory).any():
raise Exception("np.sum(dc_inv) != total_inventory")
return dc_inv
class DirichletInventoryGenerator(InventoryGenerator):
def __init__(self, network: physical_network):
num_dcs = network.num_dcs
num_commodities = network.num_commodities
self.alpha = np.random.permutation(
num_dcs / np.arange(1, num_dcs + 1)
) # trying to make it skewed.
self.inventory_generation_distribution = np.random.dirichlet(
self.alpha, num_commodities
) # (num_dc,num_k) of dc distribution of inventory.
def generate_new_inventory(
self, network: physical_network, open_orders: List[Order]
):
# logging.info("==> inventory generator")
total_inventory = sum(
map(lambda o: o.demand, open_orders)
) # TODO rename and do for many commmodities.
# even = total_inventory // network.num_dcs
inventory_distribution = self.inventory_generation_distribution
supply_per_dc = np.floor(
total_inventory.reshape(-1, 1) * inventory_distribution
)
imbalance = total_inventory - np.sum(supply_per_dc, axis=1)
supply_per_dc[:, 0] = supply_per_dc[:, 0] + imbalance
# logging.info("Demand", total_inventory)
# logging.info("Pre level dc_inv")
# logging.info(dc_inv)
# logging.info("Total new inv",np.sum(dc_inv))
# if total_inventory // network.num_dcs != total_inventory / network.num_dcs:
# logging.info("Rebalanced dc inv",dc_inv)
# logging.info("Rebalanced sum",np.sum(dc_inv))
if not np.isclose(np.sum(np.sum(supply_per_dc, axis=1) - total_inventory), 0.0):
raise RuntimeError("Demand was not correctly balanced")
return supply_per_dc.transpose()
```
#### File: shipping_allocation/envs/shipping_assignment_state.py
```python
from collections import namedtuple
from torch_geometric.data import Data
from experiment_utils import Orders
import numpy as np
import torch
# Check the environment for state_vector impl. As of Sep 18, it's
# A concatenation of inventories, current order demand an some metadata neurons
# That indicate which customer is allocating the order.
ShippingAssignmentState = namedtuple(
"ShippingAssignmentState",
[
"current_t",
"physical_network",
"fixed",
"open",
"inventory",
"state_vector",
"big_m_counter_per_commodity",
"optimization_cost",
"big_m_units_per_commodity",
],
)
def state_to_fixed_demand(state):
"""Converts a state to the currently relevant fixed orders in horizon (because raw fixed is historical)"""
planning_horizon = state.physical_network.planning_horizon
current_t = state.current_t
end_t = current_t + planning_horizon - 1
fixed_demand = Orders.summarize_order_demand(
state.fixed, current_t, end_t, state.physical_network.num_commodities
)
return fixed_demand
def state_to_demand_per_warehouse_commodity(state):
"""TODO if works test if generalize to many commodites.
Converts the demand of fixed orders in horizon into a vector of how much demand there is on each warehouse.
"""
planning_horizon = state.physical_network.planning_horizon
current_t = state.current_t
end_t = current_t + planning_horizon - 1
# (num_dcs,num_warehouses)
demand_per_dc = Orders.summarize_order_demand_per_dc(
state.fixed,
current_t,
end_t,
state.physical_network.num_dcs,
state.physical_network.num_commodities,
)
return demand_per_dc.reshape(
1, -1
).flatten() # shape (num_warehouses*num_commodities)
# Graph based networks TODO decide if this should go somewhere else
def state_to_agg_balance_in_horizon_gnn(
state: ShippingAssignmentState,
) -> "torch_geometric.data.Data":
"""
Converts an environment state to node features and adjacency list
by using inventory and demand vectors as features. Adjacencies are valid DC->Customer.
Features are the agg demand for that node in horizon or inventory if warehouse. No arc features yet.
Returns:
torch_geometric.data with the node features and edge_indices
"""
inventory = state.inventory
pn = state.physical_network
latest_open_order = state.open[0]
latest_order_demand = latest_open_order.demand
planning_horizon = state.physical_network.planning_horizon
current_t = state.current_t
end_t = current_t + planning_horizon - 1
all_orders = state.fixed + state.open
# Create a node feature vector for the customers: 0 for all customers except for the latest open order.
demand_summary_per_customer = Orders.summarize_demand_per_customer_in_horizon(
all_orders,
start=current_t,
end=end_t,
num_customers=state.physical_network.num_customers,
num_commodities=state.physical_network.num_commodities,
physical_network=state.physical_network,
)
node_features = np.vstack((inventory, demand_summary_per_customer))
edge_index = pn.physical_adjacency_matrix # TODo hope this is correct.
graph_data = Data(
x=torch.tensor(node_features), edge_index=torch.tensor(edge_index)
)
return graph_data
```
#### File: shipping_allocation/envs/test_network_flow_env.py
```python
from envs.network_flow_env import (
EnvironmentParameters,
ShippingFacilityEnvironment,
RandomAgent,
)
from envs.order_generators import ActualOrderGenerator
from shipping_allocation import NaiveInventoryGenerator
from network import physical_network
def test_one_timestep():
num_dcs = 2
num_customers = 1
num_commodities = 3
orders_per_day = 1
dcs_per_customer = 1
demand_mean = 100
demand_var = 20
num_episodes = 1
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
# order_generator = NaiveOrderGenerator(num_dcs, num_customers, orders_per_day)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = NaiveInventoryGenerator()
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_episodes
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = RandomAgent(env.action_space)
obs = env.reset()
reward = 0
done = False
print("=========== starting episode loop ===========")
print("Initial environment: ")
env.render()
while not done:
action = agent.get_action(obs, reward, done)
# print(f"Agent is taking action: {action}")
# the agent observes the first state and chooses an action
# environment steps with the agent's action and returns new state and reward
obs, reward, done, info = env.step(action)
# print(f"Got reward {reward} done {done}")
# Render the current state of the environment
env.render()
if done:
print("===========Environment says we are DONE ===========")
def run_with_params(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_episodes,
):
physical_network = physical_network(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
)
# order_generator = NaiveOrderGenerator(num_dcs, num_customers, orders_per_day)
order_generator = ActualOrderGenerator(physical_network, orders_per_day)
generator = NaiveInventoryGenerator()
environment_parameters = EnvironmentParameters(
physical_network, order_generator, generator, num_episodes
)
env = ShippingFacilityEnvironment(environment_parameters)
agent = RandomAgent(env.action_space)
obs = env.reset()
reward = 0
done = False
print("=========== starting episode loop ===========")
print("Initial environment: ")
env.render()
while not done:
action = agent.get_action(obs, reward, done)
# print(f"Agent is taking action: {action}")
# the agent observes the first state and chooses an action
# environment steps with the agent's action and returns new state and reward
obs, reward, done, info = env.step(action)
# print(f"Got reward {reward} done {done}")
# Render the current state of the environment
env.render()
if done:
print("===========Environment says we are DONE ===========")
def test_sliding_ten():
num_dcs = 2
num_customers = 2
num_commodities = 2
orders_per_day = 2
dcs_per_customer = 1
demand_mean = 100
demand_var = 20
num_episodes = 10
run_with_params(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_episodes,
)
def test_sliding_ten_2():
num_dcs = 4
num_customers = 3
num_commodities = 2
orders_per_day = 2
dcs_per_customer = 4
demand_mean = 100
demand_var = 20
num_episodes = 10
run_with_params(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_episodes,
)
def test_sliding_ten_3():
num_dcs = 10
num_customers = 20
num_commodities = 2
orders_per_day = 2
dcs_per_customer = 4
demand_mean = 100
demand_var = 20
num_episodes = 100
run_with_params(
num_dcs,
num_customers,
dcs_per_customer,
demand_mean,
demand_var,
num_commodities,
orders_per_day,
num_episodes,
)
import time
if __name__ == "__main__":
total_start = time.process_time()
for i in range(100):
test_sliding_ten()
for i in range(100):
test_sliding_ten_2()
for i in range(100):
test_sliding_ten_3()
total_end = time.process_time()
print("Elapsed on all runs: ", total_end - total_start, "s")
# test_one_timestep()
```
#### File: src/tests/test_Orders.py
```python
from network.physical_network import Node, PhysicalNetwork
from experiment_utils import Orders
from experiment_utils.Order import Order
import numpy as np
def fixture():
dummy_customer = Node(4, 100, 0, 0, "dc")
dc_0 = Node(0, 100, 0, 0, "dc")
dc_1 = Node(1, 100, 0, 0, "dc")
fixed_orders = [
# A total 105,60 to DC 0,10,10 to DC 1, And one order outside of horizon.
Order(
np.array([50.0, 30.0, 30.0]),
dc_0,
dummy_customer,
0,
"someord",
),
Order(
np.array([55.0, 30.0, 30.0]),
dc_0,
dummy_customer,
1,
"someord",
),
Order(
np.array([10.0, 10.0, 10.0]),
dc_1,
dummy_customer,
1,
"someord",
),
Order(
np.array([10.0, 10.0, 10.0]),
dc_1,
dummy_customer,
4,
"someord",
),
]
return fixed_orders
def test_summarize_order_demand_per_dc():
# Given
# physical_network = PhysicalNetwork(3, 5, 2, 100, 50, num_commodities=3)
dummy_customer = Node(4, 100, 0, 0, "dc")
dc_0 = Node(0, 100, 0, 0, "dc")
dc_1 = Node(1, 100, 0, 0, "dc")
fixed_orders = [
# A total 105,60 to DC 0,10,10 to DC 1, And one order outside of horizon.
Order(
np.array([50.0, 30.0, 30.0]),
dc_0,
dummy_customer,
0,
"someord",
),
Order(
np.array([55.0, 30.0, 30.0]),
dc_0,
dummy_customer,
1,
"someord",
),
Order(
np.array([10.0, 10.0, 10.0]),
dc_1,
dummy_customer,
1,
"someord",
),
Order(
np.array([10.0, 10.0, 10.0]),
dc_1,
dummy_customer,
4,
"someord",
),
]
# When
demand_per_dc = Orders.summarize_order_demand_per_dc(
fixed_orders, start=0, end=3, num_dcs=2, num_commodities=3
)
# Then
assert (
demand_per_dc
== np.array(
[
[105.0, 60.0, 60.0],
[10.0, 10.0, 10.0],
]
)
).all()
def test_summarize_demand_per_customer_in_horizon():
# Given
physical_network = PhysicalNetwork(
num_dcs=3,
num_customers=5,
dcs_per_customer=2,
demand_mean=100,
demand_var=50,
num_commodities=3,
)
fixed_orders = fixture()
c0 = Node(3, 0, 0, 0, "dc", name="c0")
c1 = Node(4, 0, 0, 0, "dc", name="c1")
c3 = Node(6, 0, 0, 0, "dc", name="c3")
dc_0 = Node(0, 0, 0, 0, "dc", name="dc0")
dc_1 = Node(1, 0, 0, 0, "dc", name="dc1")
fixed_orders = [
# A total 105,60 to DC 0,10,10 to DC 1, And one order outside of horizon.
Order(
np.array([50.0, 30.0, 30.0]),
dc_0,
c0,
0,
"someord",
),
Order(
np.array([55.0, 30.0, 30.0]),
dc_0,
c1,
1,
"someord",
),
Order(
np.array([10.0, 10.0, 10.0]),
dc_1,
c3,
1,
"someord",
),
Order(
np.array([10.0, 10.0, 10.0]),
dc_1,
c3,
4,
"someord",
),
Order(
np.array([666.0, 666.0, 666.0]),
dc_1,
c0,
5,
"someord",
),
]
# When
demand_summary = Orders.summarize_demand_per_customer_in_horizon(
fixed_orders,
start=0,
end=4,
num_customers=5,
num_commodities=3,
physical_network=physical_network,
)
# Then there should be demand on rows 0,1,3. Third row is the sum of two orders.
assert (
demand_summary
== np.array(
[
[-50.0, -30.0, -30.0],
[-55.0, -30.0, -30.0],
[-0.0, -0.0, -0.0],
[-20.0, -20.0, -20.0],
[-0.0, -0.0, -0.0],
]
)
).all()
``` |
{
"source": "jotaro-sama/PacEnv",
"score": 3
} |
#### File: jotaro-sama/PacEnv/pac_test.py
```python
import pac_env
def printMatrix(mat):
for row in mat:
for elem in row:
print(str(elem), end=" ")
print("")
env = pac_env.PacEnv()
(screen, score, power) = env.reset()
done = False
info = ""
print("Score: " + str(score))
printMatrix(screen)
for i in range(100):
action = env.action_space.sample()
print(action)
(screen, score, power), _, done, info = env.step(action)
printMatrix(screen)
print("Score: " + str(score) + ", Power :" + str(power))
print(str(info["self.position"]) + ", pow_timeout: " + str(info["self.power_timeout"]))
if done:
print("Game Over! \n" + "Score: " + str(score))
break
``` |
{
"source": "Jotasenpai/DigitalMediaStoreRESTfull",
"score": 2
} |
#### File: DigitalMediaStoreRESTfull/app/__init__.py
```python
import logging
import os
from flask import Flask
from flask_cors import CORS
from app.extensions import api
from app.extensions.database import db
from app.extensions.schema import ma
from app.views import albums, artists, hello, tracks
def create_app(config, **kwargs):
logging.basicConfig(level=logging.INFO)
app = Flask(__name__, **kwargs)
CORS(app, resources={r"/api/*": {"origins": "*"}})
app.config.from_object(config)
# app.url_map.strict_slashes = False
with app.app_context():
api.init_app(app)
db.init_app(app)
db.create_all()
ma.init_app(app)
api.register_blueprint(hello.blp)
api.register_blueprint(artists.blp)
api.register_blueprint(albums.blp)
api.register_blueprint(tracks.blp)
try:
os.makedirs(app.instance_path)
except OSError:
pass
return app
``` |
{
"source": "JotaSe/text2emotion-library",
"score": 2
} |
#### File: JotaSe/text2emotion-library/setup.py
```python
from setuptools import setup, find_packages
def readme():
with open('README.md', encoding="utf8") as f:
README = f.read()
return README
setup(
name="Text2Emotion",
version="0.0.1",
description="Detecting emotions behind the text, Text2Emotion package will help you to understand the emotions in textual meassages.",
long_description=readme(),
long_description_content_type="text/markdown",
url="https://github.com/aman2656/Text2Emotion",
author="Text2Emotion Team",
author_email="<EMAIL>",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8"
],
packages=find_packages(),
install_requires=["nltk", "emoji>=0.6.0"],
include_package_data=True
)
``` |
{
"source": "jotathebest/aws_dynamodb_crud",
"score": 2
} |
#### File: jotathebest/aws_dynamodb_crud/app.py
```python
from chalice import Chalice, Response
from chalicelib.handlers.handler_group import HandlerGroup
from chalicelib.handlers.handler_user import HandlerUser
app = Chalice(app_name="porter")
@app.route("/")
def index():
return {"hello": "world"}
@app.route("/users/{user_id}", methods=["POST"])
def add_user(user_id: str) -> Response:
user_data = app.current_request.json_body
handler = HandlerUser()
return handler.create_user(user_id, user_data)
@app.route("/users/{user_id}", methods=["GET"])
def get_user(user_id: str):
handler = HandlerUser()
return handler.retrieve_user(user_id)
@app.route("/users/{user_id}", methods=["PUT"])
def get_user(user_id: str):
user_data = app.current_request.json_body
handler = HandlerUser()
return handler.update_user(user_id, user_data)
@app.route("/users/{user_id}", methods=["DELETE"])
def delete_user(user_id: str):
handler = HandlerUser()
return handler.delete_user(user_id)
@app.route("/groups/{group_name}", methods=["GET"])
def get_users_from_group(group_name: str):
handler = HandlerGroup()
return handler.get_users_from_group(group_name)
@app.route("/groups/{group_name}", methods=["PUT"])
def update_users_from_group(group_name: str):
user_data = app.current_request.json_body
handler = HandlerGroup()
return handler.update_users_group(group_name, user_data)
@app.route("/groups/{group_name}", methods=["DELETE"])
def delete_users_from_group(group_name: str):
handler = HandlerGroup()
return handler.delete_users_from_group(group_name)
``` |
{
"source": "jotathebest/pycon2021",
"score": 3
} |
#### File: pycon2021/Docker base/basic_selenium_auth.py
```python
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from pyvirtualdisplay import Display
import time
class BaseBrowserCheck(object):
def __init__(self):
self.browser = None
def _init_browser(self):
"""
Initializes the browser instance
"""
chrome_options = webdriver.ChromeOptions()
display = Display(visible=0, size=(1386, 768))
display.start()
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--disable-dev-shm-usage")
chrome_options.add_experimental_option("prefs", {"download.prompt_for_download": False})
self.browser = webdriver.Chrome(options=chrome_options)
self.browser.set_window_size(1386, 768)
def setup_selenium(self):
self._init_browser()
def stop_selenium(self):
self.browser.quit()
class WebElementCheck(BaseBrowserCheck):
def __init__(self):
super(WebElementCheck, self).__init__()
def _search_element(self, selector_type, descriptor, timeout=60):
"""
@selector_type is any of the available selectors set at selenium.webdriver.common.by utility
@descriptor is the web element descriptor that fits the selector type, for example, if you
search by CSS the descriptor should be a CSS selector
"""
try:
WebDriverWait(self.browser, timeout).until(EC.presence_of_element_located((selector_type, descriptor)))
root_elements = self.browser.find_elements(selector_type, descriptor)
# Checks if the root div was loaded
return len(root_elements) > 0
except (TimeoutException):
return False
def _does_webelement_with_xpath_exist(self, xpath, timeout=60):
"""
Checks if a web element is loaded. Element must be referenced by its xpath
Returns True if the element is found
@xpath is the web element absolute path
@timeout is the number of seconds to wait befor of throwing an exception
"""
return self._search_element(By.XPATH, xpath, timeout=timeout)
def _does_webelement_with_css_exist(self, css_selector, timeout=60):
"""
Checks if a web element is loaded. Element must be referenced by its xpath
Returns True if the element is found
@css_selector is the CSS selector of the element
@timeout is the number of seconds to wait befor of throwing an exception
"""
return self._search_element(By.CSS_SELECTOR, css_selector, timeout=timeout)
def _does_element_with_name_exists(self, web_element_name):
try:
self.browser.find_element_by_name(web_element_name)
return True
except (NoSuchElementException):
return False
class UbiSignInCheck(WebElementCheck):
def __init__(
self,
username,
password,
sign_url="https://industrial.ubidots.com",
timeout=25,
):
"""
@components: Status page components to update, should be an ids list, [id, id, id]
"""
super(WebElementCheck, self).__init__()
self.sign_url = sign_url
self.username = username
self.password = password
self.timeout = timeout
def _sign_in(self):
"""
@timeout is the number of seconds to wait befor of throwing an exception
"""
# Verifies if it is already signed
current_url = self.browser.current_url
if "app" in current_url or "dashboard" in current_url.lower():
print("The user is already logged")
return True
print("Loading SigIn form")
self.browser.get(self.sign_url)
# Waits until form div is loaded
is_sign_in_form_loaded = self._does_webelement_with_css_exist("form", timeout=self.timeout)
if not is_sign_in_form_loaded:
print(f"Could not load the form to make sign in at {self.sign_url}")
return False
time.sleep(1) # Gives an additional seconds
print("Filling form")
if not self._does_element_with_name_exists("identification") or not self._does_element_with_name_exists(
"password"
):
print("Could not find form expected fields to make login")
return False
user_box = self.browser.find_element_by_name("identification")
pass_box = self.browser.find_element_by_name("password")
user_box.send_keys(self.username)
pass_box.send_keys(self.password)
if not self._does_webelement_with_xpath_exist('//button[text()="Sign In"]', timeout=self.timeout):
print("Could not find button to make login")
return False
self.browser.find_element_by_xpath('//button[text()="Sign In"]').click()
# Should redirect to the dashboards tab
result = "dashboards" in self.browser.title.lower()
if not result:
print('Could not find "dashboards" label in browser tab')
return result
def _check(self):
"""
Returns True if sign in feature is Ok
"""
if self.browser is None:
self._init_browser()
print("Checking if the browser can make login")
# Test 1: Should sign in using the form, signed should be True
signed = self._sign_in()
print("Finished, [signed = {}]".format(signed))
return signed
def test(self):
login_service_up = self._check()
attempts = 0
while attempts < 2 and not login_service_up: # Attempts three times to make login
print("Checking web access, attempt: {}".format(attempts))
login_service_up = self._check()
attempts += 1
check_result = {
"result": "ok",
"details": "",
"create_incident": False,
}
if not login_service_up: # Could not login or load root div
details = "[Alert] Could not make login"
print(details)
check_result["result"] = "outage"
check_result["create_incident"] = True
print("Finished")
return check_result
print("Finished")
return check_result
def close_browser(self):
self.browser.close()
if __name__ == "__main__":
tester = UbiSignInCheck("", "")
print(tester.test())
time.sleep(10)
tester.close_browser()
``` |
{
"source": "jotautomation/iris",
"score": 3
} |
#### File: iris/listener/listener.py
```python
import time
import shutil
import logging
import json
import os
import asyncio
import tornado.web
import tornado.websocket
from listener.logs_web_socket import LogsWebSocketHandler
from listener.auth import authenticate, get_cookie_secret
RESP_CONTENT_TYPE = 'application/vnd.siren+json; charset=UTF-8'
# disable pylint warning for not overriding 'data_received' from RequestHandler
# pylint: disable=W0223
class IrisRequestHandler(tornado.web.RequestHandler):
"""Base class for REST API calls"""
def initialize(self, test_control, test_definitions, listener_args, **kwargs):
"""Initialize is called when tornado.web.Application is created"""
self.logger = logging.getLogger(self.__class__.__name__) # pylint: disable=W0201
# Disable tornado access logging by default
logging.getLogger('tornado.access').disabled = True
self.test_control = test_control # pylint: disable=W0201
self.test_definitions = test_definitions # pylint: disable=W0201
self.listener_args = listener_args # pylint: disable=W0201
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with, Content-Type")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def json_args(self):
"""Get json args (fields) from request body"""
return tornado.escape.json_decode(self.request.body)
def text_plain(self):
"""Get plain text converted from byte string to str"""
return tornado.escape.native_str(self.request.body)
def siren_response(self, resp):
"""Respond with siren, i.e write response and set content type"""
if resp is not None:
self.write(resp)
self.set_content_type()
def set_content_type(self):
"""Set content type"""
self.set_header('Content-Type', RESP_CONTENT_TYPE)
def get(self, *args):
"""Handles get requests
First handles authentication etc, calls handle_get()
on child class and then writes response.
"""
# Tornado stores current user automatically to self.current_user
user = None # Authentication not implemented
host = self.request.headers.get('host')
self.siren_response(self.handle_get(host, user, *args))
def post(self, *args):
"""Handles post requests
First handles authentication etc, calls handle_post()
on child class and then writes response.
"""
self.logger.debug("Handling post")
# Tornado stores current user automatically to self.current_user
user = self.current_user
host = self.request.headers.get('host')
data = None
content_type = self.request.headers['Content-Type']
if 'json' in content_type:
data = self.json_args()
elif 'text/plain' in content_type:
data = self.text_plain()
self.siren_response(self.handle_post(data, host, user, *args))
def options(self):
"""Handle preflight request"""
self.set_status(204)
self.finish()
def get_current_user(self):
cur_user = self.get_secure_cookie("iris")
if cur_user:
return cur_user.decode()
self.clear_cookie("iris")
return "Operator"
class NoCacheStaticFileHandler(tornado.web.StaticFileHandler):
def set_extra_headers(self, path):
# Disable cache
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
class ApiRootHandler(IrisRequestHandler):
"""Handles calls to "/api"""
def handle_get(self, host, user, *args): # pylint: disable=W0613
"""Handle /api get"""
def handle_post(self, json_args, host, user, *args): # pylint: disable=W0613
"""Handle /api post"""
class LoginHandler(IrisRequestHandler):
def handle_post(self, json_args, host, user, *args):
auth = authenticate(json_args['user'], json_args['password'])
if auth:
self.set_secure_cookie("iris", json_args['user'], expires_days=1)
else:
raise tornado.web.HTTPError(422, "Wrong username / password")
class TestRunnerHandler(IrisRequestHandler):
"""Handles starting of tests, returns status of tests etc."""
def handle_get(self, host, user, *args):
"""Returns running test handlers"""
print("GET test_control")
return "Stay tuned"
def handle_post(self, json_args, host, user, *args): # pylint: disable=W0613
"""Handles post to /api/test_control"""
for key, value in json_args.items():
if key == 'run':
if json_args['run']:
self.test_control['run'].set()
else:
self.test_control['run'].clear()
else:
if key in self.test_control:
self.test_control[key] = value
class HistorySearchItems(IrisRequestHandler):
"""Handles starting of tests, returns status of tests etc."""
def handle_get(self, host, user, *args):
"""Returns running test handlers"""
return self.listener_args['database'].get_search_bar_items()
class IrisEncoder(json.JSONEncoder):
'''Encode json properly'''
def default(self, obj):
try:
return json.JSONEncoder.default(self, obj)
except Exception as e:
return str(obj)
class SearchHistoryHandler(IrisRequestHandler):
"""Handles starting of tests, returns status of tests etc."""
def handle_post(self, json_args, host, user, *args): # pylint: disable=W0613
"""Handles post to /api/test_control"""
return json.dumps(self.listener_args['database'].search_db(json_args), cls=IrisEncoder)
class DutsHandler(IrisRequestHandler):
"""Handles starting of tests, returns status of tests etc."""
def handle_get(self, host, user, *args):
"""Returns running test handlers"""
return {'duts': self.test_definitions.DUTS}
class ProgressHandler(IrisRequestHandler):
"""Handles calls to /api/progress"""
def handle_get(self, host, user, *args):
"""Returns current progress as json"""
return json.dumps({'progress': self.test_control['progress']}, default=str)
class UiEntryHandler(tornado.web.StaticFileHandler):
"""Handles returning the UI from all paths except /api"""
def validate_absolute_path(self, root, absolute_path): # pylint: disable=R1710
"""Validate and return the absolute path.
Override validate_absolute_path of Tornado to return UI from all
paths. This is mostly copy-paste from:
https://www.tornadoweb.org/en/stable/_modules/tornado/web.html#StaticFileHandler.validate_absolute_path
"""
root = os.path.abspath(root)
if not root.endswith(os.path.sep):
# abspath always removes a trailing slash, except when
# root is '/'. This is an unusual case, but several projects
# have independently discovered this technique to disable
# Tornado's path validation and (hopefully) do their own,
# so we need to support it.
root += os.path.sep
# The trailing slash also needs to be temporarily added back
# the requested path so a request to root/ will match.
if not (absolute_path + os.path.sep).startswith(root):
raise tornado.web.HTTPError(403, "%s is not in root static directory", self.path)
if os.path.isdir(absolute_path) and self.default_filename is not None:
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path) or not os.path.isfile(absolute_path):
# This is changed by JOT: Return UI if file/path is not found
return os.path.join(root, self.default_filename)
return absolute_path
class MessageWebsocketHandler(tornado.websocket.WebSocketHandler):
"""
Note that Tornado uses asyncio. Since we are using threads on our backend
we need to use call_soon_threadsafe to get messages through.
"""
def initialize(self, message_handlers=None, return_message_handler=None, **kwargs):
"""Initialize is called when tornado.web.Application is created"""
if message_handlers is not None:
message_handlers.append(self.websocket_signal_handler) # pylint: disable=W0201
self.loop = asyncio.get_event_loop() # pylint: disable=W0201
self.return_message_handler = return_message_handler # pylint: disable=W0201
self.logger = logging.getLogger("MessageWebsocketHandler")
def websocket_signal_handler(self, message): # pylint: disable=W0613
"""Sends application state changes through websocket"""
def send_a_message(self, msg):
try:
self.write_message(msg)
except tornado.websocket.WebSocketClosedError:
pass
except Exception as e:
self.logger.exception("Failed to write to the websocket")
raise e
self.loop.call_soon_threadsafe(send_a_message, self, message)
def open(self, *args: str, **kwargs: str):
"""Called when websocket is opened"""
pass
def get_current_user(self):
cur_user = self.get_secure_cookie("iris")
if cur_user:
return cur_user.decode()
self.clear_cookie("iris")
return "Operator"
def on_close(self):
"""Called when websocket is closed"""
pass
def on_message(self, message):
"""Called when message comes from client through websocket"""
self.return_message_handler.put(message)
def check_origin(self, origin): # pylint: disable=R0201, W0613
"""Checks whether websocket connection from origin is allowed.
We will allow all connection which is actually potential safety risk. See:
https://www.tornadoweb.org/en/stable/websocket.html#tornado.websocket.WebSocketHandler.check_origin
"""
return True
class LogsHandler(IrisRequestHandler):
_filename = ''
def get(self, *args):
self._filename = 'logs_' + time.strftime("%Y%m%d-%H%M%S")
self.set_header('Content-Type', 'application/force-download')
self.set_header('Content-Disposition', 'attachment; filename=%s' % self._filename + '.zip')
shutil.make_archive(self._filename, 'zip', 'logs/')
with open(os.path.join(self._filename + '.zip'), "rb") as _f:
try:
while True:
_buffer = _f.read(4096)
if _buffer:
self.write(_buffer)
else:
_f.close()
self.finish()
return
except:
raise tornado.web.HTTPError(404, "Log files not found")
def on_finish(self):
if os.path.exists(self._filename + '.zip'):
os.remove(self._filename + '.zip')
class MediaFileHandler(tornado.web.StaticFileHandler):
def initialize(self, listener_args, path, **kwargs):
"""Initialize is called when tornado.web.Application is created"""
self.logger = logging.getLogger(self.__class__.__name__) # pylint: disable=W0201
# Disable tornado access logging by default
logging.getLogger('tornado.access').disabled = True
self.listener_args = listener_args # pylint: disable=W0201
super().initialize(path=path)
def parse_url_path(self, url_path):
return self.listener_args['database'].get_media_file_path(url_path)
def create_listener(
port,
test_control,
message_handlers,
progress_handlers,
test_definitions,
return_message_handler,
listener_args,
):
"""Setup and create listener"""
import ui
from pathlib import Path
ui_path = Path(ui.__path__[0], 'build')
init = {
'test_control': test_control,
'test_definitions': test_definitions,
'listener_args': listener_args,
}
app = tornado.web.Application(
[
(
r'/api/websocket/messagequeue',
MessageWebsocketHandler,
{'message_handlers': message_handlers},
),
(
r'/api/websocket/progress',
MessageWebsocketHandler,
{'message_handlers': progress_handlers},
),
(
r'/api/websocket/dut_sn',
MessageWebsocketHandler,
{'return_message_handler': return_message_handler},
),
(r"/api", ApiRootHandler, init),
(r"/login", LoginHandler, init),
(r"/api/duts", DutsHandler, init),
(r"/api/history/search_bar_items", HistorySearchItems, init),
(r"/api/history/search", SearchHistoryHandler, init),
(r"/api/progress", ProgressHandler, init),
(
r"/api/latest_result/(.*)",
NoCacheStaticFileHandler,
{'path': 'results/', "default_filename": "latest_result.html"},
),
(r"/api/websocket/log", LogsWebSocketHandler),
(r"/api/testcontrol", TestRunnerHandler, init),
(r"/api/testcontrol/([0-9]+)", TestRunnerHandler, init),
(r"/logs", LogsHandler, init),
(
r"/api/download/(.*)",
tornado.web.StaticFileHandler,
{'path': listener_args['download_path']},
),
(
r"/api/media/(.*)",
MediaFileHandler,
{'listener_args': listener_args, 'path': os.getcwd()},
),
(r"/(.*\.(js|json|html|css))", tornado.web.StaticFileHandler, {'path': ui_path}),
(r"/(.*)", UiEntryHandler, {'path': ui_path, "default_filename": "index.html"}),
],
cookie_secret=get_cookie_secret(),
)
app.listen(port)
```
#### File: iris/test_runner/helpers.py
```python
import datetime
import sys
import os
import json
import importlib
import threading
from test_runner import exceptions
def import_by_name(name, error_message, logger):
sys.path.append(os.getcwd())
sys.path.append(os.path.join(os.getcwd(), 'test_definitions'))
try:
imp = importlib.import_module(name)
if 'common' not in name:
importlib.reload(imp)
# TODO: This hides also errors on nested imports
except ImportError as err:
logger.warning(err)
logger.warning(error_message)
sys.exit(-1)
return imp
def get_test_pool_definitions(logger):
"""Returns test definition pool"""
return import_by_name('test_case_pool', "test_case_pool missing?", logger)
def get_test_definitions(sequence_name, logger):
"""Returns test definitions"""
err = (
"Error loading "
+ sequence_name
+ ". Remember, you can create new definition template with --create argument."
)
return import_by_name("sequences." + sequence_name, err, logger)
``` |
{
"source": "jotautomation/super-simple-test-sequencer",
"score": 2
} |
#### File: super-simple-test-sequencer/test_runner/runner.py
```python
import datetime
import time
import json
import threading
import logging
from unittest.mock import MagicMock
from test_runner import progress_reporter
from test_runner import helpers
from test_runner import exceptions
def get_common_definitions():
"""Returns test definitions"""
return helpers.import_by_name(
'common',
"No test definitions defined? Create definition template with --create argument.",
logging.getLogger("common_definitions"),
)
def get_test_cases(logger):
test_cases = {}
for sequence in get_common_definitions().TEST_SEQUENCES:
test_definitions = helpers.get_test_definitions(sequence, logger)
test_cases[sequence] = [
{'name': t}
for t in test_definitions.TESTS
if t not in test_definitions.SKIP and '_pre' not in t and '_post' not in t
]
return test_cases
def get_test_control(logger):
"""Returns default test control dictionary"""
return {
'single_run': False,
'step': False,
'skip': None,
'loop_all': False,
'loop': None,
'retest_on_fail': 0,
'terminate': False,
'abort': False,
'report_off': False,
'run': threading.Event(),
'get_sn_from_ui': get_common_definitions().SN_FROM_UI,
'test_sequences': get_common_definitions().TEST_SEQUENCES,
'dry_run': False,
'test_cases': get_test_cases(logger),
}
def get_sn_from_ui(dut_sn_queue, logger):
"""Returns serial numbers from UI"""
sequence_name = None
test_cases = None
common_definitions = get_common_definitions()
duts_sn = {
test_position.name: {'sn': None} for test_position in common_definitions.TEST_POSITIONS
}
logger.info(
'Wait SNs from UI for test_positions: '
+ ", ".join([str(t) for t in common_definitions.TEST_POSITIONS])
)
while True:
msg = dut_sn_queue.get()
try:
msg = json.loads(msg)
for dut in msg:
if dut in duts_sn:
duts_sn[dut]['sn'] = msg[dut]
if 'sequence' in msg:
sequence_name = msg['sequence']
if 'testCases' in msg and msg['testCases']:
test_cases = [t['name'] for t in msg['testCases']]
except (AttributeError, json.decoder.JSONDecodeError):
pass
# Loop until all test_positions have received a serial number
for dut in duts_sn:
if not duts_sn[dut]['sn']:
break
else:
logger.info("All DUT serial numbers received from UI")
logger.info("Selected test %s", sequence_name)
break
return (duts_sn, sequence_name, {"name": "Not available"}, test_cases)
def run_test_runner(test_control, message_queue, progess_queue, dut_sn_queue, listener_args):
"""Starts the testing"""
logger = logging.getLogger('test_runner')
def send_message(message):
if message:
message_queue.put(message)
common_definitions = get_common_definitions()
progress = progress_reporter.ProgressReporter(test_control, progess_queue)
progress.set_progress(general_state="Boot")
for instrument in common_definitions.INSTRUMENTS.keys():
progress.set_instrument_status(instrument, 'Not initialized')
if test_control['dry_run']:
for instrument in common_definitions.INSTRUMENTS.keys():
common_definitions.INSTRUMENTS[instrument] = MagicMock()
progress.set_instrument_status(instrument, 'MagicMock')
elif 'mock' in test_control:
for instrument in common_definitions.INSTRUMENTS.keys():
if instrument in test_control['mock']:
common_definitions.INSTRUMENTS[instrument] = MagicMock()
progress.set_instrument_status(instrument, 'MagicMock')
elif 'inverse_mock' in test_control:
for instrument in common_definitions.INSTRUMENTS.keys():
if instrument not in test_control['inverse_mock']:
common_definitions.INSTRUMENTS[instrument] = MagicMock()
progress.set_instrument_status(instrument, 'MagicMock')
logger.info("Initializing instruments")
# Initialize all instruments
common_definitions.handle_instrument_status(progress, logger)
logger.info("All instruments initialized")
db_handler = common_definitions.INSTRUMENTS[common_definitions.DB_HANDLER_NAME]
listener_args['database'] = db_handler
# Execute boot_up defined for the test sequence
common_definitions.boot_up(common_definitions.INSTRUMENTS, logger)
test_positions = {}
fail_reason_history = ''
fail_reason_count = 0
pass_count = 0
for position in common_definitions.TEST_POSITIONS:
test_positions[position.name] = position
# Start the actual test loop
while not test_control['terminate']:
# Wait until you are allowed to run again i.e. pause
test_control['run'].wait()
logger.info("Start new test run")
try:
background_pre_tasks = {}
background_post_tasks = []
test_control['abort'] = False
logger.info("Checking status of instruments")
progress.set_progress(
general_state="Checking status of instruments",
overall_result=None,
test_positions=test_positions,
)
logger.info("All instruments OK")
common_definitions.handle_instrument_status(progress, logger)
if db_handler:
db_handler.clean_db()
if isinstance(db_handler, MagicMock):
progress.set_progress(statistics={'statistics': 'mocked'})
else:
progress.set_progress(statistics=db_handler.get_statistics())
# Clear test positions
for position_name, position in test_positions.items():
position.prepare_for_new_test_run()
progress.set_progress(general_state="Prepare", test_positions=test_positions)
test_cases_override = None
# DUT sn may come from UI
if test_control['get_sn_from_ui']:
(
dut_sn_values,
sequence_name,
operator_info,
test_cases_override,
) = get_sn_from_ui(dut_sn_queue, logger)
sequence_name_from_identify = common_definitions.identify_DUTs(
dut_sn_values, common_definitions.INSTRUMENTS, logger
)
# If sequence was not selected, get it from identify_DUTs
if sequence_name is None:
sequence_name = sequence_name_from_identify
else:
# Or from identify_DUTs function
(dut_sn_values, sequence_name, operator_info,) = common_definitions.identify_DUTs(
None, common_definitions.INSTRUMENTS, logger
)
common_definitions.prepare_test(
common_definitions.INSTRUMENTS, logger, dut_sn_values, sequence_name
)
# Create dut instances
for test_position, dut_info in dut_sn_values.items():
if dut_info is None:
test_positions[test_position].dut = None
else:
test_positions[test_position].dut = common_definitions.parse_dut_info(
dut_info, test_position
)
results = {"operator": operator_info, "tester": common_definitions.get_tester_info()}
# Fetch test definitions i.e. import module
test_definitions = helpers.get_test_definitions(sequence_name, logger)
# Fetch test case pool too
test_pool = helpers.get_test_pool_definitions(logger)
# Remove skipped test_case_names from test list
test_case_names = [t for t in test_definitions.TESTS if t not in test_definitions.SKIP]
start_time_epoch = time.time()
start_time = datetime.datetime.now()
start_time_monotonic = time.monotonic()
test_run_id = str(start_time_epoch).replace('.', '_')
# Run all test cases
for test_case_name in test_case_names:
# Loop for testing
if test_control['abort']:
send_message("Test aborted")
logger.warning("Test aborted")
break
if test_cases_override and test_case_name not in test_cases_override:
continue
is_pre_test = False
if '_pre' in test_case_name:
is_pre_test = True
test_case_name = test_case_name.replace('_pre', '').replace('_pre', '')
# Run test cases for each DUT in test position
for test_position_name, test_position_instance in test_positions.items():
# Set sn to be none, if you don't want to run any test_case_names for the test position
# but want to keep showing the position on the UI
if not test_position_instance.dut or test_position_instance.stop_testing:
continue
# Fill DUT data
test_position_instance.step = test_case_name
test_position_instance.status = 'testing'
progress.set_progress(
general_state='testing',
test_positions=test_positions,
sequence_name=sequence_name,
)
def new_test_instance(the_case, the_position_instance):
if hasattr(test_definitions, the_case):
test_instance = getattr(test_definitions, the_case)(
test_definitions.LIMITS,
progress,
the_position_instance.dut,
test_definitions.PARAMETERS,
db_handler,
common_definitions,
)
elif hasattr(test_pool, the_case):
test_instance = getattr(test_pool, the_case)(
test_definitions.LIMITS,
progress,
the_position_instance.dut,
test_definitions.PARAMETERS,
db_handler,
common_definitions,
)
else:
raise exceptions.TestCaseNotFound(
"Cannot find specified test case: " + the_case
)
return test_instance
# Create test case instance
if test_case_name not in test_position_instance.test_case_instances:
test_position_instance.test_case_instances[
test_case_name
] = new_test_instance(test_case_name, test_position_instance)
test_instance = test_position_instance.test_case_instances[test_case_name]
test_instance.test_position = test_position_instance
test_instance.test_run_id = test_run_id
try:
if is_pre_test:
# Start pre task and store it to dictionary
if test_case_name not in background_pre_tasks:
background_pre_tasks[test_case_name] = {}
background_pre_tasks[test_case_name][
test_position_name
] = threading.Thread(target=test_instance.run_pre_test)
background_pre_tasks[test_case_name][test_position_name].start()
else:
# Wait for pre task
if (
test_case_name in background_pre_tasks
and test_position_name in background_pre_tasks[test_case_name]
):
background_pre_tasks[test_case_name][test_position_name].join()
else:
# Or if pre task is not run, run it now
test_instance.run_pre_test()
test_position_instance.test_status = "Testing"
progress.set_progress(
general_state='testing',
test_position=test_positions,
sequence_name=sequence_name,
)
# Run the actual test case
test_instance.run_test()
progress.set_progress(
general_state='testing',
test_positions=test_positions,
sequence_name=sequence_name,
)
test_position_instance.test_status = "Idle"
# Start post task and store it to list
bg_task = threading.Thread(target=test_instance.run_post_test)
bg_task.start()
background_post_tasks.append(bg_task)
except Exception as err:
trace = []
trace_back = err.__traceback__
while trace_back is not None:
trace.append(
{
"filename": trace_back.tb_frame.f_code.co_filename,
"name": trace_back.tb_frame.f_code.co_name,
"line": trace_back.tb_lineno,
}
)
trace_back = trace_back.tb_next
err_dict = {
'type': type(err).__name__,
'message': str(err),
'trace': trace,
}
test_instance.handle_error(error=err_dict)
else:
# No error and no active tests
test_position_instance.status = 'idle'
test_position_instance.step = None
progress.set_progress(
general_state='testing',
test_positions=test_positions,
sequence_name=sequence_name,
)
for task in background_post_tasks:
task.join()
for test_position_name, test_position_instance in test_positions.items():
dut = test_position_instance.dut
results[dut.serial_number] = test_position_instance.dut.test_cases
if not test_position_instance.dut:
continue
if dut.pass_fail_result == 'error':
errors = [
f"{case_name}: {case['error']}"
for case_name, case in dut.test_cases.items()
if case['result'] == 'error' and 'error' in case
]
send_message(f"{dut.serial_number}: ERROR: " + ', '.join(errors))
test_position_instance.test_status = 'error'
elif dut.pass_fail_result == 'pass':
send_message(f"{dut.serial_number}: PASSED")
test_position_instance.test_status = 'pass'
pass_count = pass_count + 1
else:
send_message(f"{dut.serial_number}: FAILED: {', '.join(dut.failed_steps)}")
test_position_instance.test_status = 'fail'
if fail_reason_history == dut.failed_steps:
fail_reason_count = fail_reason_count + 1
else:
fail_reason_count = 0
fail_reason_history = dut.failed_steps
pass_count = 0
if fail_reason_count > 4 and pass_count < 5:
send_message(f"WARNING: 5 or more consecutive fails on {fail_reason_history}")
progress.set_progress(
general_state='finalize',
test_positions=test_positions,
overall_result=dut.pass_fail_result,
sequence_name=sequence_name,
)
common_definitions.finalize_test(
dut.pass_fail_result, test_positions, common_definitions.INSTRUMENTS, logger
)
results["start_time"] = start_time
results["start_time_epoch"] = start_time_epoch
results["end_time"] = datetime.datetime.now()
results["test_run_id"] = test_run_id
results["duration_s"] = round(time.monotonic() - start_time_monotonic, 2)
except exceptions.IrisError as e:
# TODO: write error to report
logger.exception("Error on testsequence")
continue
except Exception as exp:
# TODO: write error to report
logger.exception("Error on testsequence")
continue
else:
pass
finally:
pass
# Don't create report if aborted
if test_control['abort']:
common_definitions.test_aborted(common_definitions.INSTRUMENTS, logger)
continue
progress.set_progress(
general_state="Create test report",
test_positions=test_positions,
overall_result=dut.pass_fail_result,
sequence_name=sequence_name,
)
try:
if not test_control['report_off']:
common_definitions.create_report(
json.dumps(results, indent=4, default=str),
results,
test_positions,
test_definitions.PARAMETERS,
db_handler,
common_definitions,
progress,
)
except Exception as e:
progress.set_progress(general_state="Error")
send_message("Error while generating a test report")
send_message(str(e))
if test_control['single_run']:
test_control['terminate'] = True
progress.set_progress(general_state="Shutdown")
common_definitions.shutdown(common_definitions.INSTRUMENTS, logger)
``` |
{
"source": "jotavaladouro/motorwat_dashboard",
"score": 3
} |
#### File: motorwat_dashboard/big_query/loader.py
```python
from string import Template
import argparse
import datetime
import numpy as np
import big_query
import window
from time import sleep
from datetime import date, timedelta
import mysql
from typing import Optional
INPUT_DIR_LOCAL = "./data/"
class DeltaTemplate(Template):
delimiter = "%"
def strfdelta(tdelta, fmt):
"""
:param tdelta:datetime timedelta
:param fmt: Format
:return: String from tdelte in fmt format
"""
d = {"D": tdelta.days}
hours, rem = divmod(tdelta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
d["H"] = '{:02d}'.format(hours)
d["M"] = '{:02d}'.format(minutes)
d["S"] = '{:02d}'.format(seconds)
t = DeltaTemplate(fmt)
return t.substitute(**d)
# Get a string from a datetime.timedelta
def lambda_timedelta(x): return strfdelta(x, '%H:%M:%S')
# Get a string from a datetime.date
def lambda_date2string(x): return x.strftime('%Y-%m-%d') if type(x) == datetime.date else x
# Replace None with 0000-00-00
def lambda_data_none(x):
if x is None:
return "0000-00-00"
else:
return x
def process_daraframe(dataframe_result):
"""
Process time and dates from a dataframe
:param dataframe_result: dataframe
:return: dataframe processed
"""
if dataframe_result is not None:
#T_Hora_C : Transit time
dataframe_result[4] = dataframe_result[4]. \
apply(lambda_timedelta)
# D_Obu_Data : Obu entry date
dataframe_result[12] = dataframe_result[12]. \
apply(lambda_data_none)
# T_Obu_Time : Obu entry time
dataframe_result[13] = dataframe_result[13]. \
apply(lambda_timedelta)
##D_Data_C : Transit date
dataframe_result[3] = dataframe_result[3]. \
apply(lambda_date2string)
# D_Obu_Data : Obu entry date
dataframe_result[12] = dataframe_result[12]. \
apply(lambda_date2string)
return dataframe_result
def print_lines(dataframe_load):
"""
Print rows from a dataframe
:param dataframe_load:
:return:
"""
csv = dataframe_load.to_csv(
header=None,
index=False).strip('\n').split('\n')
for line in csv:
print(line)
def parse_parameter():
"""
:return: args_return:
day
only_print
online
until_yestarday
"""
parser = argparse.ArgumentParser()
parser.add_argument("day", help="day string")
parser.add_argument('--only_print', help='Print data, not send',
action='store_true', default=False)
parser.add_argument('--online', help='Online send',
action='store_true', default=False)
parser.add_argument('--until_yesterday', help="load fron date to yestarday",
action='store_true', default=False)
args_return = parser.parse_args()
return args_return
def load_df(df, window_remain=5):
"""
Load a dataframe to raw data
Load a dataframe to in memory windows time slider
Store the last window_remain time windows for future data.
Load the rest of the time windows to bigquery.
:param df: dataframe
:param window_remain: number of win
:return:
"""
df["Travel_Time_Second"] = 0
# Prepare data and load to rwa table
df = big_query.add_raw_columns_names(df)
df = big_query.add_travel_time(df)
big_query.write_df_raw(df)
# Add data to window time slider
window.window_add_dataframe(df)
# Get windows to load to big query
df_aggr_total = window.window_get_window_ready(window_remain)
if df_aggr_total is not None:
df_aggr = window.window_get_window_ready(window_remain)
while df_aggr is not None:
df_aggr_total = df_aggr_total.append(df_aggr)
df_aggr = window.window_get_window_ready(window_remain)
big_query.write_df_aggr(df_aggr_total)
def load_day(date_load, online, lmysql, lbigquery):
"""
Get day data from mysql and load to bigquery.
Before this delete data in big query from that day
:param date_load: day
:param online: if True, never finish, load data, sleep and load new data
:param lmysql: Mysql conection
:param lbigquery: BigQuery connection
:return:
"""
print("Load day, delete " + str(date_load) + " online " + str(online))
big_query.delete_day(date_load, lbigquery)
print("Load day " + str(date_load) + " online " + str(online))
end = False
last_index = 0 # Last index read from mysql database
while not end:
# Get data from mysql
data = mysql.get_data(lmysql, date_load, last_index)
data = process_daraframe(data)
if data is not None:
# Last column is database index.
# Store for new sql querys and remove from dataframes
print("Loading " + str(data.shape))
last_index = data[15].max()
data = data.drop(columns=15)
if args.only_print:
print_lines(data)
else:
if args.online:
# If online, we store the last five window for future data incoming
load_df(data, window_remain=5)
else:
# If not online, not future data incomming, not store.
load_df(data, window_remain=0)
if not args.online:
end = True
else:
sleep(60)
def get_days(init_date, until_yesterday):
"""
If until_yesterday we return a list with days from init date
until yesterday
Otherway return a list with init_date
:param init_date: First day in the list
:param until_yesterday:
:return: list string days
"""
if until_yesterday:
start = datetime.datetime.strptime(init_date, "%Y-%m-%d").date()
yesterday = date.today() - timedelta(1)
lst_days = [(start + datetime.timedelta(days=x)).
strftime("%Y-%m-%d") for x in range(0, (yesterday - start).days + 1)]
else:
lst_days = [init_date]
return lst_days
if __name__ == "__main__":
np.seterr(all='raise')
args = parse_parameter()
# Get mysql and big_query client connections
mysql_conn = mysql.connect_mysql()
client = big_query.get_client_bigquery()
print(args)
# Init window
window.window_init()
try:
for day in get_days(args.day, args.until_yesterday):
load_day(day, args.online, mysql_conn, client)
except KeyboardInterrupt:
pass
mysql.close_mysql(mysql_conn)
``` |
{
"source": "jotavev/usp-computer-science-python",
"score": 4
} |
#### File: usp-computer-science-python/week5/funcFatorial.py
```python
def fatorial(n):
subtração = n - 1
if not n == 0:
while subtração != 0:
n = n * (subtração)
subtração -= 1
else:
n = 1
return n
```
#### File: usp-computer-science-python/week5/funcMaiorPrimo.py
```python
def ehprimo(numero):
if numero == 2 or numero == 3 or numero == 5 or numero == 7:
return True
elif numero == 1:
return False
else:
if numero % 2 == 0 or numero % 3 == 0 or numero % 5 == 0 or numero % 7 == 0 or numero % 11 == 0 or numero == 961:
return False
else:
return True
def maior_primo(entrada):
subindo = 0
guardadinho = 0
while subindo < entrada:
subindo += 1
if ehprimo(subindo) == True:
guardadinho = subindo
return guardadinho
```
#### File: week5/opcionais/maximo.py
```python
def maximo(a, b, c):
if a > b and a > c:
return a
elif b > c and b > a:
return b
elif c > a and c > b:
return c
elif a == b == c:
return a
```
#### File: week7/opcionais/n_primo.py
```python
def primo(numero):
if numero == 2 or numero == 3 or numero == 5 or numero == 7 or numero == 11:
return True
elif numero == 1:
return False
else:
if numero % 2 == 0 or numero % 3 == 0 or numero % 5 == 0 or numero % 7 == 0 or numero % 11 == 0 or numero == 961:
return False
else:
return True
def n_primos(n):
subindo = 0
guardadinho = 0
while subindo < n:
subindo += 1
if primo(subindo) == True:
guardadinho += 1
return guardadinho
``` |
{
"source": "jotaylor/awesimsoss",
"score": 2
} |
#### File: awesimsoss/awesimsoss/awesim.py
```python
import datetime
from functools import partial, wraps
from multiprocessing.pool import ThreadPool
from multiprocessing import cpu_count
import os
from pkg_resources import resource_filename
import time
import warnings
import astropy.units as q
import astropy.constants as ac
from astropy.io import fits
from astropy.modeling.models import BlackBody1D
from astropy.modeling.blackbody import FLAM
from astropy.coordinates import SkyCoord
from astroquery.simbad import Simbad
import batman
from bokeh.plotting import figure, show
from bokeh.models import HoverTool, LogColorMapper, LogTicker, LinearColorMapper, ColorBar, Span
from bokeh.layouts import column
import numpy as np
try:
from jwst.datamodels import RampModel
except ImportError:
print("Could not import `jwst` package. Functionality limited.")
from . import generate_darks as gd
from . import make_trace as mt
from . import utils
warnings.simplefilter('ignore')
def check_psf_files():
"""Function to run on import to verify that the PSF files have been precomputed"""
if not os.path.isfile(resource_filename('awesimsoss', 'files/SOSS_CLEAR_PSF_order1_1.npy')):
print("Looks like you haven't generated the SOSS PSFs yet, which are required to produce simulations.")
print("This takes about 10 minutes but you will only need to do it this one time.")
compute = input("Would you like to do it now? [y] ")
if compute is None or compute.lower() in ['y', 'yes']:
mt.nuke_psfs()
def run_required(func):
"""A wrapper to check that the simulation has been run before a method can be executed"""
@wraps(func)
def _run_required(*args, **kwargs):
"""Check that the 'tso' attribute is not None"""
if args[0].tso is None:
print("No simulation found! Please run the 'simulate' method first.")
else:
return func(*args, **kwargs)
return _run_required
check_psf_files()
class TSO(object):
"""
Generate NIRISS SOSS time series observations
"""
def __init__(self, ngrps, nints, star=None, planet=None, tmodel=None, snr=700,
filter='CLEAR', subarray='SUBSTRIP256', orders=[1, 2], t0=0, nresets=0,
target='New Target', title=None, verbose=True):
"""
Initialize the TSO object and do all pre-calculations
Parameters
----------
ngrps: int
The number of groups per integration
nints: int
The number of integrations for the exposure
star: sequence
The wavelength and flux of the star
planet: sequence
The wavelength and transmission of the planet
snr: float
The signal-to-noise
filter: str
The name of the filter to use, ['CLEAR', 'F277W']
subarray: str
The name of the subarray to use, ['SUBSTRIP256', 'SUBSTRIP96', 'FULL']
orders: int, list
The orders to simulate, [1], [1, 2], [1, 2, 3]
t0: float
The start time of the exposure [days]
nresets: int
The number of resets before each integration
target: str (optional)
The name of the target
title: str (optionl)
A title for the simulation
verbose: bool
Print status updates throughout calculation
Example
-------
# Imports
import numpy as np
from awesimsoss import TSO, STAR_DATA
import astropy.units as q
from pkg_resources import resource_filename
star = np.genfromtxt(resource_filename('awesimsoss', 'files/scaled_spectrum.txt'), unpack=True)
star1D = [star[0]*q.um, (star[1]*q.W/q.m**2/q.um).to(q.erg/q.s/q.cm**2/q.AA)]
# Initialize simulation
tso = TSO(ngrps=3, nints=10, star=star1D)
"""
# Metadata
self.verbose = verbose
self.target = target
self.title = title or '{} Simulation'.format(self.target)
# Set static values
self.gain = 1.61
self._star = None
# Set instance attributes for the exposure
self.t0 = t0
self.ngrps = ngrps
self.nints = nints
self.nresets = nresets
self.nframes = (self.nresets+self.ngrps)*self.nints
self.obs_date = datetime.datetime.now().strftime('%x')
self.obs_time = datetime.datetime.now().strftime('%X')
self.orders = orders
self.filter = filter
self.header = ''
self.snr = snr
self.model_grid = None
self.subarray = subarray
# Set instance attributes for the target
self.star = star
self.tmodel = tmodel
self.ld_coeffs = np.zeros((3, 2048, 2))
self.ld_profile = 'quadratic'
self.planet = planet
# Reset data based on subarray and observation settings
self._reset_data()
self._reset_time()
@run_required
def add_noise(self, zodi_scale=1., offset=500):
"""
Generate ramp and background noise
Parameters
----------
zodi_scale: float
The scale factor of the zodiacal background
offset: int
The dark current offset
"""
print('Adding noise to TSO...')
start = time.time()
# Get the separated orders
orders = np.asarray([getattr(self, 'tso_order{}_ideal'.format(i)) for i in self.orders])
# Load the reference files
pca0_file = resource_filename('awesimsoss', 'files/niriss_pca0.fits')
nonlinearity = fits.getdata(resource_filename('awesimsoss', 'files/forward_coefficients_dms.fits'))
pedestal = fits.getdata(resource_filename('awesimsoss', 'files/pedestaldms.fits'))
photon_yield = fits.getdata(resource_filename('awesimsoss', 'files/photonyieldfullframe.fits'))
zodi = fits.getdata(resource_filename('awesimsoss', 'files/background_detectorfield_normalized.fits'))
darksignal = fits.getdata(resource_filename('awesimsoss', 'files/signaldms.fits'))*self.gain
# Slice of FULL frame reference files
slc = slice(1792, 1888) if self.subarray == 'SUBSTRIP96' else slice(1792, 2048) if self.subarray == 'SUBSTRIP256' else slice(0, 2048)
# Trim FULL frame reference files
pedestal = pedestal[slc, :]
nonlinearity = nonlinearity[:, slc, :]
zodi = zodi[slc, :]
darksignal = darksignal[slc, :]
photon_yield = photon_yield[:, slc, :]
# Generate the photon yield factor values
pyf = gd.make_photon_yield(photon_yield, np.mean(orders, axis=1))
# Remove negatives from the dark ramp
darksignal[np.where(darksignal < 0.)] = 0.
# Make the exposure
RAMP = gd.make_exposure(1, self.ngrps, darksignal, self.gain, pca0_file=pca0_file, offset=offset)
# Iterate over integrations
for n in range(self.nints):
# Add in the SOSS signal
ramp = gd.add_signal(self.tso_ideal[self.ngrps*n:self.ngrps*n+self.ngrps], RAMP.copy(), pyf, self.frame_time, self.gain, zodi, zodi_scale, photon_yield=False)
# Apply the non-linearity function
ramp = gd.non_linearity(ramp, nonlinearity, offset=offset)
# Add the pedestal to each frame in the integration
ramp = gd.add_pedestal(ramp, pedestal, offset=offset)
# Update the TSO with one containing noise
self.tso[self.ngrps*n:self.ngrps*n+self.ngrps] = ramp
# Memory cleanup
del RAMP, ramp, pyf, photon_yield, darksignal, zodi, nonlinearity, pedestal, orders
print('Noise model finished:', round(time.time()-start, 3), 's')
@run_required
def add_refpix(self, counts=0):
"""Add reference pixels to detector edges
Parameters
----------
counts: int
The number of counts or the reference pixels
"""
# Left, right (all subarrays)
self.tso[:, :, :, :4] = counts
self.tso[:, :, :, -4:] = counts
# Top (excluding SUBSTRIP96)
if self.subarray != 'SUBSTRIP96':
self.tso[:, :, -4:, :] = counts
# Bottom (Only FULL frame)
if self.subarray == 'FULL':
self.tso[:, :, :4, :] = counts
@run_required
def export(self, outfile, all_data=False):
"""
Export the simulated data to a JWST pipeline ingestible FITS file
Parameters
----------
outfile: str
The path of the output file
"""
# Make a RampModel
data = self.tso
mod = RampModel(data=data, groupdq=np.zeros_like(data), pixeldq=np.zeros((self.nrows, self.ncols)), err=np.zeros_like(data))
pix = utils.subarray(self.subarray)
# Set meta data values for header keywords
mod.meta.telescope = 'JWST'
mod.meta.instrument.name = 'NIRISS'
mod.meta.instrument.detector = 'NIS'
mod.meta.instrument.filter = self.filter
mod.meta.instrument.pupil = 'GR700XD'
mod.meta.exposure.type = 'NIS_SOSS'
mod.meta.exposure.nints = self.nints
mod.meta.exposure.ngroups = self.ngrps
mod.meta.exposure.nframes = self.nframes
mod.meta.exposure.readpatt = 'NISRAPID'
mod.meta.exposure.groupgap = 0
mod.meta.exposure.frame_time = self.frame_time
mod.meta.exposure.group_time = self.group_time
mod.meta.exposure.duration = self.time[-1]-self.time[0]
mod.meta.subarray.name = self.subarray
mod.meta.subarray.xsize = data.shape[3]
mod.meta.subarray.ysize = data.shape[2]
mod.meta.subarray.xstart = pix.get('xloc', 1)
mod.meta.subarray.ystart = pix.get('yloc', 1)
mod.meta.subarray.fastaxis = -2
mod.meta.subarray.slowaxis = -1
mod.meta.observation.date = self.obs_date
mod.meta.observation.time = self.obs_time
mod.meta.target.ra = self.ra
mod.meta.target.dec = self.dec
mod.meta.target.source_type = 'POINT'
# Save the file
mod.save(outfile, overwrite=True)
# Save input data
with fits.open(outfile) as hdul:
# Save input star data
hdul.append(fits.ImageHDU(data=np.array([i.value for i in self.star], dtype=np.float64), name='STAR'))
hdul['STAR'].header.set('FUNITS', str(self.star[1].unit))
hdul['STAR'].header.set('WUNITS', str(self.star[0].unit))
# Save input planet data
if self.planet is not None:
hdul.append(fits.ImageHDU(data=np.asarray(self.planet, dtype=np.float64), name='PLANET'))
for param, val in self.tmodel.__dict__.items():
if isinstance(val, (float, int, str)):
hdul['PLANET'].header.set(param.upper()[:8], val)
elif isinstance(val, np.ndarray) and len(val) == 1:
hdul['PLANET'].header.set(param.upper(), val[0])
elif isinstance(val, type(None)):
hdul['PLANET'].header.set(param.upper(), '')
elif param == 'u':
for n, v in enumerate(val):
hdul['PLANET'].header.set('U{}'.format(n+1), v)
else:
print(param, val, type(val))
# Write to file
hdul.writeto(outfile, overwrite=True)
print('File saved as', outfile)
@property
def filter(self):
"""Getter for the filter"""
return self._filter
@filter.setter
def filter(self, filt):
"""Setter for the filter
Properties
----------
filt: str
The name of the filter to use,
['CLEAR', 'F277W']
"""
# Valid filters
filts = ['CLEAR', 'F277W']
# Check the value
if not isinstance(filt, str) or filt.upper() not in filts:
raise ValueError("'{}' not a supported filter. Try {}".format(filt, filts))
# Set it
filt = filt.upper()
self._filter = filt
# If F277W, set orders to 1 to speed up calculation
if filt == 'F277W':
self.orders = [1]
# Get absolute calibration reference file
calfile = resource_filename('awesimsoss', 'files/niriss_ref_photom.fits')
caldata = fits.getdata(calfile)
self.photom = caldata[(caldata['pupil'] == 'GR700XD') & (caldata['filter'] == filt)]
# Update the results
self._reset_data()
# Reset relative response function
self._reset_psfs()
@property
def info(self):
"""Summary table for the observation settings"""
# Pull out relevant attributes
track = ['_ncols', '_nrows', '_nints', '_ngrps', '_nresets', '_subarray', '_filter', '_t0', '_orders', 'ld_profile', '_target', 'title', 'ra', 'dec']
settings = {key.strip('_'): val for key, val in self.__dict__.items() if key in track}
return settings
@property
def ld_coeffs(self):
"""Get the limb darkening coefficients"""
return self._ld_coeffs
@ld_coeffs.setter
def ld_coeffs(self, coeffs):
"""Set the limb darkening coefficients
Parameters
----------
coeffs: str, sequence
The limb darkening coefficients or 'update'
"""
# Default message
msg = "Limb darkening coefficients must be an array of 3 dimensions"
# Update the coeffs based on the transit model parameters
if coeffs == 'update':
# Check the transit model
if self.tmodel is None:
msg = "Please set a transit model with the 'tmodel' attribute to update the limb darkening coefficients"
# Check the model grid
elif self.model_grid is None:
msg = "Please set a stellar intensity model grid with the 'model_grid' attribute to update the limb darkening coefficients"
# Generate the coefficients
else:
coeffs = [mt.generate_SOSS_ldcs(self.avg_wave[order-1], self.tmodel.limb_dark, [getattr(self.tmodel, p) for p in ['teff', 'logg', 'feh']], model_grid=self.model_grid) for order in self.orders]
# Check the coefficient type
if not isinstance(coeffs, np.ndarray) or not coeffs.ndim == 3:
if self.verbose:
print(msg)
else:
self._ld_coeffs = coeffs
@property
def ncols(self):
"""Getter for the number of columns"""
return self._ncols
@ncols.setter
def ncols(self, err):
"""Error when trying to change the number of columns
"""
raise TypeError("The number of columns is fixed by setting the 'subarray' attribute.")
@property
def ngrps(self):
"""Getter for the number of groups"""
return self._ngrps
@ngrps.setter
def ngrps(self, ngrp_val):
"""Setter for the number of groups
Properties
----------
ngrp_val: int
The number of groups
"""
# Check the value
if not isinstance(ngrp_val, int):
raise TypeError("The number of groups must be an integer")
# Set it
self._ngrps = ngrp_val
# Update the results
self._reset_data()
self._reset_time()
@property
def nints(self):
"""Getter for the number of integrations"""
return self._nints
@nints.setter
def nints(self, nint_val):
"""Setter for the number of integrations
Properties
----------
nint_val: int
The number of integrations
"""
# Check the value
if not isinstance(nint_val, int):
raise TypeError("The number of integrations must be an integer")
# Set it
self._nints = nint_val
# Update the results
self._reset_data()
self._reset_time()
@property
def nresets(self):
"""Getter for the number of resets"""
return self._nresets
@nresets.setter
def nresets(self, nreset_val):
"""Setter for the number of resets
Properties
----------
nreset_val: int
The number of resets
"""
# Check the value
if not isinstance(nreset_val, int):
raise TypeError("The number of resets must be an integer")
# Set it
self._nresets = nreset_val
# Update the time (data shape doesn't change)
self._reset_time()
@property
def nrows(self):
"""Getter for the number of rows"""
return self._nrows
@nrows.setter
def nrows(self, err):
"""Error when trying to change the number of rows
"""
raise TypeError("The number of rows is fixed by setting the 'subarray' attribute.")
@property
def orders(self):
"""Getter for the orders"""
return self._orders
@orders.setter
def orders(self, ords):
"""Setter for the orders
Properties
----------
ords: list
The orders to simulate, [1, 2, 3]
"""
# Valid order lists
orderlist = [[1], [1, 2], [1, 2, 3]]
# Check the value
# Set single order to list
if isinstance(ords, int):
ords = [ords]
if not all([o in [1, 2, 3] for o in ords]):
raise ValueError("'{}' is not a valid list of orders. Try {}".format(ords, orderlist))
# Set it
self._orders = ords
# Update the results
self._reset_data()
@property
def planet(self):
"""Getter for the stellar data"""
return self._planet
@planet.setter
def planet(self, spectrum):
"""Setter for the planetary data
Parameters
----------
spectrum: sequence
The [W, F] or [W, F, E] of the planet to simulate
"""
# Check if the planet has been set
if spectrum is None:
self._planet = None
else:
# Check planet is a sequence of length 2 or 3
if not isinstance(spectrum, (list, tuple)) or not len(spectrum) in [2, 3]:
raise ValueError(type(spectrum), ': Planet input must be a sequence of [W, F] or [W, F, E]')
# Check the units
if not spectrum[0].unit.is_equivalent(q.um):
raise ValueError(spectrum[0].unit, ': Wavelength must be in units of distance')
# Check the transmission spectrum is less than 1
if not all(spectrum[1] < 1):
raise ValueError('{} - {}: Transmission must be between 0 and 1'.format(min(spectrum[1]), max(spectrum[1])))
# Check the wavelength range
spec_min = np.nanmin(spectrum[0][spectrum[0] > 0.])
spec_max = np.nanmax(spectrum[0][spectrum[0] > 0.])
sim_min = np.nanmin(self.wave[self.wave > 0.])*q.um
sim_max = np.nanmax(self.wave[self.wave > 0.])*q.um
if spec_min > sim_min or spec_max < sim_max:
print("Wavelength range of input spectrum ({} - {} um) does not cover the {} - {} um range needed for a complete simulation. Interpolation will be used at the edges.".format(spec_min, spec_max, sim_min, sim_max))
# Good to go
self._planet = spectrum
@run_required
def plot(self, ptype='data', idx=0, scale='linear', order=None, noise=True,
traces=False, saturation=0.8, draw=True):
"""
Plot a TSO frame
Parameters
----------
ptype: str
The type of plot, ['data', 'snr', 'saturation']
idx: int
The frame index to plot
scale: str
Plot scale, ['linear', 'log']
order: sequence
The order to isolate
noise: bool
Plot with the noise model
traces: bool
Plot the traces used to generate the frame
saturation: float
The fraction of full well defined as saturation
draw: bool
Render the figure instead of returning it
"""
# Check plot type
ptypes = ['data', 'snr', 'saturation']
if ptype.lower() not in ptypes:
raise ValueError("'ptype' must be {}".format(ptypes))
if order in [1, 2]:
tso = getattr(self, 'tso_order{}_ideal'.format(order))
else:
if noise:
tso = self.tso
else:
tso = self.tso_ideal
# Get data, snr, and saturation for plotting
vmax = int(np.nanmax(tso[tso < np.inf]))
dat = np.array(tso.reshape(self.dims3)[idx].data)
snr = np.sqrt(dat.data)
fullWell = 65536.0
sat = dat > saturation * fullWell
sat = sat.astype(int)
# Make the figure
height = 180 if self.subarray == 'SUBSTRIP96' else 800 if self.subarray == 'FULL' else 225
tooltips = [("(x,y)", "($x{int}, $y{int})"), ("ADU/s", "@data"), ("SNR", "@snr"), ('Saturation', '@saturation')]
fig = figure(x_range=(0, dat.shape[1]), y_range=(0, dat.shape[0]),
tooltips=tooltips, width=int(dat.shape[1]/2), height=height,
title='{}: Frame {}'.format(self.target, idx),
toolbar_location='above', toolbar_sticky=True)
# Plot the frame
if scale == 'log':
dat[dat < 1.] = 1.
source = dict(data=[dat], snr=[snr], saturation=[sat])
color_mapper = LogColorMapper(palette="Viridis256", low=dat.min(), high=dat.max())
fig.image(source=source, image=ptype, x=0, y=0, dw=dat.shape[1],
dh=dat.shape[0], color_mapper=color_mapper)
color_bar = ColorBar(color_mapper=color_mapper, ticker=LogTicker(),
orientation="horizontal", label_standoff=12,
border_line_color=None, location=(0, 0))
else:
source = dict(data=[dat], snr=[snr], saturation=[sat])
color_mapper = LinearColorMapper(palette="Viridis256", low=dat.min(), high=dat.max())
fig.image(source=source, image=ptype, x=0, y=0, dw=dat.shape[1],
dh=dat.shape[0], palette='Viridis256')
color_bar = ColorBar(color_mapper=color_mapper,
orientation="horizontal", label_standoff=12,
border_line_color=None, location=(0, 0))
# Add color bar
if ptype != 'saturation':
fig.add_layout(color_bar, 'below')
# Plot the polynomial too
if traces:
X = np.linspace(0, 2048, 2048)
# Order 1
Y = np.polyval(self.coeffs[0], X)
fig.line(X, Y, color='red')
# Order 2
Y = np.polyval(self.coeffs[1], X)
fig.line(X, Y, color='red')
if draw:
show(fig)
else:
return fig
@run_required
def plot_slice(self, col, idx=0, order=None, noise=False, draw=True, **kwargs):
"""
Plot a column of a frame to see the PSF in the cross dispersion direction
Parameters
----------
col: int, sequence
The column index(es) to plot
idx: int
The frame index to plot
order: sequence
The order to isolate
noise: bool
Plot with the noise model
draw: bool
Render the figure instead of returning it
"""
if order in [1, 2]:
tso = getattr(self, 'tso_order{}_ideal'.format(order))
else:
if noise:
tso = self.tso
else:
tso = self.tso_ideal
# Transpose data
flux = tso.reshape(self.dims3)[idx].T
# Turn one column into a list
if isinstance(col, int):
col = [col]
# Get the data
dfig = self.plot(ptype='data', idx=idx, order=order, draw=False, noise=noise, **kwargs)
# Make the figure
fig = figure(width=1024, height=500)
fig.xaxis.axis_label = 'Row'
fig.yaxis.axis_label = 'Count Rate [ADU/s]'
fig.legend.click_policy = 'mute'
for c in col:
color = next(utils.COLORS)
fig.line(np.arange(flux[c, :].size), flux[c, :], color=color, legend='Column {}'.format(c))
vline = Span(location=c, dimension='height', line_color=color, line_width=3)
dfig.add_layout(vline)
if draw:
show(column(fig, dfig))
else:
return column(fig, dfig)
@run_required
def plot_ramp(self, draw=True):
"""
Plot the total flux on each frame to display the ramp
Parameters
----------
draw: bool
Render the figure instead of returning it
"""
fig = figure()
x = range(self.dims3[0])
y = np.sum(self.tso.reshape(self.dims3), axis=(-1, -2))
fig.circle(x, y, size=12)
fig.xaxis.axis_label = 'Group'
fig.yaxis.axis_label = 'Count Rate [ADU/s]'
if draw:
show(fig)
else:
return fig
@run_required
def plot_lightcurve(self, column, time_unit='s', resolution_mult=20, draw=True):
"""
Plot a lightcurve for each column index given
Parameters
----------
column: int, float, sequence
The integer column index(es) or float wavelength(s) in microns
to plot as a light curve
time_unit: string
The string indicator for the units that the self.time array is in
['s', 'min', 'h', 'd' (default)]
resolution_mult: int
The number of theoretical points to plot for each data point
draw: bool
Render the figure instead of returning it
"""
# Check time_units
if time_unit not in ['s', 'min', 'h', 'd']:
raise ValueError("time_unit must be 's', 'min', 'h' or 'd']")
# Get the scaled flux in each column for the last group in
# each integration
flux_cols = np.nansum(self.tso_ideal.reshape(self.dims3)[self.ngrps-1::self.ngrps], axis=1)
flux_cols = flux_cols/np.nanmax(flux_cols, axis=1)[:, None]
# Make it into an array
if isinstance(column, (int, float)):
column = [column]
# Make the figure
lc = figure()
for kcol, col in enumerate(column):
color = next(utils.COLORS)
# If it is an index
if isinstance(col, int):
lightcurve = flux_cols[:, col]
label = 'Column {}'.format(col)
# Or assumed to be a wavelength in microns
elif isinstance(col, float):
waves = np.mean(self.wave[0], axis=0)
lightcurve = [np.interp(col, waves, flux_col) for flux_col in flux_cols]
label = '{} um'.format(col)
else:
print('Please enter an index, astropy quantity, or array thereof.')
return
# Plot the theoretical light curve
if str(type(self.tmodel)) == "<class 'batman.transitmodel.TransitModel'>":
# Make time axis and convert to desired units
time = np.linspace(min(self.time), max(self.time), self.ngrps*self.nints*resolution_mult)
time = time*q.d.to(time_unit)
tmodel = batman.TransitModel(self.tmodel, time)
tmodel.rp = self.rp[col]
theory = tmodel.light_curve(tmodel)
theory *= max(lightcurve)/max(theory)
lc.line(time, theory, legend=label+' model', color=color, alpha=0.1)
# Convert datetime
data_time = self.time[self.ngrps-1::self.ngrps].copy()
data_time*q.d.to(time_unit)
# Plot the lightcurve
lc.circle(data_time, lightcurve, legend=label, color=color)
lc.xaxis.axis_label = 'Time [{}]'.format(time_unit)
lc.yaxis.axis_label = 'Transit Depth'
if draw:
show(lc)
else:
return lc
@run_required
def plot_spectrum(self, frame=0, order=None, noise=False, scale='log', draw=True):
"""
Parameters
----------
frame: int
The frame number to plot
order: sequence
The order to isolate
noise: bool
Plot with the noise model
scale: str
Plot scale, ['linear', 'log']
draw: bool
Render the figure instead of returning it
"""
if order in [1, 2]:
tso = getattr(self, 'tso_order{}_ideal'.format(order))
else:
if noise:
tso = self.tso
else:
tso = self.tso_ideal
# Get extracted spectrum (Column sum for now)
wave = np.mean(self.wave[0], axis=0)
flux_out = np.sum(tso.reshape(self.dims3)[frame].data, axis=0)
response = 1./self.order1_response
# Convert response in [mJy/ADU/s] to [Flam/ADU/s] then invert so
# that we can convert the flux at each wavelegth into [ADU/s]
flux_out *= response/self.time[np.mod(self.ngrps, frame)]
# Trim wacky extracted edges
flux_out[0] = flux_out[-1] = np.nan
# Plot it along with input spectrum
flux_in = np.interp(wave, self.star[0], self.star[1])
# Make the spectrum plot
spec = figure(x_axis_type=scale, y_axis_type=scale, width=1024, height=500)
spec.step(wave, flux_out, mode='center', legend='Extracted', color='red')
spec.step(wave, flux_in, mode='center', legend='Injected', alpha=0.5)
spec.yaxis.axis_label = 'Flux Density [{}]'.format(self.star[1].unit)
# Get the residuals
res = figure(x_axis_type=scale, x_range=spec.x_range, width=1024, height=150)
res.step(wave, flux_out-flux_in, mode='center')
res.xaxis.axis_label = 'Wavelength [{}]'.format(self.star[0].unit)
res.yaxis.axis_label = 'Residuals'
if draw:
show(column(spec, res))
else:
return column(spec, res)
def _reset_data(self):
"""Reset the results to all zeros"""
# Check that all the appropriate values have been initialized
if all([i in self.info for i in ['nints', 'ngrps', 'nrows', 'ncols']]):
# Update the dimensions
self.dims = (self.nints, self.ngrps, self.nrows, self.ncols)
self.dims3 = (self.nints*self.ngrps, self.nrows, self.ncols)
# Reset the results
for arr in ['tso', 'tso_ideal']+['tso_order{}_ideal'.format(n) for n in self.orders]:
setattr(self, arr, None)
def _reset_time(self):
"""Reset the time axis based on the observation settings"""
# Check that all the appropriate values have been initialized
if all([i in self.info for i in ['subarray', 'nints', 'ngrps', 't0', 'nresets']]):
# Get frame time based on the subarray
self.frame_time = self.subarray_specs.get('tfrm')
self.group_time = self.subarray_specs.get('tgrp')
# Generate the time axis, removing reset frames
time_axis = []
t = self.t0+self.frame_time
for _ in range(self.nints):
times = t+np.arange(self.nresets+self.ngrps)*self.frame_time
t = times[-1]+self.frame_time
time_axis.append(times[self.nresets:])
self.time = np.concatenate(time_axis)
self.inttime = np.tile(self.time[:self.ngrps], self.nints)
def _reset_psfs(self):
"""Scale the psf for each detector column to the flux from the 1D spectrum"""
# Check that all the appropriate values have been initialized
if all([i in self.info for i in ['filter', 'subarray']]) and self.star is not None:
for order in self.orders:
# Get the wavelength map
wave = self.avg_wave[order-1]
# Get relative spectral response for the order (from
# /grp/crds/jwst/references/jwst/jwst_niriss_photom_0028.fits)
throughput = self.photom[self.photom['order'] == order]
ph_wave = throughput.wavelength[throughput.wavelength > 0][1:-2]
ph_resp = throughput.relresponse[throughput.wavelength > 0][1:-2]
response = np.interp(wave, ph_wave, ph_resp)
# Convert response in [mJy/ADU/s] to [Flam/ADU/s] then invert so
# that we can convert the flux at each wavelegth into [ADU/s]
response = self.frame_time/(response*q.mJy*ac.c/(wave*q.um)**2).to(self.star[1].unit).value
flux = np.interp(self.avg_wave[order-1], self.star[0], self.star[1], left=0, right=0)*response
cube = mt.SOSS_psf_cube(filt=self.filter, order=order, subarray=self.subarray)*flux[:, None, None]
setattr(self, 'order{}_response'.format(order), response)
setattr(self, 'order{}_psfs'.format(order), cube)
def simulate(self, ld_coeffs=None, noise=True, model_grid=None, n_jobs=-1, **kwargs):
"""
Generate the simulated 4D ramp data given the initialized TSO object
Parameters
----------
ld_coeffs: array-like (optional)
A 3D array that assigns limb darkening coefficients to each pixel, i.e. wavelength
ld_profile: str (optional)
The limb darkening profile to use
noise: bool
Add noise model
model_grid: ExoCTK.modelgrid.ModelGrid (optional)
The model atmosphere grid to calculate LDCs
n_jobs: int
The number of cores to use in multiprocessing
Example
-------
# Run simulation of star only
tso.simulate()
# Simulate star with transiting exoplanet by including transmission spectrum and orbital params
import batman
import astropy.constants as ac
planet1D = np.genfromtxt(resource_filename('awesimsoss', '/files/WASP107b_pandexo_input_spectrum.dat'), unpack=True)
params = batman.TransitParams()
params.t0 = 0. # time of inferior conjunction
params.per = 5.7214742 # orbital period (days)
params.a = 0.0558*q.AU.to(ac.R_sun)*0.66 # semi-major axis (in units of stellar radii)
params.inc = 89.8 # orbital inclination (in degrees)
params.ecc = 0. # eccentricity
params.w = 90. # longitude of periastron (in degrees)
params.limb_dark = 'quadratic' # limb darkening profile to use
params.u = [0.1, 0.1] # limb darkening coefficients
tmodel = batman.TransitModel(params, tso.time)
tmodel.teff = 3500 # effective temperature of the host star
tmodel.logg = 5 # log surface gravity of the host star
tmodel.feh = 0 # metallicity of the host star
tso.simulate(planet=planet1D, tmodel=tmodel)
"""
# Check that there is star data
if self.star is None:
print("No star to simulate! Please set the self.star attribute!")
return
# Check kwargs for updated attrs
for key, val in kwargs.items():
setattr(self, key, val)
if self.verbose:
begin = time.time()
# Set the number of cores for multiprocessing
max_cores = cpu_count()
if n_jobs == -1 or n_jobs > max_cores:
n_jobs = max_cores
# Clear previous results
self._reset_data()
# Generate simulation for each order
for order in self.orders:
# Get the wavelength map
wave = self.avg_wave[order-1]
# Get the psf cube and filter response function
psfs = getattr(self, 'order{}_psfs'.format(order))
# Get limb darkening coeffs and make into a list
ld_coeffs = self.ld_coeffs[order-1]
ld_coeffs = list(map(list, ld_coeffs))
# Set the radius at the given wavelength from the transmission
# spectrum (Rp/R*)**2... or an array of ones
if self.planet is not None:
tdepth = np.interp(wave, self.planet[0], self.planet[1])
else:
tdepth = np.ones_like(wave)
self.rp = np.sqrt(tdepth)
# Run multiprocessing to generate lightcurves
if self.verbose:
print('Calculating order {} light curves...'.format(order))
start = time.time()
# Generate the lightcurves at each wavelength
pool = ThreadPool(n_jobs)
func = partial(mt.psf_lightcurve, time=self.time, tmodel=self.tmodel)
data = list(zip(psfs, ld_coeffs, self.rp))
lightcurves = np.asarray(pool.starmap(func, data), dtype=np.float64)
pool.close()
pool.join()
del pool
# Reshape to make frames
lightcurves = lightcurves.swapaxes(0, 1)
# Multiply by the integration time to convert to [ADU]
lightcurves *= self.inttime[:, None, None, None]
# Generate TSO frames
if self.verbose:
print('Lightcurves finished:', round(time.time()-start, 3), 's')
print('Constructing order {} traces...'.format(order))
start = time.time()
# Make the 2048*N lightcurves into N frames
pool = ThreadPool(n_jobs)
frames = np.asarray(pool.map(mt.make_frame, lightcurves))
pool.close()
pool.join()
del pool
if self.verbose:
# print('Total flux after warp:', np.nansum(all_frames[0]))
print('Order {} traces finished:'.format(order), round(time.time()-start, 3), 's')
# Add it to the individual order
setattr(self, 'tso_order{}_ideal'.format(order), frames)
# Clear memory
del frames, lightcurves, psfs, wave
# Add to the master TSO
self.tso_ideal = np.sum([getattr(self, 'tso_order{}_ideal'.format(order)) for order in self.orders], axis=0)
self.tso = self.tso_ideal.copy()
# Trim SUBSTRIP256 array if SUBSTRIP96
if self.subarray == 'SUBSTRIP96':
for arr in ['tso', 'tso_ideal']+['tso_order{}_ideal'.format(n) for n in self.orders]:
setattr(self, arr, getattr(self, arr)[:, :self.nrows, :])
# Expand SUBSTRIP256 array if FULL frame
if self.subarray == 'FULL':
for arr in ['tso', 'tso_ideal']+['tso_order{}_ideal'.format(n) for n in self.orders]:
full = np.zeros(self.dims3)
full[:, -256:, :] = getattr(self, arr)
setattr(self, arr, full)
del full
# Make ramps and add noise to the observations using <NAME>'s
# dark ramp simulator
if noise:
self.add_noise()
# Reshape into (nints, ngrps, y, x)
for arr in ['tso', 'tso_ideal']+['tso_order{}_ideal'.format(n) for n in self.orders]:
data = getattr(self, arr).reshape(self.dims)
setattr(self, arr, data)
del data
# Simulate reference pixels
self.add_refpix()
if self.verbose:
print('\nTotal time:', round(time.time()-begin, 3), 's')
@property
def star(self):
"""Getter for the stellar data"""
return self._star
@star.setter
def star(self, spectrum):
"""Setter for the stellar data
Parameters
----------
spectrum: sequence
The [W, F] or [W, F, E] of the star to simulate
"""
# Check if the star has been set
if spectrum is None:
if self.verbose:
print("No star to simulate! Please set the self.star attribute!")
self._star = None
else:
# Check star is a sequence of length 2 or 3
if not isinstance(spectrum, (list, tuple)) or not len(spectrum) in [2, 3]:
raise ValueError(type(spectrum), ': Star input must be a sequence of [W, F] or [W, F, E]')
# Check star has units
if not all([isinstance(i, q.quantity.Quantity) for i in spectrum]):
types = ', '.join([str(type(i)) for i in spectrum])
raise ValueError('[{}]: Spectrum must be in astropy units'.format(types))
# Check the units
if not spectrum[0].unit.is_equivalent(q.um):
raise ValueError(spectrum[0].unit, ': Wavelength must be in units of distance')
if not all([i.unit.is_equivalent(q.erg/q.s/q.cm**2/q.AA) for i in spectrum[1:]]):
raise ValueError(spectrum[1].unit, ': Flux density must be in units of F_lambda')
# Check the wavelength range
spec_min = np.nanmin(spectrum[0][spectrum[0] > 0.])
spec_max = np.nanmax(spectrum[0][spectrum[0] > 0.])
sim_min = np.nanmin(self.wave[self.wave > 0.])*q.um
sim_max = np.nanmax(self.wave[self.wave > 0.])*q.um
if spec_min > sim_min or spec_max < sim_max:
print("Wavelength range of input spectrum ({} - {} um) does not cover the {} - {} um range needed for a complete simulation. Interpolation will be used at the edges.".format(spec_min, spec_max, sim_min, sim_max))
# Good to go
self._star = spectrum
# Reset the psfs
self._reset_psfs()
@property
def subarray(self):
"""Getter for the subarray"""
return self._subarray
@subarray.setter
def subarray(self, subarr):
"""Setter for the subarray
Properties
----------
subarr: str
The name of the subarray to use,
['SUBSTRIP256', 'SUBSTRIP96', 'FULL']
"""
subs = ['SUBSTRIP256', 'SUBSTRIP96', 'FULL']
# Check the value
if subarr not in subs:
raise ValueError("'{}' not a supported subarray. Try {}".format(subarr, subs))
# Set the subarray
self._subarray = subarr
self.subarray_specs = utils.subarray(subarr)
# Set the dependent quantities
self._ncols = 2048
self._nrows = self.subarray_specs.get('y')
self.wave = utils.wave_solutions(subarr)
self.avg_wave = np.mean(self.wave, axis=1)
self.coeffs = mt.trace_polynomials(subarray=subarr)
# Reset the data and time arrays
self._reset_data()
self._reset_time()
# Reset the psfs
self._reset_psfs()
@property
def t0(self):
"""Getter for transit midpoint"""
return self._t0
@t0.setter
def t0(self, tmid):
"""Setter for transit midpoint
Properties
----------
tmid: str
The transit midpoint
"""
# Check the value
if not isinstance(tmid, (float, int)):
raise ValueError("'{}' not a supported transit midpoint. Try a float or integer value.".format(tmid))
# Set the transit midpoint
self._t0 = tmid
# Reset the data and time arrays
self._reset_data()
self._reset_time()
@property
def target(self):
"""Getter for target name"""
return self._target
@target.setter
def target(self, name):
"""Setter for target name and coordinates
Properties
----------
tmid: str
The transit midpoint
"""
# Check the name
if not isinstance(name, str):
raise TypeError("Target name must be a string.")
# Set the subarray
self._target = name
self.ra = 1.23456
self.dec = 2.34567
# Query Simbad for target RA and Dec
if self.target != 'New Target':
try:
rec = Simbad.query_object(self.target)
coords = SkyCoord(ra=rec[0]['RA'], dec=rec[0]['DEC'], unit=(q.hour, q.degree), frame='icrs')
self.ra = coords.ra.degree
self.dec = coords.dec.degree
if self.verbose:
print("Coordinates {} {} for '{}' found in Simbad!".format(self.ra, self.dec, self.target))
except TypeError:
if self.verbose:
print("Could not resolve target '{}' in Simbad. Using ra={}, dec={}.".format(self.target, self.ra, self.dec))
print("Set coordinates manually by updating 'ra' and 'dec' attributes.")
@property
def tmodel(self):
"""Getter for the transit model"""
return self._tmodel
@tmodel.setter
def tmodel(self, model, time_unit='days'):
"""Setter for the transit model
Parameters
----------
model: batman.transitmodel.TransitModel
The transit model
time_unit: string
The units of model.t, ['seconds', 'minutes', 'hours', 'days']
"""
# Check if the transit model has been set
if model is None:
self._tmodel = None
else:
# Check transit model type
mod_type = str(type(model))
if not mod_type == "<class 'batman.transitmodel.TransitModel'>":
raise TypeError("{}: Transit model must be of type batman.transitmodel.TransitModel".format(mod_type))
# Check time units
time_units = {'seconds': 86400., 'minutes': 1440., 'hours': 24., 'days': 1.}
if time_unit not in time_units:
raise ValueError("{}: time_unit must be {}".format(time_unit, time_units.keys()))
# Check if the stellar params have changed
plist = ['teff', 'logg', 'feh', 'limb_dark']
old_params = [getattr(self.tmodel, p, None) for p in plist]
new_params = [getattr(model, p) for p in plist]
# Update the LD profile
self.ld_profile = model.limb_dark
# Convert seconds to days in order to match the Period and T0 parameters
model.t /= time_units[time_unit]
# Update the transit model
self._tmodel = model
# Update ld_coeffs if necessary
if new_params != old_params:
self.ld_coeffs = 'update'
class TestTSO(TSO):
"""Generate a test object for quick access"""
def __init__(self, ngrps=2, nints=2, filter='CLEAR', subarray='SUBSTRIP256', run=True, add_planet=False, **kwargs):
"""Get the test data and load the object
Parameters
----------
ngrps: int
The number of groups per integration
nints: int
The number of integrations for the exposure
filter: str
The name of the filter to use, ['CLEAR', 'F277W']
subarray: str
The name of the subarray to use, ['SUBSTRIP256', 'SUBSTRIP96', 'FULL']
run: bool
Run the simulation after initialization
add_planet: bool
Add a transiting exoplanet
"""
# Initialize base class
super().__init__(ngrps=ngrps, nints=nints, star=utils.STAR_DATA, subarray=subarray, filter=filter, **kwargs)
# Add planet
if add_planet:
self.planet = utils.PLANET_DATA
self.tmodel = utils.transit_params(self.time)
# Run the simulation
if run:
self.simulate()
class BlackbodyTSO(TSO):
"""Generate a test object with a blackbody spectrum"""
def __init__(self, ngrps=2, nints=2, teff=1800, filter='CLEAR', subarray='SUBSTRIP256', run=True, add_planet=False, **kwargs):
"""Get the test data and load the object
Parmeters
---------
ngrps: int
The number of groups per integration
nints: int
The number of integrations for the exposure
teff: int
The effective temperature of the test source
filter: str
The name of the filter to use, ['CLEAR', 'F277W']
subarray: str
The name of the subarray to use, ['SUBSTRIP256', 'SUBSTRIP96', 'FULL']
run: bool
Run the simulation after initialization
add_planet: bool
Add a transiting exoplanet
"""
# Generate a blackbody at the given temperature
bb = BlackBody1D(temperature=teff*q.K)
wav = np.linspace(0.5, 2.9, 1000) * q.um
flux = bb(wav).to(FLAM, q.spectral_density(wav))*1E-8
# Initialize base class
super().__init__(ngrps=ngrps, nints=nints, star=[wav, flux], subarray=subarray, filter=filter, **kwargs)
# Add planet
if add_planet:
self.planet = utils.PLANET_DATA
self.tmodel = utils.transit_params(self.time)
# Run the simulation
if run:
self.simulate()
```
#### File: awesimsoss/awesimsoss/generate_darks.py
```python
import os
import numpy as np
import astropy.io.fits as fits
from . import noise_simulation as ng
def add_dark_current(ramp, seed, gain, darksignal):
"""
Adds dark current to the input signal
Parameters
----------
ramp: sequence
The array of ramp images
seed: int
The seed for the dark signal
gain: float
The detector gain
darksignal: sequence
A 2D map of the dark signal to project onto the ramp
Returns
-------
np.ndarray
The dark signal ramp
"""
# Get the random seed and array shape
np.random.seed(seed)
dims = ramp.shape
# Add the dark signal to the ramp
total = darksignal*0.
for n in range(dims[0]):
signal = np.random.poisson(darksignal)/gain
total = total+signal
ramp[n,:,:] = ramp[n,:,:]+total
return ramp
def make_exposure(nints, ngrps, darksignal, gain, pca0_file, noise_seed=None,
dark_seed=None, offset=500):
"""
Make a simulated exposure with no source signal
Parameters
----------
nints: int
The number of integrations
ngrps: int
The number of groups per integration
darksignal: sequence
A dark frame
gain: float
The gain on the detector
pca0_file: str
The path to the PCA-zero file
noise_seed: int
The seed for the generated noise
dark_seed: int
The seed for the generated dark
offset: int
The pedestal offset
Returns
-------
np.ndarray
A simulated ramp of darks
"""
if nints < 1 or ngrps < 1:
return None
if not noise_seed:
noise_seed = 7+int(np.random.uniform()*4000000000.)
if not dark_seed:
dark_seed = 5+int(np.random.uniform()*4000000000.)
np.random.seed(dark_seed)
# Make empty data array
nrows, ncols = darksignal.shape
simulated_data = np.zeros([nints*ngrps,nrows,ncols], dtype=np.float32)
# Define some constants
pedestal = 18.30
c_pink = 9.6
u_pink = 3.2
acn = 2.0
bias_amp = 0.
#bias_amp = 5358.87
#bias_offset = 20944.06
pca0_amp = 0.
rd_noise = 12.95
dark_current = 0.0
dc_seed = dark_seed
bias_offset = offset*gain
# Define the HXRGN instance to make a SUSBSTRIP256 array
#(in detector coordinates)
noisecube = ng.HXRGNoise(naxis1=nrows, naxis2=ncols, naxis3=ngrps,
pca0_file=pca0_file, x0=0, y0=0, det_size=2048,
verbose=False)
# iterate over integrations
for loop in range(nints):
seed1 = noise_seed+24*int(loop)
ramp = noisecube.mknoise(c_pink=c_pink, u_pink=u_pink,
bias_amp=bias_amp, bias_offset=bias_offset,
acn=acn, pca0_amp=pca0_amp, rd_noise=rd_noise,
pedestal=pedestal, dark_current=dark_current,
dc_seed=dc_seed, noise_seed=seed1, gain=gain)
if len(ramp.shape)==2:
ramp = ramp[np.newaxis,:,:]
ramp = np.transpose(ramp,(0,2,1))
ramp = ramp[::,::-1,::-1]
ramp = add_dark_current(ramp, dc_seed, gain, darksignal)
simulated_data[loop*ngrps:(loop+1)*ngrps,:,:] = np.copy(ramp)
ramp = 0
return simulated_data
def make_photon_yield(photon_yield, orders):
"""
Generates a map of the photon yield for each order.
The shape of both arrays should be [order, nrows, ncols]
Parameters
----------
photon_yield: str
The path to the file containg the calculated photon yield at each pixel
orders: sequence
An array of the median image of each order
Returns
-------
np.ndarray
The array containing the photon yield map for each order
"""
# Get the shape and create empty arrays
dims = orders.shape
sum1 = np.zeros((dims[1], dims[2]), dtype=np.float32)
sum2 = np.zeros((dims[1], dims[2]), dtype=np.float32)
# Add the photon yield for each order
for n in range(dims[0]):
sum1 = sum1+photon_yield[n, :, :]*orders[n, :, :]
sum2 = sum2+orders[n, :, :]
# Take the ratio of the photon yield to the signal
pyimage = sum1/sum2
pyimage[np.where(sum2 == 0.)] = 1.
return pyimage
def add_signal(signals, cube, pyimage, frametime, gain, zodi, zodi_scale,
photon_yield=False):
"""
Add the science signal to the generated noise
Parameters
----------
signals: sequence
The science frames
cube: sequence
The generated dark ramp
pyimage: sequence
The photon yield per order
frametime: float
The number of seconds per frame
gain: float
The detector gain
zodi: sequence
The zodiacal background image
zodi_scale: float
The scale factor for the zodi background
"""
# Get the data dimensions
dims1 = cube.shape
dims2 = signals.shape
if dims1 != dims2:
raise ValueError(dims1, "not equal to", dims2)
# Make a new ramp
newcube = cube.copy()*0.
# The background is assumed to be in electrons/second/pixel, not ADU/s/pixel.
background = zodi*zodi_scale*frametime
# Iterate over each group
for n in range(dims1[0]):
framesignal = signals[n,:,:]*gain*frametime
# Add photon yield
if photon_yield:
newvalues = np.random.poisson(framesignal)
target = pyimage-1.
for k in range(dims1[1]):
for l in range(dims1[2]):
if target[k,l] > 0.:
n = int(newvalues[k,l])
values = np.random.poisson(target[k,l], size=n)
newvalues[k,l] = newvalues[k,l]+np.sum(values)
newvalues = newvalues+np.random.poisson(background)
# Or don't
else:
vals = np.abs(framesignal*pyimage+background)
newvalues = np.random.poisson(vals)
# First ramp image
if n==0:
newcube[n,:,:] = newvalues
else:
newcube[n,:,:] = newcube[n-1,:,:]+newvalues
newcube = cube+newcube/gain
return newcube
def non_linearity(cube, nonlinearity, offset=0):
"""
Add nonlinearity to the ramp
Parameters
----------
cube: sequence
The ramp with no non-linearity
nonlinearity: sequence
The non-linearity image to add to the ramp
offset: int
The non-linearity offset
Returns
-------
np.ndarray
The ramp with the added non-linearity
"""
# Get the dimensions of the input data
dims1 = nonlinearity.shape
dims2 = cube.shape
if (dims1[1] != dims2[1]) | (dims1[1] != dims2[1]):
raise ValueError
# Make a new array for the ramp+non-linearity
newcube = cube-offset
for k in range(dims2[0]):
frame = np.squeeze(np.copy(newcube[k,:,:]))
sum1 = frame*0.
for n in range(dims1[0]-1,-1,-1):
sum1 = sum1+nonlinearity[n,:,:]*np.power(frame,n+1)
sum1 = frame*(1.+sum1)
newcube[k,:,:] = sum1
newcube = newcube+offset
return newcube
def add_pedestal(cube, pedestal, offset=500):
"""
Add a pedestal to the ramp
Parameters
----------
cube: sequence
The ramp with no pedestal
pedestal: sequence
The pedestal image to add to the ramp
offset: int
The pedestal offset
Returns
-------
np.ndarray
The ramp with the added pedestal
"""
# Add the offset to the pedestal
ped1 = pedestal+(offset-500.)
# Make a new array for the ramp+pedestal
dims = cube.shape
newcube = np.zeros_like(cube,dtype=np.float32)
# Iterate over each integration
for n in range(dims[0]):
newcube[n,:,:] = cube[n,:,:]+ped1
newcube = newcube.astype(np.uint16)
return newcube
```
#### File: awesimsoss/awesimsoss/setup_package.py
```python
from distutils.extension import Extension
def get_package_data():
return {'awesimsoss': ['files/*', 'img/*']}
```
#### File: awesimsoss/tests/test_awesim.py
```python
from copy import copy
import unittest
from pkg_resources import resource_filename
import numpy as np
import astropy.units as q
import astropy.constants as ac
import batman
from awesimsoss import TSO, BlackbodyTSO, TestTSO, STAR_DATA, PLANET_DATA
class test_BlackbodyTSO(unittest.TestCase):
"""A test of the BlackbodyTSO class"""
def setUp(self):
pass
def test_run_no_planet(self):
"""A test of the BlackbodyTSO class with no planet"""
tso = BlackbodyTSO()
def test_run_with_planet(self):
"""A test of the BlackbodyTSO class with a planet"""
tso = BlackbodyTSO(add_planet=True)
class test_TestTSO(unittest.TestCase):
"""A test of the TestTSO class"""
def setUp(self):
pass
def test_run_no_planet(self):
"""A test of the TestTSO class with no planet"""
tso = TestTSO()
def test_run_with_planet(self):
"""A test of the TestTSO class with a planet"""
tso = TestTSO(add_planet=True)
class test_TSO(unittest.TestCase):
"""Tests for the TSO class"""
def setUp(self):
"""Setup for the tests"""
# Get data
self.star = STAR_DATA
self.planet = PLANET_DATA
def test_export(self):
"""Test the export method"""
# Make the TSO object and save
test_tso = TSO(ngrps=2, nints=2, star=self.star, subarray='SUBSTRIP256')
test_tso.simulate()
try:
test_tso.export('outfile.fits')
except NameError:
pass
def test_init(self):
"""Test that the TSO class is generated properly"""
# Initialize the FULL frame with two groups and two integrations
# and the CLEAR filter
tso2048clear = TSO(ngrps=2, nints=2, star=self.star, subarray='FULL')
self.assertEqual(tso2048clear.ngrps, 2)
self.assertEqual(tso2048clear.nints, 2)
self.assertEqual(tso2048clear.nframes, 4)
self.assertEqual(tso2048clear.dims, (2, 2, 2048, 2048))
self.assertEqual(tso2048clear.subarray, 'FULL')
self.assertEqual(tso2048clear.filter, 'CLEAR')
# Initialize the 256 subarray with two groups and two integrations
# and the CLEAR filter
tso256clear = TSO(ngrps=2, nints=2, star=self.star, subarray='SUBSTRIP256')
self.assertEqual(tso256clear.ngrps, 2)
self.assertEqual(tso256clear.nints, 2)
self.assertEqual(tso256clear.nframes, 4)
self.assertEqual(tso256clear.dims, (2, 2, 256, 2048))
self.assertEqual(tso256clear.subarray, 'SUBSTRIP256')
self.assertEqual(tso256clear.filter, 'CLEAR')
# Initialize the 96 subarray with two groups and two integrations
# and the CLEAR filter
tso96clear = TSO(ngrps=2, nints=2, star=self.star, subarray='SUBSTRIP96')
self.assertEqual(tso96clear.ngrps, 2)
self.assertEqual(tso96clear.nints, 2)
self.assertEqual(tso96clear.nframes, 4)
self.assertEqual(tso96clear.dims, (2, 2, 96, 2048))
self.assertEqual(tso96clear.subarray, 'SUBSTRIP96')
self.assertEqual(tso96clear.filter, 'CLEAR')
# Initialize the FULL frame with two groups and two integrations
# and the F277W filter
tso2048f277w = TSO(ngrps=2, nints=2, star=self.star, subarray='FULL', filter='F277W')
self.assertEqual(tso2048f277w.ngrps, 2)
self.assertEqual(tso2048f277w.nints, 2)
self.assertEqual(tso2048f277w.nframes, 4)
self.assertEqual(tso2048f277w.dims, (2, 2, 2048, 2048))
self.assertEqual(tso2048f277w.subarray, 'FULL')
self.assertEqual(tso2048f277w.filter, 'F277W')
# Initialize the 256 subarray with two groups and two integrations
# and the F277W filter
tso256f277w = TSO(ngrps=2, nints=2, star=self.star, subarray='SUBSTRIP256', filter='F277W')
self.assertEqual(tso256f277w.ngrps, 2)
self.assertEqual(tso256f277w.nints, 2)
self.assertEqual(tso256f277w.nframes, 4)
self.assertEqual(tso256f277w.dims, (2, 2, 256, 2048))
self.assertEqual(tso256f277w.subarray, 'SUBSTRIP256')
self.assertEqual(tso256f277w.filter, 'F277W')
# Initialize the 96 subarray with two groups and two integrations
# and the F277W filter
tso96f277w = TSO(ngrps=2, nints=2, star=self.star, subarray='SUBSTRIP96', filter='F277W')
self.assertEqual(tso96f277w.ngrps, 2)
self.assertEqual(tso96f277w.nints, 2)
self.assertEqual(tso96f277w.nframes, 4)
self.assertEqual(tso96f277w.dims, (2, 2, 96, 2048))
self.assertEqual(tso96f277w.subarray, 'SUBSTRIP96')
self.assertEqual(tso96f277w.filter, 'F277W')
def test_run_no_planet(self):
"""A test of simulate() with no planet"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
tso.simulate()
tso.subarray = 'SUBSTRIP96'
tso.simulate()
tso.subarray = 'FULL'
tso.simulate()
def test_run_with_planet(self):
"""A test of simulate() with a planet"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
# Make orbital params
params = batman.TransitParams()
params.t0 = 0.
params.per = 5.7214742
params.a = 0.0558*q.AU.to(ac.R_sun)*0.66
params.inc = 89.8
params.ecc = 0.
params.w = 90.
params.limb_dark = 'quadratic'
params.u = [0.1, 0.1]
params.rp = 0.
tmodel = batman.TransitModel(params, tso.time)
tmodel.teff = 3500
tmodel.logg = 5
tmodel.feh = 0
# Run the simulation
tso.simulate(planet=self.planet, tmodel=tmodel)
tso.subarray = 'SUBSTRIP96'
tso.simulate(planet=self.planet, tmodel=tmodel)
tso.subarray = 'FULL'
tso.simulate(planet=self.planet, tmodel=tmodel)
def test_lookup(self):
"""Test that coordinates are looked up if given a name"""
# Make the TSO object
targ = TSO(ngrps=2, nints=2, star=self.star, target='trappist-1')
no_targ = TSO(ngrps=2, nints=2, star=self.star)
# Check target name
self.assertNotEqual(targ.target, no_targ.target)
# Check coordinates
self.assertNotEqual(targ.ra, no_targ.ra)
self.assertNotEqual(targ.dec, no_targ.dec)
def test_star(self):
"""Test that errors are thrown for bas star input"""
# Test that non wavelength units fail
bad_wave_star = copy(self.star)
bad_wave_star[0] *= q.Jy
kwargs = {'nints': 2, 'ngrps': 2, 'star': bad_wave_star}
self.assertRaises(ValueError, TSO, **kwargs)
# Test that non flux density units fail
bad_flux_star = copy(self.star)
bad_flux_star[1] *= q.K
kwargs = {'nints': 2, 'ngrps': 2, 'star': bad_flux_star}
self.assertRaises(ValueError, TSO, **kwargs)
# Test that no units fail
bad_unit_star = copy(self.star)
bad_unit_star[0] = bad_unit_star[0].value
kwargs = {'nints': 2, 'ngrps': 2, 'star': bad_unit_star}
self.assertRaises(ValueError, TSO, **kwargs)
# Test that spectrum shape
bad_size_star = [self.star[0]]
kwargs = {'nints': 2, 'ngrps': 2, 'star': bad_size_star}
self.assertRaises(ValueError, TSO, **kwargs)
def test_bad_attrs(self):
"""Test that invalid attributes throw an error"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
# Bad fiilter
self.assertRaises(ValueError, setattr, tso, 'filter', 'foo')
# Bad ncols
self.assertRaises(TypeError, setattr, tso, 'ncols', 3)
# Bad nrows
self.assertRaises(TypeError, setattr, tso, 'nrows', 3)
# Bad nints
self.assertRaises(TypeError, setattr, tso, 'nints', 'three')
# Bad ngrps
self.assertRaises(TypeError, setattr, tso, 'ngrps', 'three')
# Bad nresets
self.assertRaises(TypeError, setattr, tso, 'nresets', 'three')
# Bad orders
tso.orders = 1
self.assertRaises(ValueError, setattr, tso, 'orders', 'three')
# Bad subarray
self.assertRaises(ValueError, setattr, tso, 'subarray', 'three')
# Bad t0
self.assertRaises(ValueError, setattr, tso, 't0', 'three')
# Bad target
self.assertRaises(TypeError, setattr, tso, 'target', 3)
def test_ldcs(self):
"""Test the limb darkening coefficients"""
# Create instance
tso = TSO(ngrps=2, nints=2, star=self.star)
# Set manually
ldcs = tso.ld_coeffs
tso.ld_coeffs = np.ones((3, 2048, 2))
# Bad LDCs (Removed TypeError in favor of print statement)
# self.assertRaises(TypeError, setattr, tso, 'ld_coeffs', 'foo')
def test_plot(self):
"""Test plot method"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
# Test plot with no data
plt = tso.plot(draw=False)
# Run simulation
tso.simulate()
# Test bad ptype
kwargs = {'ptype': 'foo', 'draw': False}
self.assertRaises(ValueError, tso.plot, **kwargs)
# Standard plot with traces
plt = tso.plot(traces=True)
# Standard plot with one order
plt = tso.plot(order=1, draw=False)
# No noise plot
plt = tso.plot(noise=False, draw=False)
# Log plot
plt = tso.plot(scale='log', draw=False)
def test_plot_slice(self):
"""Test plot_slice method"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
tso.simulate()
# Standard plot with traces
plt = tso.plot_slice(500, traces=True)
# Standard plot with one order
plt = tso.plot_slice(500, order=1, draw=False)
# Plot with noise
plt = tso.plot_slice(500, noise=True, draw=False)
# Log plot
plt = tso.plot_slice(500, scale='log', draw=False)
# List of slices
plt = tso.plot_slice([500, 1000], draw=False)
def test_plot_ramp(self):
"""Test plot_ramp method"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
tso.simulate()
# Standard plot
plt = tso.plot_ramp(draw=False)
tso.plot_ramp()
def test_plot_lightcurve(self):
"""Test plot_lightcurve method"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
tso.simulate()
# Test bad units
kwargs = {'column': 500, 'time_unit': 'foo', 'draw': False}
self.assertRaises(ValueError, tso.plot_lightcurve, **kwargs)
# Standard plot
plt = tso.plot_lightcurve(500)
# Wavelength
plt = tso.plot_lightcurve(1.6, draw=False)
# Neither
plt = tso.plot_lightcurve('foo', draw=False)
# List of lightcurves
plt = tso.plot_lightcurve([500, 1000], draw=False)
def test_plot_spectrum(self):
"""Test plot_spectrum method"""
# Make the TSO object
tso = TSO(ngrps=2, nints=2, star=self.star)
tso.simulate()
# Standard plot
plt = tso.plot_spectrum()
# Standard plot with one order
plt = tso.plot_spectrum(order=1, draw=False)
# Log plot
plt = tso.plot_spectrum(scale='log', draw=False)
# No noise plot
plt = tso.plot_spectrum(noise=True, draw=False)
# Specific order
plt = tso.plot_spectrum(order=1, draw=False)
``` |
{
"source": "jotaylor/SPAMM",
"score": 3
} |
#### File: spamm/components/ComponentBase.py
```python
from __future__ import print_function
from abc import ABC, abstractmethod
import numpy as np
import sys
from utils.parse_pars import parse_pars
PARS = parse_pars()
#-----------------------------------------------------------------------------#
# Compatible with python 2 & 3.
class Component(ABC):
'''
Description of Component class here.
This class is abstract; use (or create) subclasses of this class.
Functionality that is common to all subclasses should be implemented here.
'''
def __init__(self):
self.z = None # redshift
self.reddening_law = None
#self.model_parameters = list()
#self.model_parameter_names = list()
# wavelength grid defined by the data
# interpolate to this if necessary (i.e. no analytical component)
self.data_wavelength_grid = None
self.interpolated_flux = None # based on data, defined in initialize()
def parameter_index(self, parameter_name):
''' '''
for idx, pname in enumerate(self.model_parameter_names):
if parameter_name == pname:
return idx
return None
@property
def is_analytic(self):
# define this property in the subclass
print("Please define the 'is_analytic' property for the class '{0}'.".format(__class__.__name__))
sys.exit(1)
@property
def parameter_count(self):
''' Returns the number of parameters of this component. '''
if self.z:
return len(self.model_parameter_names + 1)
else:
return len(self.model_parameter_names)
@abstractmethod
def initial_values(self, spectrum=None):
''' Return type must be a list (not an np.array. '''
pass
@abstractmethod
def ln_priors(self, params):
'''
Return a list of the ln of all of the priors.
@param params
'''
def native_wavelength_grid(self):
'''
Returns the wavelength grid native to this component.
This needs to be overridden by subclasses.
'''
if self.is_analytic:
pass # implement in subclass
else:
assert True, "The method 'native_wavelength_grid' must be defined for {0}.".format(self.__class__.__name__)
@abstractmethod
def flux(self, wavelengths=None, parameters=None):
pass
def grid_spacing(self):
''' Return the spacing of the wavelength grid in Ångstroms. Does not support variable grid spacing. '''
if self.is_analytic:
# analytic components don't have grid spacing
return None
else:
return self.native_wavelength_grid[1] - self.native_wavelength_grid[0]
def initialize(self, data_spectrum=None):
''' '''
# Check that the component wavelength grid is not more coarse than the data wavelength grid
if self.is_analytic:
pass
else:
assert True, "The 'initialize' method of the component '{0}' must be defined.".format(self.__class__.__name__)
#self.data_wavelength_grid = np.array(data_spectrum.spectral_axis)
#data_delta_wavelength = data_spectrum.spectral_axis[1] - data_spectrum.spectral_axis[0]
#comp_delta_wavelength = self.native_wavelength_grid[1] - native_wavelength_grid[0]
# TODO - what if component grid is not uniform? currently require that it be.
#if comp_delta_wavelength > data_delta_wavelength:
@property
def fast_interp(self):
'''Determines if fast interpolation should be used instead of rebin_spec'''
if PARS["rebin_spec"] is False:
return True
else:
return False
```
#### File: spamm/components/ComponentTemplate.py
```python
import sys
import numpy as np
from .ComponentBase import Component
# [replace] "TemplateComponent" with the name of the new component
class TemplateComponent(Component):
'''
Describe your component here.
This component has n parameters:
parameter1 : a short description of parameter 1
parameter2 : a short description of parameter 2
...
parametern : a short description of parameter n
'''
def __init__(self):
# [replace] fill in the same name you gave above
super(TemplateComponent, self).__init__()
# [replace] give the parameters names (spaces are ok), one line for each parameter
self.model_parameter_names = list() # this may need to be defined as a method
self.model_parameter_names.append("parameter n")
self._norm_wavelength = None
# [replace] define variables for min/max values for each parameter range
self.min_parameter1 = None
self.max_parameter1 = None
# etc.
@property
def is_analytic(self):
return True/False # choose the appropriate value.
# This must be defined if the component is NOT analytical.
def native_wavelength_grid(self):
''' Returns the wavelength grid native to this component. '''
def initial_values(self, spectrum=None):
'''
Needs to sample from prior distribution.
'''
# call super() implementation
super(NuclearContinuumComponent, self).initialize()
# [replace] calculate/define minimum and maximum values for each parameter.
self.min_parameter1 = ...
self.max_parameter1 = ...
# [replace] this is an example of a random flat distribution
# See for other options: http://docs.scipy.org/doc/numpy/reference/routines.random.html
parameter1_init = np.random.uniform(low=self.min_parameter1,
high=self.max_parameter1)
self.min_parameter2 = ...
self.max_parameter2 = ...
# [replace] this is an example of a random lognormal distribution
parameter2_init = np.random.lognormal(mean=, sigma=, size=)
# [replace] return a list of all parameter_init values
# NOTE: Order is important! Place them in the same order they were defined
# in __init__ above.
return [parameter1_init, parameter2_init]
def initialize(self, data_spectrum=None):
'''
Perform any initializations where the data is optional.
'''
if data_spectrum is None:
raise Exception("The data spectrum must be specified to initialize" +
"{0}.".format(self.__class__.__name__))
self.normalization_wavelength(data_spectrum_wavelength=data_spectrum.wavelengths)
def ln_priors(self, params):
'''
Return a list of the ln of all of the priors.
@param params
'''
# need to return parameters as a list in the correct order
ln_priors = list()
# [replace] Put code here that calculates the ln of the priors
# given the value of the parameters.
# [replace] Get the current value of the parameters. Use the names
# as defined in __init__() above.
parameter1 = params[self.parameter_index("parameter 1")]
parametern = params[self.parameter_index("parameter 2")]
# [replace] append each parameter, in the correct order, to the "ln_priors" list
return ln_priors
def flux(self, wavelengths=None, parameters=None):
'''
Returns the flux for this component for a given wavelength grid
and parameters. Will use the initial parameters if none are specified.
'''
assert len(parameters) == len(self.model_parameter_names), ("The wrong number " +
"of indices were provided: {0}".format(parameters))
# calculate flux of the component
# [replace] fill in the flux calculation
flux = ...
return flux
```
#### File: spamm/components/NuclearContinuumComponent.py
```python
import sys
import numpy as np
from astropy.modeling.powerlaws import PowerLaw1D,BrokenPowerLaw1D
from .ComponentBase import Component
from utils.runningmeanfast import runningMeanFast
from utils.parse_pars import parse_pars
#-----------------------------------------------------------------------------#
class NuclearContinuumComponent(Component):
"""
AGN Continuum Component
\f$ F_{\lambda,{\rm PL}}=F_{\rm PL,0} \
\left(\frac{\lambda}{\lambda_0}\right)^{\alpha} \f$
This component has two parameters:
normalization : \f$ F_{\rm PL,0} \f$
slope : \f$ \alpha \f$
Attributes:
broken_powerlaw (Bool): True if a broken power law should be used.
model_parameter_names (list): List of model parameter names,
e.g. slope1, wave_break
name (str): Name of component, i.e. "Nuclear"
norm_min ():
norm_max ():
slope_min ():
slope_max ():
wave_break_min ():
wave_break_max ():
"""
def __init__(self, pars=None, broken=None):
super().__init__()
self.name = "Nuclear"
if pars is None:
self.inputpars = parse_pars()["nuclear_continuum"]
else:
self.inputpars = pars
if broken is None:
self.broken_pl = self.inputpars["broken_pl"]
else:
self.broken_pl = broken
self.model_parameter_names = list()
if not self.broken_pl:
self.model_parameter_names.append("norm_PL")
self.model_parameter_names.append("slope1")
else:
self.model_parameter_names.append("wave_break")
self.model_parameter_names.append("norm_PL")
self.model_parameter_names.append("slope1")
self.model_parameter_names.append("slope2")
self.norm_min = self.inputpars["pl_norm_min"]
self.norm_max = self.inputpars["pl_norm_max"]
self.slope_min = self.inputpars["pl_slope_min"]
self.slope_max = self.inputpars["pl_slope_max"]
self.wave_break_min = self.inputpars["pl_wave_break_min"]
self.wave_break_max = self.inputpars["pl_wave_break_max"]
#-----------------------------------------------------------------------------#
#TODO could this be moved to Component.py?
@property
def is_analytic(self):
"""
Method that stores whether component is analytic or not
Returns:
Bool (Bool): True if componenet is analytic.
"""
return True
#-----------------------------------------------------------------------------#
def initial_values(self, spectrum):
"""
Needs to sample from prior distribution.
Return type must be a list (not an np.array).
Called by emcee.
Args:
spectrum (Spectrum object): ?
Returns:
norm_init (array):
slope_init (array):
"""
pl_init = []
if self.norm_max == "max_flux":
self.norm_max = max(runningMeanFast(spectrum.flux, self.inputpars["boxcar_width"]))
elif self.norm_max == "fnw":
fnw = spectrum.norm_wavelength_flux
self.norm_max = fnw
if self.broken_pl:
size = 2
if self.wave_break_min == "min_wl":
self.wave_break_min = min(spectrum.spectral_axis)
if self.wave_break_max == "max_wl":
self.wave_break_max = max(spectrum.spectral_axis)
wave_break_init = np.random.uniform(low=self.wave_break_min,
high=self.wave_break_max)
pl_init.append(wave_break_init)
else:
size = 1
norm_init = np.random.uniform(self.norm_min, high=self.norm_max)
pl_init.append(norm_init)
slope_init = np.random.uniform(low=self.slope_min, high=self.slope_max, size=size)
# pl_init should be a list of scalars
for slope in slope_init:
pl_init.append(slope)
return pl_init
#TODO need to modify emcee initial_values call
#-----------------------------------------------------------------------------#
def ln_priors(self, params):
"""
Return a list of the ln of all of the priors.
# norm: Uniform linear prior between 0 and the maximum of the
# spectral flux after computing running median.
# slope: Uniform linear prior in range [-3,3]??
Args:
params (): ?
Returns:
ln_priors (list): ln of all the priors.
"""
# Need to return parameters as a list in the correct order.
ln_priors = []
if self.broken_pl:
wave_break = params[self.parameter_index("wave_break")]
if self.wave_break_min < wave_break < self.wave_break_max:
ln_priors.append(0.)
else:
ln_priors.append(-np.inf) # Arbitrarily small number
norm = params[self.parameter_index("norm_PL")]
if self.norm_min < norm < self.norm_max:
ln_priors.append(0.)
else:
ln_priors.append(-np.inf) # Arbitrarily small number
slope1 = params[self.parameter_index("slope1")]
if self.slope_min < slope1 < self.slope_max:
ln_priors.append(0.)
else:
ln_priors.append(-np.inf) # Arbitrarily small number
# TODO - suppress "RuntimeWarning: divide by zero encountered in log" warning.
if self.broken_pl:
slope2 = params[self.parameter_index("slope2")]
if self.slope_min < slope2 < self.slope_max:
ln_priors.append(0.)
else:
ln_priors.append(-np.inf) # Arbitrarily small number
return ln_priors
#-----------------------------------------------------------------------------#
def flux(self, spectrum, parameters=None):
"""
Compute the flux for this component for a given wavelength grid
and parameters. Use the initial parameters if none are specified.
Args:
spectrum (Spectrum object): ?
parameters (): ?
Return:
flux (): Flux of the componenet.
"""
assert len(parameters) == len(self.model_parameter_names), \
"The wrong number of indices were provided: {0}".format(parameters)
norm = parameters[self.parameter_index("norm_PL")]
slope1 = parameters[self.parameter_index("slope1")]
if not self.broken_pl:
PL = PowerLaw1D(norm, spectrum.norm_wavelength, slope1)
else:
x_break = parameters[self.parameter_index("wave_break")]
slope2 = parameters[self.parameter_index("slope2")]
PL = BrokenPowerLaw1D(norm, x_break, slope1, slope2)
flux = PL(spectrum.spectral_axis)
return flux
```
#### File: SPAMM/utils/add_in_quadrature.py
```python
import numpy as np
def add_in_quadrature(data_in):
"""
Add arrays in quadrature.
Args:
data_in (list, array, or tuple): Holds the arrays of individual
values to be added in quadrature.
Returns:
sum_quad (array): The sum of the input arrays added in quadrature.
"""
sqsum = 0.
for data in data_in:
data_arr = np.array(data)
sqsum += data_arr**2
sum_quad = np.sqrt(sqsum)
return sum_quad
```
#### File: SPAMM/utils/gaussian_kernel.py
```python
import numpy as np
def gaussian_kernel(x, mu, sig):
'''
Construct a gaussian function.
Args:
x (array-like): Data array
mu (float): Mean of the distribution
sig (float): Standard deviation
Returns:
Gaussian function.
'''
return np.exp(-0.5*((x-mu)/sig)**2)
```
#### File: SPAMM/utils/rebin_spec.py
```python
def rebin_spec(wave, specin, wavnew):
"""
Rebin spectra to bins used in wavnew.
Ref: http://www.astrobetter.com/blog/2013/08/12/python-tip-re-sampling-spectra-with-pysynphot/
"""
from pysynphot import observation
from pysynphot import spectrum as pysynphot_spec
import numpy as np
spec = pysynphot_spec.ArraySourceSpectrum(wave=wave, flux=specin)
f = np.ones(len(wave))
filt = pysynphot_spec.ArraySpectralElement(wave, f, waveunits='angstrom')
obs = observation.Observation(spec, filt, binset=wavnew, force='taper')
return obs.binflux
```
#### File: SPAMM/utils/runningmeanfast.py
```python
import numpy as np
def runningMeanFast(x, N):
'''
Calculate the running mean of an array given a window.
Ref: http://stackoverflow.com/questions/13728392/moving-average-or-running-mean
Args:
x (array-like): Data array
N (int): Window width
Returns:
An array of averages over each window.
'''
return np.convolve(x, np.ones((N,))/N)[(N-1):]
``` |
{
"source": "jotbe/linux-developer-vm-with-ansible",
"score": 2
} |
#### File: linux-developer-vm-with-ansible/spec/test_testinfra.py
```python
def test_testinfra_is_installed_at_version_6_3_0_(host):
cmd = host.run("pip3 show --disable-pip-version-check pytest-testinfra")
assert cmd.rc is 0
assert "Name: pytest-testinfra\nVersion: 6.3.0" in cmd.stdout
def test_pytest_spec_is_installed_at_version_3_2_0_(host):
cmd = host.run("pip3 show --disable-pip-version-check pytest-spec")
assert cmd.rc is 0
assert "Name: pytest-spec\nVersion: 3.2.0" in cmd.stdout
def test_pytest_html_formatter_is_installed_at_version_3_1_1_(host):
cmd = host.run("pip3 show --disable-pip-version-check pytest-html")
assert cmd.rc is 0
assert "Name: pytest-html\nVersion: 3.1.1" in cmd.stdout
``` |
{
"source": "jotelha/dtoolcore",
"score": 4
} |
#### File: dtoolcore/dtoolcore/filehasher.py
```python
import hashlib
class FileHasher(object):
"""Class for associating hash functions with names."""
def __init__(self, hash_func):
self.func = hash_func
self.name = hash_func.__name__
def __call__(self, filename):
return self.func(filename)
def _hash_the_file(hasher, filename):
"""Helper function for creating hash functions.
See implementation of :func:`dtoolcore.filehasher.shasum`
for more usage details.
"""
BUF_SIZE = 65536
with open(filename, 'rb') as f:
buf = f.read(BUF_SIZE)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(BUF_SIZE)
return hasher
def hashsum_hexdigest(hasher, filename):
"""Helper function for creating hash functions.
See implementation of :func:`dtoolcore.filehasher.shasum`
for more usage details.
"""
hasher = _hash_the_file(hasher, filename)
return hasher.hexdigest()
def hashsum_digest(hasher, filename):
"""Helper function for creating hash functions.
See implementation of :func:`dtoolcore.filehasher.shasum`
for more usage details.
"""
hasher = _hash_the_file(hasher, filename)
return hasher.digest()
def sha1sum_hexdigest(filename):
"""Return hex digest of SHA-1 hash of file.
:param filename: path to file
:returns: shasum of file
"""
hasher = hashlib.sha1()
return hashsum_hexdigest(hasher, filename)
def sha256sum_hexdigest(filename):
"""Return hex digest of SHA-256 hash of file.
:param filename: path to file
:returns: shasum of file
"""
hasher = hashlib.sha256()
return hashsum_hexdigest(hasher, filename)
def md5sum_hexdigest(filename):
"""Return hex digest of MD5sum of file.
:param filename: path to file
:returns: shasum of file
"""
hasher = hashlib.md5()
return hashsum_hexdigest(hasher, filename)
def md5sum_digest(filename):
"""Return digest of MD5sum of file.
:param filename: path to file
:returns: shasum of file
"""
hasher = hashlib.md5()
return hashsum_digest(hasher, filename)
```
#### File: dtoolcore/tests/test_timestamp.py
```python
from datetime import datetime
def test_timestamp_returns_float():
import dtoolcore.utils
start_of_time = datetime(1970, 1, 1)
assert type(dtoolcore.utils.timestamp(start_of_time)) is float
def test_start_of_time_is_0():
import dtoolcore.utils
start_of_time = datetime(1970, 1, 1)
assert dtoolcore.utils.timestamp(start_of_time) == 0.0
def test_millenium_is_946684800():
import dtoolcore.utils
start_of_time = datetime(2000, 1, 1)
assert dtoolcore.utils.timestamp(start_of_time) == 946684800.
def test_subsection_precision():
import dtoolcore.utils
time_as_float = 946684800.513
into_new_millenium = datetime.fromtimestamp(time_as_float)
tolerance = 0.000001
actual = dtoolcore.utils.timestamp(into_new_millenium)
assert actual < time_as_float + tolerance
assert actual > time_as_float - tolerance
``` |
{
"source": "jotelha/dtool-ecs",
"score": 2
} |
#### File: dtool-ecs/tests/test_annotations.py
```python
import os
from . import tmp_uuid_and_uri # NOQA
from . import TEST_SAMPLE_DATA
def test_annotations(tmp_uuid_and_uri): # NOQA
uuid, dest_uri = tmp_uuid_and_uri
from dtoolcore import ProtoDataSet, generate_admin_metadata
from dtoolcore import DataSet
name = "my_dataset"
admin_metadata = generate_admin_metadata(name)
admin_metadata["uuid"] = uuid
sample_data_path = os.path.join(TEST_SAMPLE_DATA)
local_file_path = os.path.join(sample_data_path, 'tiny.png')
# Create a minimal dataset
proto_dataset = ProtoDataSet(
uri=dest_uri,
admin_metadata=admin_metadata,
config_path=None)
proto_dataset.create()
proto_dataset.put_item(local_file_path, 'tiny.png')
proto_dataset.freeze()
# Read in a dataset
dataset = DataSet.from_uri(dest_uri)
assert dataset.list_annotation_names() == []
dataset.put_annotation("project", "demo")
assert dataset.get_annotation("project") == "demo"
assert dataset.list_annotation_names() == ["project"]
```
#### File: dtool-ecs/tests/test_multiple_namespaces.py
```python
import os
from . import TEST_SAMPLE_DATA
from . import tmp_uuid_and_uri # NOQA
from . import tmp_uuid_and_uri_from_second_namespace # NOQA
def test_basic_workflow_on_first_namespace(tmp_uuid_and_uri): # NOQA
uuid, dest_uri = tmp_uuid_and_uri
from dtoolcore import ProtoDataSet, generate_admin_metadata
from dtoolcore import DataSet
from dtoolcore.utils import generate_identifier
name = "my_dataset"
admin_metadata = generate_admin_metadata(name)
admin_metadata["uuid"] = uuid
sample_data_path = os.path.join(TEST_SAMPLE_DATA)
local_file_path = os.path.join(sample_data_path, 'tiny.png')
# Create a minimal dataset
proto_dataset = ProtoDataSet(
uri=dest_uri,
admin_metadata=admin_metadata,
config_path=None)
proto_dataset.create()
proto_dataset.put_item(local_file_path, 'tiny.png')
proto_dataset.freeze()
# Read in a dataset
dataset = DataSet.from_uri(dest_uri)
expected_identifier = generate_identifier('tiny.png')
assert expected_identifier in dataset.identifiers
assert len(dataset.identifiers) == 1
def test_basic_workflow_on_second_namespace(tmp_uuid_and_uri_from_second_namespace): # NOQA
uuid, dest_uri = tmp_uuid_and_uri_from_second_namespace
from dtoolcore import ProtoDataSet, generate_admin_metadata
from dtoolcore import DataSet
from dtoolcore.utils import generate_identifier
name = "my_dataset"
admin_metadata = generate_admin_metadata(name)
admin_metadata["uuid"] = uuid
sample_data_path = os.path.join(TEST_SAMPLE_DATA)
local_file_path = os.path.join(sample_data_path, 'tiny.png')
# Create a minimal dataset
proto_dataset = ProtoDataSet(
uri=dest_uri,
admin_metadata=admin_metadata,
config_path=None)
proto_dataset.create()
proto_dataset.put_item(local_file_path, 'tiny.png')
proto_dataset.freeze()
# Read in a dataset
dataset = DataSet.from_uri(dest_uri)
expected_identifier = generate_identifier('tiny.png')
assert expected_identifier in dataset.identifiers
assert len(dataset.identifiers) == 1
```
#### File: dtool-ecs/tests/test_update_readme.py
```python
import time
from . import tmp_uuid_and_uri # NOQA
def test_update_readme(tmp_uuid_and_uri): # NOQA
uuid, dest_uri = tmp_uuid_and_uri
from dtoolcore import ProtoDataSet, generate_admin_metadata
from dtoolcore import DataSet
name = "my_dataset"
admin_metadata = generate_admin_metadata(name)
admin_metadata["uuid"] = uuid
# Create a minimal dataset
proto_dataset = ProtoDataSet(
uri=dest_uri,
admin_metadata=admin_metadata,
config_path=None)
proto_dataset.create()
proto_dataset.put_readme("First")
proto_dataset.put_readme("Hello world")
proto_dataset.freeze()
# Read in a dataset
dataset = DataSet.from_uri(dest_uri)
assert len(dataset._storage_broker._list_historical_readme_keys()) == 0
dataset.put_readme("Updated")
assert len(dataset._storage_broker._list_historical_readme_keys()) == 1
key = dataset._storage_broker._list_historical_readme_keys()[0]
content = dataset._storage_broker.get_text(key)
assert content == 'Hello world'
time.sleep(0.1)
dataset.put_readme('Updated again')
assert dataset.get_readme_content() == 'Updated again'
``` |
{
"source": "jotelha/dtool-s3",
"score": 2
} |
#### File: dtool-s3/tests/test_prefix_for_storage_location_functional.py
```python
from . import tmp_env_var, S3_TEST_BASE_URI, _remove_dataset
def _prefix_contains_something(storage_broker, prefix):
bucket = storage_broker.s3resource.Bucket(storage_broker.bucket)
prefix_objects = list(
bucket.objects.filter(Prefix=prefix).all()
)
return len(prefix_objects) > 0
def test_prefix_functional(): # NOQA
from dtoolcore import DataSetCreator
from dtoolcore import DataSet, iter_datasets_in_base_uri
# Create a minimal dataset without a prefix
with tmp_env_var("DTOOL_S3_DATASET_PREFIX", ""):
with DataSetCreator("no-prefix", S3_TEST_BASE_URI) as ds_creator:
ds_creator.put_annotation("prefix", "no")
no_prefix_uri = ds_creator.uri
dataset_no_prefix = DataSet.from_uri(no_prefix_uri)
# Basic test that retrieval works.
assert dataset_no_prefix.get_annotation("prefix") == "no"
# Basic test that prefix is correct.
structure_key = dataset_no_prefix._storage_broker.get_structure_key()
assert structure_key.startswith(dataset_no_prefix.uuid)
# Create a minimal dataset
prefix = "u/olssont/"
with tmp_env_var("DTOOL_S3_DATASET_PREFIX", prefix):
with DataSetCreator("no-prefix", S3_TEST_BASE_URI) as ds_creator:
ds_creator.put_annotation("prefix", "yes")
prefix_uri = ds_creator.uri
dataset_with_prefix = DataSet.from_uri(prefix_uri)
# Basic test that retrieval works.
assert dataset_with_prefix.get_annotation("prefix") == "yes"
# Basic test that prefix is correct.
structure_key = dataset_with_prefix._storage_broker.get_structure_key()
assert structure_key.startswith(prefix)
# Basic tests that everything can be picked up.
dataset_uris = list(
ds.uri for ds in
iter_datasets_in_base_uri(S3_TEST_BASE_URI)
)
assert dataset_no_prefix.uri in dataset_uris
assert dataset_with_prefix.uri in dataset_uris
_remove_dataset(dataset_no_prefix.uri)
_remove_dataset(dataset_with_prefix.uri)
```
#### File: dtool-s3/tests/test_put_item_logic.py
```python
import pytest
from . import tmp_dir_fixture # NOQA
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
def test_get_object_failure():
"""
Mock scenario where the get fails.
"""
from botocore.exceptions import WaiterError
from dtool_s3.storagebroker import _object_exists
mock_s3resource = MagicMock()
obj = MagicMock()
obj.wait_until_exists = MagicMock(side_effect=WaiterError(
'ObjectExists', 'Max attempts exceeded', {}))
mock_s3resource.Object = MagicMock(return_value=obj)
value = _object_exists(
mock_s3resource,
"dummy_bucket",
"dummy_dest_path"
)
assert value is False
def test_get_object_success():
"""
Mock scenario where the get succeeds.
"""
from dtool_s3.storagebroker import _object_exists
mock_s3resource = MagicMock()
obj = MagicMock()
obj.wait_until_exists = MagicMock()
mock_s3resource.Object = MagicMock(return_value=obj)
value = _object_exists(
mock_s3resource,
"dummy_bucket",
"dummy_dest_path"
)
obj.wait_until_exists.assert_called_once()
assert value is True
def test_upload_file_simulating_successful_upload():
"""
Mock scenario where upload simply succeeds.
"""
from dtool_s3.storagebroker import _upload_file # NOQA
s3client = MagicMock()
s3client.upload_file = MagicMock(return_value=True)
value = _upload_file(
s3client,
"dummy_fpath",
"dummy_bucket",
"dummy_dest_path",
"dummy_extra_args"
)
assert value is True
def test_upload_file_simulating_nosuchupload_failure(tmp_dir_fixture): # NOQA
"""
Mock scenario where upload fails with a NoSuchUpload exception.
"""
from dtool_s3.storagebroker import _upload_file # NOQA
import boto3
error_response = {'Error': {'Code': 'NoSuchUpload',
'Message': 'The specified multipart upload ' +
'does not exist. The upload ID might be ' +
'invalid, or the multipart upload might ' +
'have been aborted or completed.'}}
s3client = boto3.client("s3")
s3client.upload_file = MagicMock(
side_effect=s3client.exceptions.NoSuchUpload(
error_response,
"AbortMultipartUpload")
)
value = _upload_file(
s3client,
"dummy_fpath",
"dummy_bucket",
"dummy_dest_path",
"dummy_extra_args",
)
assert value is False
def test_upload_file_simulating_endpointconnectionerror(tmp_dir_fixture): # NOQA
"""
Mock scenario where upload fails with a EndpointConnectionError exception.
"""
from dtool_s3.storagebroker import _upload_file # NOQA
import boto3
from botocore.exceptions import EndpointConnectionError
s3client = boto3.client("s3")
s3client.upload_file = MagicMock(
side_effect=EndpointConnectionError(
endpoint_url="dummy_bucket/dest_path")
)
value = _upload_file(
s3client,
"dummy_fpath",
"dummy_bucket",
"dummy_dest_path",
"dummy_extra_args",
)
assert value is False
def test_upload_file_simulating_S3UploadFailedError(tmp_dir_fixture): # NOQA
"""
Mock scenario where upload fails with a S3UploadFailedError exception.
"""
from dtool_s3.storagebroker import _upload_file # NOQA
import boto3
from boto3.exceptions import S3UploadFailedError
s3client = boto3.client("s3")
s3client.upload_file = MagicMock(
side_effect=S3UploadFailedError()
)
value = _upload_file(
s3client,
"dummy_fpath",
"dummy_bucket",
"dummy_dest_path",
"dummy_extra_args",
)
assert value is False
def test_put_item_with_retry():
from dtool_s3.storagebroker import _put_item_with_retry # NOQA
def test_put_item_with_retry_immediate_success():
"""
Mock scenario where while doing a put, the upload succeeds without needing
to retry.
"""
import dtool_s3.storagebroker
dtool_s3.storagebroker._upload_file = MagicMock(return_value=True)
dtool_s3.storagebroker._object_exists = MagicMock()
dtool_s3.storagebroker._put_item_with_retry(
"dummy_s3client",
"dummy_s3resource",
"dummy_fpath",
"dummy_bucket",
"dummy_dest_path",
{}
)
dtool_s3.storagebroker._upload_file.assert_called()
dtool_s3.storagebroker._object_exists.assert_not_called()
def test_put_item_with_retry_simulating_upload_error_item_uploaded():
"""
Mock scenario where while doing a put, the upload fails with an ambiguous
failure, however item has been successfully created in the bucket.
"""
import dtool_s3.storagebroker
dtool_s3.storagebroker._upload_file = MagicMock(return_value=False)
dtool_s3.storagebroker._object_exists = MagicMock(return_value=True)
dtool_s3.storagebroker._put_item_with_retry(
"dummy_s3client",
"dummy_s3resource",
"dummy_fpath",
"dummy_bucket",
"dummy_dest_path",
{}
)
dtool_s3.storagebroker._upload_file.assert_called_once()
dtool_s3.storagebroker._object_exists.assert_called_once()
def test_put_item_with_retry_simulating_upload_error_item_doesnt_exist():
"""
Mock scenario where while doing a put, the upload fails, the object hasn't
been created on the target, so the retry routine is engaged.
"""
import dtool_s3.storagebroker
max_retry_time = 10
dtool_s3.storagebroker._upload_file = MagicMock(return_value=False)
dtool_s3.storagebroker._object_exists = MagicMock(return_value=None)
dtool_s3.storagebroker._put_item_with_retry = MagicMock(
side_effect=dtool_s3.storagebroker._put_item_with_retry)
with pytest.raises(dtool_s3.storagebroker.S3StorageBrokerPutItemError):
dtool_s3.storagebroker._put_item_with_retry(
s3client="dummy_s3client",
s3resource="dummy_s3resource",
fpath="dummy_fpath",
bucket="dummy_bucket",
dest_path="dummy_dest_path",
extra_args={},
max_retry_time=max_retry_time
)
assert dtool_s3.storagebroker._put_item_with_retry.call_count > 1
my_args = dtool_s3.storagebroker._put_item_with_retry.call_args
args, kwargs = my_args
assert kwargs['retry_time_spent'] >= max_retry_time
``` |
{
"source": "jotelha/dtool-sync",
"score": 2
} |
#### File: dtool-sync/dtool_sync/compare.py
```python
import logging
import json
import math
import dtoolcore
from dtool_cli.cli import CONFIG_PATH
def _make_marker(d):
"""Mark everything for comparison."""
if isinstance(d, list):
return [_make_marker(e) for e in d]
elif isinstance(d, dict):
return {k: _make_marker(v) for k, v in d.items()}
else:
return True
def _equal(source, target):
"""Treats slightly differing floats as equal."""
# type-dependent treatment:
# the lookup server yields floats with lower accuracy then the direct storage broker,
# i.e. comparison will fail at '1646312878.401044' == '1646312878.401'
if isinstance(source, float) and isinstance(target, float):
return math.isclose(source, target, rel_tol=1e-9, abs_tol=0.0)
else:
return source == target
def _compare(source, target, marker):
"""Compare source and target partially, as marked by marker."""
logger = logging.getLogger(__name__)
if isinstance(marker, dict):
for k, v in marker.items():
if k not in source:
logger.info("{} not in source '{}'.".format(k, source))
return False
if k not in target:
logger.info("{} not in target '{}'.".format(k, source))
return False
logger.debug("Descending into sub-tree '{}' of '{}'.".format(
source[k], source))
# descend
if not _compare(source[k], target[k], v):
return False # one failed comparison suffices
elif isinstance(marker, list): # source, target and marker must have same length
logger.debug("Branching into element wise sub-trees of '{}'.".format(
source))
for s, t, m in zip(source, target, marker):
if not _compare(s, t, m):
return False # one failed comparison suffices
else: # arrived at leaf, comparison desired?
if marker is not False: # yes
logger.debug("Comparing '{}' == '{}' -> {}.".format(
source, target, _equal(source, target)))
return _equal(source, target)
# comparison either not desired or successfull for all elements
return True
def _compare_nested(source, target, marker=None):
"""Compare source and target partially, as marked by marker. If marker is None, then compare everything."""
if not marker:
marker = _make_marker(source)
return _compare(source, target, marker)
def _forward_compare(source, target, marker=None):
"""One-way-compare two dicts of nested dict and categorize into 'equal', 'differing' and 'missing'."""
missing = dict()
differing = dict()
equal = dict()
for k, sd in source.items():
if k in target:
td = target[k]
is_equal = _compare_nested(sd, td, marker)
if is_equal:
equal[k] = (sd, td)
else:
differing[k] = (sd, td)
else:
missing[k] = sd
return equal, differing, missing
def _ds_list_to_dict(l):
"""Convert list of dataset metadata entries to dict with UUIDs as keys."""
return {e['uuid']: e for e in l}
def _direct_list(base_uri, config_path=CONFIG_PATH):
"""Directly list all datasets at base_uri via suitable storage broker."""
base_uri = dtoolcore.utils.sanitise_uri(base_uri)
StorageBroker = dtoolcore._get_storage_broker(base_uri, config_path)
info = []
for uri in StorageBroker.list_dataset_uris(base_uri, config_path):
admin_metadata = dtoolcore._admin_metadata_from_uri(uri, config_path)
info.append(admin_metadata)
by_name = sorted(info, key=lambda d: d['name'])
return by_name
def compare_dataset_lists(source, target, marker=None):
"""One-way compare source and target dataset metadata lists by fields set True within marker."""
s = _ds_list_to_dict(source)
t = _ds_list_to_dict(target)
equal, differing, missing = _forward_compare(s, t, marker)
return list(equal.values()), list(differing.values()), list(missing.values())
```
#### File: jotelha/dtool-sync/setup.py
```python
from setuptools import setup
from setuptools_scm import get_version
version = get_version(root='.', relative_to=__file__)
def local_scheme(version):
"""Skip the local version (eg. +xyz of 0.6.1.dev4+gdf99fe2)
to be able to upload to Test PyPI"""
return ""
url = "https://github.com/IMTEK-Simulation/dtool-sync"
readme = open('README.rst').read()
setup(
name="dtool-sync",
packages=["dtool_sync"],
version=version,
description="One-way synchronization utility fo data management command line tool dtool.",
long_description=readme,
include_package_data=True,
author="<NAME>",
author_email="<EMAIL>",
use_scm_version={"local_scheme": local_scheme},
url=url,
setup_requires=['setuptools_scm'],
install_requires=[
"click",
"dtoolcore",
"dtool-cli",
"humanfriendly",
],
entry_points={
'dtool.cli': ['sync=dtool_sync.cli:sync', 'compare=dtool_sync.cli:compare'],
},
download_url="{}/tarball/{}".format(url, version),
license="MIT"
)
```
#### File: dtool-sync/tests/test_dtool_sync_cli.py
```python
import json
from click.testing import CliRunner
from . import compare_nested, comparison_marker_from_obj, compare_marked_nested
def test_dtool_diff_q(lhs_repository_fixture, rhs_repository_fixture, expected_output_diff_q):
from dtool_sync.cli import diff
runner = CliRunner()
result = runner.invoke(diff, ['-q', lhs_repository_fixture, rhs_repository_fixture])
assert result.exit_code == 0
assert '\n'.join(result.stdout.splitlines()[2:]) == '\n'.join(expected_output_diff_q.splitlines()[2:])
def test_dtool_compare_all_j(comparable_repositories_fixture, expected_output_compare_all_j):
from dtool_sync.cli import compare_all
lhs_uri, rhs_uri = comparable_repositories_fixture
runner = CliRunner()
result = runner.invoke(compare_all, ['-j', lhs_uri, rhs_uri])
assert result.exit_code == 0
out = json.loads(result.stdout)
expected = json.loads(expected_output_compare_all_j)
assert compare_nested(out, expected)
def test_dtool_compare_all_qj(comparable_repositories_fixture, expected_output_compare_all_qj):
from dtool_sync.cli import compare_all
lhs_uri, rhs_uri = comparable_repositories_fixture
runner = CliRunner()
result = runner.invoke(compare_all, ['-q', '-j', lhs_uri, rhs_uri])
assert result.exit_code == 0
out = json.loads(result.stdout)
expected = json.loads(expected_output_compare_all_qj)
assert compare_nested(out, expected)
def test_dtool_compare_all_jr(comparable_repositories_fixture, expected_output_compare_all_jr):
from dtool_sync.cli import compare_all
lhs_uri, rhs_uri = comparable_repositories_fixture
runner = CliRunner()
result = runner.invoke(compare_all, ['-j', '-r', lhs_uri, rhs_uri])
assert result.exit_code == 0
out = json.loads(result.stdout)
expected = json.loads(expected_output_compare_all_jr)
assert compare_nested(out, expected)
def test_dtool_compare_all_qu(comparable_repositories_fixture, expected_output_compare_all_qu):
from dtool_sync.cli import compare_all
lhs_uri, rhs_uri = comparable_repositories_fixture
runner = CliRunner()
result = runner.invoke(compare_all, ['-q', '-u', lhs_uri, rhs_uri])
assert result.exit_code == 0
assert result.stdout == expected_output_compare_all_qu
def test_dtool_sync_all(comparable_repositories_fixture, expected_output_compare_all_jr, expected_output_post_sync_all_compare_all_jr):
from dtool_sync.cli import sync_all, compare_all
lhs_uri, rhs_uri = comparable_repositories_fixture
runner = CliRunner()
# first, content of base URIs differs
result = runner.invoke(compare_all, ['-j', '-r', lhs_uri, rhs_uri])
assert result.exit_code == 0
out = json.loads(result.stdout)
expected = json.loads(expected_output_compare_all_jr)
assert compare_nested(out, expected)
# next, we sync
result = runner.invoke(sync_all, [lhs_uri, rhs_uri])
assert result.exit_code == 0
# eventually, content must be equal
result = runner.invoke(compare_all, ['-j', '-r', lhs_uri, rhs_uri])
assert result.exit_code == 0
out = json.loads(result.stdout)
expected = json.loads(expected_output_post_sync_all_compare_all_jr)
assert compare_nested(out, expected)
``` |
{
"source": "jotelha/fireworks",
"score": 2
} |
#### File: fireworks/utilities/dict_mods.py
```python
from __future__ import unicode_literals
"""
This module allows you to modify a dict (a spec) using another dict (an instruction).
The main method of interest is apply_dictmod().
This code is based heavily on the Ansible class of custodian <https://pypi.python.org/pypi/custodian>,
but simplifies it considerably for the limited use cases required by FireWorks.
"""
import copy
import json
import logging
import re
from monty.design_patterns import singleton
__author__ = "<NAME>"
__credits__ = "<NAME>"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__date__ = "Jun 1, 2012"
def _log_nested_dict(log_func, dct):
for l in json.dumps(dct, indent=2, default=str).splitlines():
log_func(l)
def dict_select(base_dct, selector_dct):
"""Select subset of nested base_dct by nested hierarchy marked by selector_dct.
Args:
base_dct: dict or list or anything
selector_dict: dict or list or bool
Returns:
dct: same as base_dct,
if nested dict or list, then only nested fields marked by selector dict of parallel structure
"""
logger = logging.getLogger(__name__)
if isinstance(selector_dct, dict):
dct = {}
for k, v in selector_dct.items():
if k not in base_dct:
logger.warning("{} not in base_dct '{}'.".format(k, base_dct))
elif v is not False:
logger.debug("Descending into sub-tree '{}' of '{}'.".format(
base_dct[k], base_dct))
# descend
dct[k] = dict_select(base_dct[k], v)
else:
logger.debug("Deselected sub-tree '{}' of '{}'.".format(
base_dct[k], base_dct))
elif isinstance(selector_dct, list): # base_dct and selector_dct must have same length
logger.debug("Branching into element wise sub-trees of '{}'.".format(base_dct))
dct = [dict_select(base, selector) for base, selector in zip(base_dct, selector_dct) if selector is not False]
else: # arrived at leaf, selected
logger.debug("Selected value '{}'".format(base_dct))
dct = base_dct
return dct
# from https://gist.github.com/angstwad/bf22d1822c38a92ec0a9
def dict_inject(base_dct, injection_dct, add_keys=True):
""" Recursively inject inject_dict into base_dict. Recurses down into dicts nested
to an arbitrary depth, updating keys.
Will not alter base_dct or injection_dct, but return a deep copy without references to any of the former.
The optional argument ``add_keys``, determines whether keys which are
present in ``injection_dct`` but not ``base_dict`` should be included in the
new dict.
Args:
base_dct (dict): inject injection_dct into base_dct
injection_dct (dict):
add_keys (bool): whether to add new keys
Returns:
dct: constructed merge dict
"""
logger = logging.getLogger(__name__)
logger.debug("Inject 'injection_dct'...")
_log_nested_dict(logger.debug, injection_dct)
logger.debug("... into 'base_dct'...")
_log_nested_dict(logger.debug, base_dct)
if isinstance(injection_dct, dict) and isinstance(base_dct, dict):
logger.debug("Treating 'base_dct' and 'injection_dct' as parallel dicts...")
dct = copy.deepcopy(base_dct)
# injection_dct = injection_dct.copy()
for k, v in injection_dct.items():
if k in base_dct and isinstance(base_dct[k], dict) and isinstance(v, dict):
logger.debug("Descending into key '{}' for further injection.".format(k))
dct[k] = dict_inject(base_dct[k], v, add_keys=add_keys)
else: # inject
if k in base_dct:
logger.debug("Replacing dict item '{}: {}' with injection '{}'.".format(k, dct[k], injection_dct[k]))
else:
logger.debug("Inserting injection '{}' at key '{}'.".format(injection_dct[k], k))
dct[k] = copy.deepcopy(v)
elif isinstance(injection_dct, list) and isinstance(base_dct, list) and (len(injection_dct) == len(base_dct)):
logger.debug("Treating 'base_dct' and 'injection_dct' as parallel lists...")
# in this case base_dct and injecion_dct must have same length
dct = []
for base, injection in zip(base_dct, injection_dct):
if isinstance(base, dict) and isinstance(injection, dict):
logger.debug("Descending into list item '{}' and injection '{}' for further injection.".format(
base, injection))
dct.append(dict_inject(base, injection, add_keys=add_keys))
else:
logger.debug("Replacing list item '{}' with injection '{}'.".format(base, injection))
dct.append(copy.deepcopy(injection))
else: # arrived at leaf, inject
logger.debug("Treating 'base_dct' and 'injection_dct' as values.")
logger.debug("Replacing '{}' with injection '{}'.".format(base_dct, injection_dct))
dct = copy.deepcopy(injection_dct)
return dct
def get_nested_dict(input_dict, key):
current = input_dict
toks = key.split("->")
n = len(toks)
for i, tok in enumerate(toks):
if tok not in current and i < n - 1:
current[tok] = {}
elif i == n - 1:
return current, toks[-1]
current = current[tok]
def get_nested_dict_value(input_dict, key):
"""Uses '.' or '->'-splittable string as key to access nested dict."""
if key in input_dict:
val = input_dict[key]
else:
key = key.replace("->", ".") # make sure no -> left
split_key = key.split('.', 1)
if len(split_key) == 2:
key_prefix, key_suffix = split_key[0], split_key[1]
else: # not enough values to unpack
raise KeyError("'{:s}' not in {}".format(key, input_dict))
val = get_nested_dict_value(input_dict[key_prefix], key_suffix)
return val
def set_nested_dict_value(input_dict, key, val):
"""Uses '.' or '->'-splittable string as key and returns modified dict."""
if not isinstance(input_dict, dict):
# dangerous, just replace with dict
input_dict = {}
key = key.replace("->", ".") # make sure no -> left
split_key = key.split('.', 1)
if len(split_key) == 2:
key_prefix, key_suffix = split_key[0], split_key[1]
if key_prefix not in input_dict:
input_dict[key_prefix] = {}
input_dict[key_prefix] = set_nested_dict_value(
input_dict[key_prefix], key_suffix, val)
else: # not enough values to unpack
input_dict[key] = val
return input_dict
def arrow_to_dot(input_dict):
"""
Converts arrows ('->') in dict keys to dots '.' recursively.
Allows for storing MongoDB neseted document queries in MongoDB.
Args:
input_dict (dict)
Returns:
dict
"""
if not isinstance(input_dict, dict):
return input_dict
else:
return {k.replace("->", "."): arrow_to_dot(v) for k, v in input_dict.items()}
@singleton
class DictMods(object):
"""
Class to implement the supported mongo-like modifications on a dict.
Supported keywords include the following Mongo-based keywords, with the
usual meanings (refer to Mongo documentation for information):
_inc
_set
_unset
_push
_push_all
_add_to_set (but _each is not supported)
_pop
_pull
_pull_all
_rename
However, note that "_set" does not support modification of nested dicts
using the mongo {"a.b":1} notation. This is because mongo does not allow
keys with "." to be inserted. Instead, nested dict modification is
supported using a special "->" keyword, e.g. {"a->b": 1}
"""
def __init__(self):
self.supported_actions = {}
for i in dir(self):
if (not re.match('__\w+__', i)) and callable(getattr(self, i)):
self.supported_actions["_" + i] = getattr(self, i)
@staticmethod
def set(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
d[key] = v
@staticmethod
def unset(input_dict, settings):
for k in settings.keys():
(d, key) = get_nested_dict(input_dict, k)
del d[key]
@staticmethod
def push(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
if key in d:
d[key].append(v)
else:
d[key] = [v]
@staticmethod
def push_all(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
if key in d:
d[key].extend(v)
else:
d[key] = v
@staticmethod
def inc(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
if key in d:
d[key] += v
else:
d[key] = v
@staticmethod
def rename(input_dict, settings):
for k, v in settings.items():
if k in input_dict:
input_dict[v] = input_dict[k]
del input_dict[k]
@staticmethod
def add_to_set(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
if key in d and (not isinstance(d[key], (list, tuple))):
raise ValueError("Keyword {} does not refer to an array."
.format(k))
if key in d and v not in d[key]:
d[key].append(v)
elif key not in d:
d[key] = v
@staticmethod
def pull(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
if key in d and (not isinstance(d[key], (list, tuple))):
raise ValueError("Keyword {} does not refer to an array."
.format(k))
if key in d:
d[key] = [i for i in d[key] if i != v]
@staticmethod
def pull_all(input_dict, settings):
for k, v in settings.items():
if k in input_dict and (not isinstance(input_dict[k], (list, tuple))):
raise ValueError("Keyword {} does not refer to an array."
.format(k))
for i in v:
DictMods.pull(input_dict, {k: i})
@staticmethod
def pop(input_dict, settings):
for k, v in settings.items():
(d, key) = get_nested_dict(input_dict, k)
if key in d and (not isinstance(d[key], (list, tuple))):
raise ValueError("Keyword {} does not refer to an array."
.format(k))
if v == 1:
d[key].pop()
elif v == -1:
d[key].pop(0)
def apply_mod(modification, obj):
"""
Note that modify makes actual in-place modifications. It does not
return a copy.
Args:
modification:
Modification must be {action_keyword : settings}, where action_keyword is a
supported DictMod
obj:
A dict to be modified
"""
for action, settings in modification.items():
if action in DictMods().supported_actions:
DictMods().supported_actions[action].__call__(obj, settings)
else:
raise ValueError("{} is not a supported action!".format(action))
``` |
{
"source": "jotelha/fwrlm",
"score": 2
} |
#### File: fwrlm/fwrlm/fwrlm.py
```python
import os
import subprocess
import time
from .base import FireWorksRocketLauncherManager
class DummyManager(FireWorksRocketLauncherManager):
"""Testing purpose daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix='.dummy.')
@property
def outfile_name(self):
return os.path.join(self.logdir_loc,"dummy_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc,"dummy_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""Simple system shell dummy while loop for testing purposes"""
args = ['while [ True ]; do printf "."; sleep 5; done']
self.logger.debug("Evoking '{cmd:s}'".format(cmd=' '.join(args)))
p = subprocess.Popen(args, cwd=self.launchpad_loc, shell=True)
outs, errs = p.communicate()
self.logger.debug("Subprocess exited with return code = {}"
.format(p.returncode))
class SSHTunnelManager(FireWorksRocketLauncherManager):
"""Permanent SSH tunnel via paramiko daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix=".ssh_tunnel.{local_port:d}:@{remote_host:s}:{remote_port:d}"
":{jump_user:s}@{jump_host:}.".format(
local_port = self.local_port,
remote_host = self.remote_host,
remote_port = self.remote_port,
jump_user = self.jump_user,
jump_host = self.jump_host))
@property
def outfile_name(self):
return os.path.join(self.logdir_loc, "ssh_tunnel_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc, "ssh_tunnel_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""SSH forward based on FWRLM_config.yaml settings."""
from .utils.ssh_forward import forward
forward(
remote_host = self.remote_host,
remote_port = self.remote_port,
local_port = self.local_port,
ssh_host = self.jump_host,
ssh_user = self.jump_user,
ssh_keyfile = self.ssh_key,
ssh_port = self.ssh_port,
port_file = None)
class RLaunchManager(FireWorksRocketLauncherManager):
"""FireWorks rlaunch daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix='.rlaunch.')
@property
def outfile_name(self):
return os.path.join(self.logdir_loc, "rlaunch_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc, "rlaunch_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""Spawn rlaunch."""
args = [
'rlaunch',
'-l', self.fw_auth_file_path,
'-w', self.rlaunch_fworker_file,
'--loglvl', self.loglevel, 'rapidfire',
'--nlaunches', 'infinite',
'--sleep', self.rlaunch_interval,
]
args = [a if isinstance(a, str) else str(a) for a in args]
self.logger.info("Evoking '{cmd:s}'".format(cmd=' '.join(args)))
p = subprocess.Popen(args, cwd=self.launchpad_loc)
outs, errs = p.communicate()
self.logger.info("Subprocess exited with return code = {}"
.format(p.returncode))
class QLaunchManager(FireWorksRocketLauncherManager):
"""FireWorks qlaunch daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix='.qlaunch.')
@property
def outfile_name(self):
return os.path.join(self.logdir_loc,"qlaunch_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc,"qlaunch_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""Spawn qlaunch."""
args = [
'qlaunch', '-r',
'-l', self.fw_auth_file_path,
'-w', self.qlaunch_fworker_file,
'-q', self.qadapter_file,
'--loglvl', self.loglevel, 'rapidfire',
'--nlaunches', 'infinite',
'--sleep', self.qlaunch_interval,
]
args = [a if isinstance(a, str) else str(a) for a in args]
self.logger.info("Evoking '{cmd:s}'".format(cmd=' '.join(args)))
p = subprocess.Popen(args, cwd=self.launchpad_loc)
outs, errs = p.communicate()
self.logger.info("Subprocess exited with return code = {}"
.format(p.returncode))
class MLaunchManager(FireWorksRocketLauncherManager):
"""FireWorks rlaunch multi daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix='.mlaunch.')
@property
def outfile_name(self):
return os.path.join(self.logdir_loc, "mlaunch_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc, "mlaunch_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""Spawn raunch multi."""
args = [
'rlaunch',
'-l', self.fw_auth_file_path,
'-w', self.rlaunch_fworker_file,
'--loglvl', self.loglevel, 'multi', self.rlaunch_multi_nprocesses,
'--nlaunches', 'infinite',
'--sleep', self.rlaunch_interval,
]
args = [a if isinstance(a, str) else str(a) for a in args]
self.logger.info("Evoking '{cmd:s}'".format(cmd=' '.join(args)))
p = subprocess.Popen(args, cwd=self.launchpad_loc)
outs, errs = p.communicate()
self.logger.info("Subprocess exited with return code = {}"
.format(p.returncode))
class LPadRecoverOfflineManager(FireWorksRocketLauncherManager):
"""FireWorks recover offline loop daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix='.lpad_recover_offline.')
@property
def outfile_name(self):
return os.path.join(self.logdir_loc, "lpad_recover_offline_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc, "lpad_recover_offline_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""Spawn recover offline loop."""
args = [
'lpad',
'-l', self.fw_auth_file_path,
'--loglvl', self.loglevel,
'recover_offline',
'-w', self.qlaunch_fworker_file,
]
args = [a if isinstance(a, str) else str(a) for a in args]
self.logger.info("Evoking '{cmd:s}' repeatedly in a loop"
.format(cmd=' '.join(args)))
while True:
p = subprocess.Popen(args, cwd=self.launchpad_loc)
outs, errs = p.communicate()
self.logger.info("Subprocess exited with return code = {}"
.format(p.returncode))
time.sleep(self.lpad_recover_offline_interval)
class LPadWebGuiManager(FireWorksRocketLauncherManager):
"""FireWorks web gui daemon."""
@property
def pidfile_name(self):
return super().pidfile_name(prefix='.lpad_webgui.', suffix=':{port:}'.format(port=self.webgui_port))
@property
def outfile_name(self):
return os.path.join(self.logdir_loc,"webgui_{:s}.out"
.format(self.timestamp))
@property
def errfile_name(self):
return os.path.join(self.logdir_loc,"webgui_{:s}.err"
.format(self.timestamp))
def spawn(self):
"""Spawn webgui."""
args = [
'lpad', 'webgui',
'--server_mode', '--nworkers', 1,
'--webgui_username', self.webgui_username,
'--webgui_password', self.webgui_password,
]
args = [a if isinstance(a, str) else str(a) for a in args]
self.logger.info("Evoking '{cmd:s}'".format(cmd=' '.join(args)))
p = subprocess.Popen(args, cwd=self.launchpad_loc)
outs, errs = p.communicate()
self.logger.info("Subprocess exited with return code = {}"
.format(p.returncode))
```
#### File: jotelha/fwrlm/setup.py
```python
import os
from setuptools import setup, find_packages
from setuptools_scm import get_version
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, IMTEK Simulation, University of Freiburg"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__date__ = "Mar 18, 2020"
module_dir = os.path.dirname(os.path.abspath(__file__))
readme = open(os.path.join(module_dir, 'README.md')).read()
version = get_version(root='.', relative_to=__file__)
def local_scheme(version):
"""Skip the local version (eg. +xyz of 0.6.1.dev4+gdf99fe2)
to be able to upload to Test PyPI"""
return ""
url = 'https://github.com/jotelha/fwrlm'
if __name__ == "__main__":
setup(
author='<NAME>',
author_email='<EMAIL>',
name='fwrlm',
description='FireWorks RocketLauncher Manager',
long_description=readme,
long_description_content_type="text/markdown",
url=url,
use_scm_version={
"root": '.',
"relative_to": __file__,
"write_to": os.path.join("fwrlm", "version.py"),
"local_scheme": local_scheme},
packages=find_packages(),
include_package_data=True,
python_requires='>=3.6.5',
zip_safe=False,
install_requires=[
'ansible >= 2.9.1', # TODO: the dependency on ansible is only due two 4 simple jinja filters for the render utility, that should be removed at some point
'fireworks>=1.9.5',
'jinja2>=2.10',
'jinja2-time>=0.2.0',
'monty>=4.0.2',
'paramiko>=2.4.2',
'python-daemon>=2.2.4',
'pid>=3.0.0',
'psutil>=5.6.1',
'tabulate>=0.8.2',
],
setup_requires=['setuptools_scm'],
tests_require=['pytest'],
entry_points={
'console_scripts': [
'fwrlm = fwrlm.cli.fwrlm_run:main',
'render = fwrlm.cli.render_run:main',
]
},
download_url="{}/tarball/{}".format(url, version),
license='MIT',
)
``` |
{
"source": "joth76/mobile-chrome-apps",
"score": 2
} |
#### File: chrome-cordova/gcmServer/server.py
```python
import sys, json, random, string
import xmpp
################################################################################
SERVER = 'gcm.googleapis.com'
PORT = 5235
unacked_messages_quota = 1000
send_queue = []
client = None
################################################################################
# Return a random alphanumerical id
def random_id():
chars = string.ascii_letters + string.digits
rid = ''.join(random.choice(chars) for i in range(8))
return rid
################################################################################
def sendMessage(to, data):
send_queue.append({
'to': to,
'message_id': random_id(),
'data': data
})
################################################################################
def send(json_dict):
template = "<message><gcm xmlns='google:mobile:data'>{1}</gcm></message>"
content = template.format(client.Bind.bound[0], json.dumps(json_dict))
client.send(xmpp.protocol.Message(node = content))
################################################################################
def flush_queued_messages():
global unacked_messages_quota
while len(send_queue) and unacked_messages_quota > 0:
send(send_queue.pop(0))
unacked_messages_quota -= 1
################################################################################
def message_callback(session, message):
global unacked_messages_quota
gcm = message.getTags('gcm')
if not gcm:
return
msg = json.loads(gcm[0].getData())
# If this just an ACK/NACK message from the gcm server, don't actually handle the payload
if msg.has_key('message_type') and (msg['message_type'] == 'ack' or msg['message_type'] == 'nack'):
# TODO: Do we need to do something special for nack?
unacked_messages_quota += 1
return
# Okay, this is a message from a client. First things first, we have to send ACK to gcm server that we got it
send({
'to': msg['from'],
'message_type': 'ack',
'message_id': msg['message_id']
})
handleMessageInApplicationSpecificManner(msg)
################################################################################
def handleMessageInApplicationSpecificManner(msg):
payload = msg['data']
# payload['type'] is not a requirement, its just a convention I chose to use
def handlePingMessage(msg, payload):
# Reply with same message
sendMessage(msg['from'], { 'type': 'pong', 'message': payload['message'] })
handlers = {
'ping': handlePingMessage
}
if not payload.has_key('type') or not handlers.has_key(payload['type']):
print "WARN: Do not know how to handle this message:"
print json.dumps(payload, indent=2)
return;
handler = handlers[payload['type']]
handler(msg, payload)
################################################################################
def readUsernameAndPasswordFromFile(path):
import json
json_data = open(path)
ret = json.load(json_data)
json_data.close()
return ret
################################################################################
def main():
global client
client = xmpp.Client('gcm.googleapis.com', debug=['socket'])
client.connect(server=(SERVER, PORT), secure=1, use_srv=False)
# TODO: support command line args for auth info / path to file
authData = readUsernameAndPasswordFromFile('gcm_auth_info.json')
auth = client.auth(authData['username'], authData['password'])
if not auth:
print 'Authentication failed!'
sys.exit(1)
client.RegisterHandler('message', message_callback)
while True:
client.Process(1)
flush_queued_messages()
################################################################################
if __name__ == '__main__':
main()
``` |
{
"source": "jo-tham/geosample",
"score": 3
} |
#### File: geosample/geosample/cli.py
```python
import geojson
import click
import json
from .geosample import sample_geojson
@click.command()
@click.argument('geojson-file', type=click.File())
@click.option('-n', type=int, default=100, help='Total number of samples')
@click.option('-o', type=click.Path(exists=False),
default='sample.geojson', help='Output path')
@click.option('-s', '--seed', type=int, default=1111,
help='Random seed')
def main(geojson_file, n, o, seed):
"""Generate sample of locations in geojson polygons"""
gj = json.loads(geojson_file.read())
points = sample_geojson(gj, n, seed=seed)
result = geojson.Feature(geometry=points, properties={})
with open(o, 'w') as of:
of.write(json.dumps(result))
``` |
{
"source": "jo-tham/sisosig",
"score": 2
} |
#### File: sisosig/sisosig/cli.py
```python
import os
import json
import click
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from .sisosig import DarkskyClient
@click.group()
def cli():
pass
@cli.command()
@click.option('-l', '--locations', type=(float, float), multiple=True,
help='Coordinates of location for which to get data')
@click.option('--locations-file', type=click.File(),
help='Geojson file with coordinates; overrides "-l"')
@click.option('-h', '--db-host', type=str, default='localhost',
help='MongoDB host')
@click.option('-p', '--db-port', type=int, default=27017,
help='MongoDB port')
@click.option('-d', '--db-name', type=str, default='sisosig',
help='MongoDB database name')
@click.option('-c', '--collection-name', type=str, default='forecasts',
help='MongoDB collection name')
@click.option('-s', '--save-to-db', is_flag=True,
help='Persist results to the database collection')
@click.option('-t', '--threads', type=int, default=10,
help='Maximum number of threads to use for api calls')
@click.option('-k', '--api-key', type=str,
default=lambda: os.environ.get('DARKSKY_API_KEY'),
help='darksky.net API key')
@click.option('-T', '--time', type=str,
default='',
help='Unix time for data - e.g. date --date="2 days ago" +%s')
def get(locations, locations_file,
db_name, collection_name,
db_host, db_port, save_to_db,
threads, api_key, time):
"""Get forecasts or observations"""
api_client = DarkskyClient(key=api_key, threads=threads)
db_client = MongoClient(host=db_host, port=db_port)
collection = db_client[db_name][collection_name]
try:
db_client.admin.command('ismaster')
except ConnectionFailure:
click.echo("{}:{} not available".format(db_host, db_port))
raise
if locations_file:
locations = json.loads(
locations_file.read()
)['geometry']['coordinates']
# geojson is lon,lat instead of lat,lon
locations = [i[::-1] for i in locations]
result = api_client.get_locations(locations, time)
if save_to_db:
click.echo("Saving to database")
collection.insert_many(result)
else:
click.echo(result)
if __name__ == "__main__":
cli()
``` |
{
"source": "jothepro/djinni-library-template",
"score": 2
} |
#### File: jothepro/djinni-library-template/conanfile.py
```python
from conans import ConanFile, CMake, tools
required_conan_version = ">=1.36"
def get_version():
"""tries to determine the library version based on the git tag, and write it to the VERSION file.
If no tag can be found, the version is loaded from the VERSION file."""
version = ""
try:
version = tools.Git().run("describe --tags")[1:]
tools.save("VERSION", version)
except:
version = tools.load("VERSION")
return version
class MyLibraryConan(ConanFile):
name = "my_djinni_library"
version = get_version()
description = """A basic Djinni C++ library project template using CMake and Conan."""
settings = "os", "compiler", "build_type", "arch"
license = "AGPL-3.0-or-later"
generators = "cmake_find_package", "cmake_paths"
exports = "VERSION"
exports_sources = "lib/src/*", "lib/include/*", "lib/CMakeLists.txt", "lib/*.djinni", \
"lib/platform/*/CMakeLists.txt", "lib/platform/*/src/*", "lib/platform/*/include/*", "test/*", \
"cmake/*", "VERSION", "LICENSE", "CMakeLists.txt"
author = "jothepro"
requires = (
)
build_requires = (
"catch2/2.13.4",
"djinni-generator/1.2.0"
)
def build(self):
generator = None
if tools.is_apple_os(self.settings.os):
generator = "Xcode"
elif self.settings.os == "Windows":
generator = "Visual Studio 16 2019"
cmake = CMake(self, generator=generator)
if self.settings.os == "Android":
cmake.definitions["ANDROID_PLATFORM"] = self.settings.os.api_level
if not tools.get_env("CONAN_RUN_TESTS", True):
cmake.definitions["BUILD_TESTING"] = "OFF"
cmake.configure()
cmake.build()
if tools.get_env("CONAN_RUN_TESTS", True):
cmake.test()
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
``` |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.