hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c2b4a1c07a03c84645790de2fd147b0a49af942 | 779 | py | Python | Python Files/Dataset_Formating/Audio_splicing.py | brennanMosher/Music-Genre-Recognition-using-a-Machine-Learning-Appraoch | 7834fe5d709e894322ad76ef118067febaa78bce | [
"MIT"
]
| 1 | 2021-04-13T16:22:27.000Z | 2021-04-13T16:22:27.000Z | Python Files/Dataset_Formating/Audio_splicing.py | brennanMosher/Music-Genre-Recognition-using-a-Machine-Learning-Appraoch | 7834fe5d709e894322ad76ef118067febaa78bce | [
"MIT"
]
| null | null | null | Python Files/Dataset_Formating/Audio_splicing.py | brennanMosher/Music-Genre-Recognition-using-a-Machine-Learning-Appraoch | 7834fe5d709e894322ad76ef118067febaa78bce | [
"MIT"
]
| null | null | null | from pydub import AudioSegment
import os
import math
from pathlib import Path
'''
Splice wav files into multiple segments.
'''
LENGTH = 3 # Set splice length in seconds
def splice(audioPath, outputPath):
# try:
# os.mkdir('Spliced Spectrogram training') # Need to figure out where to put this
# except OSError:
# print("Creation of the directory failed")
audio = AudioSegment.from_wav(audioPath)
count = math.ceil(audio.duration_seconds/LENGTH) # Do we want the last part of audio?
t1 = 0
t2 = LENGTH*1000
for i in range(count):
newAudio = audio[t1:t2]
newPath = outputPath+Path(audioPath).stem+'_splice'+str(i)+'.wav'
newAudio.export(newPath, format="wav")
t1 = t2
t2 = t2 + LENGTH*1000
| 25.129032 | 89 | 0.65982 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 289 | 0.370988 |
1c2b5a500905db564cebad53847b80d4840a37d9 | 3,947 | py | Python | manpages.py | mba811/dash-manpages-zh | 94f7345f48084c2fa22ae00996920d1309458649 | [
"Apache-2.0"
]
| 1 | 2020-04-09T10:51:01.000Z | 2020-04-09T10:51:01.000Z | manpages.py | mba811/dash-manpages-zh | 94f7345f48084c2fa22ae00996920d1309458649 | [
"Apache-2.0"
]
| null | null | null | manpages.py | mba811/dash-manpages-zh | 94f7345f48084c2fa22ae00996920d1309458649 | [
"Apache-2.0"
]
| 1 | 2020-09-16T03:04:18.000Z | 2020-09-16T03:04:18.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: Wu Liang
@contact:
@date: 2014/06/23
"""
import os
import sqlite3
import urllib2
import shutil
import tarfile
import hashlib
import codecs
from mako.template import Template
from pyquery import PyQuery
currentPath = os.path.join(os.path.dirname(os.path.realpath(__file__)))
name = "manpages"
baseName = "manpages-zh"
output = baseName + ".docset"
appName = "dash-" + baseName
tarFileName = baseName + ".tgz"
feedName = baseName + ".xml"
version = "1.5.0"
docsetPath = os.path.join(currentPath, output, "Contents", "Resources", "Documents")
# Step 2: Copy the HTML Documentation
fin = codecs.open(os.path.join(docsetPath, "index.html"), "r", "utf-8")
content = fin.read()
fin.close()
jQuery = PyQuery(content)
jQuery.find("body").empty()
fileNames = []
itemTemplate = Template("<a href='html/${fileName}'>${name}</a><br />\n")
for fileName in os.listdir(os.path.join(docsetPath, "html")):
fileNames.append({
"name": fileName.split(".")[0],
"fileName": fileName
})
jQuery.find("body").append(itemTemplate.render(name = fileName.split(".")[0], fileName = fileName))
fin = codecs.open(os.path.join(docsetPath, "index.html"), "w", "utf-8")
newContent = jQuery.html()
fin.write(newContent)
fin.close()
# Step 3: create the Info.plist file
infoTemplate = Template('''<?xml version="1.0" encoding="UTF-8"?>
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>${name}</string>
<key>CFBundleName</key>
<string>${name}</string>
<key>DocSetPlatformFamily</key>
<string>${name}</string>
<key>dashIndexFilePath</key>
<string>index.html</string>
<key>dashIndexFilePath</key>
<string>index.html</string>
<key>isDashDocset</key><true/>
<key>isJavaScriptEnabled</key><true/>
</dict>
</plist>''')
infoPlistFile = os.path.join(currentPath, output, "Contents", "Info.plist")
fin = open(infoPlistFile, "w")
fin.write(infoTemplate.render(name = name))
fin.close()
# Step 4: Create the SQLite Index
dbFile = os.path.join(currentPath, output, "Contents", "Resources", "docSet.dsidx")
if os.path.exists(dbFile):
os.remove(dbFile)
db = sqlite3.connect(dbFile)
cursor = db.cursor()
try:
cursor.execute("DROP TABLE searchIndex;")
except Exception:
pass
cursor.execute('CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT);')
cursor.execute('CREATE UNIQUE INDEX anchor ON searchIndex (name, type, path);')
insertTemplate = Template("INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES ('${name}', '${type}', '${path}');")
# Step 5: Populate the SQLite Index
for result in fileNames:
sql = insertTemplate.render(name = result["name"], type = "Builtin", path = "html/" + result["fileName"])
print sql
cursor.execute(sql)
db.commit()
db.close()
# Step 6: copy icon
shutil.copyfile(os.path.join(currentPath, "icon.png"),
os.path.join(currentPath, output, "icon.png"))
shutil.copyfile(os.path.join(currentPath, "[email protected]"),
os.path.join(currentPath, output, "[email protected]"))
# Step 7: 打包
if not os.path.exists(os.path.join(currentPath, "dist")):
os.makedirs(os.path.join(currentPath, "dist"))
tarFile = tarfile.open(os.path.join(currentPath, "dist", tarFileName), "w:gz")
for root, dirNames, fileNames in os.walk(output):
for fileName in fileNames:
fullPath = os.path.join(root, fileName)
tarFile.add(fullPath)
tarFile.close()
# Step 8: 更新feed url
feedTemplate = Template('''<entry>
<version>${version}</version>
<sha1>${sha1Value}</sha1>
<url>https://raw.githubusercontent.com/magicsky/${appName}/master/dist/${tarFileName}</url>
</entry>''')
fout = open(os.path.join(currentPath, "dist", tarFileName), "rb")
sha1Value = hashlib.sha1(fout.read()).hexdigest()
fout.close()
fin = open(os.path.join(currentPath, feedName), "w")
fin.write(feedTemplate.render(sha1Value = sha1Value, appName = appName, tarFileName = tarFileName, version = version))
fin.close()
| 30.835938 | 122 | 0.698759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,578 | 0.398989 |
1c2c1ecff02208f628aa2e65eae53abaf0c94bd6 | 1,527 | py | Python | docs/conf.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
]
| null | null | null | docs/conf.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
]
| null | null | null | docs/conf.py | alexweav/nisystemlink-clients-python | f19a30907a7fef536043ecbddc5a755e5fedf846 | [
"MIT"
]
| null | null | null | import os
import sys
sys.path.insert(0, os.path.abspath(".."))
# --------------------------------------------------------------------------------------
project = "nisystemlink"
copyright = "2020, National Instruments"
author = "National Instruments"
# The short X.Y version
version = "0.1"
# The full version, including alpha/beta/rc tags
release = "0.1.3"
# --------------------------------------------------------------------------------------
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
"sphinx_autodoc_typehints",
"docs.cleanup",
]
master_doc = "index"
html_theme = "sphinx_rtd_theme"
html_extra_path = [
"../LICENSE",
]
nitpicky = True
nitpick_ignore = [
("py:class", "datetime.datetime"),
("py:class", "datetime.timedelta"),
("py:class", "pathlib.Path"),
("py:data", "typing.Any"),
("py:data", "typing.Awaitable"),
("py:data", "typing.Dict"),
("py:data", "typing.Iterable"),
("py:data", "typing.List"),
("py:data", "typing.Optional"),
("py:data", "typing.Sequence"),
("py:data", "typing.Tuple"),
("py:data", "typing.Union"),
]
autodoc_default_options = {
"inherited-members": True,
"special-members": "__init__",
"no-private-members": True,
}
# Don't let napoleon force methods to be included in the docs; use autodoc flags and our
# own docs.cleanup module for that.
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = False
| 26.789474 | 88 | 0.587426 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 954 | 0.624754 |
1c2c9eed7b32e658c90b6a2885b2e30dd90f1dbc | 2,702 | py | Python | multinet/api/views/common.py | multinet-app/multinet-api | a658d787f0fb9ba415ed85a1e37c29953486287f | [
"Apache-2.0"
]
| null | null | null | multinet/api/views/common.py | multinet-app/multinet-api | a658d787f0fb9ba415ed85a1e37c29953486287f | [
"Apache-2.0"
]
| 91 | 2021-03-15T19:00:15.000Z | 2022-03-11T00:04:05.000Z | multinet/api/views/common.py | multinet-app/multinet-api | a658d787f0fb9ba415ed85a1e37c29953486287f | [
"Apache-2.0"
]
| 1 | 2022-02-05T15:53:04.000Z | 2022-02-05T15:53:04.000Z | from typing import Dict, List
from arango.cursor import Cursor
from django.http.response import Http404
from django.shortcuts import get_object_or_404
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.request import Request
from rest_framework_extensions.mixins import NestedViewSetMixin
from multinet.api.models import Workspace, WorkspaceRole
from multinet.api.utils.arango import ArangoQuery
class MultinetPagination(LimitOffsetPagination):
default_limit = 100
class ArangoPagination(LimitOffsetPagination):
"""Override the LimitOffsetPagination class to allow for use with arango cursors."""
def _set_pre_query_params(self, request):
self.limit = self.get_limit(request)
if self.limit is None:
return None
self.offset = self.get_offset(request)
self.request = request
def _set_post_query_params(self):
if self.count > self.limit and self.template is not None:
self.display_page_controls = True
def paginate_queryset(self, query: ArangoQuery, request: Request) -> List[Dict]:
self._set_pre_query_params(request)
paginated_query = query.paginate(self.limit, self.offset)
cur: Cursor = paginated_query.execute(full_count=True)
self.count = cur.statistics()['fullCount']
self._set_post_query_params()
return list(cur)
class WorkspaceChildMixin(NestedViewSetMixin):
def get_queryset(self):
"""
Get the queryset for workspace child enpoints.
Check that the requeting user has appropriate permissions for the associated workspace.
"""
child_objects = super().get_queryset()
# prevent warning for schema generation incompatibility
if getattr(self, 'swagger_fake_view', False):
return child_objects.none()
parent_query_dict = self.get_parents_query_dict()
workspace = get_object_or_404(
Workspace.objects.select_related('owner'), name=parent_query_dict['workspace__name']
)
# No user or user permission required for public workspaces
if workspace.public:
return child_objects
# Private workspace
request_user = self.request.user
if not request_user.is_authenticated: # anonymous user
raise Http404
workspace_role = WorkspaceRole.objects.filter(
workspace=workspace, user=request_user
).first()
# If the user is at least a reader or the owner, grant access
if workspace_role is not None or workspace.owner == request_user:
return child_objects
# Read access denied
raise Http404
| 33.358025 | 96 | 0.703923 | 2,266 | 0.838638 | 0 | 0 | 0 | 0 | 0 | 0 | 535 | 0.198001 |
1c2cf799737827ae82cb008c68687ac40ab5260f | 2,613 | py | Python | scripts/tests/generate_host_files.py | NDevTK/cel | e97226416b6e12245564bfc1c3631d610d62f052 | [
"BSD-3-Clause"
]
| null | null | null | scripts/tests/generate_host_files.py | NDevTK/cel | e97226416b6e12245564bfc1c3631d610d62f052 | [
"BSD-3-Clause"
]
| null | null | null | scripts/tests/generate_host_files.py | NDevTK/cel | e97226416b6e12245564bfc1c3631d610d62f052 | [
"BSD-3-Clause"
]
| null | null | null | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import sys
def ParseArgs():
parser = argparse.ArgumentParser(
description='Host file generator for CELab E2E tests')
all_tokens = ['project_id', 'storage_bucket', 'storage_prefix']
template_help = 'The full path to the *.host.textpb template file to use. '
template_help += 'Must contain the following tokens: %s' % all_tokens
parser.add_argument(
'--template', metavar='<host_file>', required=True, help=template_help)
parser.add_argument(
'--projects',
metavar='<projectA;projectB;...>',
dest="projects",
required=True,
help='The values to replace "<project_id>" with.')
parser.add_argument(
'--storage_bucket',
metavar='<token>',
dest="storage_bucket",
required=True,
help='The value to replace "<storage_bucket>" with.')
parser.add_argument(
'--storage_prefix',
metavar='<token>',
dest="storage_prefix",
required=True,
help='The value to replace "<storage_prefix>" with.')
parser.add_argument(
'--destination_dir',
metavar='<path>',
dest='destination',
required=True,
action='store',
help='Where to collect extra logs on test failures')
return parser.parse_args()
def ConfigureLogging(args):
logfmt = '%(asctime)s %(filename)s:%(lineno)s: [%(levelname)s] %(message)s'
datefmt = '%Y/%m/%d %H:%M:%S'
logging.basicConfig(level=logging.INFO, format=logfmt, datefmt=datefmt)
if __name__ == '__main__':
args = ParseArgs()
ConfigureLogging(args)
logging.info("Arguments: %s" % args)
if not os.path.exists(args.template):
raise ValueError('Template host file not found: %s' % args.template)
if not os.path.exists(args.destination):
raise ValueError('Destination directory not found: %s' % args.destination)
# Generate all the host files based off the arguments passed.
with open(args.template, 'r') as f:
template = f.read()
for project_id in args.projects.split(';'):
filename = "%s.host.textpb" % project_id
destination = os.path.join(args.destination, filename)
with open(destination, 'w') as f:
logging.info("Generating %s" % destination)
content = template.replace("<project_id>", project_id)
content = content.replace("<storage_bucket>", args.storage_bucket)
content = content.replace("<storage_prefix>", args.storage_prefix)
f.write(content)
sys.exit(0)
| 31.107143 | 78 | 0.677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,063 | 0.406812 |
1c2db146a81095258082a5e01445b3cddf1eab20 | 8,037 | py | Python | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
]
| null | null | null | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
]
| null | null | null | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
]
| null | null | null | from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.urls import reverse
from django_extensions.db.models import TimeStampedModel
from mptt.models import MPTTModel, TreeForeignKey
from .managers import UserProfileManager, DepartmentManager, PositionManager
User = settings.AUTH_USER_MODEL
class Department(MPTTModel, TimeStampedModel):
"""
Departments in an organisation
"""
name = models.CharField(_("Name"), max_length=255)
description = models.TextField(_("Description"), blank=True, default="")
parent = TreeForeignKey('self', verbose_name=_("Parent"), null=True,
blank=True, related_name='children', db_index=True,
on_delete=models.PROTECT,
help_text=_("The parent department"))
customer = models.ForeignKey(
'customers.Customer', verbose_name=_("Customer"),
on_delete=models.PROTECT)
manager = models.ForeignKey(
User, verbose_name=_("Manager"), on_delete=models.PROTECT,
blank=True, null=True)
active = models.BooleanField(_("Active"), default=True)
objects = DepartmentManager()
class Meta:
verbose_name = _("Department")
verbose_name_plural = _("Departments")
ordering = ['name']
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:departments_edit', args=[self.pk])
def get_delete_url(self):
return reverse('users:departments_delete', args=[self.pk])
def get_list_url(self):
return reverse('users:departments_list')
def __str__(self):
return self.name
class Position(MPTTModel, TimeStampedModel):
"""
Job positions in an organisation
"""
name = models.CharField(_("Name"), max_length=255)
description = models.TextField(_("Description"), blank=True, default="")
department = models.ForeignKey(
Department, verbose_name=_("Department"), on_delete=models.PROTECT)
parent = TreeForeignKey('self', verbose_name=_("Reports To"), null=True,
blank=True, related_name='children', db_index=True,
on_delete=models.PROTECT,
help_text=_("The parent Job Position"))
supervisor = models.ForeignKey(
User, verbose_name=_("Supervisor"), on_delete=models.PROTECT,
blank=True, null=True)
customer = models.ForeignKey(
'customers.Customer', verbose_name=_("Customer"),
on_delete=models.PROTECT)
active = models.BooleanField(_("Active"), default=True)
objects = PositionManager()
class Meta:
verbose_name = _("Job Positions")
verbose_name_plural = _("Job Positions")
ordering = ['name']
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:positions_edit', args=[self.pk])
def get_delete_url(self):
return reverse('users:positions_delete', args=[self.pk])
def get_list_url(self):
return reverse('users:positions_list')
def __str__(self):
return "{} - {}".format(self.department.name, self.name)
@python_2_unicode_compatible
class UserProfile(models.Model):
"""
Model used to store more information on users
"""
ADMIN = '1'
MEMBER = '2'
EDITOR = '3'
MEMBER_ROLE_CHOICES = (
(ADMIN, _('Admin')),
(EDITOR, _('Editor')),
(MEMBER, _('Member')),
)
created_on = models.DateTimeField(_("Created on"), auto_now_add=True)
updated_on = models.DateTimeField(_("Updated on"), auto_now=True)
user = models.OneToOneField(User, verbose_name=_("User"))
position = models.ForeignKey(Position, verbose_name=_(
"job Position"), on_delete=models.SET_NULL, blank=True, null=True,
default=None)
customer = models.ForeignKey('customers.Customer', verbose_name=_(
"Customer"), on_delete=models.SET_NULL, blank=True, null=True,
default=None)
role = models.CharField(
_("Role"), max_length=1, choices=MEMBER_ROLE_CHOICES, blank=False,
default=MEMBER)
active = models.BooleanField(
_("Active"), default=True, help_text="Is the staff member actively "
"employed?")
objects = UserProfileManager()
class Meta:
verbose_name = _("Staff Member")
verbose_name_plural = _("Staff Members")
ordering = ['user__first_name', 'user__last_name', 'user__email']
def get_name(self):
if self.user.get_full_name():
return self.user.get_full_name()
if self.user.email:
return self.user.email
return self.user.username
def get_initials(self):
if self.user.first_name and self.user.last_name:
return "{}{}".format(self.user.first_name[0],
self.user.last_name[0])
if self.user.first_name:
return self.user.first_name[0]
if self.user.last_name:
return self.user.last_name[0]
return self.user.email[0]
def is_admin(self):
return self.role == self.ADMIN
def is_editor(self):
return self.role == self.EDITOR
def can_edit(self):
return self.role == self.EDITOR or self.role == self.ADMIN
def get_subordinates(self):
"""
Returns a queryset of UserProfile objects which report to this
userprofile
"""
if self.position:
queryset = UserProfile.objects.active().exclude(
id=self.id).filter(
models.Q(
position__supervisor=self.user) | models.Q(
position__department__manager=self.user) | models.Q(
position__parent=self.position))
else:
queryset = UserProfile.objects.active().exclude(
id=self.id).filter(
models.Q(
position__supervisor=self.user) | models.Q(
position__department__manager=self.user))
# get job positions of subs
subordinate_positions = Position.objects.filter(
userprofile__in=queryset)
# get any position that may report to these positions
# list of position ids of Positions that report to
# subordinate_positions
reporting_jp_ids = []
for sub_p in subordinate_positions:
reporting_jps = sub_p.get_descendants(include_self=False)
if reporting_jps is not None:
reporting_jp_ids = reporting_jp_ids + list(
reporting_jps.values_list('id', flat=True))
reporting_jp_ids = list(set(reporting_jp_ids))
# get user profiles wiht positions that report to subordinate_positions
reporting_profiles = UserProfile.objects.active().filter(
position__id__in=reporting_jp_ids)
queryset = queryset.union(reporting_profiles)
# unions result in weird filtering so we create a new queryset
queryset_ids = list(set([x.id for x in queryset]))
if queryset_ids:
queryset = UserProfile.objects.filter(id__in=queryset_ids)
else:
queryset = UserProfile.objects.none()
return queryset
def has_subordinates(self):
return self.get_subordinates().exists()
def get_department(self):
if self.position is not None:
return self.position.department.name
return None
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:userprofiles_edit', args=[self.pk])
def get_delete_url(self):
return "#"
def get_list_url(self):
return reverse('users:userprofiles_list')
def __str__(self):
return _("{user}").format(user=self.get_name())
| 33.911392 | 79 | 0.630459 | 7,568 | 0.941645 | 0 | 0 | 4,722 | 0.587533 | 0 | 0 | 1,335 | 0.166107 |
1c2e98b8bfffd32e002ee05aa4877b21658d72a4 | 59,466 | py | Python | azure-devops/azure/devops/released/build/build_client.py | imafidon2020/azure-devops-python-api | ea9075f0c54dbc10115a23a8b7ad34feacbbdc14 | [
"MIT"
]
| 248 | 2019-05-10T14:20:24.000Z | 2022-03-29T12:17:27.000Z | azure-devops/azure/devops/released/build/build_client.py | AzureMentor/azure-devops-python-api | 3838e91d662dba1f77b43ad560ca23c1cb7e84e8 | [
"MIT"
]
| 147 | 2019-05-08T14:20:49.000Z | 2022-03-28T19:36:21.000Z | azure-devops/azure/devops/released/build/build_client.py | AzureMentor/azure-devops-python-api | 3838e91d662dba1f77b43ad560ca23c1cb7e84e8 | [
"MIT"
]
| 121 | 2019-05-08T06:24:39.000Z | 2022-03-01T12:58:02.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from ...v5_1.build import models
class BuildClient(Client):
"""Build
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(BuildClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '965220d5-5bb9-42cf-8d67-9b146df2a5a4'
def create_artifact(self, artifact, project, build_id):
"""CreateArtifact.
Associates an artifact with a build.
:param :class:`<BuildArtifact> <azure.devops.v5_1.build.models.BuildArtifact>` artifact: The artifact.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: :class:`<BuildArtifact> <azure.devops.v5_1.build.models.BuildArtifact>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
content = self._serialize.body(artifact, 'BuildArtifact')
response = self._send(http_method='POST',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='5.1',
route_values=route_values,
content=content)
return self._deserialize('BuildArtifact', response)
def get_artifact(self, project, build_id, artifact_name):
"""GetArtifact.
Gets a specific artifact for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str artifact_name: The name of the artifact.
:rtype: :class:`<BuildArtifact> <azure.devops.v5_1.build.models.BuildArtifact>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildArtifact', response)
def get_artifact_content_zip(self, project, build_id, artifact_name, **kwargs):
"""GetArtifactContentZip.
Gets a specific artifact for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str artifact_name: The name of the artifact.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_artifacts(self, project, build_id):
"""GetArtifacts.
Gets all artifacts for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [BuildArtifact]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='5.1',
route_values=route_values)
return self._deserialize('[BuildArtifact]', self._unwrap_collection(response))
def get_file(self, project, build_id, artifact_name, file_id, file_name, **kwargs):
"""GetFile.
Gets a file from the build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str artifact_name: The name of the artifact.
:param str file_id: The primary key for the file.
:param str file_name: The name that the file will be set to.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if artifact_name is not None:
query_parameters['artifactName'] = self._serialize.query('artifact_name', artifact_name, 'str')
if file_id is not None:
query_parameters['fileId'] = self._serialize.query('file_id', file_id, 'str')
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
response = self._send(http_method='GET',
location_id='1db06c96-014e-44e1-ac91-90b2d4b3e984',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def delete_build(self, project, build_id):
"""DeleteBuild.
Deletes a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
self._send(http_method='DELETE',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.1',
route_values=route_values)
def get_build(self, project, build_id, property_filters=None):
"""GetBuild.
Gets a build
:param str project: Project ID or project name
:param int build_id:
:param str property_filters:
:rtype: :class:`<Build> <azure.devops.v5_1.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if property_filters is not None:
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
response = self._send(http_method='GET',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Build', response)
def get_builds(self, project, definitions=None, queues=None, build_number=None, min_time=None, max_time=None, requested_for=None, reason_filter=None, status_filter=None, result_filter=None, tag_filters=None, properties=None, top=None, continuation_token=None, max_builds_per_definition=None, deleted_filter=None, query_order=None, branch_name=None, build_ids=None, repository_id=None, repository_type=None):
"""GetBuilds.
Gets a list of builds.
:param str project: Project ID or project name
:param [int] definitions: A comma-delimited list of definition IDs. If specified, filters to builds for these definitions.
:param [int] queues: A comma-delimited list of queue IDs. If specified, filters to builds that ran against these queues.
:param str build_number: If specified, filters to builds that match this build number. Append * to do a prefix search.
:param datetime min_time: If specified, filters to builds that finished/started/queued after this date based on the queryOrder specified.
:param datetime max_time: If specified, filters to builds that finished/started/queued before this date based on the queryOrder specified.
:param str requested_for: If specified, filters to builds requested for the specified user.
:param str reason_filter: If specified, filters to builds that match this reason.
:param str status_filter: If specified, filters to builds that match this status.
:param str result_filter: If specified, filters to builds that match this result.
:param [str] tag_filters: A comma-delimited list of tags. If specified, filters to builds that have the specified tags.
:param [str] properties: A comma-delimited list of properties to retrieve.
:param int top: The maximum number of builds to return.
:param str continuation_token: A continuation token, returned by a previous call to this method, that can be used to return the next set of builds.
:param int max_builds_per_definition: The maximum number of builds to return per definition.
:param str deleted_filter: Indicates whether to exclude, include, or only return deleted builds.
:param str query_order: The order in which builds should be returned.
:param str branch_name: If specified, filters to builds that built branches that built this branch.
:param [int] build_ids: A comma-delimited list that specifies the IDs of builds to retrieve.
:param str repository_id: If specified, filters to builds that built from this repository.
:param str repository_type: If specified, filters to builds that built from repositories of this type.
:rtype: :class:`<GetBuildsResponseValue>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if definitions is not None:
definitions = ",".join(map(str, definitions))
query_parameters['definitions'] = self._serialize.query('definitions', definitions, 'str')
if queues is not None:
queues = ",".join(map(str, queues))
query_parameters['queues'] = self._serialize.query('queues', queues, 'str')
if build_number is not None:
query_parameters['buildNumber'] = self._serialize.query('build_number', build_number, 'str')
if min_time is not None:
query_parameters['minTime'] = self._serialize.query('min_time', min_time, 'iso-8601')
if max_time is not None:
query_parameters['maxTime'] = self._serialize.query('max_time', max_time, 'iso-8601')
if requested_for is not None:
query_parameters['requestedFor'] = self._serialize.query('requested_for', requested_for, 'str')
if reason_filter is not None:
query_parameters['reasonFilter'] = self._serialize.query('reason_filter', reason_filter, 'str')
if status_filter is not None:
query_parameters['statusFilter'] = self._serialize.query('status_filter', status_filter, 'str')
if result_filter is not None:
query_parameters['resultFilter'] = self._serialize.query('result_filter', result_filter, 'str')
if tag_filters is not None:
tag_filters = ",".join(tag_filters)
query_parameters['tagFilters'] = self._serialize.query('tag_filters', tag_filters, 'str')
if properties is not None:
properties = ",".join(properties)
query_parameters['properties'] = self._serialize.query('properties', properties, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if max_builds_per_definition is not None:
query_parameters['maxBuildsPerDefinition'] = self._serialize.query('max_builds_per_definition', max_builds_per_definition, 'int')
if deleted_filter is not None:
query_parameters['deletedFilter'] = self._serialize.query('deleted_filter', deleted_filter, 'str')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
if branch_name is not None:
query_parameters['branchName'] = self._serialize.query('branch_name', branch_name, 'str')
if build_ids is not None:
build_ids = ",".join(map(str, build_ids))
query_parameters['buildIds'] = self._serialize.query('build_ids', build_ids, 'str')
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if repository_type is not None:
query_parameters['repositoryType'] = self._serialize.query('repository_type', repository_type, 'str')
response = self._send(http_method='GET',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
response_value = self._deserialize('[Build]', self._unwrap_collection(response))
continuation_token = self._get_continuation_token(response)
return self.GetBuildsResponseValue(response_value, continuation_token)
class GetBuildsResponseValue(object):
def __init__(self, value, continuation_token):
"""
Response for the get_builds method
:param value:
:type value: :class:`<[Build]> <azure.devops.v5_1.build.models.[Build]>`
:param continuation_token: The continuation token to be used to get the next page of results.
:type continuation_token: str
"""
self.value = value
self.continuation_token = continuation_token
def queue_build(self, build, project, ignore_warnings=None, check_in_ticket=None, source_build_id=None):
"""QueueBuild.
Queues a build
:param :class:`<Build> <azure.devops.v5_1.build.models.Build>` build:
:param str project: Project ID or project name
:param bool ignore_warnings:
:param str check_in_ticket:
:param int source_build_id:
:rtype: :class:`<Build> <azure.devops.v5_1.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if ignore_warnings is not None:
query_parameters['ignoreWarnings'] = self._serialize.query('ignore_warnings', ignore_warnings, 'bool')
if check_in_ticket is not None:
query_parameters['checkInTicket'] = self._serialize.query('check_in_ticket', check_in_ticket, 'str')
if source_build_id is not None:
query_parameters['sourceBuildId'] = self._serialize.query('source_build_id', source_build_id, 'int')
content = self._serialize.body(build, 'Build')
response = self._send(http_method='POST',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Build', response)
def update_build(self, build, project, build_id, retry=None):
"""UpdateBuild.
Updates a build.
:param :class:`<Build> <azure.devops.v5_1.build.models.Build>` build: The build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param bool retry:
:rtype: :class:`<Build> <azure.devops.v5_1.build.models.Build>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if retry is not None:
query_parameters['retry'] = self._serialize.query('retry', retry, 'bool')
content = self._serialize.body(build, 'Build')
response = self._send(http_method='PATCH',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('Build', response)
def update_builds(self, builds, project):
"""UpdateBuilds.
Updates multiple builds.
:param [Build] builds: The builds to update.
:param str project: Project ID or project name
:rtype: [Build]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(builds, '[Build]')
response = self._send(http_method='PATCH',
location_id='0cd358e1-9217-4d94-8269-1c1ee6f93dcf',
version='5.1',
route_values=route_values,
content=content)
return self._deserialize('[Build]', self._unwrap_collection(response))
def get_build_changes(self, project, build_id, continuation_token=None, top=None, include_source_change=None):
"""GetBuildChanges.
Gets the changes associated with a build
:param str project: Project ID or project name
:param int build_id:
:param str continuation_token:
:param int top: The maximum number of changes to return
:param bool include_source_change:
:rtype: :class:`<GetBuildChangesResponseValue>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if include_source_change is not None:
query_parameters['includeSourceChange'] = self._serialize.query('include_source_change', include_source_change, 'bool')
response = self._send(http_method='GET',
location_id='54572c7b-bbd3-45d4-80dc-28be08941620',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
response_value = self._deserialize('[Change]', self._unwrap_collection(response))
continuation_token = self._get_continuation_token(response)
return self.GetBuildChangesResponseValue(response_value, continuation_token)
class GetBuildChangesResponseValue(object):
def __init__(self, value, continuation_token):
"""
Response for the get_build_changes method
:param value:
:type value: :class:`<[Change]> <azure.devops.v5_1.build.models.[Change]>`
:param continuation_token: The continuation token to be used to get the next page of results.
:type continuation_token: str
"""
self.value = value
self.continuation_token = continuation_token
def get_build_controller(self, controller_id):
"""GetBuildController.
Gets a controller
:param int controller_id:
:rtype: :class:`<BuildController> <azure.devops.v5_1.build.models.BuildController>`
"""
route_values = {}
if controller_id is not None:
route_values['controllerId'] = self._serialize.url('controller_id', controller_id, 'int')
response = self._send(http_method='GET',
location_id='fcac1932-2ee1-437f-9b6f-7f696be858f6',
version='5.1',
route_values=route_values)
return self._deserialize('BuildController', response)
def get_build_controllers(self, name=None):
"""GetBuildControllers.
Gets controller, optionally filtered by name
:param str name:
:rtype: [BuildController]
"""
query_parameters = {}
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
response = self._send(http_method='GET',
location_id='fcac1932-2ee1-437f-9b6f-7f696be858f6',
version='5.1',
query_parameters=query_parameters)
return self._deserialize('[BuildController]', self._unwrap_collection(response))
def create_definition(self, definition, project, definition_to_clone_id=None, definition_to_clone_revision=None):
"""CreateDefinition.
Creates a new definition.
:param :class:`<BuildDefinition> <azure.devops.v5_1.build.models.BuildDefinition>` definition: The definition.
:param str project: Project ID or project name
:param int definition_to_clone_id:
:param int definition_to_clone_revision:
:rtype: :class:`<BuildDefinition> <azure.devops.v5_1.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if definition_to_clone_id is not None:
query_parameters['definitionToCloneId'] = self._serialize.query('definition_to_clone_id', definition_to_clone_id, 'int')
if definition_to_clone_revision is not None:
query_parameters['definitionToCloneRevision'] = self._serialize.query('definition_to_clone_revision', definition_to_clone_revision, 'int')
content = self._serialize.body(definition, 'BuildDefinition')
response = self._send(http_method='POST',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('BuildDefinition', response)
def delete_definition(self, project, definition_id):
"""DeleteDefinition.
Deletes a definition and all associated builds.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
self._send(http_method='DELETE',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='5.1',
route_values=route_values)
def get_definition(self, project, definition_id, revision=None, min_metrics_time=None, property_filters=None, include_latest_builds=None):
"""GetDefinition.
Gets a definition, optionally at a specific revision.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param int revision: The revision number to retrieve. If this is not specified, the latest version will be returned.
:param datetime min_metrics_time: If specified, indicates the date from which metrics should be included.
:param [str] property_filters: A comma-delimited list of properties to include in the results.
:param bool include_latest_builds:
:rtype: :class:`<BuildDefinition> <azure.devops.v5_1.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if revision is not None:
query_parameters['revision'] = self._serialize.query('revision', revision, 'int')
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
if property_filters is not None:
property_filters = ",".join(property_filters)
query_parameters['propertyFilters'] = self._serialize.query('property_filters', property_filters, 'str')
if include_latest_builds is not None:
query_parameters['includeLatestBuilds'] = self._serialize.query('include_latest_builds', include_latest_builds, 'bool')
response = self._send(http_method='GET',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildDefinition', response)
def get_definitions(self, project, name=None, repository_id=None, repository_type=None, query_order=None, top=None, continuation_token=None, min_metrics_time=None, definition_ids=None, path=None, built_after=None, not_built_after=None, include_all_properties=None, include_latest_builds=None, task_id_filter=None, process_type=None, yaml_filename=None):
"""GetDefinitions.
Gets a list of definitions.
:param str project: Project ID or project name
:param str name: If specified, filters to definitions whose names match this pattern.
:param str repository_id: A repository ID. If specified, filters to definitions that use this repository.
:param str repository_type: If specified, filters to definitions that have a repository of this type.
:param str query_order: Indicates the order in which definitions should be returned.
:param int top: The maximum number of definitions to return.
:param str continuation_token: A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions.
:param datetime min_metrics_time: If specified, indicates the date from which metrics should be included.
:param [int] definition_ids: A comma-delimited list that specifies the IDs of definitions to retrieve.
:param str path: If specified, filters to definitions under this folder.
:param datetime built_after: If specified, filters to definitions that have builds after this date.
:param datetime not_built_after: If specified, filters to definitions that do not have builds after this date.
:param bool include_all_properties: Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned.
:param bool include_latest_builds: Indicates whether to return the latest and latest completed builds for this definition.
:param str task_id_filter: If specified, filters to definitions that use the specified task.
:param int process_type: If specified, filters to definitions with the given process type.
:param str yaml_filename: If specified, filters to YAML definitions that match the given filename.
:rtype: :class:`<GetDefinitionsResponseValue>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if name is not None:
query_parameters['name'] = self._serialize.query('name', name, 'str')
if repository_id is not None:
query_parameters['repositoryId'] = self._serialize.query('repository_id', repository_id, 'str')
if repository_type is not None:
query_parameters['repositoryType'] = self._serialize.query('repository_type', repository_type, 'str')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if min_metrics_time is not None:
query_parameters['minMetricsTime'] = self._serialize.query('min_metrics_time', min_metrics_time, 'iso-8601')
if definition_ids is not None:
definition_ids = ",".join(map(str, definition_ids))
query_parameters['definitionIds'] = self._serialize.query('definition_ids', definition_ids, 'str')
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if built_after is not None:
query_parameters['builtAfter'] = self._serialize.query('built_after', built_after, 'iso-8601')
if not_built_after is not None:
query_parameters['notBuiltAfter'] = self._serialize.query('not_built_after', not_built_after, 'iso-8601')
if include_all_properties is not None:
query_parameters['includeAllProperties'] = self._serialize.query('include_all_properties', include_all_properties, 'bool')
if include_latest_builds is not None:
query_parameters['includeLatestBuilds'] = self._serialize.query('include_latest_builds', include_latest_builds, 'bool')
if task_id_filter is not None:
query_parameters['taskIdFilter'] = self._serialize.query('task_id_filter', task_id_filter, 'str')
if process_type is not None:
query_parameters['processType'] = self._serialize.query('process_type', process_type, 'int')
if yaml_filename is not None:
query_parameters['yamlFilename'] = self._serialize.query('yaml_filename', yaml_filename, 'str')
response = self._send(http_method='GET',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
response_value = self._deserialize('[BuildDefinitionReference]', self._unwrap_collection(response))
continuation_token = self._get_continuation_token(response)
return self.GetDefinitionsResponseValue(response_value, continuation_token)
class GetDefinitionsResponseValue(object):
def __init__(self, value, continuation_token):
"""
Response for the get_definitions method
:param value:
:type value: :class:`<[BuildDefinitionReference]> <azure.devops.v5_1.build.models.[BuildDefinitionReference]>`
:param continuation_token: The continuation token to be used to get the next page of results.
:type continuation_token: str
"""
self.value = value
self.continuation_token = continuation_token
def restore_definition(self, project, definition_id, deleted):
"""RestoreDefinition.
Restores a deleted definition
:param str project: Project ID or project name
:param int definition_id: The identifier of the definition to restore.
:param bool deleted: When false, restores a deleted definition.
:rtype: :class:`<BuildDefinition> <azure.devops.v5_1.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if deleted is not None:
query_parameters['deleted'] = self._serialize.query('deleted', deleted, 'bool')
response = self._send(http_method='PATCH',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('BuildDefinition', response)
def update_definition(self, definition, project, definition_id, secrets_source_definition_id=None, secrets_source_definition_revision=None):
"""UpdateDefinition.
Updates an existing definition.
:param :class:`<BuildDefinition> <azure.devops.v5_1.build.models.BuildDefinition>` definition: The new version of the definition.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:param int secrets_source_definition_id:
:param int secrets_source_definition_revision:
:rtype: :class:`<BuildDefinition> <azure.devops.v5_1.build.models.BuildDefinition>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
query_parameters = {}
if secrets_source_definition_id is not None:
query_parameters['secretsSourceDefinitionId'] = self._serialize.query('secrets_source_definition_id', secrets_source_definition_id, 'int')
if secrets_source_definition_revision is not None:
query_parameters['secretsSourceDefinitionRevision'] = self._serialize.query('secrets_source_definition_revision', secrets_source_definition_revision, 'int')
content = self._serialize.body(definition, 'BuildDefinition')
response = self._send(http_method='PUT',
location_id='dbeaf647-6167-421a-bda9-c9327b25e2e6',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('BuildDefinition', response)
def get_build_log(self, project, build_id, log_id, start_line=None, end_line=None, **kwargs):
"""GetBuildLog.
Gets an individual log file for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int log_id: The ID of the log file.
:param long start_line: The start line.
:param long end_line: The end line.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if start_line is not None:
query_parameters['startLine'] = self._serialize.query('start_line', start_line, 'long')
if end_line is not None:
query_parameters['endLine'] = self._serialize.query('end_line', end_line, 'long')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='text/plain')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_build_log_lines(self, project, build_id, log_id, start_line=None, end_line=None):
"""GetBuildLogLines.
Gets an individual log file for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int log_id: The ID of the log file.
:param long start_line: The start line.
:param long end_line: The end line.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if start_line is not None:
query_parameters['startLine'] = self._serialize.query('start_line', start_line, 'long')
if end_line is not None:
query_parameters['endLine'] = self._serialize.query('end_line', end_line, 'long')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_build_logs(self, project, build_id):
"""GetBuildLogs.
Gets the logs for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [BuildLog]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='5.1',
route_values=route_values)
return self._deserialize('[BuildLog]', self._unwrap_collection(response))
def get_build_logs_zip(self, project, build_id, **kwargs):
"""GetBuildLogsZip.
Gets the logs for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='5.1',
route_values=route_values,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_build_log_zip(self, project, build_id, log_id, start_line=None, end_line=None, **kwargs):
"""GetBuildLogZip.
Gets an individual log file for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int log_id: The ID of the log file.
:param long start_line: The start line.
:param long end_line: The end line.
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if log_id is not None:
route_values['logId'] = self._serialize.url('log_id', log_id, 'int')
query_parameters = {}
if start_line is not None:
query_parameters['startLine'] = self._serialize.query('start_line', start_line, 'long')
if end_line is not None:
query_parameters['endLine'] = self._serialize.query('end_line', end_line, 'long')
response = self._send(http_method='GET',
location_id='35a80daf-7f30-45fc-86e8-6b813d9c90df',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_build_option_definitions(self, project=None):
"""GetBuildOptionDefinitions.
Gets all build definition options supported by the system.
:param str project: Project ID or project name
:rtype: [BuildOptionDefinition]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='591cb5a4-2d46-4f3a-a697-5cd42b6bd332',
version='5.1',
route_values=route_values)
return self._deserialize('[BuildOptionDefinition]', self._unwrap_collection(response))
def get_definition_revisions(self, project, definition_id):
"""GetDefinitionRevisions.
Gets all revisions of a definition.
:param str project: Project ID or project name
:param int definition_id: The ID of the definition.
:rtype: [BuildDefinitionRevision]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if definition_id is not None:
route_values['definitionId'] = self._serialize.url('definition_id', definition_id, 'int')
response = self._send(http_method='GET',
location_id='7c116775-52e5-453e-8c5d-914d9762d8c4',
version='5.1',
route_values=route_values)
return self._deserialize('[BuildDefinitionRevision]', self._unwrap_collection(response))
def get_build_settings(self, project=None):
"""GetBuildSettings.
Gets the build settings.
:param str project: Project ID or project name
:rtype: :class:`<BuildSettings> <azure.devops.v5_1.build.models.BuildSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d',
version='5.1',
route_values=route_values)
return self._deserialize('BuildSettings', response)
def update_build_settings(self, settings, project=None):
"""UpdateBuildSettings.
Updates the build settings.
:param :class:`<BuildSettings> <azure.devops.v5_1.build.models.BuildSettings>` settings: The new settings.
:param str project: Project ID or project name
:rtype: :class:`<BuildSettings> <azure.devops.v5_1.build.models.BuildSettings>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(settings, 'BuildSettings')
response = self._send(http_method='PATCH',
location_id='aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d',
version='5.1',
route_values=route_values,
content=content)
return self._deserialize('BuildSettings', response)
def add_build_tag(self, project, build_id, tag):
"""AddBuildTag.
Adds a tag to a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str tag: The tag to add.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='PUT',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='5.1',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def add_build_tags(self, tags, project, build_id):
"""AddBuildTags.
Adds tags to a build.
:param [str] tags: The tags to add.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
content = self._serialize.body(tags, '[str]')
response = self._send(http_method='POST',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='5.1',
route_values=route_values,
content=content)
return self._deserialize('[str]', self._unwrap_collection(response))
def delete_build_tag(self, project, build_id, tag):
"""DeleteBuildTag.
Removes a tag from a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param str tag: The tag to remove.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if tag is not None:
route_values['tag'] = self._serialize.url('tag', tag, 'str')
response = self._send(http_method='DELETE',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='5.1',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_build_tags(self, project, build_id):
"""GetBuildTags.
Gets the tags for a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
response = self._send(http_method='GET',
location_id='6e6114b2-8161-44c8-8f6c-c5505782427f',
version='5.1',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def get_tags(self, project):
"""GetTags.
Gets a list of all build and definition tags in the project.
:param str project: Project ID or project name
:rtype: [str]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='d84ac5c6-edc7-43d5-adc9-1b34be5dea09',
version='5.1',
route_values=route_values)
return self._deserialize('[str]', self._unwrap_collection(response))
def delete_template(self, project, template_id):
"""DeleteTemplate.
Deletes a build definition template.
:param str project: Project ID or project name
:param str template_id: The ID of the template.
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
self._send(http_method='DELETE',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='5.1',
route_values=route_values)
def get_template(self, project, template_id):
"""GetTemplate.
Gets a specific build definition template.
:param str project: Project ID or project name
:param str template_id: The ID of the requested template.
:rtype: :class:`<BuildDefinitionTemplate> <azure.devops.v5_1.build.models.BuildDefinitionTemplate>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
response = self._send(http_method='GET',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='5.1',
route_values=route_values)
return self._deserialize('BuildDefinitionTemplate', response)
def get_templates(self, project):
"""GetTemplates.
Gets all definition templates.
:param str project: Project ID or project name
:rtype: [BuildDefinitionTemplate]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
response = self._send(http_method='GET',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='5.1',
route_values=route_values)
return self._deserialize('[BuildDefinitionTemplate]', self._unwrap_collection(response))
def save_template(self, template, project, template_id):
"""SaveTemplate.
Updates an existing build definition template.
:param :class:`<BuildDefinitionTemplate> <azure.devops.v5_1.build.models.BuildDefinitionTemplate>` template: The new version of the template.
:param str project: Project ID or project name
:param str template_id: The ID of the template.
:rtype: :class:`<BuildDefinitionTemplate> <azure.devops.v5_1.build.models.BuildDefinitionTemplate>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if template_id is not None:
route_values['templateId'] = self._serialize.url('template_id', template_id, 'str')
content = self._serialize.body(template, 'BuildDefinitionTemplate')
response = self._send(http_method='PUT',
location_id='e884571e-7f92-4d6a-9274-3f5649900835',
version='5.1',
route_values=route_values,
content=content)
return self._deserialize('BuildDefinitionTemplate', response)
def get_build_timeline(self, project, build_id, timeline_id=None, change_id=None, plan_id=None):
"""GetBuildTimeline.
Gets details for a build
:param str project: Project ID or project name
:param int build_id:
:param str timeline_id:
:param int change_id:
:param str plan_id:
:rtype: :class:`<Timeline> <azure.devops.v5_1.build.models.Timeline>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
if timeline_id is not None:
route_values['timelineId'] = self._serialize.url('timeline_id', timeline_id, 'str')
query_parameters = {}
if change_id is not None:
query_parameters['changeId'] = self._serialize.query('change_id', change_id, 'int')
if plan_id is not None:
query_parameters['planId'] = self._serialize.query('plan_id', plan_id, 'str')
response = self._send(http_method='GET',
location_id='8baac422-4c6e-4de5-8532-db96d92acffa',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Timeline', response)
def get_build_work_items_refs(self, project, build_id, top=None):
"""GetBuildWorkItemsRefs.
Gets the work items associated with a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='5a21f5d2-5642-47e4-a0bd-1356e6731bee',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response))
def get_build_work_items_refs_from_commits(self, commit_ids, project, build_id, top=None):
"""GetBuildWorkItemsRefsFromCommits.
Gets the work items associated with a build, filtered to specific commits.
:param [str] commit_ids: A comma-delimited list of commit IDs.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return, or the number of commits to consider if no commit IDs are specified.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
content = self._serialize.body(commit_ids, '[str]')
response = self._send(http_method='POST',
location_id='5a21f5d2-5642-47e4-a0bd-1356e6731bee',
version='5.1',
route_values=route_values,
query_parameters=query_parameters,
content=content)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response))
| 54.505958 | 411 | 0.618437 | 58,800 | 0.988767 | 0 | 0 | 0 | 0 | 0 | 0 | 24,392 | 0.41017 |
1c2ecd7374ac4b43cc0c12a94a556e95164106a8 | 190 | py | Python | tests/test_train.py | hugobb/sgda | 69dcda47bb2c5b76d46ead32eb46ab5fb5e5e6d3 | [
"MIT"
]
| 1 | 2022-02-16T04:20:02.000Z | 2022-02-16T04:20:02.000Z | tests/test_train.py | hugobb/sgda | 69dcda47bb2c5b76d46ead32eb46ab5fb5e5e6d3 | [
"MIT"
]
| null | null | null | tests/test_train.py | hugobb/sgda | 69dcda47bb2c5b76d46ead32eb46ab5fb5e5e6d3 | [
"MIT"
]
| null | null | null | import unittest
from gamesopt.train import train, TrainConfig
class TestOptimizer(unittest.TestCase):
def test_sgda(self):
config = TrainConfig(num_iter=2)
train(config) | 27.142857 | 45 | 0.736842 | 127 | 0.668421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1c2ee79c50e5332807a24a1c5c70089c0090c76c | 91 | py | Python | loadCSVdata.py | christostsekouronas/academyposttestanalysis | 913a0c13ad0482927a323b2fb3a97a8e2ca26517 | [
"MIT"
]
| null | null | null | loadCSVdata.py | christostsekouronas/academyposttestanalysis | 913a0c13ad0482927a323b2fb3a97a8e2ca26517 | [
"MIT"
]
| null | null | null | loadCSVdata.py | christostsekouronas/academyposttestanalysis | 913a0c13ad0482927a323b2fb3a97a8e2ca26517 | [
"MIT"
]
| null | null | null | import pandas as pd
def loadTest(filepath):
df = pd.read_csv(filepath)
return df | 13 | 30 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1c2f9886c30209c8f8c18348757a2729fc8d5b30 | 1,832 | py | Python | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
]
| 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
]
| 226 | 2019-07-24T07:57:21.000Z | 2019-10-15T01:07:24.000Z | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
]
| 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class ApplicationType(str, Enum):
web = "web"
other = "other"
class FlowType(str, Enum):
bluefield = "Bluefield"
class RequestSource(str, Enum):
rest = "rest"
class PurgeState(str, Enum):
pending = "pending"
completed = "completed"
class FavoriteType(str, Enum):
shared = "shared"
user = "user"
class WebTestKind(str, Enum):
ping = "ping"
multistep = "multistep"
class ItemScope(str, Enum):
shared = "shared"
user = "user"
class ItemType(str, Enum):
query = "query"
function = "function"
folder = "folder"
recent = "recent"
class SharedTypeKind(str, Enum):
user = "user"
shared = "shared"
class FavoriteSourceType(str, Enum):
retention = "retention"
notebook = "notebook"
sessions = "sessions"
events = "events"
userflows = "userflows"
funnel = "funnel"
impact = "impact"
segmentation = "segmentation"
class ItemScopePath(str, Enum):
analytics_items = "analyticsItems"
myanalytics_items = "myanalyticsItems"
class ItemTypeParameter(str, Enum):
none = "none"
query = "query"
function = "function"
folder = "folder"
recent = "recent"
class CategoryType(str, Enum):
workbook = "workbook"
tsg = "TSG"
performance = "performance"
retention = "retention"
| 17.960784 | 76 | 0.600437 | 1,297 | 0.707969 | 0 | 0 | 0 | 0 | 0 | 0 | 796 | 0.434498 |
1c2fb781ddcd4218fd8a81658d8b1820f7658753 | 425 | py | Python | setup.py | dhruvdcoder/allennlp-wandb | 160dceb1f4cec8e893b856d333bc302748afdd74 | [
"MIT"
]
| null | null | null | setup.py | dhruvdcoder/allennlp-wandb | 160dceb1f4cec8e893b856d333bc302748afdd74 | [
"MIT"
]
| null | null | null | setup.py | dhruvdcoder/allennlp-wandb | 160dceb1f4cec8e893b856d333bc302748afdd74 | [
"MIT"
]
| null | null | null | from setuptools import setup, find_packages
install_requires = [
"allennlp>=0.9.0",
"wandb==0.8.15",
]
setup(
name='allennlp_wandb',
version='0.0.1',
description='Utilities to use allennlp with wandb',
packages=find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_data={'allennlp_wandb': ['py.typed']},
install_requires=install_requires,
zip_safe=False)
| 25 | 62 | 0.647059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 155 | 0.364706 |
1c30848fe8db838bf2ea7ab14ebea0d07ae3d297 | 2,311 | py | Python | setup.py | mark-mishyn/django-axes | dfaf67810abd21a0e76200a4906c1bffdd4fa9c9 | [
"MIT"
]
| null | null | null | setup.py | mark-mishyn/django-axes | dfaf67810abd21a0e76200a4906c1bffdd4fa9c9 | [
"MIT"
]
| null | null | null | setup.py | mark-mishyn/django-axes | dfaf67810abd21a0e76200a4906c1bffdd4fa9c9 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="django-axes",
description="Keep track of failed login attempts in Django-powered sites.",
long_description="\n".join(
[
open("README.rst", encoding="utf-8").read(),
open("CHANGES.rst", encoding="utf-8").read(),
]
),
keywords="authentication django pci security",
author=", ".join(
[
"Josh VanderLinden",
"Philip Neustrom",
"Michael Blume",
"Alex Clark",
"Camilo Nova",
"Aleksi Hakli",
]
),
author_email="[email protected]",
maintainer="Jazzband",
maintainer_email="[email protected]",
url="https://github.com/jazzband/django-axes",
project_urls={
"Documentation": "https://django-axes.readthedocs.io/",
"Source": "https://github.com/jazzband/django-axes",
"Tracker": "https://github.com/jazzband/django-axes/issues",
},
license="MIT",
package_dir={"axes": "axes"},
use_scm_version=True,
setup_requires=["setuptools_scm"],
python_requires="~=3.6",
install_requires=["django>=1.11", "django-appconf>=1.0.3", "django-ipware>=2.0.2"],
include_package_data=True,
packages=find_packages(),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Environment :: Plugins",
"Framework :: Django",
"Framework :: Django :: 1.11",
"Framework :: Django :: 2.2",
"Framework :: Django :: 3.0",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: Log Analysis",
"Topic :: Security",
"Topic :: System :: Logging",
],
zip_safe=False,
)
| 34.492537 | 87 | 0.581134 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,368 | 0.591952 |
1c30c09f1bd3070f07f121e14a73ab704dad99b4 | 106 | py | Python | achievements/admin.py | peterkrauz/rpg-achievements-django | c65ec12237b2bee9f12d259fedd5f18934ff6c96 | [
"Apache-2.0"
]
| 1 | 2021-08-31T10:52:55.000Z | 2021-08-31T10:52:55.000Z | achievements/admin.py | peterkrauz/rpg-achievements-django | c65ec12237b2bee9f12d259fedd5f18934ff6c96 | [
"Apache-2.0"
]
| null | null | null | achievements/admin.py | peterkrauz/rpg-achievements-django | c65ec12237b2bee9f12d259fedd5f18934ff6c96 | [
"Apache-2.0"
]
| null | null | null | from django.contrib import admin
from achievements import models
admin.site.register(models.Achievement)
| 21.2 | 39 | 0.849057 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1c30e979a316677653e10a7d840b2373d881b549 | 1,925 | py | Python | src/modules/loss.py | ab3llini/BlindLess | 46c50fb2748b9d372044d00b901f0cde91946684 | [
"MIT"
]
| 1 | 2022-03-19T09:19:12.000Z | 2022-03-19T09:19:12.000Z | src/modules/loss.py | ab3llini/BlindLess | 46c50fb2748b9d372044d00b901f0cde91946684 | [
"MIT"
]
| 1 | 2020-02-06T18:26:07.000Z | 2020-02-06T18:26:07.000Z | src/modules/loss.py | ab3llini/BlindLess | 46c50fb2748b9d372044d00b901f0cde91946684 | [
"MIT"
]
| null | null | null | from torch.nn import CrossEntropyLoss
class GPT2Loss(CrossEntropyLoss):
def __init__(self, pad_token_id):
super(GPT2Loss, self).__init__(ignore_index=pad_token_id)
def forward(self, output, labels):
"""
Loss function for gpt2
:param output:
:param labels:
:return:
"""
# Flatten the tensors (shift-align)
# Remove last token from output
output = output[..., :-1, :].contiguous().view(-1, output.size(-1))
# Remove the first token from labels e do not care for question
labels = (labels[..., 1:].contiguous()).view(-1)
# Compute the actual loss
return super(GPT2Loss, self).forward(output, labels)
class VisualGPT2Loss(GPT2Loss):
def __init__(self, pad_token_id, extract=None):
super(VisualGPT2Loss, self).__init__(pad_token_id=pad_token_id)
if extract is not None:
assert type(extract) == int, 'Extract value MUST be integer'
self.extract = extract
def forward(self, output, labels):
if self.extract is not None:
output = output[self.extract]
# Compute the actual loss
return super(VisualGPT2Loss, self).forward(output, labels[0])
class BERTLoss(CrossEntropyLoss):
def __init__(self, pad_token_id):
super(BERTLoss, self).__init__(ignore_index=pad_token_id)
def forward(self, output, labels):
"""
Loss function for gpt2
:param output:
:param labels:
:return:
"""
# Flatten the tensors (shift-align)
# Remove last token from output
output = output[..., :-1, :].contiguous().view(-1, output.size(-1))
# Remove the first token from labels e do not care for question
labels = (labels[..., 1:].contiguous()).view(-1)
# Compute the actual loss
return super(BERTLoss, self).forward(output, labels)
| 31.048387 | 75 | 0.619221 | 1,878 | 0.975584 | 0 | 0 | 0 | 0 | 0 | 0 | 582 | 0.302338 |
1c31541017e2e3db5152ae18abbb5211d1ab50d4 | 6,481 | py | Python | analyze_tls.py | khushhallchandra/CN-project | 405ce86e4e65e116531aa19287b8d05c959b1441 | [
"MIT"
]
| null | null | null | analyze_tls.py | khushhallchandra/CN-project | 405ce86e4e65e116531aa19287b8d05c959b1441 | [
"MIT"
]
| null | null | null | analyze_tls.py | khushhallchandra/CN-project | 405ce86e4e65e116531aa19287b8d05c959b1441 | [
"MIT"
]
| null | null | null | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def main(filename):
data = pd.read_csv(filename, header=None)
means = data.mean(axis = 0)
stds = data.std(axis = 0)
return means[0], means[1], stds[0], stds[1]
if __name__ == '__main__':
files_http1 = ['./results/benchmark_size/http1_txt1.csv', './results/benchmark_size/http1_txt2.csv', './results/benchmark_size/http1_txt3.csv', './results/benchmark_size/http1_txt4.csv', './results/benchmark_size/http1_txt5.csv']
files_http1_tls = ['./results/benchmark_size/http1_tls_txt1.csv', './results/benchmark_size/http1_tls_txt2.csv', './results/benchmark_size/http1_tls_txt3.csv', './results/benchmark_size/http1_tls_txt4.csv', './results/benchmark_size/http1_tls_txt5.csv']
files_http2 = ['./results/benchmark_size/http2_txt1.csv', './results/benchmark_size/http2_txt2.csv', './results/benchmark_size/http2_txt3.csv', './results/benchmark_size/http2_txt4.csv', './results/benchmark_size/http2_txt5.csv']
files_http2_tls = ['./results/benchmark_size/http2_tls_txt1.csv', './results/benchmark_size/http2_tls_txt2.csv', './results/benchmark_size/http2_tls_txt3.csv', './results/benchmark_size/http2_tls_txt4.csv', './results/benchmark_size/http2_tls_txt5.csv']
time_tot_http2, time_contentTransfer_http2 = [], []
std_tot_http2, std_contentTransfer_http2 = [], []
time_tot_http1, time_contentTransfer_http1 = [], []
std_tot_http1, std_contentTransfer_http1 = [], []
time_tot_http2_tls, time_contentTransfer_http2_tls = [], []
std_tot_http2_tls, std_contentTransfer_http2_tls = [], []
time_tot_http1_tls, time_contentTransfer_http1_tls = [], []
std_tot_http1_tls, std_contentTransfer_http1_tls = [], []
for f in files_http2:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http2.append(t1)
time_tot_http2.append(t2)
std_contentTransfer_http2.append(2*std1)
std_tot_http2.append(2*std2)
for f in files_http1:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http1.append(t1)
time_tot_http1.append(t2)
std_contentTransfer_http1.append(2*std1)
std_tot_http1.append(2*std2)
for f in files_http2_tls:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http2_tls.append(t1)
time_tot_http2_tls.append(t2)
std_contentTransfer_http2_tls.append(2*std1)
std_tot_http2_tls.append(2*std2)
for f in files_http1_tls:
t1, t2, std1, std2 = main(f)
time_contentTransfer_http1_tls.append(t1)
time_tot_http1_tls.append(t2)
std_contentTransfer_http1_tls.append(2*std1)
std_tot_http1_tls.append(2*std2)
x = [100, 1000, 10000, 100000, 1000000]
time_tot_http2, time_contentTransfer_http2 = np.array(time_tot_http2), np.array(time_contentTransfer_http2)
std_tot_http2, std_contentTransfer_http2 = np.array(std_tot_http2), np.array(std_contentTransfer_http2)
time_tot_http1, time_contentTransfer_http1 = np.array(time_tot_http1), np.array(time_contentTransfer_http1)
std_tot_http1, std_contentTransfer_http1 = np.array(std_tot_http1), np.array(std_contentTransfer_http1)
time_tot_http2_tls, time_contentTransfer_http2_tls = np.array(time_tot_http2_tls), np.array(time_contentTransfer_http2_tls)
std_tot_http2_tls, std_contentTransfer_http2_tls = np.array(std_tot_http2_tls), np.array(std_contentTransfer_http2_tls)
time_tot_http1_tls, time_contentTransfer_http1_tls = np.array(time_tot_http1_tls), np.array(time_contentTransfer_http1_tls)
std_tot_http1_tls, std_contentTransfer_http1_tls = np.array(std_tot_http1_tls), np.array(std_contentTransfer_http1_tls)
fig, ax = plt.subplots()
ax.grid()
ax.plot(x, time_contentTransfer_http1, 'o-', color='r', label="HTTP1")
ax.plot(x, time_contentTransfer_http1_tls, 'o-', color='g', label="HTTP1_with_tls")
ax.plot(x, time_contentTransfer_http2, 'o-', color='b', label="SPDY")
ax.plot(x, time_contentTransfer_http2_tls, 'o-', color='k', label="SPDY_with_tls")
ax.fill_between(x, time_contentTransfer_http1 - std_contentTransfer_http1, time_contentTransfer_http1 + std_contentTransfer_http1, color='gray', alpha=0.3)
ax.fill_between(x, time_contentTransfer_http2 - std_contentTransfer_http2, time_contentTransfer_http2 + std_contentTransfer_http2, color='gray', alpha=0.3)
ax.fill_between(x, time_contentTransfer_http1_tls - std_contentTransfer_http1_tls, time_contentTransfer_http1_tls + std_contentTransfer_http1_tls, color='gray', alpha=0.3)
ax.fill_between(x, time_contentTransfer_http2_tls - std_contentTransfer_http2_tls, time_contentTransfer_http2_tls + std_contentTransfer_http2_tls, color='gray', alpha=0.3)
# ax.errorbar(x, time_contentTransfer_http2, yerr=std_contentTransfer_http2, fmt='-', color='r', label="HTTP2")
# ax.errorbar(x, time_contentTransfer_quic, yerr=std_contentTransfer_quic, fmt='-', color='b', label="QUIC")
ax.set_xlabel('Size of data (Length)')
ax.set_ylabel('Time (in ms)')
ax.legend()
ax.set_xscale('log')
ax.set_title('Comparison of Time Taken for Data Transfer with TLS ON/OFF')
fig.savefig('results/plots/time_contentTransfer_tls.png', dpi=fig.dpi)
fig, ax = plt.subplots()
ax.grid()
ax.plot(x, time_tot_http1, 'o-', color='r', label="HTTP1")
ax.plot(x, time_tot_http1_tls, 'o-', color='g', label="HTTP1_with_tls")
ax.plot(x, time_tot_http2, 'o-', color='b', label="SPDY")
ax.plot(x, time_tot_http2_tls, 'o-', color='k', label="SPDY_with_tls")
ax.fill_between(x, time_tot_http1 - std_tot_http1, time_tot_http1 + std_tot_http1, color='gray', alpha=0.3)
ax.fill_between(x, time_tot_http2 - std_tot_http2, time_tot_http2 + std_tot_http2, color='gray', alpha=0.3)
ax.fill_between(x, time_tot_http1_tls - std_tot_http1_tls, time_tot_http1_tls + std_tot_http1_tls, color='gray', alpha=0.3)
ax.fill_between(x, time_tot_http2_tls - std_tot_http2_tls, time_tot_http2_tls + std_tot_http2_tls, color='gray', alpha=0.3)
# ax.errorbar(x, time_tot_http2, yerr=std_tot_http2, fmt='-', color='r', label="HTTP2")
# ax.errorbar(x, time_tot_quic, yerr=std_tot_quic, fmt='-', color='b', label="QUIC")
ax.set_xlabel('Size of data (Length)')
ax.set_ylabel('Time (in ms)')
ax.legend()
ax.set_xscale('log')
ax.set_title('Comparison of Total Time with TLS ON/OFF')
fig.savefig('results/plots/total_time_tls.png', dpi=fig.dpi) | 54.923729 | 257 | 0.738158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,716 | 0.264774 |
1c32015a3c35228c38c5bac706f794e1cdc33050 | 7,376 | py | Python | validation/utils/m1.py | PedrV/stfX | 017436cd4ade7f0ea95185d82408697c43ac6ce6 | [
"MIT"
]
| null | null | null | validation/utils/m1.py | PedrV/stfX | 017436cd4ade7f0ea95185d82408697c43ac6ce6 | [
"MIT"
]
| null | null | null | validation/utils/m1.py | PedrV/stfX | 017436cd4ade7f0ea95185d82408697c43ac6ce6 | [
"MIT"
]
| null | null | null | import unittest
import os
from matplotlib import pyplot as plt
from shapely import geometry, affinity
X_COORDINATE = 0
Y_COORDINATE = 1
def extract_x_y(polygon: list) -> (list, list):
"""Extract the x and y coordinates as two separate lists"""
x_list = []
y_list = []
for vertex in polygon:
x_list.append(vertex[X_COORDINATE])
y_list.append(vertex[Y_COORDINATE])
return (x_list, y_list)
def save_fig(dir: str):
"""Save the current plt figure in the given directory under the name: m1.png"""
plt.savefig(dir + '/m1.png')
plt.clf()
def plot_polygons(hull: list, min_hull: list, perceived_poly: list, real_poly: list, dir: str = None):
"""Plot the given two polygons, in a single figure, with different colors"""
h1_x, h1_y = extract_x_y(hull)
h2_x, h2_y = extract_x_y(min_hull)
p1_x, p1_y = extract_x_y(perceived_poly)
p2_x, p2_y = extract_x_y(real_poly)
# Figure settings
fig = plt.figure()
# fig.suptitle('Convex hull area (red) VS real representation area (blue)')
plt.xlabel('x')
plt.ylabel('y')
# Plotting hulls
plt.fill(h1_x, h1_y, color="#FF000020")
plt.fill(h2_x, h2_y, color="#0000FF20")
# Plotting polygons lines
plt.plot(p1_x, p1_y, color="#FF000060") # Red perceived poly
plt.plot(p2_x, p2_y, color="#0000FF60") # Blue real poly
# Plotting polygons points
for p in perceived_poly:
plt.plot(p[X_COORDINATE], p[Y_COORDINATE], 'o', color="#FF0000A0")
for p in real_poly:
plt.plot(p[X_COORDINATE], p[Y_COORDINATE], 'x', color="#0000FFA0")
# plt.show()
if dir is not None:
save_fig(dir)
def surveyor_formula(polygon: list) -> float:
"""Find the area of the given polygon using the surveyor formula"""
# Check if first and last points of polygon are equal
parsed_poly = polygon[0:-1]\
if polygon[0] == polygon[len(polygon)-1]\
else polygon
area = 0
for i in range(-1, len(parsed_poly)-1):
area += parsed_poly[i][X_COORDINATE] * parsed_poly[i+1][Y_COORDINATE] -\
parsed_poly[i][Y_COORDINATE] * parsed_poly[i+1][X_COORDINATE]
return abs(area / 2)
def polygon_to_vertices_list(polygon: geometry.Polygon) -> list:
"""Extract the polygon vertices as a list"""
return list(polygon.exterior.coords)
def apply_transformations(initial_representation: list, events: list) -> float:
"""Apply the transformations in the events list to the initial representation"""
scale = 1
rot_angle = 0
trans_vector = [0, 0]
for item in events:
for event in item["events"]:
if event["type"] == "TRANSLATION":
trans_vector[X_COORDINATE] += event["trigger"]["transformation"][X_COORDINATE]
trans_vector[Y_COORDINATE] += event["trigger"]["transformation"][Y_COORDINATE]
elif event["type"] == "ROTATION":
rot_angle += event["trigger"]["transformation"]
elif event["type"] == "UNIFORM_SCALE":
scale *= event["trigger"]["transformation"]
# Apply multiplication
polygon = geometry.Polygon(initial_representation)
s_polygon = affinity.scale(polygon,
xfact=scale,
yfact=scale,
origin=(0, 0))
r_s_polygon = affinity.rotate(s_polygon,
rot_angle,
origin=(0, 0))
t_r_s_polygon = affinity.translate(r_s_polygon,
xoff=trans_vector[0],
yoff=trans_vector[1])
return polygon_to_vertices_list(t_r_s_polygon)
def apply_m1(real_representation: list, perceived_representation: list, dir: str = None) -> float:
"""Apply the metric M1 and obtain its result, between 0 and 1"""
joint_point_set = real_representation + perceived_representation
# Getting necessary hulls
real_convex_hull = geometry.MultiPoint(real_representation).convex_hull
perceived_hull = geometry.MultiPoint(perceived_representation).convex_hull
convex_hull = geometry.MultiPoint(joint_point_set).convex_hull
# Getting vertices of hulls
real_vertices = polygon_to_vertices_list(real_convex_hull)
perceived_vertices = polygon_to_vertices_list(perceived_hull)
joint_vertices = polygon_to_vertices_list(convex_hull)
# Getting the min area
real_area = surveyor_formula(real_vertices)
perceived_area = surveyor_formula(perceived_vertices)
if real_area <= perceived_area:
min_area = real_area
min_vertices = real_vertices
else:
min_area = perceived_area
min_vertices = perceived_vertices
plot_polygons(hull=joint_vertices,
min_hull=min_vertices,
perceived_poly=perceived_representation,
real_poly=real_representation,
dir=dir)
return min_area / surveyor_formula(joint_vertices)
class TestM1(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestM1, self).__init__(*args, **kwargs)
self.representation = [
[1, 1],
[1, -1],
[-1, -1],
[-1, 1],
[1, 1]
]
self.transformations = [{
"events": [
{"type": "TRANSLATION", "trigger": {"transformation": [5, 5]}},
{"type": "ROTATION", "trigger": {"transformation": 180}},
{"type": "UNIFORM_SCALE", "trigger": {"transformation": 1.25}}
]
}, {
"events": [
{"type": "TRANSLATION", "trigger": {"transformation": [5, 0]}},
{"type": "ROTATION", "trigger": {"transformation": -90}},
{"type": "UNIFORM_SCALE", "trigger": {"transformation": 1.6}}
]
}]
self.min_scale = [{
"events": [
{"type": "UNIFORM_SCALE", "trigger": {"transformation": 0.5}}
]
}]
def test_area(self):
square = [
[1, 1],
[1, -1],
[-1, -1],
[-1, 1]
]
self.assertEqual(surveyor_formula(square), 4)
self.assertEqual(surveyor_formula(self.representation), 4)
def test_transformations(self):
self.assertEqual(apply_transformations(self.representation, self.transformations), [
(8.0, 7.0),
(12.0, 7.0),
(12.0, 3.0),
(8.0, 3.0),
(8.0, 7.0),
])
def test_M1(self):
self.assertEqual(apply_m1(self.representation, self.representation), 1)
self.assertTrue(apply_m1(self.representation,
apply_transformations(self.representation, self.transformations))
< 0.1)
self.assertEqual(apply_m1([
(8.0, 7.0),
(12.0, 7.0),
(12.0, 3.0),
(8.0, 3.0),
(8.0, 7.0)],
apply_transformations(self.representation, self.transformations)),
1)
def test_mean_perceived(self):
self.assertEqual(apply_m1(self.representation,
apply_transformations(self.representation, self.min_scale)),
0.25)
if __name__ == '__main__':
unittest.main()
| 33.990783 | 102 | 0.590564 | 2,308 | 0.312907 | 0 | 0 | 0 | 0 | 0 | 0 | 1,418 | 0.192245 |
1c334c43ec9647ed0e0ec846ea0ec8b0f1abcbfa | 1,332 | py | Python | movefiles.py | linhailan/JPG-PNG-to-MNIST-NN-Format | c2ff84cb8d2dc6cd49c4d462b4d8ea2ba4620719 | [
"Apache-2.0"
]
| null | null | null | movefiles.py | linhailan/JPG-PNG-to-MNIST-NN-Format | c2ff84cb8d2dc6cd49c4d462b4d8ea2ba4620719 | [
"Apache-2.0"
]
| null | null | null | movefiles.py | linhailan/JPG-PNG-to-MNIST-NN-Format | c2ff84cb8d2dc6cd49c4d462b4d8ea2ba4620719 | [
"Apache-2.0"
]
| null | null | null | import os
from PIL import Image
from array import *
from random import shuffle
import shutil
def move_file(src_path, dst_path, file):
print("from : ",src_path)
print("to : ",dst_path)
try:
# cmd = 'chmod -R +x ' + src_path
# os.popen(cmd)
f_src = os.path.join(src_path, file)
if not os.path.exists(dst_path):
os.mkdir(dst_path)
f_dst = os.path.join(dst_path, file)
shutil.move(f_src, f_dst)
except Exception as e:
print("move file ERROR: ",e)
# Load from and save to
def loadfile(Names):
FileList = []
for dirname in os.listdir(Names[0][0]):
path = os.path.join(Names[0][0], dirname)
print(path)
i = 0
for filename in os.listdir(path):
if i >= 50:
break
if filename.endswith(".jpg"):
print(i,":",filename)
src_path = os.path.join(Names[0][0],dirname)
dst_path = os.path.join(Names[1][0],dirname)
move_file(src_path,dst_path,filename)
i += 1
Names = [['./training-images','train'], ['./test-images','test']]
for name in Names:
FileList = []
for dirname in os.listdir(name[0]):
path = os.path.join(name[0],dirname)
print(path,":",len(os.listdir(path)))
| 25.615385 | 65 | 0.553303 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 167 | 0.125375 |
1c33dae046d778c2acefa8efab3c4ae7565e1bc3 | 348 | py | Python | spark_work.py | nszceta/spark-python-celery-demo | c5b03be4bb96699f8e41aa8a42fecd4c25c76331 | [
"MIT"
]
| 8 | 2016-01-19T15:59:36.000Z | 2018-04-25T09:00:57.000Z | spark_work.py | nszceta/spark-python-celery-demo | c5b03be4bb96699f8e41aa8a42fecd4c25c76331 | [
"MIT"
]
| null | null | null | spark_work.py | nszceta/spark-python-celery-demo | c5b03be4bb96699f8e41aa8a42fecd4c25c76331 | [
"MIT"
]
| null | null | null | import sys
from pyspark import SparkContext
import json
print('spark got python path -> ' + str(sys.executable))
logfile = sys.argv[1]
sc = SparkContext()
logdata = sc.textFile(logfile).cache()
a_count = logdata.filter(lambda s: 'a' in s).count()
b_count = logdata.filter(lambda s: 'b' in s).count()
print(json.dumps({'a': a_count, 'b': b_count}))
| 31.636364 | 56 | 0.70977 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.112069 |
1c33fa15ddbf9c5dfc357e4226f51b2734c6f579 | 738 | py | Python | nodes/List/GetTaskRenderListIndex.py | atticus-lv/RenderNode | 8a4797a2186b76fedebc5d634cff298e69089474 | [
"Apache-2.0"
]
| 17 | 2021-11-21T09:26:55.000Z | 2022-03-09T06:56:01.000Z | nodes/List/GetTaskRenderListIndex.py | atticus-lv/RenderNode | 8a4797a2186b76fedebc5d634cff298e69089474 | [
"Apache-2.0"
]
| 1 | 2021-12-05T13:02:48.000Z | 2021-12-06T08:02:34.000Z | nodes/List/GetTaskRenderListIndex.py | atticus-lv/RenderNode | 8a4797a2186b76fedebc5d634cff298e69089474 | [
"Apache-2.0"
]
| 4 | 2021-11-23T14:49:34.000Z | 2021-12-30T15:04:58.000Z | import bpy
from bpy.props import *
from ...nodes.BASE.node_base import RenderNodeBase
class RenderNodeGetListIndex(RenderNodeBase):
"""A simple input node"""
bl_idname = 'RenderNodeGetListIndex'
bl_label = 'Get List Index'
def init(self, context):
self.create_output('RenderNodeSocketInt', "index", 'Index')
def process(self,context,id,path):
node = self.id_data.nodes.get(bpy.context.window_manager.rsn_active_list)
if not node or node.bl_idname != 'RenderNodeTaskRenderListNode': return
self.outputs[0].set_value(node.active_index)
def register():
bpy.utils.register_class(RenderNodeGetListIndex)
def unregister():
bpy.utils.unregister_class(RenderNodeGetListIndex)
| 26.357143 | 81 | 0.730352 | 502 | 0.680217 | 0 | 0 | 0 | 0 | 0 | 0 | 130 | 0.176152 |
1c3521323cf7d57dc8b2b240d95a181b90cc3144 | 1,188 | py | Python | src/recognizeDigit.py | RsTaK/Sudoku | 8daa0a06906ce61d9a71586a8d28a3931ca4e5e3 | [
"MIT"
]
| 2 | 2020-01-22T14:32:40.000Z | 2021-12-23T20:42:52.000Z | src/recognizeDigit.py | RsTaK/Sudoku | 8daa0a06906ce61d9a71586a8d28a3931ca4e5e3 | [
"MIT"
]
| 4 | 2020-11-13T18:54:24.000Z | 2022-02-10T02:10:00.000Z | src/recognizeDigit.py | RsTaK/Sudoku | 8daa0a06906ce61d9a71586a8d28a3931ca4e5e3 | [
"MIT"
]
| 1 | 2020-01-22T14:02:50.000Z | 2020-01-22T14:02:50.000Z | from keras.models import load_model
import cv2
import pickle
import keras.backend as K
import numpy as np
from src.model_path import MODEL_PATH
'''def predict(self, cell):
model = load_model('./model/Model.h5')
f = K.function([model.layers[0].input, K.learning_phase()],[model.layers[-1].output])
rescaled_cell = self.rescale(cell)
result = []
for _ in range(10):
result.append(f([rescaled_cell, 1]))
result = np.array(result)
prediction = result.mean(axis=0)
uncertainty = result.var(axis=0)
if uncertainty.argmax() > 3:
new_prediction = 0
print(prediction.argmax(),uncertainty.argmax(),new_prediction)
else:
print(prediction.argmax(),uncertainty.argmax())'''
class recognizeDigit:
def __init__(self, cell):
self._prediction = self.predict(cell)
def predict(self, cell):
model = load_model(MODEL_PATH)
rescaled_cell = self.rescale(cell)
pred = model.predict(rescaled_cell)
return pred.argmax()
def rescale(self, cell):
resized_cell = cv2.resize(cell, (28, 28))
return resized_cell.reshape(1, resized_cell.shape[0], resized_cell.shape[1], 1)
@property
def prediction(self):
return self._prediction | 27 | 87 | 0.705387 | 482 | 0.405724 | 0 | 0 | 61 | 0.051347 | 0 | 0 | 558 | 0.469697 |
1c3558d607658f8dea73cab624fa5807f1ade4f4 | 4,544 | py | Python | plots.py | olihawkins/penguin-models | fabecdf6336390fc50e67cfd8494ade69fc3ef7f | [
"BSD-3-Clause"
]
| 1 | 2021-05-05T10:17:01.000Z | 2021-05-05T10:17:01.000Z | plots.py | olihawkins/penguin-models | fabecdf6336390fc50e67cfd8494ade69fc3ef7f | [
"BSD-3-Clause"
]
| null | null | null | plots.py | olihawkins/penguin-models | fabecdf6336390fc50e67cfd8494ade69fc3ef7f | [
"BSD-3-Clause"
]
| null | null | null | # -*- coding: utf-8 -*-
"""A module for plotting penguins data for modelling with scikit-learn."""
# Imports ---------------------------------------------------------------------
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# Constants -------------------------------------------------------------------
SPECIES_COLORS = {
'Adelie': '#4daf4a',
'Gentoo': '#ffb000',
'Chinstrap': '#0084f7'
}
X_AXIS = [30, 60]
Y_AXIS = [12, 22]
# Set style -------------------------------------------------------------------
# Load the style from a file
plt.style.use('./style/eda.mplstyle')
# Alternatively, load the style from the library in ~/.matplotlib/stylelib
# plt.style.use(['eda'])
# Functions -------------------------------------------------------------------
def get_contour_data(model, pipeline, n_points=1000):
"""Create the data used to show the boundary of the decision function."""
x0s = np.linspace(X_AXIS[0], X_AXIS[1], n_points)
x1s = np.linspace(Y_AXIS[0], Y_AXIS[1], n_points)
x0, x1 = np.meshgrid(x0s, x1s)
X = np.c_[x0.ravel(), x1.ravel()]
df_X = pd.DataFrame(X, columns=['bill_length_mm', 'bill_depth_mm'])
X = pipeline.transform(df_X)
y_pred = model.predict(X).reshape(x0.shape)
y_decision = model.decision_function(X).reshape(x0.shape)
return x0, x1, y_pred, y_decision
def get_target_colors(target):
"""Create a dictionary of colors to use in binary classification plots."""
return {
target : '#984ea3',
'Other': '#ff7f00'
}
# Plots -----------------------------------------------------------------------
def plot_example():
plt.style.reload_library()
plt.style.use(['eda'])
fig, ax = plt.subplots()
ax.set_title('Some random words of the title')
ax.scatter(np.random.normal(0,1,10), np.random.normal(0,1,10))
fig.savefig('plots/test.svg', format='svg')
fig.savefig('plots/test.png', format='png')
plt.close()
def plot_target_by_features(df):
"""Plot the different target species."""
fig, ax = plt.subplots()
ax.set_title(
label='Palmer penguins by species and bill characteristics',
loc='center')
ax.get_xaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_xlim(X_AXIS[0], X_AXIS[1])
ax.set_xlabel('Bill length (mm)')
ax.get_yaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_ylim(Y_AXIS[0], Y_AXIS[1])
ax.set_ylabel('Bill depth (mm)')
grouped = df.groupby('species')
for key, group in grouped:
ax.scatter(
group['bill_length_mm'],
group['bill_depth_mm'],
c=SPECIES_COLORS[key],
s=40,
label=key,
alpha=0.55)
ax.legend(loc='lower left', handletextpad=0.2)
fig.savefig('plots/target-by-features.png', format='png')
plt.close()
def plot_model(df, model, pipeline, f_score, target, title, filename):
"""Plot the results of a binary classification model."""
fig, ax = plt.subplots()
ax.set_title(title, loc='center')
ax.get_xaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_xlim(X_AXIS[0], X_AXIS[1])
ax.set_xlabel('Bill length (mm)')
ax.get_yaxis().set_major_formatter(
mpl.ticker.FormatStrFormatter('%.0f'))
ax.set_ylim(Y_AXIS[0], Y_AXIS[1])
ax.set_ylabel('Bill depth (mm)')
# Plot the boundary of the decision function
x0, x1, y_pred, y_decision = get_contour_data(model, pipeline)
ax.contourf(x0, x1, y_pred, cmap=plt.cm.PuOr, alpha=0.2)
# This plots the decision score, if needed
# ax.contourf(x0, x1, y_decision, cmap=plt.cm.PuOr, alpha=0.1)
df = df.copy()
df['species'] = df['target'].apply(lambda t: target if t == 1 else 'Other')
colors = get_target_colors(target)
grouped = df.groupby('species')
for key, group in grouped:
ax.scatter(
group['bill_length_mm'],
group['bill_depth_mm'],
c=colors[key],
s=40,
label=key,
alpha=0.55)
ax.legend(loc='lower left', handletextpad=0.2)
bbox_style = {
'boxstyle': 'round',
'facecolor': '#ffffff',
'edgecolor': '#d4d4d4',
'alpha': 0.8
}
ax.text(53, 12.415, '$F_1$ score: {0}'.format(f_score), bbox=bbox_style)
fig.savefig('plots/{0}.png'.format(filename), format='png')
plt.close() | 28.759494 | 79 | 0.574604 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,648 | 0.362676 |
1c357d3712292b01ee95a5bca2342315acb4f8ef | 623 | py | Python | dojo/db_migrations/0147_rename_sslyze_parser.py | dant24/django-DefectDojo | caf5c91b3f8870d5f466dfaaf5a3a096f8812ad9 | [
"BSD-3-Clause"
]
| 249 | 2016-09-06T21:04:40.000Z | 2018-01-19T15:59:44.000Z | dojo/db_migrations/0147_rename_sslyze_parser.py | dant24/django-DefectDojo | caf5c91b3f8870d5f466dfaaf5a3a096f8812ad9 | [
"BSD-3-Clause"
]
| 255 | 2016-09-06T21:36:37.000Z | 2018-01-19T19:57:57.000Z | dojo/db_migrations/0147_rename_sslyze_parser.py | dant24/django-DefectDojo | caf5c91b3f8870d5f466dfaaf5a3a096f8812ad9 | [
"BSD-3-Clause"
]
| 152 | 2016-09-06T21:04:54.000Z | 2018-01-18T08:52:24.000Z | from django.db import migrations
def rename_sslyze_parser(apps, schema_editor):
Test_Type_model = apps.get_model('dojo', 'Test_Type')
try:
test_type_sslyze = Test_Type_model.objects.get(name='SSLyze 3 Scan (JSON)')
test_type_sslyze.name = 'SSLyze Scan (JSON)'
test_type_sslyze.save()
except Test_Type_model.DoesNotExist:
# This happens when a new instance of DD is initialized
pass
class Migration(migrations.Migration):
dependencies = [
('dojo', '0146_lead_optional'),
]
operations = [
migrations.RunPython(rename_sslyze_parser),
]
| 25.958333 | 83 | 0.678973 | 184 | 0.295345 | 0 | 0 | 0 | 0 | 0 | 0 | 140 | 0.224719 |
1c35f69ad59be07090db7f3539f86ff7d6d0b4e8 | 4,203 | py | Python | server/forestgame/game/test_world.py | Nick-Pearson/forestgame | 8a37225adbe6da9df7851eba34ad06806da0ce48 | [
"0BSD"
]
| null | null | null | server/forestgame/game/test_world.py | Nick-Pearson/forestgame | 8a37225adbe6da9df7851eba34ad06806da0ce48 | [
"0BSD"
]
| 5 | 2021-03-10T14:18:45.000Z | 2022-03-12T00:28:29.000Z | server/forestgame/game/test_world.py | Nick-Pearson/forestgame | 8a37225adbe6da9df7851eba34ad06806da0ce48 | [
"0BSD"
]
| null | null | null | import unittest
from forestgame.game.world import World
class WorldTest(unittest.TestCase):
def test_world_inits_to_empty_data(self):
world = World(None, "1", "0", 0, 0, [], [])
self.assertEqual(0, world.get_size_x())
self.assertEqual(0, world.get_size_y())
self.assertEqual([], world.get_tile_data())
def test_world_with_tiles_inits__with_tiles_to_empty_data(self):
world = World(None, "1", "0", 3, 3, [(1, 1, 0)], [])
expected_tile_data = [
[1, 1, 1],
[1, 0, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(3, world.get_size_y())
def test_set_size_from_zero_initialsies_from_forest(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(3, 3)
expected_tile_data = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(3, world.get_size_y())
def test_set_size_with_larger_x_y_pads_with_forest(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(2, 2)
world.set_size(3, 3)
expected_tile_data = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(3, world.get_size_y())
def test_set_size_with_larger_x_pads_with_forest(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(2, 3)
world.set_size(3, 3)
expected_tile_data = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(3, world.get_size_y())
def test_set_size_with_larger_y_pads_with_forest(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(3, 2)
world.set_size(3, 3)
expected_tile_data = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(3, world.get_size_y())
def test_set_size_with_smaller_x_y_removes_data(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(3, 3)
world.set_size(2, 2)
expected_tile_data = [
[1, 1],
[1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(2, world.get_size_x())
self.assertEqual(2, world.get_size_y())
def test_set_size_with_smaller_x_removes_data(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(3, 3)
world.set_size(2, 3)
expected_tile_data = [
[1, 1],
[1, 1],
[1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(2, world.get_size_x())
self.assertEqual(3, world.get_size_y())
def test_set_size_with_smaller_y_removes_data(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(3, 3)
world.set_size(3, 2)
expected_tile_data = [
[1, 1, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(2, world.get_size_y())
def test_set_size_with_same_x_y_does_nothing(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(3, 3)
world.set_size(3, 3)
expected_tile_data = [
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
]
self.assertEqual(expected_tile_data, world.get_tile_data())
self.assertEqual(3, world.get_size_x())
self.assertEqual(3, world.get_size_y())
# set tile range checks
def test_set_tile_changes_tile_data(self):
world = World(None, "1", "0", 0, 0, [], [])
world.set_size(5, 5)
world.set_tile_at(2, 3, 0)
self.assertEqual(0, world.get_tile_at(2, 3))
expected_tile_data = [
[1, 1, 1, 1, 1],
[1, 1, 1, 1, 1],
[1, 1, 1, 1, 1],
[1, 1, 0, 1, 1],
[1, 1, 1, 1, 1]
]
self.assertEqual(expected_tile_data, world.get_tile_data())
| 26.601266 | 66 | 0.610278 | 4,144 | 0.985962 | 0 | 0 | 0 | 0 | 0 | 0 | 89 | 0.021175 |
1c362cfcd82b4292b1b1b46edbeee9a97e7fba89 | 9,756 | py | Python | invconv/xlsx.py | TechPowerAwaits/ax-toolkit | d49924ef2dcd3f54f494ba3859afb070cc12ef91 | [
"0BSD"
]
| null | null | null | invconv/xlsx.py | TechPowerAwaits/ax-toolkit | d49924ef2dcd3f54f494ba3859afb070cc12ef91 | [
"0BSD"
]
| 16 | 2021-04-14T03:46:37.000Z | 2022-02-11T16:15:00.000Z | invconv/xlsx.py | TechPowerAwaits/ax-toolkit | d49924ef2dcd3f54f494ba3859afb070cc12ef91 | [
"0BSD"
]
| null | null | null | # Copyright 2021 Richard Johnston <[email protected]>
# SPDX-license-identifier: 0BSD
import string
from loguru import logger
try:
import cell_pos
from exceptions import InvconvMissingHeaders
import ftype
import msg_handler
except ModuleNotFoundError:
import invconv.cell_pos as cell_pos
from invconv.exceptions import InvconvMissingHeaders
import invconv.ftype as ftype
import invconv.msg_handler as msg_handler
used = True
try:
from openpyxl import load_workbook
except ModuleNotFoundError:
used = False
# load_workbook is used repeatedly with similar settings
# every time.
WB_SETTINGS = {
"read_only": True,
"keep_vba": False,
"data_only": True,
"keep_links": False,
}
class XlsxDataTuple(ftype.BasicFtypeDataClass):
def __init__(self, filename, wsname, headers):
self.filename = filename
self.wsname = wsname
self.headers = headers
self.cur_row = None
self.cur_col = None
super().__init__(
filename=self.filename, sectionname=self.wsname, headers=self.headers
)
# Set relevant values and gets the number of operations
# to be performed based on the dimensions.
def set_oper_num(self, min_row, max_row, max_col):
self.min_row = min_row
self.min_col = 1
self.max_row = max_row
self.max_col = max_col
delta_col = self.max_col - self.min_col + 1
delta_row = self.max_row - self.min_row + 1
self.num_oper = delta_col * delta_row
return self.num_oper
def load_workbook(self):
return load_workbook(self.filename, **WB_SETTINGS)
def parser(self):
if self.cur_row is None:
self.cur_row = self.min_row
if self.cur_col is None:
self.cur_col = self.min_col
if self.cur_col > self.max_col:
self.cur_col = self.min_col
self.cur_row += 1
if self.cur_row > self.max_row:
self.cur_row = None
self.cur_col = None
return None
col_letter = cell_pos.get_col_letter(self.cur_col)
row_str = str(self.cur_row)
wb = self.load_workbook()
ws = wb[self.wsname]
cell_val = ws[col_letter + row_str].value
return_str = str(cell_val)
if cell_val is None:
return_str = ""
if return_str == "#REF!":
logger.warning(
string.Template(
'Unknown reference found at $cell_pos in $id. Defaulting to "unknown".'
).substitute(
cell_pos=col_letter + row_str,
id=msg_handler.get_id((self.filename, self.wsname), "WS"),
)
)
return_str = "unknown"
self.cur_col += 1
wb.close()
return return_str
# Will store a file, worksheet tuple-like class
# with additional data accessible.
xlsx_data_list = ftype.FtypeDataList()
# Contains just a list of file, worksheet tuples.
xlsx_tuple_list = []
# xlsx files always start counting at 1.
INVALID_ROW = 0
def start(input_files):
# Gets the name of worksheets and
# adds it to xlsx_tuple_list.
get_worksheets(input_files)
# Sometimes, openpyxl can't get
# the proper dimensions of a worksheet,
# so it handles that. It also deals with
# headers in the worksheets and removes
# blank cells from the size of the sheet.
set_data()
# Check if some file worksheet pairs don't
# have a valid header.
if not xlsx_data_list:
raise InvconvMissingHeaders
# Can't directly check for membership of
# items from xlsx_tuple_list in xlsx_data_list,
# for they are different types.
for file_section in xlsx_tuple_list:
found_file_section = False
for data_file_section in xlsx_data_list:
# The first element in if statement
# has to be XlsxDataTuple, as it
# contains a __eq__() function
# that should work in this case.
if data_file_section == file_section:
found_file_section = True
break
if not found_file_section:
logger.error(
f"{msg_handler.get_id(file_section, 'ws')} contains no valid headers."
)
msg_handler.does_continue()
return xlsx_data_list
def get_worksheets(input_files):
for input_file in input_files:
wb = load_workbook(input_file, **WB_SETTINGS)
sheetname_list = wb.sheetnames
for sheetname in sheetname_list:
xlsx_tuple_list.append((input_file, sheetname))
wb.close()
def set_data():
for filename, wsname in xlsx_tuple_list:
wb = load_workbook(filename, **WB_SETTINGS)
ws = wb[wsname]
# max_col and max_row can be None.
cur_max_col = ws.max_column
cur_max_row = ws.max_row
# Close workbook right away so
# it won't remain open in case script
# gets closed or crashes.
wb.close()
max_col = get_max_col(filename, wsname, cur_max_col)
max_row = get_max_row(filename, wsname, cur_max_row)
# Get the row where a header was found.
header_row = get_header_row(filename, wsname, max_row)
# check_header_row() ensures that a non-blank row
# is after header row. If not, it might not
# actually be a header row.
if (
header_row == INVALID_ROW
or header_row == max_row
or not check_header_row(filename, wsname, max_col, header_row)
):
continue
# The first row after the header_row.
min_row = header_row + 1
header_list = get_header_list(filename, wsname, max_col, header_row)
if max_col > len(header_list):
logger.info(
string.Template(
"Reducing max column length of $id from $cur_col to $new_col due to None in $cell_pos."
)
)
max_col = len(header_list)
DataTuple = XlsxDataTuple(filename, wsname, header_list)
DataTuple.set_oper_num(min_row, max_row, max_col)
xlsx_data_list.append(DataTuple)
def get_max_col(filename, wsname, max_col):
xlsx_id = msg_handler.get_id((filename, wsname), "WS")
while (not isinstance(max_col, int)) or (max_col <= INVALID_ROW):
logger.error(f"Max col for {xlsx_id} is {str(max_col)}.")
msg_handler.does_continue()
try:
logger.info("User providing number of columns (starting at 1).")
max_col = int(
input("Please provide the number of columns (starting at 1) > ")
)
except (ValueError, TypeError):
logger.log("FAILURE", "Input could not be converted to int.")
max_col = None
if (isinstance(max_col, int)) and (max_col <= 0):
logger.log("FAILURE", "Input is less than one.")
return max_col
def get_max_row(filename, wsname, max_row):
xlsx_id = msg_handler.get_id((filename, wsname))
while (not isinstance(max_row, int)) or (max_row <= 0):
logger.error(f"Max row for {xlsx_id} is {str(max_row)}.")
msg_handler.does_continue()
try:
logger.info("User providing number of rows (starting at 1).")
max_row = int(input("Please provide the number of rows (starting at 1) > "))
except (ValueError, TypeError):
logger.log("FAILURE", "Input could not be converted to int.")
max_row = None
if (isinstance(max_row, int)) and (max_row <= 0):
logger.log("FAILURE", "Input is less than one.")
return max_row
def get_header_row(filename, wsname, max_row):
wb = load_workbook(filename, **WB_SETTINGS)
ws = wb[wsname]
# header_row starts at 1,
# so a value of 0 indicates
# it wasn't found.
header_row = INVALID_ROW
for row in cell_pos.row_iter(max_row):
row_str = str(row)
# A row with just a title would not fill up the entire max_column.
# As a result, there would be None at either the first or second
# position.
cell1 = ws["A" + row_str].value
cell2 = ws["B" + row_str].value
if cell1 is not None and cell2 is not None:
header_row = row
break
wb.close()
return header_row
def check_header_row(filename, wsname, max_col, header_row):
wb = load_workbook(filename, **WB_SETTINGS)
ws = wb[wsname]
# Check the row after the header row
# for content.
post_header_row = header_row + 1
row_str = str(post_header_row)
# List of items in row.
row_list = []
for col in cell_pos.col_iter(max_col):
col_letter = cell_pos.get_col_letter(col)
row_list.append(str(ws[col_letter + row_str].value))
wb.close()
# Ensure the row is not blank.
if row_list.count("None") != len(row_list):
return True
return False
def get_header_list(filename, wsname, max_col, header_row):
wb = load_workbook(filename, **WB_SETTINGS)
ws = wb[wsname]
header_list = []
row_str = str(header_row)
for col in cell_pos.col_iter(max_col):
col_letter = cell_pos.get_col_letter(col)
header_item = ws[col_letter + row_str].value
# Assuming the header doesn't have blank
# items between entries. Only at the end.
if header_item is None:
logger.warning(
f"Blank header {col_letter+row_str} in {msg_handler.get_id((filename, wsname), 'WS')} will be ignored."
)
break
header_list.append(header_item)
wb.close()
return header_list
if used:
ftype.add("xlsx", start)
| 33.410959 | 119 | 0.625974 | 2,107 | 0.21597 | 0 | 0 | 0 | 0 | 0 | 0 | 2,586 | 0.265068 |
1c369e5832adc50f438c555f56dfcb9a9431f342 | 5,501 | py | Python | solvers/generation_solver/img_interface.py | Anthony102899/Lego-ImageGenerator | 52b19c8bb20f77a3394675e7c037c943a50c1e15 | [
"Unlicense"
]
| 1 | 2022-03-20T10:23:38.000Z | 2022-03-20T10:23:38.000Z | solvers/generation_solver/img_interface.py | Anthony102899/Lego-ImageGenerator | 52b19c8bb20f77a3394675e7c037c943a50c1e15 | [
"Unlicense"
]
| null | null | null | solvers/generation_solver/img_interface.py | Anthony102899/Lego-ImageGenerator | 52b19c8bb20f77a3394675e7c037c943a50c1e15 | [
"Unlicense"
]
| null | null | null | import os
from tkinter import *
import tkinter.filedialog as tkfd
from PIL import Image
import numpy as np
import solvers.generation_solver.image_seperation as IS
def layer_interface(img_num):
layer_names = []
layer_nums = []
for k in range(img_num):
master = Toplevel()
master.title(f"Image number {k+1}")
master.geometry("+300+200")
# input image and layer
img_label = Label(master, text="Image").grid(row=0)
layer_label = Label(master, text="Layer").grid(row=1)
entry_img = Entry(master, width=30)
entry_layer = Entry(master, width=30)
entry_img.grid(row=0, column=1)
entry_layer.grid(row=1, column=1)
if k == img_num - 1:
Button(master, text='Done', command=master.quit).grid(row=2, column=2, sticky=W, pady=4)
else:
Button(master, text='Next', command=master.quit).grid(row=2, column=2, sticky=W, pady=4)
img_path = "inputs/images/"
img_path = os.path.join(os.path.dirname(__file__), img_path)
path = tkfd.askopenfilename(initialdir = img_path, title = "Select file", filetypes = (("png files","*.png"),("all files","*.*")))
entry_img.insert('0', os.path.basename(path))
image = Image.open(path)
img = PhotoImage(file=path)
width, height = img.width(), img.height()
if width > 250:
scale_w = int(round(width / 250, 0))
scale_h = int(round(height / 250, 0))
img = img.subsample(scale_w, scale_h)
if width < 250:
scale_w = int(round(250 / width, 0))
scale_h = int(round(250 / height, 0))
img = img.zoom(scale_w, scale_h)
Label(master, image=img).grid(row=2, column=1)
mainloop()
img_name = entry_img.get()
img_layer = entry_layer.get()
layer_names.append(img_name)
layer_nums.append(img_layer)
return layer_names, layer_nums
def show_interface():
root = Tk()
root.geometry("+300+300")
Label(root, text="Graph", font=("", 14, "bold", "underline"), fg='#696969').grid(row=0, sticky='w')
entry_graph = Entry(root, width=15)
entry_graph.grid(row=0, column=1)
graph_path = "connectivity/"
graph_path = os.path.join(os.path.dirname(__file__), graph_path)
path = tkfd.askopenfilename(initialdir = graph_path, title = "Select file", filetypes = (("pkl files", "*.pkl"), ("all files","*.*")))
entry_graph.insert('0', os.path.basename(path))
# input No. image and button
Label(root, text="Input image", font=("", 14, "bold", "underline"), fg='#696969').grid(row=1, sticky='w')
entry_file = Entry(root, width=15)
entry_file.grid(row=1, column=1)
entry_path = "inputs/images/"
entry_path = os.path.join(os.path.dirname(__file__), entry_path)
input_path = tkfd.askopenfilename(initialdir=entry_path, title="Select input image", filetypes=(("png files", "*.png"), ("jpg files", "*.jpg")))
entry_file.insert('0', os.path.basename(input_path))
Button(root, text='Next', command=root.quit).grid(row=1, column=2, sticky='e', pady=4)
# input background color
Label(root, text="").grid(row=2, column=1)
Label(root, text="Background color", font=("", 14, "bold", "underline"), fg='#696969').grid(row=3, sticky='w')
Label(root, text="R", fg='#4f4f4f').grid(row=4, column=0)
Label(root, text="G", fg='#4f4f4f').grid(row=4, column=1)
Label(root, text="B", fg='#4f4f4f').grid(row=4, column=2)
entry_r = Entry(root, width=15)
entry_g = Entry(root, width=15)
entry_b = Entry(root, width=15)
entry_r.grid(row=5, column=0)
entry_g.grid(row=5, column=1)
entry_b.grid(row=5, column=2)
# input rotation and scaling
Label(root, text="").grid(row=6, column=1)
Label(root, text="Rotation degree", font=("", 14, "bold", "underline"), fg='#696969').grid(row=7, sticky='w')
entry_degree = Entry(root, width=15, textvariable=StringVar(root, value='0'))
entry_degree.grid(row=7, column=1)
Label(root, text="Scale", font=("", 14, "bold", "underline"), fg='#696969').grid(row=7, column=2)
entry_scale = Entry(root, width=15, textvariable=StringVar(root, value='1'))
entry_scale.grid(row=7, column=3)
# input translation
Label(root, text="").grid(row=8, column=1)
Label(root, text="x translation", font=("", 14, "bold", "underline"), fg='#696969').grid(row=9, sticky='w')
entry_x = Entry(root, width=15, textvariable=StringVar(root, value='0'))
entry_x.grid(row=9, column=1)
Label(root, text="y translation", font=("", 14, "bold", "underline"), fg='#696969').grid(row=9, column=2)
entry_y = Entry(root, width=15, textvariable=StringVar(root, value='0'))
entry_y.grid(row=9, column=3)
Label(root, text="").grid(row=9, column=1)
mainloop()
img_path = input_path
print(img_path)
img_num = IS.seperate_color(img_path, "./cache/")
r, g, b = entry_r.get(), entry_g.get(), entry_b.get()
if len(r) == 0:
r = 0
if len(g) == 0:
g = 0
if len(b) == 0:
b = 0
if r == 0 and g == 0 and b == 0:
rgb = []
else:
rgb = np.array((int(r), int(g), int(b)))
layer_names, layer_nums = layer_interface(img_num)
return entry_graph.get(), img_num, layer_names, layer_nums, rgb, int(entry_degree.get()), float(entry_scale.get()), int(entry_x.get()), int(entry_y.get())
if __name__ == '__main__':
print(show_interface()) | 42.315385 | 158 | 0.616797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 783 | 0.142338 |
1c3759df5a38cc9eec92e29506b100742f627706 | 953 | py | Python | Constellations/get_brightest_stars.py | PatD123/Polar-Constellation | 86f54ae2028a4f351b9f1a056aa3166f49541679 | [
"MIT"
]
| null | null | null | Constellations/get_brightest_stars.py | PatD123/Polar-Constellation | 86f54ae2028a4f351b9f1a056aa3166f49541679 | [
"MIT"
]
| null | null | null | Constellations/get_brightest_stars.py | PatD123/Polar-Constellation | 86f54ae2028a4f351b9f1a056aa3166f49541679 | [
"MIT"
]
| null | null | null | from bs4 import BeautifulSoup as soup
from urllib.request import urlopen as uReq
import re, json
# Getting the page
URL = "https://www.astronomytrek.com/star-constellations-brightest-stars/"
uClient = uReq(url=URL)
page_html = uClient.read()
page_soup = soup(page_html, "html.parser")
# Opening a file to write in
stars_file = open("brightest_stars.txt", 'w')
#
def find_space(star):
for i in range(0, len(star)):
if star[i] == " " and star[i + 1] == "(":
return i
brightest_uncleaned = page_soup.find_all("tr")
for html in brightest_uncleaned:
col_4 = html.contents[4].contents[0]
col_5 = html.contents[5].string
if col_5 is not None:
idx = find_space(col_5)
col_5 = col_5[0:idx]
if col_5 == "Brightest Star": continue
stars_file.write(col_5 + "\n")
else:
idx = find_space(col_4)
col_4 = col_4[0:idx]
stars_file.write(col_4 + "\n")
stars_file.close() | 27.228571 | 74 | 0.651626 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 186 | 0.195173 |
1c38a65740967a1e49c94a99e84549d3470de0b7 | 493 | py | Python | TwoPointers/Leetcode11.py | Rylie-W/LeetRecord | 623c4efe88b3af54b8a65f6ec23db850b8c6f46f | [
"Apache-2.0"
]
| null | null | null | TwoPointers/Leetcode11.py | Rylie-W/LeetRecord | 623c4efe88b3af54b8a65f6ec23db850b8c6f46f | [
"Apache-2.0"
]
| null | null | null | TwoPointers/Leetcode11.py | Rylie-W/LeetRecord | 623c4efe88b3af54b8a65f6ec23db850b8c6f46f | [
"Apache-2.0"
]
| null | null | null | class Solution:
def maxArea(self, height) -> int:
left=0
right=len(height)-1
res=min(height[left],height[right])*(right-left)
while right>left:
res=max(res,(right-left)*min(height[right],height[left]))
if height[left]<height[right]:
left+=1
else: right-=1
return res
if __name__ == '__main__':
sol=Solution()
# height = [1, 1]
height=[1,3,2,5,25,24,5]
print(sol.maxArea(height))
| 25.947368 | 69 | 0.543611 | 363 | 0.736308 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.054767 |
1c38c6e2555cdc9fef807ccf4fe2adf10311bc9a | 13,688 | py | Python | tensorflow_text/python/ops/bert_tokenizer_test.py | hashim361/text | 141ed3ae72078a5da431831ce718c8d09fbf4f92 | [
"Apache-2.0"
]
| 1 | 2020-10-10T14:10:07.000Z | 2020-10-10T14:10:07.000Z | tensorflow_text/python/ops/bert_tokenizer_test.py | pranayjoshi/text | 5a12211ac370f989ca359d232d3081a889e859dd | [
"Apache-2.0"
]
| null | null | null | tensorflow_text/python/ops/bert_tokenizer_test.py | pranayjoshi/text | 5a12211ac370f989ca359d232d3081a889e859dd | [
"Apache-2.0"
]
| null | null | null | # coding=utf-8
# Copyright 2020 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# encoding=utf-8
r"""Tests for BertTokenizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_map_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow_text.python.ops import bert_tokenizer
def _utf8(x):
return x.encode('utf-8')
# TODO(thuang513): It appears there isn't a Ragged version of substr; consider
# checking this into core TF.
def _ragged_substr(text_input, begin, end):
text_input_flat = None
if ragged_tensor.is_ragged(text_input):
text_input_flat = text_input.flat_values
else:
text_input_flat = text_input
def _ragged_tile(x):
input_text, indices = x
multiple = math_ops.reduce_sum(indices.row_lengths())
return array_ops.tile([input_text], [multiple])
broadcasted_text = ragged_map_ops.map_fn(
_ragged_tile,
(text_input_flat, begin),
dtype=ragged_tensor.RaggedTensorType(dtype=dtypes.string, ragged_rank=1),
infer_shape=False,
)
size = math_ops.sub(
array_ops.squeeze(end.flat_values), array_ops.squeeze(begin.flat_values))
new_tokens = string_ops.substr_v2(broadcasted_text,
array_ops.squeeze(begin.flat_values), size)
return begin.with_flat_values(new_tokens.flat_values)
_VOCAB = [
b'[unused1]',
b'[unused23]',
b"'",
b'##%',
b'##af',
b'##book',
b'##c',
b'##fr',
b'##hey',
b'##is',
b'##o',
b'##ost',
b'##s',
b'##tri',
b'##y',
b'$',
b'%',
b'&',
b'(',
b')',
b'*',
b'-',
b'.',
b'20',
b':',
b'?',
b'[CLS]',
b'[SEP]',
_utf8(u'國'),
_utf8(u'暐'),
_utf8(u'瀚'),
_utf8(u'韓'),
_utf8(u'食'),
_utf8(u'黃'),
_utf8(u'🤔'),
_utf8(u'🤣'),
b'^',
b'a',
b'ago',
b'among',
b'an',
b'and',
b'are',
b'aren',
b'awesome',
b'between',
b'candy',
b'china',
b'companies',
b'company',
b'crushed',
b'dug',
b'earnings',
b'engaged',
b'even',
b'few',
b'forecast',
b'getting',
b'had',
b'han',
b'has',
b'hers',
b'high',
b'hit',
b'hs',
b'hurting',
b'in',
b'indie',
b'is',
b'isn',
b'ka',
b'ku',
b'major',
b'maker',
b'moth',
b'nearly',
b'new',
b'now',
b'president',
b'record',
b'regulators',
b'reported',
b'rift',
b'rust',
b'sales',
b'shares',
b'slightly',
b'sprint',
b'states',
b'stock',
b't',
b'taste',
b'tension',
b'that',
b'the',
b'this',
b'today',
b'told',
b'topped',
b'trade',
b'trump',
b'united',
b'up',
b'weeks',
b'what',
b'why',
b'with',
b'year',
b'yo',
b'yu',
_utf8(u'\u7231'),
_utf8(u'\u4e0a'),
_utf8(u'\u4e00'),
_utf8(u'\u4e2a'),
_utf8(u'\u4e0d'),
_utf8(u'\u56de'),
_utf8(u'\u5bb6'),
_utf8(u'\u7684'),
_utf8(u'\u4eba'),
]
def _create_table(vocab, num_oov=1):
init = lookup_ops.KeyValueTensorInitializer(
vocab,
math_ops.range(
array_ops.size(vocab, out_type=dtypes.int64), dtype=dtypes.int64),
key_dtype=dtypes.string,
value_dtype=dtypes.int64)
return lookup_ops.StaticVocabularyTableV1(
init, num_oov, lookup_key_dtype=dtypes.string)
class BertTokenizerTest(test_util.TensorFlowTestCase, parameterized.TestCase):
def test_bert_tokenizer_outputs(self):
text_inputs = constant_op.constant([_utf8('Test')])
vocab = _VOCAB
table = _create_table(vocab, 2)
self.evaluate(table.initializer)
tokenizer = bert_tokenizer.BertTokenizer(
table,
token_out_type=dtypes.int32)
results = tokenizer.tokenize(text_inputs)
self.assertAllEqual(results.dtype, dtypes.int32)
@parameterized.parameters([
dict(
text_inputs=[
_utf8(u'taste the rustisc indiefrost'),
_utf8(u'Han Kuo-yu (韓國食)🤔'),
_utf8(u'Añade la información del formulario y tus preguntas'),
],
expected_tokens=[[b'taste', b'the', b'rustisc', b'indiefrost'],
[
b'Han', b'Kuo', b'-', b'yu', b'(',
b'\xe9\x9f\x93', b'\xe5\x9c\x8b',
b'\xe9\xa3\x9f', b')', b'\xf0\x9f\xa4\x94'
],
[
b'A\xc3\xb1ade', b'la', b'informaci\xc3\xb3n',
b'del', b'formulario', b'y', b'tus', b'preguntas'
]],
),
dict(
text_inputs=[
_utf8(u'UNwant\u00E9d,running'),
_utf8(u'Añade la información del formulario y tus preguntas'),
],
expected_tokens=[[b'unwanted', b',', b'running'],
[
b'anade', b'la', b'informacion', b'del',
b'formulario', b'y', b'tus', b'preguntas'
]],
lower_case=True,
),
dict(
text_inputs=[
_utf8(u'Añade la información del formulario y tus preguntas')
],
expected_tokens=[[
b'An\xcc\x83ade', b'la', b'informacio\xcc\x81n', b'del',
b'formulario', b'y', b'tus', b'preguntas'
]],
normalization_form='NFD',
),
# Test CJK are tokenized by unicode characters
dict(
text_inputs=[
_utf8(u'香港では4日'),
_utf8(u'영어독해 자만심 왜 문제일까'),
_utf8(u'據港媒《東網》報導')
],
expected_tokens=[
[_utf8(u'香'),
_utf8(u'港'),
_utf8(u'では4'),
_utf8(u'日')],
[
_utf8(u'영어독해'),
_utf8(u'자만심'),
_utf8(u'왜'),
_utf8(u'문제일까'),
],
[
_utf8(u'據'),
_utf8(u'港'),
_utf8(u'媒'),
_utf8(u'《'),
_utf8(u'東'),
_utf8(u'網'),
_utf8(u'》'),
_utf8(u'報'),
_utf8(u'導')
],
],
normalization_form=None,
),
# Test Katakana followed by Hiragana.
dict(
text_inputs=[_utf8(u'のテキストとして')],
expected_tokens=[
[_utf8(u'のテキストとして')],
],
normalization_form=None,
),
])
@test_util.run_in_graph_and_eager_modes
def test_basic_tokenize(self,
text_inputs,
expected_tokens,
lower_case=False,
normalization_form='NFC'):
text_inputs = ragged_factory_ops.constant(text_inputs)
tokenizer = bert_tokenizer.BasicTokenizer(
lower_case=lower_case, normalization_form=normalization_form)
tokens = tokenizer.tokenize(text_inputs)
self.assertAllEqual(tokens, expected_tokens)
@parameterized.parameters([
dict(
text_inputs=[
b'taste the rustisc indiefrost',
_utf8(u'Han Kuo-yu (韓國食)🤔'),
_utf8(u'dugtrio had an awesome 🤣 dugbook'),
b'yo^what$is*up?',
b'mothaf*&%ka',
],
expected=[[[b'taste'], [b'the'], [b'rust', b'##is', b'##c'],
[b'indie', b'##fr', b'##ost']],
[[b'han'], [b'ku', b'##o'], [b'-'], [b'yu'], [b'('],
[_utf8(u'韓')], [_utf8(u'國')], [_utf8(u'食')], [b')'],
[_utf8(u'🤔')]],
[[b'dug', b'##tri', b'##o'], [b'had'], [b'an'],
[b'awesome'], [_utf8(u'🤣')], [b'dug', b'##book']],
[[b'yo'], [b'^'], [b'what'], [b'$'], [b'is'], [b'*'],
[b'up'], [b'?']],
[[b'moth', b'##af'], [b'*'], [b'&'], [b'%'], [b'ka']]],
expected_extracted=[[[b'taste'], [b'the'], [b'rust', b'is', b'c'],
[b'indie', b'fr', b'ost']],
[[b'Han'], [b'Ku', b'o'], [b'-'], [b'yu'], [b'('],
[_utf8(u'韓')], [_utf8(u'國')], [_utf8(u'食')],
[b')'], [_utf8(u'🤔')]],
[[b'dug', b'tri', b'o'], [b'had'], [b'an'],
[b'awesome'], [_utf8(u'🤣')], [b'dug', b'book']],
[[b'yo'], [b'^'], [b'what'], [b'$'], [b'is'],
[b'*'], [b'up'], [b'?']],
[[b'moth', b'af'], [b'*'], [b'&'], [b'%'],
[b'ka']]],
lower_case=True,
),
# Test when we are expecting multiple OOV vocab ids and tf.string just
# maps out [UNK] token.
dict(
text_inputs=[
b'mothaf*&%ka cantfindme whodis',
],
expected=[[[b'moth', b'##af'], [b'*'], [b'&'], [b'%'], [b'ka'],
[b'[UNK]'], [b'[UNK]']]],
expected_extracted=[[[b'moth', b'af'], [b'*'], [b'&'], [b'%'],
[b'ka'], [b'cantfindme'], [b'whodis']]],
lower_case=True,
num_oov=2,
),
dict(
text_inputs=[
b'candy',
],
expected=[[[b'candy']]],
lower_case=True,
num_oov=2,
),
dict(
text_inputs=[
_utf8(u'爱上一个不回家的人'),
],
expected=[[[_utf8(u'爱')], [_utf8(u'上')], [_utf8(u'一')], [_utf8(u'个')],
[_utf8(u'不')], [_utf8(u'回')], [_utf8(u'家')], [_utf8(u'的')],
[_utf8(u'人')]]],
lower_case=True,
num_oov=2,
),
# Test 'preserve_unused_token' option
dict(
text_inputs=[
b'taste the rustisc indiefrost [unused1]',
_utf8(u'爱上一个不回家的人[unused23]'),
],
expected=[[[b'taste'], [b'the'], [b'rust', b'##is', b'##c'],
[b'indie', b'##fr', b'##ost'], [b'[unused1]']],
[[_utf8(u'爱')], [_utf8(u'上')], [_utf8(u'一')], [_utf8(u'个')],
[_utf8(u'不')], [_utf8(u'回')], [_utf8(u'家')], [_utf8(u'的')],
[_utf8(u'人')], [b'[unused23]']]],
preserve_unused_token=True,
),
])
@test_util.run_in_graph_and_eager_modes
def test_bert_tokenizer(self,
text_inputs,
expected,
vocab=None,
expected_extracted=None,
lower_case=True,
num_oov=1,
preserve_unused_token=False):
text_inputs = constant_op.constant(text_inputs)
if not vocab:
vocab = _VOCAB
table = _create_table(vocab, num_oov)
self.evaluate(table.initializer)
tokenizer = bert_tokenizer.BertTokenizer(
table,
token_out_type=dtypes.string,
lower_case=lower_case,
preserve_unused_token=preserve_unused_token)
results = tokenizer.tokenize(text_inputs)
self.assertAllEqual(results, expected)
# Verify that the int ids are the same.
expected_rt = ragged_factory_ops.constant(expected)
expected_int = table.lookup(expected_rt.flat_values)
expected_int_rt = ragged_tensor.RaggedTensor.from_nested_row_splits(
expected_int, expected_rt.nested_row_splits)
int_tokenizer = bert_tokenizer.BertTokenizer(
vocab_lookup_table=table,
token_out_type=dtypes.int64,
lower_case=lower_case,
preserve_unused_token=preserve_unused_token)
results_int = int_tokenizer.tokenize(text_inputs)
self.assertAllEqual(results_int, expected_int_rt)
# Verify that the offsets can extract the expected tokens
_, begin, end = tokenizer.tokenize_with_offsets(text_inputs)
extracted_wordpieces = _ragged_substr(text_inputs, begin, end)
if expected_extracted:
self.assertAllEqual(extracted_wordpieces, expected_extracted)
else:
# The extracted won't have any wordpieces with '##' prefix. Strip them
# out.
stripped_prefix_flat = string_ops.regex_replace(expected_rt.flat_values,
'##', '')
stripped_prefix = expected_rt.with_flat_values(stripped_prefix_flat)
self.assertAllEqual(extracted_wordpieces, stripped_prefix)
if __name__ == '__main__':
test.main()
| 31.179954 | 80 | 0.512566 | 9,434 | 0.675353 | 0 | 0 | 8,961 | 0.641492 | 0 | 0 | 4,012 | 0.287207 |
1c3b16c69b0c5704668f2afab4edc623fff685bf | 5,324 | py | Python | tests/index_test.py | DubeySandeep/pending-review-notification | 353fa74d98eeb6c8386818273a2fe02af39d6b9d | [
"Apache-2.0"
]
| null | null | null | tests/index_test.py | DubeySandeep/pending-review-notification | 353fa74d98eeb6c8386818273a2fe02af39d6b9d | [
"Apache-2.0"
]
| null | null | null | tests/index_test.py | DubeySandeep/pending-review-notification | 353fa74d98eeb6c8386818273a2fe02af39d6b9d | [
"Apache-2.0"
]
| 1 | 2021-10-20T16:24:04.000Z | 2021-10-20T16:24:04.000Z | """Unit test for the index.py file."""
import unittest
from datetime import datetime, timedelta, timezone
import json
from unittest.mock import patch, mock_open
import requests_mock
from src import index
from src import github_services
class ModuleIntegerationTest(unittest.TestCase):
"""Integeration test for the send notification feature."""
def setUp(self):
self.orgName = 'orgName'
self.repoName = 'repo'
self.pull_response = [{
'html_url': 'https://githuburl.pull/123',
'number': 123,
'title': 'PR title 1',
'user': {
'login': 'authorName',
},
'assignees': [{
'login': 'reviewerName1',
}, {
'login': 'reviewerName2',
}]
}, {
'html_url': 'https://githuburl.pull/234',
'number': 234,
'title': 'PR title 2',
'user': {
'login': 'authorName',
},
'assignees': [{
'login': 'reviewerName1',
}, {
'login': 'reviewerName2',
}]
}]
def get_past_time(hours=0):
return (
datetime.now(timezone.utc) - timedelta(hours=hours)).strftime(
"%Y-%m-%dT%H:%M:%SZ")
self.timeline1 = [{
'event': 'created'
}, {
'event': 'assigned',
'assignee': {
'login': 'reviewerName1'
},
'created_at': get_past_time(hours=22)
},{
'event': 'assigned',
'assignee': {
'login': 'reviewerName2'
},
'created_at': get_past_time(hours=56)
}]
self.timeline2 = [{
'event': 'created'
}, {
'event': 'assigned',
'assignee': {
'login': 'reviewerName1'
},
'created_at': get_past_time(hours=23)
}, {
'event': 'assigned',
'assignee': {
'login': 'reviewerName2'
},
'created_at': get_past_time(hours=19)
}]
self.test_template = "{{ username }}\n{{ pr_list }}"
def mock_all_get_requests(self, mock_request):
param_page_1='?page=1&per_page=100'
param_page_2='?page=2&per_page=100'
mock_request.get(
github_services.PULL_REQUESTS_URL_TEMPLATE.format(
self.orgName, self.repoName) + param_page_1,
text=json.dumps(self.pull_response))
mock_request.get(
github_services.PULL_REQUESTS_URL_TEMPLATE.format(
self.orgName, self.repoName) + param_page_2,
text=json.dumps([]))
mock_request.get(
github_services.ISSUE_TIMELINE_URL_TEMPLATE.format(
self.orgName, self.repoName, 123) + param_page_1,
text=json.dumps(self.timeline1))
mock_request.get(
github_services.ISSUE_TIMELINE_URL_TEMPLATE.format(
self.orgName, self.repoName, 123) + param_page_2,
text=json.dumps([]))
mock_request.get(
github_services.ISSUE_TIMELINE_URL_TEMPLATE.format(
self.orgName, self.repoName, 234) + param_page_1,
text=json.dumps(self.timeline2))
mock_request.get(
github_services.ISSUE_TIMELINE_URL_TEMPLATE.format(
self.orgName, self.repoName, 234) + param_page_2,
text=json.dumps([]))
def mock_post_discussion_request(self, mock_request):
request = mock_request.post(
github_services.CREATE_DISCUSSION_URL_TEMPLATE.format(
self.orgName, 'teamName'),
text=json.dumps({}))
return request
def test_executing_main_function_sends_notification(self):
with requests_mock.Mocker() as mock_request:
self.mock_all_get_requests(mock_request)
request = self.mock_post_discussion_request(mock_request)
file_data = mock_open(read_data=self.test_template)
with patch("builtins.open", file_data):
index.main([
'--team', 'teamName',
'--repo', 'orgName/repo',
'--max-wait-hours', '20',
'--token', 'githubTokenForApiRequest'
])
self.assertTrue(request.called)
self.assertEqual(request.call_count, 2)
expected_messages = [
{
'title': '[@reviewerName1] Pending review on PRs',
'body': '@reviewerName1\n- [#123](https://githuburl.pull/123) '
'[Waiting from the last 22 hours]\n'
'- [#234](https://githuburl.pull/234) '
'[Waiting from the last 23 hours]'
},
{
'title': '[@reviewerName2] Pending review on PRs',
'body': '@reviewerName2\n- [#123](https://githuburl.pull/123) '
'[Waiting from the last 2 days, 8 hours]'
},
]
self.assertEqual(
request.request_history[0].json(), expected_messages[0])
self.assertEqual(
request.request_history[1].json(), expected_messages[1])
| 35.493333 | 79 | 0.523666 | 5,083 | 0.954733 | 0 | 0 | 0 | 0 | 0 | 0 | 1,260 | 0.236664 |
1c3c2ebbf2a88dc388bb0314813d8b32b385e4b0 | 3,133 | py | Python | rqalpha/data/instrument_mixin.py | mysky528/rqalpha | ecd550fc30aee96f9995e8152e2c48f5512f8b11 | [
"Apache-2.0"
]
| 3 | 2017-07-11T15:37:24.000Z | 2021-11-22T14:21:13.000Z | rqalpha/data/instrument_mixin.py | mysky528/rqalpha | ecd550fc30aee96f9995e8152e2c48f5512f8b11 | [
"Apache-2.0"
]
| null | null | null | rqalpha/data/instrument_mixin.py | mysky528/rqalpha | ecd550fc30aee96f9995e8152e2c48f5512f8b11 | [
"Apache-2.0"
]
| 2 | 2019-04-26T07:51:08.000Z | 2020-12-01T20:59:04.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
class InstrumentMixin(object):
def __init__(self, instruments):
self._instruments = {i.order_book_id: i for i in instruments}
self._sym_id_map = {i.symbol: k for k, i in six.iteritems(self._instruments)
# 过滤掉 CSI300, SSE50, CSI500, SSE180
if not i.order_book_id.endswith('INDX')}
try:
# FIXME
# 沪深300 中证500 固定使用上证的
for o in ['000300.XSHG', '000905.XSHG']:
self._sym_id_map[self._instruments[o].symbol] = o
# 上证180 及 上证180指数 两个symbol都指向 000010.XSHG
self._sym_id_map[self._instruments['SSE180.INDX'].symbol] = '000010.XSHG'
except KeyError:
pass
def sector(self, code):
return [v.order_book_id for v in self._instruments.values()
if v.type == 'CS' and v.sector_code == code]
def industry(self, code):
return [v.order_book_id for v in self._instruments.values()
if v.type == 'CS' and v.industry_code == code]
def concept(self, *concepts):
return [v.order_book_id for v in self._instruments.values()
if v.type == 'CS' and any(c in v.concept_names.split('|') for c in concepts)]
def all_instruments(self, types, dt=None):
return [i for i in self._instruments.values()
if ((dt is None or i.listed_date.date() <= dt.date() <= i.de_listed_date.date()) and
(types is None or i.type in types))]
def _instrument(self, sym_or_id):
try:
return self._instruments[sym_or_id]
except KeyError:
try:
sym_or_id = self._sym_id_map[sym_or_id]
return self._instruments[sym_or_id]
except KeyError:
return None
def instruments(self, sym_or_ids):
if isinstance(sym_or_ids, six.string_types):
return self._instrument(sym_or_ids)
return [i for i in [self._instrument(sid) for sid in sym_or_ids] if i is not None]
def get_future_contracts(self, underlying, date):
date = date.replace(hour=0, minute=0, second=0)
futures = [v for o, v in six.iteritems(self._instruments)
if v.type == 'Future' and v.underlying_symbol == underlying and
not o.endswith('88') and not o.endswith('99')]
if not futures:
return []
return sorted(i.order_book_id for i in futures if i.listed_date <= date <= i.de_listed_date)
| 40.166667 | 100 | 0.620172 | 2,566 | 0.805651 | 0 | 0 | 0 | 0 | 0 | 0 | 834 | 0.261852 |
1c3ca96a8752bea73f340ee28894ea1bdab8af22 | 215 | py | Python | Python 3/19.prac_no2.py | ByeonUi-Hyeok/practice | 6f55ddcb662e2bf8e0c3fb4c4af0beb77a1c7d2d | [
"MIT"
]
| 1 | 2021-06-11T08:55:03.000Z | 2021-06-11T08:55:03.000Z | Python 3/19.prac_no2.py | ByeonUi-Hyeok/practice | 6f55ddcb662e2bf8e0c3fb4c4af0beb77a1c7d2d | [
"MIT"
]
| null | null | null | Python 3/19.prac_no2.py | ByeonUi-Hyeok/practice | 6f55ddcb662e2bf8e0c3fb4c4af0beb77a1c7d2d | [
"MIT"
]
| null | null | null | import funcvote as vote
votes = input("투표내용 >>>")
# print(votes)
# print(type(votes))
result = vote.str2int(votes)
print(vote.countvotes(result))
result = vote.countvotes(result)
vote.printvote(result)
# 투표 초안 | 14.333333 | 32 | 0.716279 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.290043 |
1c3d09dc17bc58a64b3b41021ca264b66d8e9b31 | 427 | py | Python | tutorials/30-days-of-code/30-operators.py | PingHuskar/hackerrank | 1bfdbc63de5d0f94cd9e6ae250476b4a267662f2 | [
"Unlicense"
]
| 41 | 2018-05-11T07:54:34.000Z | 2022-03-29T19:02:32.000Z | tutorials/30-days-of-code/30-operators.py | PingHuskar/hackerrank | 1bfdbc63de5d0f94cd9e6ae250476b4a267662f2 | [
"Unlicense"
]
| 2 | 2021-09-13T10:03:26.000Z | 2021-10-04T10:21:05.000Z | tutorials/30-days-of-code/30-operators.py | PingHuskar/hackerrank | 1bfdbc63de5d0f94cd9e6ae250476b4a267662f2 | [
"Unlicense"
]
| 21 | 2019-01-23T19:06:59.000Z | 2021-12-23T16:03:47.000Z | # Day 2: Operators
# Start using arithmetic operators.
#
# https://www.hackerrank.com/challenges/30-operators/problem
#
#!/bin/python3
import sys
if __name__ == "__main__":
meal_cost = float(input().strip())
tip_percent = int(input().strip())
tax_percent = int(input().strip())
cost = meal_cost * (1 + tip_percent / 100 + tax_percent / 100)
print("The total meal cost is {:.0f} dollars.".format(cost))
| 22.473684 | 66 | 0.665105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 179 | 0.419204 |
1c3e669806f961c690e3e607d0c5ebaae5ffefbe | 2,503 | py | Python | articles/views.py | qwghlm/CommentIsMee | 2c11be1376ec693df28123727c3d86b38404fd71 | [
"MIT"
]
| null | null | null | articles/views.py | qwghlm/CommentIsMee | 2c11be1376ec693df28123727c3d86b38404fd71 | [
"MIT"
]
| null | null | null | articles/views.py | qwghlm/CommentIsMee | 2c11be1376ec693df28123727c3d86b38404fd71 | [
"MIT"
]
| null | null | null | from django.http import HttpResponse
from django.template import RequestContext, loader
from django.shortcuts import render, get_object_or_404, redirect
from django.core.urlresolvers import reverse
from django.core.cache import cache
from articles.models import CIFArticle
from .forms import CIFArticleForm
def index(request):
"""
Handle requests to the homepage
"""
article = None
# If a user has submitted a URL...
if request.POST:
form = CIFArticleForm(request.POST)
if (form.is_valid()):
try:
article = form.save(commit=False)
existing_articles = CIFArticle.objects.filter(url=article.url).count()
if existing_articles:
article = CIFArticle.objects.get(url=article.url)
else:
article.measure_ego()
article.save()
except ValueError, e:
article = None
form._errors["url"] = form.error_class([str(e)])
# If no URL submitted, just set up a blank form
else:
form = CIFArticleForm()
# If an article is found or created due to a user submission, redirect there
if article:
return redirect(reverse("articles:detail", args=(article.id,)))
# Else show the homepage & rendered form
else:
top_articles = cache.get('cim:top_articles')
if top_articles is None:
top_articles = CIFArticle.objects.filter(is_cif=1).order_by('-score')[:10]
cache.set('cim:top_articles', top_articles, 60)
latest_articles = cache.get('cim:latest_articles')
if latest_articles is None:
latest_articles = CIFArticle.objects.filter(is_cif=1).order_by('-id')[:5]
cache.set('cim:latest_articles', latest_articles, 30)
return render(request, 'articles/index.html', {
'form' : form ,
'top_articles' : top_articles,
'latest_articles' : latest_articles
})
def detail(request, article_id):
"""
Handle detail view for an article
"""
# Quite simple, set up article and form
form = CIFArticleForm()
article_key = 'cim:article:%s' % article_id
article = cache.get(article_key)
if article is None:
article = get_object_or_404(CIFArticle, id=article_id)
cache.set(article_key, article, 300)
return render(request, 'articles/detail.html', {
'article' : article,
'form' : form })
| 32.934211 | 86 | 0.62485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 556 | 0.222133 |
1c3f21c6980082d2b5b98180066cf9ba8b94eb50 | 156 | py | Python | utils/runtime_mode.py | omiderfanmanesh/dengue-infections-prediction | 6b4e4aa4af6f6e2cc581fd7828634bbfdc446340 | [
"Apache-2.0"
]
| null | null | null | utils/runtime_mode.py | omiderfanmanesh/dengue-infections-prediction | 6b4e4aa4af6f6e2cc581fd7828634bbfdc446340 | [
"Apache-2.0"
]
| null | null | null | utils/runtime_mode.py | omiderfanmanesh/dengue-infections-prediction | 6b4e4aa4af6f6e2cc581fd7828634bbfdc446340 | [
"Apache-2.0"
]
| 1 | 2021-06-05T10:05:44.000Z | 2021-06-05T10:05:44.000Z | # Copyright (c) 2021, Omid Erfanmanesh, All rights reserved.
class RuntimeMode:
TRAIN = 0
TUNING = 1
CROSS_VAL = 2
FEATURE_IMPORTANCE = 3
| 19.5 | 61 | 0.666667 | 92 | 0.589744 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.391026 |
1c41c0dd3400c46c01883be0652a07078deef3cb | 2,616 | py | Python | pydoc_fork/__main__.py | matthewdeanmartin/pydoc_fork | 174475b15be966f3751d5563b4db0beecc3ab1f9 | [
"MIT"
]
| null | null | null | pydoc_fork/__main__.py | matthewdeanmartin/pydoc_fork | 174475b15be966f3751d5563b4db0beecc3ab1f9 | [
"MIT"
]
| 1 | 2022-01-17T16:28:45.000Z | 2022-01-17T16:28:45.000Z | pydoc_fork/__main__.py | matthewdeanmartin/pydoc_fork | 174475b15be966f3751d5563b4db0beecc3ab1f9 | [
"MIT"
]
| null | null | null | # noinspection PyPep8
"""pydoc_fork
A fork of pydoc that is optimized for generating html documentation in a CI context
Usage:
pydoc_fork <package>... [options]
pydoc_fork (-h | --help)
pydoc_fork --version
Options:
-h --help Show this screen.
-v --version Show version.
--quiet No printing or logging.
--verbose Crank up the logging.
--config <config> pyproject.toml or other toml config.
--document_internals respect underscore or __all__ private
--prefer_docs_python_org link to python.org or generate own stdlib docs
-o --output <folder> where to write files
"""
# TODO: implement this
# pydoc_fork dot_notation <importable>... [--output=<folder>] [--document_internals]
# pydoc_fork source_path <path>... [--output=<folder>] [--document_internals]
import logging
import sys
import docopt
from pydoc_fork import commands, settings
from pydoc_fork.settings import load_config
LOGGER = logging.getLogger(__name__)
LOGGERS = []
__version__ = "3.0.0"
def main() -> int:
"""Get the args object from command parameters"""
arguments = docopt.docopt(__doc__, version=f"pydoc_fork {__version__}")
config_path = arguments.get("<config>")
if config_path:
load_config(config_path)
LOGGER.debug(f"Invoking with docopts: {str(arguments)}")
output_folder = arguments["--output"]
# TODO: add lists of packages
package = arguments["<package>"] or []
# quiet = bool(arguments.get("--quiet", False))
if arguments.get("--document_internals"):
settings.DOCUMENT_INTERNALS = arguments["--document_internals"]
if arguments.get("--prefer_docs_python_org"):
settings.PREFER_DOCS_PYTHON_ORG = arguments["--prefer_docs_python_org"]
if arguments.get("--verbose"):
# root logger, all modules
for root in ("pydoc_fork", "__main__"):
logger = logging.getLogger(root)
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
LOGGERS.append(logger)
commands.process_path_or_dot_name(
package,
output_folder=output_folder,
)
# # TODO
# print("Don't recognize that command.")
# return -1
return 0
if __name__ == "__main__":
sys.exit(main())
| 31.518072 | 86 | 0.64526 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,400 | 0.535168 |
1c41d05846e91ffb115828352ba38c0ccc9074be | 444 | py | Python | backend/src/libs/strings.py | codeglitchz/attendance-system | c82a8d75375069b15e0b827608209bfacb67cde7 | [
"MIT"
]
| 37 | 2019-12-15T17:39:38.000Z | 2022-03-13T08:16:09.000Z | backend/src/libs/strings.py | codeglitchz/attendance-system | c82a8d75375069b15e0b827608209bfacb67cde7 | [
"MIT"
]
| 16 | 2020-05-05T14:17:26.000Z | 2022-03-02T09:09:38.000Z | backend/src/libs/strings.py | codeglitchz/attendance-system | c82a8d75375069b15e0b827608209bfacb67cde7 | [
"MIT"
]
| 18 | 2019-12-15T17:39:43.000Z | 2022-01-22T10:42:41.000Z | """
libs.strings
By default, uses `en-gb.json` file inside the `strings` top-level folder.
If language changes, set `libs.strings.default_locale` and run `libs.strings.refresh()`.
"""
import json
default_locale = "en-us"
cached_strings = {}
def refresh():
global cached_strings
with open(f"strings/{default_locale}.json") as f:
cached_strings = json.load(f)
def gettext(name):
return cached_strings[name]
refresh()
| 17.76 | 88 | 0.702703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 224 | 0.504505 |
1c420085b055ce7cdac960f6e45563c43bc3b205 | 5,881 | py | Python | nemo_cmd/deflate.py | SalishSeaCast/NEMO-Cmd | a1fb05c4430e152a7dae57296bce364f73752129 | [
"Apache-2.0"
]
| 1 | 2020-03-26T16:42:26.000Z | 2020-03-26T16:42:26.000Z | nemo_cmd/deflate.py | SalishSeaCast/NEMO-Cmd | a1fb05c4430e152a7dae57296bce364f73752129 | [
"Apache-2.0"
]
| 10 | 2020-03-23T21:19:25.000Z | 2021-11-01T22:12:17.000Z | nemo_cmd/deflate.py | SalishSeaCast/NEMO-Cmd | a1fb05c4430e152a7dae57296bce364f73752129 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2013-2021 The Salish Sea MEOPAR Contributors
# and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEMO-Cmd command plug-in for deflate sub-command.
Deflate variables in netCDF files using Lempel-Ziv compression.
"""
import logging
import math
import multiprocessing
from pathlib import Path
import shlex
import subprocess
import time
import attr
import cliff.command
logger = logging.getLogger(__name__)
class Deflate(cliff.command.Command):
"""Deflate variables in netCDF files using Lempel-Ziv compression."""
def get_parser(self, prog_name):
parser = super(Deflate, self).get_parser(prog_name)
parser.description = """
Deflate variables in netCDF files using Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
This command is effectively the same as running
ncks -4 -L -O FILEPATH FILEPATH
for each FILEPATH.
"""
parser.add_argument(
"filepaths",
nargs="+",
type=Path,
metavar="FILEPATH",
help="Path/name of file to be deflated.",
)
parser.add_argument(
"-j",
"--jobs",
type=int,
default=math.floor(multiprocessing.cpu_count() / 2),
help=(
"Maximum number of concurrent deflation processes allowed. "
"Defaults to 1/2 the number of cores detected."
),
)
return parser
def take_action(self, parsed_args):
"""Execute the :command:`nemo deflate` sub-command.
Deflate variables in netCDF files using Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
This command is effectively the same as
:command:`ncks -4 -L -O filename filename`.
"""
deflate(parsed_args.filepaths, parsed_args.jobs)
@attr.s
class DeflateJob(object):
"""netCDF file deflation job."""
#: Path/name of the netCDF file to deflate.
filepath = attr.ib()
#: Lempel-Ziv compression level to use.
dfl_lvl = attr.ib(default=4)
#: Deflation job subprocess object.
process = attr.ib(default=None)
#: Deflation job process PID.
pid = attr.ib(default=None)
#: Deflation job process return code.
returncode = attr.ib(default=None)
def start(self):
"""Start the deflation job in a subprocess.
Cache the subprocess object and its process id as job attributes.
"""
cmd = "nccopy -s -4 -d{0.dfl_lvl} {0.filepath} {0.filepath}.nccopy.tmp".format(
self
)
self.process = subprocess.Popen(
shlex.split(cmd),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
)
self.pid = self.process.pid
logger.debug("deflating {0.filepath} in process {0.pid}".format(self))
@property
def done(self):
"""Return a boolean indicating whether or not the job has finished.
Cache the subprocess return code as a job attribute.
"""
finished = False
self.returncode = self.process.poll()
if self.returncode is not None:
if self.returncode == 0:
Path("{0.filepath}.nccopy.tmp".format(self)).rename(self.filepath)
finished = True
logger.debug(
"deflating {0.filepath} finished "
"with return code {0.returncode}".format(self)
)
return finished
def deflate(filepaths, max_concurrent_jobs):
"""Deflate variables in each of the netCDF files in filepaths using
Lempel-Ziv compression.
Converts files to netCDF-4 format.
The deflated file replaces the original file.
:param sequence filepaths: Paths/names of files to be deflated.
:param int max_concurrent_jobs: Maximum number of concurrent deflation
processes allowed.
"""
logger.info(
"Deflating in up to {} concurrent sub-processes".format(
int(max_concurrent_jobs)
)
)
jobs = [DeflateJob(fp) for fp in filepaths if fp.exists()]
jobs_in_progress = _launch_initial_jobs(jobs, max_concurrent_jobs)
while jobs or jobs_in_progress:
time.sleep(1)
_poll_and_launch(jobs, jobs_in_progress)
def _launch_initial_jobs(jobs, max_concurrent_jobs):
jobs_in_progress = {}
for process in range(int(max_concurrent_jobs)):
try:
job = jobs.pop(0)
except IndexError:
break
else:
job.start()
jobs_in_progress[job.pid] = job
return jobs_in_progress
def _poll_and_launch(jobs, jobs_in_progress):
for running_job in jobs_in_progress.copy().values():
if running_job.done:
result, _ = running_job.process.communicate()
logger.error(result) if result else logger.info(
"netCDF4 deflated {.filepath}".format(running_job)
)
jobs_in_progress.pop(running_job.pid)
try:
job = jobs.pop(0)
except IndexError:
continue
else:
job.start()
jobs_in_progress[job.pid] = job
| 32.672222 | 87 | 0.631185 | 3,210 | 0.545826 | 0 | 0 | 1,649 | 0.280394 | 0 | 0 | 2,783 | 0.473219 |
1c4260852b0f621da5efbc981c92c14d38f9bbe8 | 1,469 | py | Python | src/UQpy/distributions/collection/__init__.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
]
| null | null | null | src/UQpy/distributions/collection/__init__.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
]
| null | null | null | src/UQpy/distributions/collection/__init__.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
]
| null | null | null | """distributions module."""
from UQpy.distributions.collection.Beta import Beta
from UQpy.distributions.collection.Binomial import Binomial
from UQpy.distributions.collection.Cauchy import Cauchy
from UQpy.distributions.collection.ChiSquare import ChiSquare
from UQpy.distributions.collection.Exponential import Exponential
from UQpy.distributions.collection.Gamma import Gamma
from UQpy.distributions.collection.GeneralizedExtreme import GeneralizedExtreme
from UQpy.distributions.collection.InverseGaussian import InverseGauss
from UQpy.distributions.collection.Laplace import Laplace
from UQpy.distributions.collection.Levy import Levy
from UQpy.distributions.collection.Logistic import Logistic
from UQpy.distributions.collection.Lognormal import Lognormal
from UQpy.distributions.collection.Maxwell import Maxwell
from UQpy.distributions.collection.Multinomial import Multinomial
from UQpy.distributions.collection.MultivariateNormal import MultivariateNormal
from UQpy.distributions.collection.Normal import Normal
from UQpy.distributions.collection.Pareto import Pareto
from UQpy.distributions.collection.Poisson import Poisson
from UQpy.distributions.collection.Rayleigh import Rayleigh
from UQpy.distributions.collection.TruncatedNormal import TruncatedNormal
from UQpy.distributions.collection.Uniform import Uniform
from UQpy.distributions.collection.JointIndependent import JointIndependent
from UQpy.distributions.collection.JointCopula import JointCopula
| 58.76 | 79 | 0.884275 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.01838 |
1c4262cdeb92ebd6c335d957cdc8fd8bfca03129 | 190 | py | Python | Learning Python/Exercise Files/Ch2/helloworld_my.py | RomanShevtsiv/linkedin-learning | d7ec85953b7e88905f87928ede067d32344b984f | [
"MIT"
]
| null | null | null | Learning Python/Exercise Files/Ch2/helloworld_my.py | RomanShevtsiv/linkedin-learning | d7ec85953b7e88905f87928ede067d32344b984f | [
"MIT"
]
| null | null | null | Learning Python/Exercise Files/Ch2/helloworld_my.py | RomanShevtsiv/linkedin-learning | d7ec85953b7e88905f87928ede067d32344b984f | [
"MIT"
]
| null | null | null | #
# Example file for HelloWorld
#
def main():
print("Hello World")
name = input("What is your name? ")
print("Nice to meet you,", name)
if __name__ == "__main__":
main()
| 13.571429 | 39 | 0.594737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.494737 |
1c42d191e50517487ce29edd00a0d3e85b40a9be | 15,309 | py | Python | RocketSimulation.py | pietrotrope/SolarSystemSimulation | 905eec31eb73e1203ee23a32846954b30bbc5925 | [
"MIT"
]
| null | null | null | RocketSimulation.py | pietrotrope/SolarSystemSimulation | 905eec31eb73e1203ee23a32846954b30bbc5925 | [
"MIT"
]
| null | null | null | RocketSimulation.py | pietrotrope/SolarSystemSimulation | 905eec31eb73e1203ee23a32846954b30bbc5925 | [
"MIT"
]
| null | null | null | import sys
import csv
import json
import math
import pygame
import numpy as np
from pygame.locals import *
import pandas as pd
from data import *
from agent import agentsList, Agent
global screenSize
screenSize = [1920, 1080]
def load_parameters(path):
package = []
file = open(path, 'r')
j = json.load(file)
for subgroup in j.values():
package.append([cast(x) for x in subgroup.values()])
env_variables = package.pop(4)
file.close()
return (package, env_variables)
def cast(x):
try:
return float(x)
except Exception:
return str(x)
class Environment:
def __init__(self, vars):
# Environmental Constants
self.elev, self.t, self.g, self.M_air, self.R, self.gamma, self.P_zero = vars # noqa
self.g_zero = self.g
self.Re = 6356766
# Layer base altitudes
self.hb = [0, 11000, 20000, 32000, 47000, 51000, 71000]
# Layer base pressures
self.Pb = [101325, 22632.1, 5474.89,
868.019, 110.906, 66.9389, 3.95642]
# Layer base temperatures
self.Tb = [288.15, 216.65, 216.65, 228.65, 270.65, 270.65, 214.65]
# Layer lapse rates
self.Lm = [-0.0065, 0.0, 0.001, 0.0028, 0.0, -0.0028, -0.002]
def get_geopotential_altitude(self, z: float) -> float:
return self.Re*z / (self.Re+z)
def atmo_heterosphere_equ(self, z: float, a, b, c, d, e):
z_km = z/1000
return math.exp(a * z_km**4 + b * z_km**3 + c * z_km**2 + d * z_km + e) # noqa
def get_gravity(self, z: float) -> float:
return self.g_zero * (self.Re / (self.Re + z))**2
def get_temp(self, z: float, h: float) -> float:
if h <= 84852:
for i in range(len(self.hb)-1):
if self.hb[i] <= h <= self.hb[i+1]:
return (self.Tb[i] + self.Lm[i]*(h-self.hb[i]), i)
return (self.Tb[i+1] + self.Lm[i+1]*(h-self.hb[i+1]), i+1)
elif 86000 < z <= 91000:
return (186.87, 7)
elif 91000 < z <= 110000:
if 91000 < z <= 100000:
layer = 8
elif 100000 < z <= 110000:
layer = 9
return (
263.1905 - 76.3232 * math.sqrt(1 - ((z - 91000) / -19942.9)**2), # noqa
layer
)
elif 110000 < z <= 120000:
return (240 + 0.012 * (z - 110000), 10)
elif 120000 < z <= 1000000:
if 120000 < z <= 150000:
layer = 11
elif 150000 < z <= 200000:
layer = 12
elif 200000 < z <= 300000:
layer = 13
elif 300000 < z <= 500000:
layer = 14
elif 500000 < z <= 750000:
layer = 15
elif 750000 < z <= 1000000:
layer = 16
xi = (z - 120000) * (6356766 + 120000) / (6356766 + z)
return (1000 - 640 * math.exp(-0.00001875 * xi), layer)
def get_pressure(self, z: float, h: float, T: float, b: int) -> float:
if b <= 6:
if self.Lm[b] != 0:
return self.Pb[b] * (self.Tb[b]/T)**(self.g_zero*self.M_air/(self.R*self.Lm[b])) # noqa
else:
return self.Pb[b] * math.exp(-self.g_zero * self.M_air * (h-self.hb[b]) / (self.R*self.Tb[b])) # noqa
elif b == 7:
return self.atmo_heterosphere_equ(
z, 0.000000, 2.159582e-6, -4.836957e-4, -0.1425192, 13.47530)
elif b == 8:
return self.atmo_heterosphere_equ(
z, 0.000000, 3.304895e-5, -0.009062730, 0.6516698, -11.03037)
elif b == 9:
return self.atmo_heterosphere_equ(
z, 0.000000, 6.693926e-5, -0.01945388, 1.719080, -47.75030)
elif b == 10:
return self.atmo_heterosphere_equ(
z, 0.000000, -6.539316e-5, 0.02485568, -3.223620, 135.9355)
elif b == 11:
return self.atmo_heterosphere_equ(
z, 2.283506e-7, -1.343221e-4, 0.02999016, -3.055446, 113.5764)
elif b == 12:
return self.atmo_heterosphere_equ(
z, 1.209434e-8, -9.692458e-6, 0.003002041, -0.4523015, 19.19151)
elif b == 13:
return self.atmo_heterosphere_equ(
z, 8.113942e-10, -9.822568e-7, 4.687616e-4, -0.1231710, 3.067409)
elif b == 14:
return self.atmo_heterosphere_equ(
z, 9.814674e-11, -1.654439e-7, 1.148115e-4, -0.05431334, -2.011365)
elif b == 15:
return self.atmo_heterosphere_equ(
z, -7.835161e-11, 1.964589e-7, -1.657213e-4, 0.04305869, -14.77132)
elif b == 16:
return self.atmo_heterosphere_equ(
z, 2.813255e-11, -1.120689e-7, 1.695568e-4, -0.1188941, 14.56718)
def get_density(self, z: float, P: float, T: float, b) -> float:
if b <= 6:
return (P * self.M_air)/(self.R * T)
elif b == 7:
return self.atmo_heterosphere_equ(
z, 0.000000, -3.322622E-06, 9.111460E-04, -0.2609971, 5.944694)
elif b == 8:
return self.atmo_heterosphere_equ(
z, 0.000000, 2.873405e-05, -0.008492037, 0.6541179, -23.62010)
elif b == 9:
return self.atmo_heterosphere_equ(
z, -1.240774e-05, 0.005162063, -0.8048342, 55.55996, -1443.338)
elif b == 10:
return self.atmo_heterosphere_equ(
z, 0.00000, -8.854164e-05, 0.03373254, -4.390837, 176.5294)
elif b == 11:
return self.atmo_heterosphere_equ(
z, 3.661771e-07, -2.154344e-04, 0.04809214, -4.884744, 172.3597)
elif b == 12:
return self.atmo_heterosphere_equ(
z, 1.906032e-08, -1.527799E-05, 0.004724294, -0.6992340, 20.50921)
elif b == 13:
return self.atmo_heterosphere_equ(
z, 1.199282e-09, -1.451051e-06, 6.910474e-04, -0.1736220, -5.321644)
elif b == 14:
return self.atmo_heterosphere_equ(
z, 1.140564e-10, -2.130756e-07, 1.570762e-04, -0.07029296, -12.89844)
elif b == 15:
return self.atmo_heterosphere_equ(
z, 8.105631e-12, -2.358417e-09, -2.635110e-06, -0.01562608, -20.02246)
elif b == 16:
return self.atmo_heterosphere_equ(
z, -3.701195e-12, -8.608611e-09, 5.118829e-05, -0.06600998, -6.137674)
def get_c(self, T: float) -> float:
return math.sqrt((self.gamma * self.R * T) / self.M_air)
def get_status(self, z: float):
h = round(self.get_geopotential_altitude(z), 0)
self.g = self.get_gravity(z)
self.T, b = self.get_temp(z, h)
self.P = self.get_pressure(z, h, self.T, b)
self.Rho = self.get_density(z, self.P, self.T, b)
self.c = self.get_c(self.T)
class System:
def __init__(self, params, env, burn_time: float):
package = params
print(package)
# Environment
self.env = env
# Burn time
self.num_steps = int(burn_time // self.env.t)
self.burn_time = self.num_steps * self.env.t
# Engine specs
self.etype = package[0][0]
package[0].pop(0)
if self.etype == "Liquid":
self.isp, self.thrust = package[0]
elif self.etype == "Solid":
self.isp, self.avg_thrust, path = package[0] # noqa
with(open(path)) as f:
csv_reader = csv.reader(f)
self.thrust_curve = {}
for row in csv_reader:
self.thrust_curve.update({
float(row[0]): float(row[1])
})
f.close()
# Fuel Specs
if self.etype == "Liquid":
self.OFratio, self.Reserve = package[1]
elif self.etype == "Solid":
self.OFratio = 0
self.Reserve = package[1][0]
# Flow Rate
if self.etype == "Liquid":
self.w = (self.thrust/self.env.g_zero)/self.isp
elif self.etype == "Solid":
self.w = (self.avg_thrust/self.env.g_zero)/self.isp
self.dF = self.w * (1 / (self.OFratio + 1))
self.dOx = (self.w - self.dF)
# Fuel & Oxidizer
self.F = (self.dF * self.burn_time)/(1 - self.Reserve/100)
self.Ox = (self.dOx * self.burn_time)/(1 - self.Reserve/100)
# Mass
self.dry_mass = package[2][0]
# Aerodynamics
self.Cd, self.cross_section = package[3]
# Output
self.csvout = package[4][0]
self.field_names = ["t", "thrust", "drag", "m", "v", "mach", "a", "altitude",
"asl", "twr", "max_v", "max_mach", "max_acc", "min_acc", "max_g", "min_g"]
with open(self.csvout, "w", newline="") as f:
csv_writer = csv.writer(f)
csv_writer.writerow(self.field_names)
f.close()
# Flight
def launch(self):
"""Runs a simulation within the given parameters."""
# Variables setup
self.t = 0
self.altitude = 0
self.asl = self.altitude + self.env.elev
self.calc_mass()
self.env.get_status(self.asl)
self.calc_thrust()
self.calc_twr()
self.drag = 0
self.v = 0
self.max_v = 0
self.mach = 0
self.max_mach = 0
self.max_acc = 0
self.max_g = 0
self.min_acc = 0
self.min_g = 0
self.a = 0
self.j = 0
self.s = 0
# Used by matplotlib
self.data = [[], [], [], [], [], [], [], [], [], [], []]
# Accelaration phase
for i in range(self.num_steps):
# Output management
self.add_data()
# Environment-related
self.update_env()
# Thrust-related
self.calc_thrust()
# Accelaration/derivative-related
self.calc_acc()
self.calc_additional_derivatives()
# Position-related
self.set_altitude()
# Velocity-related
self.calc_velocity()
# Force-related
self.calc_drag()
self.calc_twr()
# Mass-related
self.calc_propellant()
self.calc_mass()
# Time-related
self.t += self.env.t
if self.a > self.max_acc:
self.max_acc = self.a
self.max_g = self.max_acc/self.env.g
if self.v > self.max_v:
self.max_v = self.v
self.max_mach = self.mach
self.thrust = 0
# Deceleration phase
while self.v > 0:
# Output management
self.add_data()
# Environment-related
self.update_env()
# Accelaration/derivative-related
self.calc_acc()
self.calc_additional_derivatives()
# Position-related
self.set_altitude()
# Velocity-related
self.calc_velocity()
# Force-related
self.calc_drag()
self.calc_twr()
# Mass-related
self.calc_mass()
# Time-related
self.t += self.env.t
if self.a < self.min_acc:
self.min_acc = self.a
self.min_g = self.min_acc/self.env.g
self.output("max_v", "max_mach", "max_acc",
"min_acc", "max_g", "min_g")
def suicide_burn(self):
"""Run a suicide burn simulation, will affct ascent simulation."""
self.Vt = math.sqrt((2 * self.m * self.env.g) / (self.env.Rho * self.cross_section * self.Cd)) # noqa
# Mass
def calc_mass(self):
self.propellant_mass = (self.Ox + self.F)
self.m = self.propellant_mass + self.dry_mass
def calc_propellant(self):
if self.etype == "Liquid":
self.w = (self.thrust/self.env.g_zero)/self.isp
elif self.etype == "Solid":
self.w = (self.avg_thrust/self.env.g_zero)/self.isp
self.dF = self.w * (1/(self.OFratio+1))
self.dOx = (self.w - self.dF)
self.Ox -= self.dOx * self.env.t
self.F -= self.dF * self.env.t
# Position
def set_altitude(self):
self.altitude += self.v * self.env.t + (self.a * self.env.t**2)/2 # noqa
self.asl = self.altitude + self.env.elev
# Derivatives of position
def calc_velocity(self):
self.v += self.a * self.env.t
self.mach = self.v/self.env.c
def calc_acc(self):
self.a = (self.thrust - (self.m * self.env.g + self.drag)) / self.m
def calc_additional_derivatives(self):
self.j = (self.a - self.data[4][-1]) / self.env.t
self.s = (self.j - self.data[5][-1]) / self.env.t
# Forces
def calc_thrust(self):
if self.etype == "Liquid":
pass
elif self.etype == "Solid":
self.thrust = self.thrust_curve[round(self.t, 3)]
def calc_drag(self):
self.drag = 0.5 * (self.env.Rho * self.v**2 * self.Cd * self.cross_section) # noqa
def calc_twr(self):
self.twr = self.thrust / (self.m * self.env.g)
# Environment
def update_env(self):
self.env.get_status(self.asl)
# Ouput
def output(self, *args):
values = []
for field in self.field_names:
value = str(round(eval(field, self.__dict__), 5))
values.append(value)
with open(self.csvout, "a", newline="") as f:
csv_writer = csv.writer(f)
csv_writer.writerow(values)
f.close()
def add_data(self):
self.data[0].append(self.t)
self.data[1].append(self.altitude)
self.data[2].append(self.v)
self.data[3].append(self.env.c)
self.data[4].append(self.a)
self.data[5].append(self.j)
self.data[6].append(self.s)
self.data[7].append(self.drag)
self.output("t", "thrust", "drag", "m", "v",
"mach", "a", "altitude", "asl", "twr")
def run_simulation(burn_time):
params = load_parameters("RocketSimulationData/info.json")
env = Environment(params[1])
s = System(params[0], env, burn_time)
s.launch()
def renderAgents(screen, res, ratio):
screen.fill((0, 0, 0))
pygame.draw.rect(screen, (0, 0, 255), (0, 1080-108, 1920, 108))
pos = screenSize[1]-158 - res["altitude"]*ratio
# print("altitude: "+str(res["altitude"])+", pos: "+str(pos))
pygame.draw.rect(screen, (255, 255, 255), (940, pos, 20, 50))
pygame.display.update()
def simulateRocket(screen):
run_simulation(150)
df = pd.read_csv('RocketSimulationData/Flight.csv')
result = df.to_dict("index")
ratio = screenSize[1]/1000000
interestingPoint = None
for res in result:
# print("time: "+str(result[res]["t"])+" Altitude: "+str(result[res]["altitude"]))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
renderAgents(screen, result[res], ratio)
if result[res]["altitude"] < 800000:
interestingPoint = result[res]
pygame.display.update()
return interestingPoint
| 33.720264 | 118 | 0.528317 | 13,512 | 0.882618 | 0 | 0 | 0 | 0 | 0 | 0 | 1,390 | 0.090796 |
1c43093fa85de4f6e1de23a0ecc3b43530f42260 | 126 | py | Python | sourcecode/GAN/FID/__init__.py | toufeeqahamedns/GeneratingHumanFaces | 93048bf5f6ae99424f918b0d0fea46d21abee0cb | [
"MIT"
]
| null | null | null | sourcecode/GAN/FID/__init__.py | toufeeqahamedns/GeneratingHumanFaces | 93048bf5f6ae99424f918b0d0fea46d21abee0cb | [
"MIT"
]
| null | null | null | sourcecode/GAN/FID/__init__.py | toufeeqahamedns/GeneratingHumanFaces | 93048bf5f6ae99424f918b0d0fea46d21abee0cb | [
"MIT"
]
| null | null | null | """ Package has implementation for the FID score calculation
"""
from GAN.FID import fid_score
from GAN.FID import inception
| 21 | 60 | 0.785714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.507937 |
1c4353fef35e15660683e78f01919ecd4744808d | 574 | py | Python | flask_monitoringdashboard/test/core/profiler/util/test_stringhash.py | timgates42/Flask-MonitoringDashboard | 0404b05b9a8f1917796e0f314a77a53a754a0b15 | [
"MIT"
]
| 3 | 2020-07-17T05:37:41.000Z | 2021-09-06T19:46:15.000Z | flask_monitoringdashboard/test/core/profiler/util/test_stringhash.py | timgates42/Flask-MonitoringDashboard | 0404b05b9a8f1917796e0f314a77a53a754a0b15 | [
"MIT"
]
| null | null | null | flask_monitoringdashboard/test/core/profiler/util/test_stringhash.py | timgates42/Flask-MonitoringDashboard | 0404b05b9a8f1917796e0f314a77a53a754a0b15 | [
"MIT"
]
| 1 | 2020-11-21T01:25:51.000Z | 2020-11-21T01:25:51.000Z | import unittest
from flask_monitoringdashboard.core.profiler.util.stringHash import StringHash
class TestStringHash(unittest.TestCase):
def test_stringhash(self):
string_hash = StringHash()
self.assertEqual(string_hash.hash('abc'), 0)
self.assertEqual(string_hash.hash('def'), 1)
self.assertEqual(string_hash.hash('abc'), 0)
def test_unhash(self):
string_hash = StringHash()
self.assertEqual(string_hash.unhash(string_hash.hash('abc')), 'abc')
self.assertRaises(ValueError, string_hash.unhash, 'unknown')
| 31.888889 | 78 | 0.709059 | 475 | 0.827526 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.059233 |
1c45545157e97f9c4e1cc68b6cafb654b5d57282 | 439 | py | Python | news/views.py | valch85/newssite | ef612a7bde4ff1d6e1e35f5cc4ec9407f031270e | [
"Apache-2.0"
]
| null | null | null | news/views.py | valch85/newssite | ef612a7bde4ff1d6e1e35f5cc4ec9407f031270e | [
"Apache-2.0"
]
| 2 | 2020-02-12T00:16:37.000Z | 2020-06-05T20:42:49.000Z | news/views.py | valch85/newssite | ef612a7bde4ff1d6e1e35f5cc4ec9407f031270e | [
"Apache-2.0"
]
| null | null | null | from django.shortcuts import render, get_object_or_404
from .models import News
# Create your views here.
def index(request):
latest_news_list = News.objects.order_by('-pub_date')[:10]
context = {'latest_news_list': latest_news_list}
return render(request, 'news/index.html', context)
def detail(request, news_id):
new = get_object_or_404(News, pk=news_id)
return render(request, 'news/detail.html', {'new': new})
| 27.4375 | 62 | 0.728929 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.214123 |
1c4559619debbfab81b5667b6115f6d8185615c5 | 1,229 | py | Python | benchmark/generate_libs/jamplus.py | chadaustin/ibb | ea1e25cc53a1ad7c302a12d95fc704c443924dff | [
"MIT"
]
| 4 | 2015-04-09T17:24:58.000Z | 2019-07-02T12:05:56.000Z | benchmark/generate_libs/jamplus.py | chadaustin/ibb | ea1e25cc53a1ad7c302a12d95fc704c443924dff | [
"MIT"
]
| null | null | null | benchmark/generate_libs/jamplus.py | chadaustin/ibb | ea1e25cc53a1ad7c302a12d95fc704c443924dff | [
"MIT"
]
| 1 | 2019-11-08T15:38:29.000Z | 2019-11-08T15:38:29.000Z | #!/usr/bin/python
import os.path
import cppcodebase
import random
def CreateLibJamfile(lib_number, classes):
os.chdir(cppcodebase.lib_name(lib_number))
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP lib_" + str(lib_number) + " ;\n\n")
handle.write ("SubDirHdrs $(INCLUDES) ;\n\n")
handle.write ("Library lib_" + str(lib_number) + " :\n")
for i in xrange(classes):
handle.write(' class_' + str(i) + '.cpp\n')
handle.write (' ;\n')
os.chdir('..')
def CreateFullJamfile(libs):
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP ;\n\n")
for i in xrange(libs):
handle.write('SubInclude TOP ' + cppcodebase.lib_name(i) + ' ;\n')
handle.write('\nWorkspace GeneratedLibs :\n')
for i in xrange(libs):
handle.write('\t\t' + cppcodebase.lib_name(i) + '\n')
handle.write(';\n')
handle = file("Jamrules.jam", "w")
handle.write ('INCLUDES = $(TOP) ;\n')
def CreateCodebase(libs, classes, internal_includes, external_includes):
cppcodebase.SetDir('jamplus')
cppcodebase.CreateSetOfLibraries(libs, classes, internal_includes, external_includes, CreateLibJamfile)
CreateFullJamfile(libs)
os.chdir('..')
| 29.261905 | 107 | 0.643613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 297 | 0.24166 |
1c45bee0b72f7290f98a152d2fd4047f74e16502 | 8,482 | py | Python | inbm/dispatcher-agent/dispatcher/fota/fota.py | intel/intel-inb-manageability | cdb17765120857fd41cacb838d6ee6e34e1f5047 | [
"Apache-2.0"
]
| 5 | 2021-12-13T21:19:31.000Z | 2022-01-18T18:29:43.000Z | inbm/dispatcher-agent/dispatcher/fota/fota.py | intel/intel-inb-manageability | cdb17765120857fd41cacb838d6ee6e34e1f5047 | [
"Apache-2.0"
]
| 45 | 2021-12-30T17:21:09.000Z | 2022-03-29T22:47:32.000Z | inbm/dispatcher-agent/dispatcher/fota/fota.py | intel/intel-inb-manageability | cdb17765120857fd41cacb838d6ee6e34e1f5047 | [
"Apache-2.0"
]
| 4 | 2022-01-26T17:42:54.000Z | 2022-03-30T04:48:04.000Z | """
FOTA update tool which is called from the dispatcher during installation
Copyright (C) 2017-2022 Intel Corporation
SPDX-License-Identifier: Apache-2.0
"""
import logging
import os
import platform
from threading import Timer
from typing import Any, Optional, Mapping
from future.moves.urllib.parse import urlparse
from inbm_common_lib.exceptions import UrlSecurityException
from inbm_common_lib.utility import canonicalize_uri
from inbm_common_lib.constants import REMOTE_SOURCE
from .constants import *
from .fota_error import FotaError
from .manifest import parse_tool_options, parse_guid, parse_hold_reboot_flag
from .os_factory import OsFactory, OsType
from ..common import dispatcher_state
from ..common.result_constants import *
from ..constants import UMASK_OTA
from ..dispatcher_callbacks import DispatcherCallbacks
from ..dispatcher_exception import DispatcherException
from ..downloader import download
from ..packagemanager.local_repo import DirectoryRepo
logger = logging.getLogger(__name__)
class FOTA:
"""AKA FOTA Tool
An instance of this class will be called from the
dispatcher if the requested type of update is FOTA
"""
def __init__(self,
parsed_manifest: Mapping[str, Optional[Any]],
repo_type: str,
dispatcher_callbacks: DispatcherCallbacks) -> None:
"""Base class constructor for variable assignment, to send telemetry info and create a new
directory if no repo is present
@param parsed_manifest: Parsed parameters from manifest
@param repo_type: OTA source location -> local or remote
@param dispatcher_callbacks: DispatcherCallbacks instance
"""
logger.debug(f"parsed_manifest: {parsed_manifest}")
self._ota_element = parsed_manifest.get('resource')
logger.debug(f"ota_element: {self._ota_element}")
self._dispatcher_callbacks = dispatcher_callbacks
self._uri: Optional[str] = parsed_manifest['uri']
self._repo_type = repo_type
repo_path: Optional[str]
"""If repo_type=local, then use path and not URI"""
if self._repo_type == REMOTE_SOURCE:
if not self._uri:
raise FotaError("missing URI.")
else:
self._pkg_filename = os.path.basename(urlparse(self._uri).path)
repo_path = None
else:
if self._ota_element is None or 'path' not in self._ota_element:
raise FotaError('attempting to use local repo for FOTA but no path specified')
self._pkg_filename = os.path.basename(self._ota_element['path'])
path = self._ota_element.get('path', None)
logger.debug(f"path: {path}")
if path is None:
repo_path = None
else:
repo_path = os.path.dirname(path)
logger.debug(f"repo_path: {repo_path}")
self.__signature = parsed_manifest['signature']
self._hash_algorithm = parsed_manifest['hash_algorithm']
self._username = parsed_manifest['username']
self._password = parsed_manifest['password']
if self._dispatcher_callbacks is None:
raise FotaError("dispatcher_callbacks not specified in FOTA constructor")
self._dispatcher_callbacks.broker_core.telemetry("Firmware Update Tool launched")
if repo_path:
logger.debug("Using manifest specified repo path")
self._repo = DirectoryRepo(repo_path)
else:
logger.debug("Using default repo path")
self._repo = DirectoryRepo(CACHE)
def install(self) -> Result:
"""checks current platform versions and then issues download
and install. Performs clean() in failure conditions
@return: (Result) containing status code and message
"""
logger.debug("")
return_message: Result = Result()
hold_reboot = False
try:
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
bios_vendor, platform_product = factory.create_upgrade_checker().check()
if self._repo_type.lower() == REMOTE_SOURCE:
# need to perform this check here because some FOTA commands don't have a URI -- see constructor
# (instead they have a path)
if self._uri is None:
raise FotaError(
"internal error: _uri uninitialized in Fota.install with download requested in manifest")
uri = canonicalize_uri(self._uri)
download(dispatcher_callbacks=self._dispatcher_callbacks,
uri=uri,
repo=self._repo,
umask=UMASK_OTA,
username=self._username,
password=self._password)
else:
logger.debug("Skipping FOTA upgradable check for local repo")
if self._ota_element is None:
raise FotaError("missing ota_element")
tool_options = parse_tool_options(self._ota_element)
logger.debug(f"tool_options: {tool_options}")
guid = parse_guid(self._ota_element)
logger.debug(f"guid: {guid}")
hold_reboot = parse_hold_reboot_flag(self._ota_element)
logger.debug(f"holdReboot: {hold_reboot}; pkg_filename: {self._pkg_filename}")
factory.create_installer(self._repo, FOTA_CONF_PATH, FOTA_CONF_SCHEMA_LOC).\
install(guid=guid,
tool_options=tool_options,
pkg_filename=self._pkg_filename,
signature=self.__signature,
hash_algorithm=self._hash_algorithm,
bios_vendor=bios_vendor,
platform_product=platform_product)
def trigger_reboot() -> None:
"""This method triggers a reboot."""
factory.create_rebooter().reboot()
if not hold_reboot:
logger.debug("")
state = {'restart_reason': "fota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
time_to_trigger_reboot = Timer(0.1, trigger_reboot)
time_to_trigger_reboot.start()
return_message = COMMAND_SUCCESS
else:
status = 'Reboot on hold after Firmware update...'
state = {'restart_reason': "pota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
logger.debug(status)
self._dispatcher_callbacks.broker_core.telemetry(status)
except (DispatcherException, FotaError, UrlSecurityException, ValueError, FileNotFoundError) as e:
error = 'Firmware Update Aborted: ' + str(e)
logger.error(error)
self._dispatcher_callbacks.broker_core.telemetry(error)
return_message = INSTALL_FAILURE
self._repo.delete(self._pkg_filename)
# In POTA, mender file needs to be deleted also.
if hold_reboot:
self._repo.delete_all()
finally:
if return_message == COMMAND_SUCCESS:
status = 'Firmware update in process...'
else:
status = 'Firmware Update Aborted'
dispatcher_state.clear_dispatcher_state()
logger.debug('Firmware update status: ' + status)
self._dispatcher_callbacks.broker_core.telemetry(status)
return return_message
@staticmethod
def _verify_os_supported():
"""checks if the current OS is supported.
@return True if OS is supported; otherwise, false.
@raise ValueError Unsupported OS
"""
logger.debug("")
os_type = platform.system()
logger.debug(f"os_type: {os_type}")
if os_type in OsType.__members__:
return os_type
else:
logger.error("Unsupported OS type.")
raise ValueError('Unsupported OS type.')
def check(self) -> None:
"""validate the manifest before FOTA"""
logger.debug("")
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
factory.create_upgrade_checker().check()
| 42.838384 | 113 | 0.630512 | 7,455 | 0.87892 | 0 | 0 | 498 | 0.058713 | 0 | 0 | 2,235 | 0.263499 |
1c46065a2d7cec80d32a5396991fd1b74b074e66 | 8,727 | py | Python | syncflux.py | nagylzs/syncflux | c070267065cad817708d0680e17bfe5f8942310f | [
"Apache-2.0"
]
| null | null | null | syncflux.py | nagylzs/syncflux | c070267065cad817708d0680e17bfe5f8942310f | [
"Apache-2.0"
]
| null | null | null | syncflux.py | nagylzs/syncflux | c070267065cad817708d0680e17bfe5f8942310f | [
"Apache-2.0"
]
| null | null | null | import copy
import datetime
import sys
import os
import time
import argparse
import traceback
import pytz
import syncthing
from influxdb import InfluxDBClient
import yaml
from yaml2dataclass import Schema, SchemaPath
from typing import Optional, Dict, Type, List
from dataclasses import dataclass, asdict, field
@dataclass
class SyncthingConfiguration(Schema):
name: str
api_key: str
host: str = 'localhost'
port: int = field(default=8384)
timeout: float = field(default=10.0)
is_https: bool = field(default=False)
ssl_cert_file: Optional[str] = field(default=None)
tags: Optional[List[str]] = field(default_factory=lambda: [])
def get_client_params(self):
result = asdict(self)
if "name" in result:
del result["name"]
if "tags" in result:
del result["tags"]
return result
@dataclass
class InfluxDbConfiguration(Schema):
host: str
port: int # Common ports: 443
ssl: bool
verify_ssl: bool
database: str
username: str
password: str
def get_client_params(self):
result = asdict(self)
if "tags" in result:
del result["tags"]
return result
@dataclass
class MeasurementConfiguration(Schema):
devices: str
folders: str
@dataclass
class AppConfiguration(Schema):
syncthings: Dict[str, SyncthingConfiguration]
influxes: Dict[str, InfluxDbConfiguration]
measurements: MeasurementConfiguration
@classmethod
def _load_dict(cls, props_dict, dest_cls: Type[Schema], add_name: bool = False):
result = {}
for name, value in props_dict.items():
arguments = {}
arguments.update(value)
if add_name:
arguments["name"] = name
result[name] = dest_cls.scm_load_from_dict(arguments)
return result
@classmethod
def scm_convert(cls, values: dict, path: SchemaPath):
values["syncthings"] = cls._load_dict(values["syncthings"], SyncthingConfiguration, True)
values["influxes"] = cls._load_dict(values["influxes"], InfluxDbConfiguration)
return values
def load_app_config(stream) -> AppConfiguration:
"""Load application configuration from a stream."""
obj = yaml.safe_load(stream)
return AppConfiguration.scm_load_from_dict(obj)
def error(message: str):
sys.stderr.write("\nerror: " + message + "\n")
sys.stderr.flush()
raise SystemExit(-1)
def info(*values):
if not args.silent:
print(*values)
def main():
# Collect data
points = []
for sync in config.syncthings.values():
info(" Connect syncthing %s" % sync.name)
proto_tags = {"cfg_name": sync.name}
if sync.tags:
proto_tags.update(sync.tags)
conn_args = sync.get_client_params()
q_started = time.time()
conn = syncthing.Syncthing(**conn_args)
now = datetime.datetime.now(tz=pytz.UTC)
sync_cfg = conn.system.config()
# My own device id
my_device = sync_cfg["defaults"]["folder"]["devices"][0]
my_id = my_device["deviceID"]
proto_tags["my_id"] = my_id
# Collect device stats
device_stats = conn.stats.device()
# List all remote devices
remote_devices = []
for device in sync_cfg["devices"]:
device_id = device["deviceID"]
if device_id == my_id:
proto_tags["my_name"] = device["name"]
else:
stats = device_stats[device_id]
last_seen = syncthing.parse_datetime(stats["lastSeen"])
last_seen_since = now - last_seen
remote_devices.append({
"tags": {
"id": device["deviceID"], # Device ID
"name": device["name"], # Device Name
},
"fields": {
"last_seen_since_sec": last_seen_since.total_seconds(), # Number of seconds last seen
}
})
# Folders
folders = []
for folder in sync_cfg["folders"]:
# Get completion for my own device
completion = conn.database.completion(my_id, folder["id"])
folders.append({
"tags": {"id": folder["id"], "label": folder["label"], "path": folder["path"]},
"fields": {"completion": completion},
})
q_elapsed = time.time() - q_started
proto_fields = {"q_elapsed": q_elapsed}
# Create data points for devices
for device in remote_devices:
tags = copy.copy(proto_tags)
tags.update(device["tags"])
fields = copy.copy(proto_fields)
fields.update(device["fields"])
point = dict(measurement=config.measurements.devices, tags=tags, fields=fields)
points.append(point)
# Create points for folders
for folder in folders:
tags = copy.copy(proto_tags)
tags.update(folder["tags"])
fields = copy.copy(proto_fields)
fields.update(folder["fields"])
point = dict(measurement=config.measurements.folders, tags=tags, fields=fields)
points.append(point)
if not points:
return
for influx_name, influx in config.influxes.items():
info(" Sending %d point(s) to influxdb %s" % (len(points), influx_name))
try:
influx = config.influxes[influx_name]
client = InfluxDBClient(**asdict(influx))
client.write_points(points)
except:
if args.halt_on_send_error:
raise
else:
traceback.print_exc(file=sys.stderr)
parser = argparse.ArgumentParser(description='Monitor your Syncthing instances with influxdb.')
parser.add_argument('-c', "--config", dest="config", default=None,
help="Configuration file for application. Default is syncflux.yml. "
"See syncflux_example.yml for an example.")
parser.add_argument("--config-dir", dest="config_dir", default=None,
help="Configuration directory. All config files with .yml extension will be processed one by one.")
parser.add_argument('-n', "--count", dest="count", default=1, type=int,
help="Number of test runs. Default is one. Use -1 to run indefinitely.")
parser.add_argument('-w', "--wait", dest="wait", default=60, type=float,
help="Number of seconds between test runs.")
parser.add_argument("-s", "--silent", dest='silent', action="store_true", default=False,
help="Supress all messages except errors.")
parser.add_argument("-v", "--verbose", dest='verbose', action="store_true", default=False,
help="Be verbose."
)
parser.add_argument("--halt-on-send-error", dest="halt_on_send_error", default=False, action="store_true",
help="Halt when cannot send data to influxdb. The default is to ignore the error.")
args = parser.parse_args()
if args.silent and args.verbose:
parser.error("Cannot use --silent and --verbose at the same time.")
if args.config is None:
args.config = "syncflux.yml"
if (args.config is not None) and (args.config_dir is not None):
parser.error("You must give either --config or --config-dir (exactly one of them)")
if args.count == 0:
parser.error("Test run count cannot be zero.")
if args.wait <= 0:
parser.error("Wait time must be positive.")
if args.config:
config_files = [args.config]
else:
config_files = []
for file_name in sorted(os.listdir(args.config_dir)):
ext = os.path.splitext(file_name)[1]
if ext.lower() == ".yml":
fpath = os.path.join(args.config_dir, file_name)
config_files.append(fpath)
index = 0
while args.count < 0 or index < args.count:
if args.count != 1:
info("Pass #%d started" % (index + 1))
started = time.time()
for config_file in config_files:
if not os.path.isfile(config_file):
parser.error("Cannot open %s" % config_file)
config = load_app_config(open(config_file, "r"))
main()
elapsed = time.time() - started
index += 1
last_one = (args.count > 0) and (index == args.count)
if not last_one:
remaining = args.wait - elapsed
if remaining > 0:
if not args.silent:
info("Pass #%d elapsed %.2f sec, waiting %.2f sec for next." % (index, elapsed, remaining))
time.sleep(args.wait)
else:
info("Pass #%d elapsed %.2f sec" % (index, elapsed))
info("")
| 33.694981 | 119 | 0.605936 | 1,776 | 0.203506 | 0 | 0 | 1,820 | 0.208548 | 0 | 0 | 1,765 | 0.202246 |
1c48374373ae16db6dbcfd16316661e717dab9fc | 5,230 | py | Python | tests/input/pdf/test_pdf.py | asweeney86/preview-generator | 354cbac1c131ebbb81cd9cfd9b4bc0c184d10103 | [
"MIT"
]
| null | null | null | tests/input/pdf/test_pdf.py | asweeney86/preview-generator | 354cbac1c131ebbb81cd9cfd9b4bc0c184d10103 | [
"MIT"
]
| null | null | null | tests/input/pdf/test_pdf.py | asweeney86/preview-generator | 354cbac1c131ebbb81cd9cfd9b4bc0c184d10103 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
import os
import re
import shutil
import typing
from PIL import Image
from PyPDF2 import PdfFileReader
import PyPDF2.utils
import pytest
from preview_generator.exception import UnavailablePreviewType
from preview_generator.manager import PreviewManager
from tests import test_utils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
CACHE_DIR = "/tmp/preview-generator-tests/cache"
PDF_FILE_PATH = os.path.join(CURRENT_DIR, "the_pdf.pdf")
PDF_FILE_PATH__ENCRYPTED = os.path.join(CURRENT_DIR, "the_pdf.encrypted.pdf")
PDF_FILE_PATH__A4 = os.path.join(CURRENT_DIR, "qpdfconvert.pdf")
def setup_function(function: typing.Callable) -> None:
shutil.rmtree(CACHE_DIR, ignore_errors=True)
def test_to_jpeg() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_jpeg_preview(file_path=PDF_FILE_PATH) is True
path_to_file = manager.get_jpeg_preview(
file_path=PDF_FILE_PATH, height=512, width=321, force=True
)
assert os.path.exists(path_to_file) is True
assert os.path.getsize(path_to_file) > 0
assert re.match(test_utils.CACHE_FILE_PATH_PATTERN__JPEG, path_to_file)
with Image.open(path_to_file) as jpeg:
assert jpeg.height in range(453, 455)
assert jpeg.width == 321
def test_to_jpeg__encrypted_pdf() -> None:
with pytest.raises(PyPDF2.utils.PdfReadError): # ensure file is encrpyted
pdf = PdfFileReader(PDF_FILE_PATH__ENCRYPTED)
pdf.getPage(0)
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_jpeg_preview(file_path=PDF_FILE_PATH) is True
path_to_file = manager.get_jpeg_preview(
file_path=PDF_FILE_PATH__ENCRYPTED, height=512, width=321, force=True
)
assert os.path.exists(path_to_file) is True
assert os.path.getsize(path_to_file) > 0
assert re.match(test_utils.CACHE_FILE_PATH_PATTERN__JPEG, path_to_file)
with Image.open(path_to_file) as jpeg:
assert jpeg.height in range(453, 455)
assert jpeg.width == 321
def test_to_jpeg_no_size() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_jpeg_preview(file_path=PDF_FILE_PATH) is True
path_to_file = manager.get_jpeg_preview(file_path=PDF_FILE_PATH, force=True)
assert os.path.exists(path_to_file) is True
assert os.path.getsize(path_to_file) > 0
assert re.match(test_utils.CACHE_FILE_PATH_PATTERN__JPEG, path_to_file)
with Image.open(path_to_file) as jpeg:
assert jpeg.height == 256
assert jpeg.width in range(180, 182)
def test_to_text() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_text_preview(file_path=PDF_FILE_PATH) is False
with pytest.raises(UnavailablePreviewType):
manager.get_text_preview(file_path=PDF_FILE_PATH, force=True)
def test_to_json() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_json_preview(file_path=PDF_FILE_PATH) is True
manager.get_json_preview(file_path=PDF_FILE_PATH, force=True)
# TODO - G.M - 2018-11-06 - To be completed
def test_to_pdf() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_pdf_preview(file_path=PDF_FILE_PATH) is True
manager.get_pdf_preview(file_path=PDF_FILE_PATH, force=True)
# TODO - G.M - 2018-11-06 - To be completed
def test_to_pdf_one_page() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_pdf_preview(file_path=PDF_FILE_PATH) is True
path_0 = manager.get_pdf_preview(file_path=PDF_FILE_PATH, page=0, force=True)
assert os.path.exists(path_0) is True
assert os.path.getsize(path_0) > 1000 # verify if the size of the pdf refer to a normal content
assert re.match(test_utils.CACHE_FILE_PATH_PATTERN_WITH_PAGE__PDF, path_0)
pdf = PdfFileReader(open(path_0, "rb"))
assert pdf.getNumPages() == 1
path_1 = manager.get_pdf_preview(file_path=PDF_FILE_PATH, page=1, force=True)
assert os.path.exists(path_1) is True
assert os.path.getsize(path_1) > 1000 # verify if the size of the pdf refer to a normal content
assert re.match(test_utils.CACHE_FILE_PATH_PATTERN_WITH_PAGE__PDF, path_1)
pdf = PdfFileReader(open(path_1, "rb"))
assert pdf.getNumPages() == 1
def test_algorithm4() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
assert manager.has_jpeg_preview(file_path=PDF_FILE_PATH__A4) is True
path_to_file = manager.get_jpeg_preview(file_path=PDF_FILE_PATH__A4, force=True)
with Image.open(path_to_file) as jpeg:
assert jpeg.height == 256
assert jpeg.width in range(180, 182)
def test_get_nb_page() -> None:
manager = PreviewManager(cache_folder_path=CACHE_DIR, create_folder=True)
nb_page = manager.get_page_nb(file_path=PDF_FILE_PATH)
assert nb_page == 2
nb_page = manager.get_page_nb(file_path=PDF_FILE_PATH__ENCRYPTED)
assert nb_page == 2
nb_page = manager.get_page_nb(file_path=PDF_FILE_PATH__A4)
assert nb_page == 2
| 39.621212 | 100 | 0.759656 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 348 | 0.066526 |
1c4921cfeca9e8e27f2d0b623dc27dabba9abc92 | 10,495 | py | Python | ipt/ipt_filter_contour_by_size.py | tpmp-inra/ipapi | b0f6be8960a20dbf95ef9df96efdd22bd6e031c5 | [
"MIT"
]
| 1 | 2020-06-30T06:53:36.000Z | 2020-06-30T06:53:36.000Z | ipt/ipt_filter_contour_by_size.py | tpmp-inra/ipapi | b0f6be8960a20dbf95ef9df96efdd22bd6e031c5 | [
"MIT"
]
| null | null | null | ipt/ipt_filter_contour_by_size.py | tpmp-inra/ipapi | b0f6be8960a20dbf95ef9df96efdd22bd6e031c5 | [
"MIT"
]
| null | null | null | from ipso_phen.ipapi.base.ipt_abstract import IptBase
from ipso_phen.ipapi.tools import regions
import numpy as np
import cv2
import logging
logger = logging.getLogger(__name__)
from ipso_phen.ipapi.base import ip_common as ipc
class IptFilterContourBySize(IptBase):
def build_params(self):
self.add_enabled_checkbox()
self.add_spin_box(
name="min_threshold",
desc="Lower bound limit",
default_value=0,
minimum=0,
maximum=100000000,
hint="Only contours bigger than lower limit bound will be kept",
)
self.add_spin_box(
name="max_threshold",
desc="Upper bound limit",
default_value=100000000,
minimum=0,
maximum=100000000,
hint="Only contours smaller than lower limit bound will be kept",
)
self.add_roi_selector()
def process_wrapper(self, **kwargs):
"""
Filter contour by size:
'Keep or descard contours according to their size
Real time: False
Keyword Arguments (in parentheses, argument name):
* Activate tool (enabled): Toggle whether or not tool is active
* Lower bound limit (min_threshold): Only contours bigger than lower limit bound will be kept
* Upper bound limit (max_threshold): Only contours smaller than lower limit bound will be kept
* Name of ROI to be used (roi_names): Operation will only be applied inside of ROI
* ROI selection mode (roi_selection_mode):
"""
wrapper = self.init_wrapper(**kwargs)
if wrapper is None:
return False
res = False
try:
if self.get_value_of("enabled") == 1:
mask = self.get_mask()
if mask is None:
logger.error(f"FAIL {self.name}: mask must be initialized")
return
lt, ut = self.get_value_of("min_threshold"), self.get_value_of(
"max_threshold"
)
# Get source contours
contours = [
c
for c in ipc.get_contours(
mask=mask,
retrieve_mode=cv2.RETR_LIST,
method=cv2.CHAIN_APPROX_SIMPLE,
)
if cv2.contourArea(c, True) < 0
]
contours.sort(key=lambda x: cv2.contourArea(x), reverse=True)
colors = ipc.build_color_steps(step_count=len(contours))
dbg_img = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
for clr, cnt in zip(colors, contours):
cv2.drawContours(dbg_img, [cnt], 0, clr, -1)
dbg_img = np.dstack(
(
cv2.bitwise_and(dbg_img[:, :, 0], mask),
cv2.bitwise_and(dbg_img[:, :, 1], mask),
cv2.bitwise_and(dbg_img[:, :, 2], mask),
)
)
wrapper.store_image(
image=dbg_img,
text="all_contours",
)
fnt = (cv2.FONT_HERSHEY_SIMPLEX, 0.6)
for cnt in contours:
area_ = cv2.contourArea(cnt)
x, y, w, h = cv2.boundingRect(cnt)
x += w // 2 - 10
y += h // 2
if area_ > 0:
cv2.putText(
dbg_img,
f"{area_}",
(x, y),
fnt[0],
fnt[1],
(255, 255, 255),
2,
)
wrapper.store_image(
image=dbg_img,
text="all_contours_with_sizes",
)
dbg_img = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
out_mask = np.zeros_like(mask)
# Discarded contours
size_cnts = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
for cnt in contours:
area_ = cv2.contourArea(cnt)
if area_ < lt:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_RED, -1)
elif area_ > ut:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_BLUE, -1)
else:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_WHITE, -1)
wrapper.store_image(image=size_cnts, text="cnts_by_size")
# Discarded contours
size_cnts = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
for cnt in sorted(
contours, key=lambda x: cv2.contourArea(x), reverse=True
):
area_ = cv2.contourArea(cnt)
if area_ < lt:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_RED, -1)
elif area_ > ut:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_BLUE, -1)
else:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_WHITE, -1)
wrapper.store_image(image=size_cnts, text="cnts_by_size_reversed")
for cnt in contours:
area_ = cv2.contourArea(cnt)
if not (lt < area_ < ut):
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_RED, -1)
# Discarded contours borders
for cnt in contours:
area_ = cv2.contourArea(cnt)
if not (lt < area_ < ut):
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_MAROON, 4)
# Kept contours
for cnt in contours:
area_ = cv2.contourArea(cnt)
if lt < area_ < ut:
cv2.drawContours(out_mask, [cnt], 0, 255, -1)
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_GREEN, -1)
else:
cv2.drawContours(out_mask, [cnt], 0, 0, -1)
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_RED, -1)
dbg_img = np.dstack(
(
cv2.bitwise_and(dbg_img[:, :, 0], mask),
cv2.bitwise_and(dbg_img[:, :, 1], mask),
cv2.bitwise_and(dbg_img[:, :, 2], mask),
)
)
# Discarded sizes
for cnt in contours:
area_ = cv2.contourArea(cnt)
if not (lt < area_ < ut):
x, y, w, h = cv2.boundingRect(cnt)
x += w // 2 - 10
y += h // 2
cv2.putText(
dbg_img,
f"{area_}",
(x, y),
fnt[0],
fnt[1],
ipc.C_RED,
thickness=2,
)
# Kept sizes
for cnt in contours:
area_ = cv2.contourArea(cnt)
if lt < area_ < ut:
x, y, w, h = cv2.boundingRect(cnt)
x += w // 2 - 10
y += h // 2
cv2.putText(
dbg_img,
f"{area_}",
(x, y),
fnt[0],
fnt[1],
ipc.C_LIME,
thickness=2,
)
out_mask = cv2.bitwise_and(
out_mask,
mask,
)
# Apply ROIs if needed
rois = self.get_ipt_roi(
wrapper=wrapper,
roi_names=self.get_value_of("roi_names").replace(" ", "").split(","),
selection_mode=self.get_value_of("roi_selection_mode"),
)
if rois:
untouched_mask = regions.delete_rois(rois=rois, image=self.get_mask())
self.result = cv2.bitwise_or(
untouched_mask, regions.keep_rois(rois=rois, image=out_mask)
)
self.demo_image = cv2.bitwise_or(
dbg_img,
np.dstack((untouched_mask, untouched_mask, untouched_mask)),
)
else:
self.result = out_mask
self.demo_image = dbg_img
wrapper.store_image(image=self.result, text="filtered_contours")
wrapper.store_image(image=self.demo_image, text="tagged_contours")
res = True
else:
wrapper.store_image(wrapper.current_image, "current_image")
res = True
except Exception as e:
res = False
logger.exception(f"Filter contour by size FAILED, exception: {repr(e)}")
else:
pass
finally:
return res
@property
def name(self):
return "Filter contour by size"
@property
def package(self):
return "TPMP"
@property
def real_time(self):
return False
@property
def result_name(self):
return "mask"
@property
def output_kind(self):
return "mask"
@property
def use_case(self):
return [ipc.ToolFamily.MASK_CLEANUP]
@property
def description(self):
return """'Keep or descard contours according to their size"""
| 38.443223 | 107 | 0.429252 | 10,248 | 0.976465 | 0 | 0 | 493 | 0.046975 | 0 | 0 | 1,419 | 0.135207 |
1c49c9837d339902372100015afa8dd09aa825df | 718 | py | Python | tests/main.py | deeso/json-search-replace | d1dd75cfaecb65bf8fcbad0c80a0bd839eccaa8d | [
"Apache-2.0"
]
| 1 | 2019-02-08T14:42:45.000Z | 2019-02-08T14:42:45.000Z | tests/main.py | deeso/manipin-json | d1dd75cfaecb65bf8fcbad0c80a0bd839eccaa8d | [
"Apache-2.0"
]
| null | null | null | tests/main.py | deeso/manipin-json | d1dd75cfaecb65bf8fcbad0c80a0bd839eccaa8d | [
"Apache-2.0"
]
| null | null | null | from wrapper_tests.upsert_test import *
from wrapper_tests.upsertvaluedict_test import *
import os
import logging
import sys
import argparse
import signal
logging.getLogger().setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s - %(name)s] %(message)s')
ch.setFormatter(formatter)
logging.getLogger().addHandler(ch)
parser = argparse.ArgumentParser(
description='Unit testing for fiery snap.')
parser.add_argument('-config', type=str, default=None,
help='toml config for keys and such, see key.toml')
if __name__ == '__main__':
unittest.main()
os.kill(os.getpid(), signal.SIGKILL)
| 26.592593 | 71 | 0.721448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 132 | 0.183844 |
1c4b4d3e7fde53ff67c2f5b9ffd3aee5b505137c | 598 | py | Python | Sending_email/email.py | Satyam-Bhalla/Python-Scripts | 39c46a362acd63cc5d1b9ab57ecb7250eaff35f7 | [
"MIT"
]
| 8 | 2018-09-25T16:30:12.000Z | 2022-03-25T05:13:43.000Z | Sending_email/email.py | Satyam-Bhalla/Python-Scripts | 39c46a362acd63cc5d1b9ab57ecb7250eaff35f7 | [
"MIT"
]
| 1 | 2021-03-31T18:43:43.000Z | 2021-03-31T18:43:43.000Z | Sending_email/email.py | Satyam-Bhalla/Python-Scripts | 39c46a362acd63cc5d1b9ab57ecb7250eaff35f7 | [
"MIT"
]
| 6 | 2018-01-29T19:00:42.000Z | 2022-03-25T05:13:47.000Z | import smtplib
gmail_user = 'your email'
gmail_password = 'your password'
sent_from = gmail_user
to = ['reciever email'] #Create a list for all the recievers
subject = 'OMG Super Important Message'
body = 'Hey, what\'s up?\n- You'
email_text = """\
From: %s
To: %s
Subject: %s
%s
""" % (sent_from, ", ".join(to), subject, body)
try:
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.ehlo()
server.login(gmail_user, gmail_password)
server.sendmail(sent_from, to, email_text)
server.close()
print('Email sent!')
except Exception as e:
print(e)
| 21.357143 | 61 | 0.653846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.356187 |
1c4b532b4b156dd08dc3bcca54d167230e8c8b2a | 4,532 | py | Python | FlaskDaemon/load_test.py | caffeinate/test-pylot | 3380208ea0e7ee5fed4299f22ab592a3d3232b3a | [
"MIT"
]
| null | null | null | FlaskDaemon/load_test.py | caffeinate/test-pylot | 3380208ea0e7ee5fed4299f22ab592a3d3232b3a | [
"MIT"
]
| 1 | 2021-10-31T17:46:54.000Z | 2021-10-31T17:46:54.000Z | FlaskDaemon/load_test.py | caffeinate/test-pylot | 3380208ea0e7ee5fed4299f22ab592a3d3232b3a | [
"MIT"
]
| 1 | 2020-07-20T04:10:40.000Z | 2020-07-20T04:10:40.000Z | '''
Created on 11 Sep 2015
@author: si
'''
import json
import random
import time
from threading import Thread
# import urllib
import urllib2
from Queue import Queue
import logging
logger = logging.getLogger(__name__)
API_URL = "http://127.0.0.1:5000/"
class LoadTest(object):
"""
Create a single process with one thread per test user.
"""
def __init__(self, test_users_count, requests_per_user):
"""
@param test_users_count: int
@param requests_per_user: int
"""
self.thread_table = []
self.test_users_count = test_users_count
self.requests_per_user = requests_per_user
self.stats = { 'return_codes' : {},
'requests_made' : 0,
'total_seconds_waiting' : 0.0
}
self.stats_q = Queue(0)
def go(self):
start_time = time.time()
msg = "%s test users with %s requests each..." % \
(self.test_users_count, self.requests_per_user)
self.logger(msg)
for i in range(self.test_users_count):
p = TestUser(i, self.requests_per_user, self.stats_q)
p.start()
self.thread_table.append(p)
end_time = time.time()
self.logger("time taken to create threads : %s" % (end_time-start_time,))
start_time = time.time()
# wait for threads to complete
while True:
alive_count = len(self.thread_table)
# could time.sleep(0.5) or just wait for all threads to finish
for p in self.thread_table:
if not p.is_alive():
alive_count -= 1
else:
p.join()
if alive_count == 0:
break
#print "alive:%s" % alive_count
end_time = time.time()
time_taken = end_time-start_time
self.logger("finished. Time taken : %s" % time_taken)
while not self.stats_q.empty():
user_stats = self.stats_q.get()
for http_status, count in user_stats['return_codes'].iteritems():
if http_status not in self.stats['return_codes']:
self.stats['return_codes'][http_status] = 0
self.stats['return_codes'][http_status] += count
self.stats['requests_made'] += user_stats['requests_made']
self.stats['total_seconds_waiting'] += user_stats['total_seconds_waiting']
print self.stats
# time_taken is real time not CPU
req_per_sec = float(self.stats['requests_made'])/time_taken
print "Requests per second: %s" % req_per_sec
def logger(self, msg):
logger.info(msg)
print msg
class TestUser(Thread):
"""
Act like a user. Bit over simplified at the moment.
"""
def __init__(self, user_id, requests_count, stats_queue):
super(TestUser, self).__init__()
self.remaining_request = requests_count
self.base_url = API_URL
self.stats_queue = stats_queue
self.user_id = user_id
def logger(self, msg):
logger.info(msg)
#print msg
def run(self):
"""
@return: dictionary of stats to be collected by main process
"""
stats = { 'return_codes' : {},
'requests_made': self.remaining_request,
'total_seconds_waiting' : 0.0, # waiting for requests
}
while self.remaining_request > 0:
# sleep for average of half a second
time.sleep(random.random())
start_time = time.time()
# for POST
#raw = {}
#d = json.dumps(raw)
#h = {'Content-type': 'application/json'}
#req = urllib2.Request(self.base_url, data=d, headers=h)
# for GET
req = urllib2.Request(self.base_url)
f = urllib2.urlopen(req)
end_time = time.time()
d = end_time-start_time
stats['total_seconds_waiting'] += d
http_status = f.getcode()
if http_status not in stats['return_codes']:
stats['return_codes'][http_status] = 0
stats['return_codes'][http_status] += 1
self.remaining_request -= 1
self.logger("Thread %s finished: %s" % (self.user_id, stats))
self.stats_queue.put(stats, False)
if __name__ == '__main__':
l = LoadTest(10,30)
l.go()
| 29.23871 | 86 | 0.559797 | 4,205 | 0.927846 | 0 | 0 | 0 | 0 | 0 | 0 | 1,240 | 0.27361 |
1c4b641cf08d14aaba12ee7b055b0523dd40710b | 407 | py | Python | urls.py | jeylani99/Real-Estate | 5ccb4bf23c73b4acb77427faa202a15216ef58c3 | [
"Apache-2.0"
]
| null | null | null | urls.py | jeylani99/Real-Estate | 5ccb4bf23c73b4acb77427faa202a15216ef58c3 | [
"Apache-2.0"
]
| null | null | null | urls.py | jeylani99/Real-Estate | 5ccb4bf23c73b4acb77427faa202a15216ef58c3 | [
"Apache-2.0"
]
| null | null | null | from django.contrib import admin
from django.conf.urls import include,url
from .import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(),name='index'),
#homeapp_detail_view_url
url(r'^(?P<pk>[0-9]+)/$',views.LocationView.as_view(),name='property'),
#homeapp/detailview/moredetailview
url(r'^([0-9]+)/(?P<pk>[0-9]+)/$',views.PropertyView.as_view(),name='propertyview'),
]
| 29.071429 | 88 | 0.668305 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 143 | 0.351351 |
1c4bdbc2c162e12eac3d923f38fe6b53d36966ae | 541 | py | Python | main.py | ngh3053/auto_spacing_with_tensorflow | 0569b734c087d13cdf6cbb8e79dd8c579d7e66e4 | [
"MIT"
]
| null | null | null | main.py | ngh3053/auto_spacing_with_tensorflow | 0569b734c087d13cdf6cbb8e79dd8c579d7e66e4 | [
"MIT"
]
| null | null | null | main.py | ngh3053/auto_spacing_with_tensorflow | 0569b734c087d13cdf6cbb8e79dd8c579d7e66e4 | [
"MIT"
]
| null | null | null | from utils import *
from model import Model2
if __name__ == '__main__':
train_data = DataLoader('../data/trainX.txt', '../data/trainY.txt')
test_data = DataLoader('../data/testX.txt', '../data/testY.txt')
train_data.set_batch(100)
test_data.set_batch(100)
char_dic = CharDic([train_data])
model = Model2(train_data=train_data,
test_data=test_data,
char_dic=char_dic,
model_name='bilstm_crf_n3_e300_h2002')
model.train()
model.test() | 28.473684 | 72 | 0.608133 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 114 | 0.210721 |
1c4c3d7288804166b00482d9413cd64068adedd3 | 3,475 | py | Python | src/sardana/taurus/qt/qtgui/extra_macroexecutor/macrodescriptionviewer.py | marc2332/sardana | 48dc9191baaa63f6c714d8c025e8f3f96548ad26 | [
"CC-BY-3.0"
]
| 43 | 2016-11-25T15:21:23.000Z | 2021-08-20T06:09:40.000Z | src/sardana/taurus/qt/qtgui/extra_macroexecutor/macrodescriptionviewer.py | marc2332/sardana | 48dc9191baaa63f6c714d8c025e8f3f96548ad26 | [
"CC-BY-3.0"
]
| 1,263 | 2016-11-25T15:58:37.000Z | 2021-11-02T22:23:47.000Z | src/sardana/taurus/qt/qtgui/extra_macroexecutor/macrodescriptionviewer.py | marc2332/sardana | 48dc9191baaa63f6c714d8c025e8f3f96548ad26 | [
"CC-BY-3.0"
]
| 58 | 2016-11-21T11:33:55.000Z | 2021-09-01T06:21:21.000Z | #!/usr/bin/env python
##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""
macrodescriptionviewer.py:
"""
import taurus.core
from taurus.external.qt import Qt
from taurus.qt.qtgui.base import TaurusBaseWidget
class TaurusMacroDescriptionViewer(Qt.QTextEdit, TaurusBaseWidget):
__pyqtSignals__ = ("modelChanged(const QString &)",)
def __init__(self, parent=None, designMode=False):
name = "TaurusMacroDescriptionView"
self.call__init__wo_kw(Qt.QTextEdit, parent)
self.call__init__(TaurusBaseWidget, name)
self.setReadOnly(True)
self.setFont(Qt.QFont("Courier", 9))
def defineStyle(self):
""" Defines the initial style for the widget """
self.updateStyle()
def getModelClass(self):
return taurus.core.taurusdevice.TaurusDevice
def updateStyle(self):
self.update()
def onMacroNameChanged(self, macroName):
"""Can be connected to an event emitted after macro name was changed.
As an argument receives macroName and ask BaseMacroServer object
about already prepared and stored in MacroInfoObj object macro description"""
macroServer = self.getModelObj()
if macroServer is None or macroName is None or macroName == "":
self.setText("")
return
self.setText(str(macroServer.getMacroInfoObj(macroName).doc))
def getFormatedToolTip(self, cache=True):
"""This method was overridden to get rid of the default tooltip of TaurusWidget"""
return ""
model = Qt.pyqtProperty("QString",
TaurusBaseWidget.getModel,
TaurusBaseWidget.setModel,
TaurusBaseWidget.resetModel)
useParentModel = Qt.pyqtProperty("bool",
TaurusBaseWidget.getUseParentModel,
TaurusBaseWidget.setUseParentModel,
TaurusBaseWidget.resetUseParentModel)
def test():
import sys
from sardana.taurus.core.tango.sardana.macroserver import registerExtensions
registerExtensions()
app = Qt.QApplication(sys.argv)
taurusMacroDescriptionView = TaurusMacroDescriptionViewer(designMode=1)
if len(sys.argv) != 2:
taurusMacroDescriptionView.setModel("macroserver/zreszela/1")
else:
taurusMacroDescriptionView.setModel(sys.argv[1])
taurusMacroDescriptionView.onMacroChanged("mv")
taurusMacroDescriptionView.show()
sys.exit(app.exec_())
if __name__ == "__main__":
test()
| 35.10101 | 90 | 0.649784 | 1,800 | 0.517986 | 0 | 0 | 0 | 0 | 0 | 0 | 1,466 | 0.421871 |
1c4d007b31f3f642fe520a7abaa4b88348fd22fe | 179 | py | Python | torch/metrics/accuracy_score.py | LilDataScientist/PyTorch-From-Scratch | ae3c0bffc5a36a9a7c123b98f52bdaa32fbedef6 | [
"MIT"
]
| null | null | null | torch/metrics/accuracy_score.py | LilDataScientist/PyTorch-From-Scratch | ae3c0bffc5a36a9a7c123b98f52bdaa32fbedef6 | [
"MIT"
]
| null | null | null | torch/metrics/accuracy_score.py | LilDataScientist/PyTorch-From-Scratch | ae3c0bffc5a36a9a7c123b98f52bdaa32fbedef6 | [
"MIT"
]
| null | null | null | import numpy as np
def accuracy_score(y_true, y_pred):
a = np.argmax(y_true, axis=1)
b = np.argmax(y_pred, axis=1)
return np.count_nonzero(a == b) / y_true.shape[0]
| 22.375 | 53 | 0.664804 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1c4e17f4910c6d5e94aabd5e46b41369a206e931 | 462 | py | Python | asaas/financial_transactions.py | marlonjsilva/asaas_sdk_python | 871a199e8156d9baa9f78972232feee38b0608bb | [
"MIT"
]
| null | null | null | asaas/financial_transactions.py | marlonjsilva/asaas_sdk_python | 871a199e8156d9baa9f78972232feee38b0608bb | [
"MIT"
]
| 4 | 2022-02-16T13:53:36.000Z | 2022-02-16T14:10:40.000Z | asaas/financial_transactions.py | marlonjsilva/asaas_sdk_python | 871a199e8156d9baa9f78972232feee38b0608bb | [
"MIT"
]
| null | null | null | from asaas.typing import SyncAsync
from typing import Any, Optional, Dict
class FinancialTransactions:
def __init__(self, parent: Any) -> None:
self.parent = parent
def list(
self, query: Optional[Dict[Any, Any]] = None, **kwars: Any
) -> SyncAsync[Any]:
return self.parent.request(
path="/financialTransactions",
method="GET",
query=query,
auth=kwars.get("auth"),
)
| 25.666667 | 66 | 0.588745 | 385 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.075758 |
1c4fabe61f50bb8ab5d328236ac8daab3e74249e | 17,672 | py | Python | datahub/core/serializers.py | uktrade/data-hub-api | c698cba533ff002293b821d01916f6334549f778 | [
"MIT"
]
| 6 | 2019-12-02T16:11:24.000Z | 2022-03-18T10:02:02.000Z | datahub/core/serializers.py | uktrade/data-hub-api | c698cba533ff002293b821d01916f6334549f778 | [
"MIT"
]
| 1,696 | 2019-10-31T14:08:37.000Z | 2022-03-29T12:35:57.000Z | datahub/core/serializers.py | uktrade/data-hub-api | c698cba533ff002293b821d01916f6334549f778 | [
"MIT"
]
| 9 | 2019-11-22T12:42:03.000Z | 2021-09-03T14:25:05.000Z | from functools import partial
from uuid import UUID
from dateutil.parser import parse as dateutil_parse
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework.fields import ReadOnlyField, UUIDField
from datahub.core.constants import Country as CountryEnum
from datahub.core.validate_utils import DataCombiner
from datahub.core.validators import InRule, OperatorRule, RulesBasedValidator, ValidationRule
from datahub.metadata.models import AdministrativeArea, Country
MAX_LENGTH = settings.CHAR_FIELD_MAX_LENGTH
class ConstantModelSerializer(serializers.Serializer):
"""Constant models serializer."""
id = serializers.ReadOnlyField()
name = serializers.ReadOnlyField()
disabled_on = serializers.ReadOnlyField()
class PermittedFieldsModelSerializer(serializers.ModelSerializer):
"""Lets you get permitted fields only.
Needs 'permissions' attribute on Meta class in following format:
permissions = {
'app_name.permission': 'field'
}
If user doesn't have required permission, corresponding field will be filtered out.
Note: The current implementation does not allow access to the field if request.user is None.
"""
def get_fields(self):
"""Gets filtered dictionary of fields based on permissions."""
assert hasattr(self.Meta, 'permissions'), (
'Class {serializer_class} missing "Meta.permissions" attribute'.format(
serializer_class=self.__class__.__name__,
)
)
fields = super().get_fields()
request = self.context.get('request', None)
if request:
permissions = self.Meta.permissions
for permission, field in permissions.items():
if not request.user or not request.user.has_perm(permission):
del fields[field]
return fields
class NestedRelatedField(serializers.RelatedField):
"""DRF serialiser field for foreign keys and many-to-many fields.
Serialises as a dict with 'id' plus other specified keys.
"""
default_error_messages = {
'required': 'This field is required.',
'missing_pk': 'pk not provided.',
'does_not_exist': 'Invalid pk "{pk_value}" - object does not exist.',
'incorrect_type': 'Incorrect type. Expected object, received {'
'data_type}.',
}
def __init__(self, model, extra_fields=('name',), **kwargs):
"""Initialises the related field.
:param model: Model of the related field.
:param extra_fields: List of extra fields to include in the representation.
Can contain field names as strings or as tuples of
(field name, DRF field).
E.g. ['field1', ('field2', CharField())]
:param kwargs: Keyword arguments to pass to
RelatedField.__init__()
"""
super().__init__(**kwargs)
model_class = (apps.get_model(model) if isinstance(model, str) else
model)
self.pk_field = UUIDField()
self._fields = [
field if isinstance(field, tuple) else (field, ReadOnlyField())
for field in extra_fields
]
self._model = model_class
def get_queryset(self):
"""Returns the queryset corresponding to the model."""
return self._model.objects.all()
def to_internal_value(self, data):
"""Converts a user-provided value to a model instance."""
try:
if isinstance(data, (str, UUID)):
id_repr = data
else:
id_repr = data['id']
data = self.pk_field.to_internal_value(id_repr)
return self.get_queryset().get(pk=data)
except ObjectDoesNotExist:
self.fail('does_not_exist', pk_value=data)
except KeyError:
self.fail('missing_pk')
except (TypeError, ValueError):
self.fail('incorrect_type', data_type=type(data).__name__)
def to_representation(self, value):
"""Converts a model instance to a dict representation."""
if not value:
return value
extra = {
field_name: field.to_representation(getattr(value, field_name))
for field_name, field in self._fields
}
return {
**extra,
'id': self.pk_field.to_representation(value.pk),
}
def get_choices(self, cutoff=None):
"""Returns choices for DRF UI.
Standard implementation uses a dict, but that doesn't work as our
representation isn't hashable.
"""
queryset = self.get_queryset()
if queryset is None:
return ()
if cutoff is not None:
queryset = queryset[:cutoff]
return _Choices(
(
self.pk_field.to_representation(item.pk),
self.display_value(item),
)
for item in queryset
)
RelaxedDateField = partial(serializers.DateField, input_formats=('iso-8601', '%Y/%m/%d'))
class RelaxedDateTimeField(serializers.Field):
"""
Relaxed DateTime field.
Front end uses free text field for data filters, that's why
we need to accept date/datetime in various different formats.
DRF DateTimeField doesn't offer that flexibility.
"""
default_error_messages = {
'invalid': 'Date is in incorrect format.',
}
def to_internal_value(self, data):
"""Parses data into datetime."""
try:
data = dateutil_parse(data)
except ValueError:
self.fail('invalid', value=data)
return data
def to_representation(self, value):
"""Formats the datetime using a normal DateTimeField."""
repr_field = serializers.DateTimeField()
return repr_field.to_representation(value)
class RelaxedURLField(serializers.URLField):
"""URLField subclass that prepends http:// to input and output when a scheme is not present."""
def to_internal_value(self, data):
"""Converts a user-provided value to an internal value."""
return super().to_internal_value(self._fix_missing_url_scheme(data))
def to_representation(self, value):
"""Converts a stored value to the external representation."""
return super().to_representation(self._fix_missing_url_scheme(value))
@staticmethod
def _fix_missing_url_scheme(value):
if value and '://' not in value:
return f'http://{value}'
return value
class _Choices:
"""Wrapper for choices to make them compatible with DRF."""
def __init__(self, choices):
self._choices = choices
def items(self):
"""Returns the choices."""
return self._choices
class AddressSerializer(serializers.ModelSerializer):
"""
ModelSerializer that can be used to simulate nested address objects.
E.g.
Model:
class MultiAddressModel(models.Model):
primary_address_1 = models.CharField(max_length=MAX_LENGTH)
primary_address_2 = models.CharField(max_length=MAX_LENGTH, blank=True)
primary_address_town = models.CharField(max_length=MAX_LENGTH)
primary_address_county = models.CharField(max_length=MAX_LENGTH, blank=True)
primary_address_country = models.ForeignKey(
Country, on_delete=models.PROTECT, related_name='+',
)
primary_address_postcode = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_1 = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_2 = models.CharField(max_length=MAX_LENGTH, blank=True, null=True)
secondary_address_town = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_county = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_country = models.ForeignKey(
Country, null=True, on_delete=models.SET_NULL, related_name='+',
)
secondary_address_postcode = models.CharField(max_length=MAX_LENGTH, blank=True)
Serializer:
class MultiAddressModelSerializer(serializers.ModelSerializer):
primary_address = AddressSerializer(
source_model=MultiAddressModel,
address_source_prefix='primary_address',
)
secondary_address = AddressSerializer(
source_model=MultiAddressModel,
address_source_prefix='secondary_address',
required=False,
allow_null=True,
)
class Meta:
model = MultiAddressModel
fields = ['primary_address', 'secondary_address']
Will produce the following API response:
{
'primary_address': {
'line_1': '2',
'line_2': '',
'town': 'London',
'county': '',
'postcode': '',
'country': {
'id': '80756b9a-5d95-e211-a939-e4115bead28a',
'name': 'United Kingdom',
},
},
'secondary_address': {
'line_1': '1',
'line_2': '',
'town': 'Muckamore',
'county': '',
'postcode': '',
'country': {
'id': '736a9ab2-5d95-e211-a939-e4115bead28a',
'name': 'Ireland',
},
},
},
Please note:
1. None values for CharFields will be converted to ''
2. If all address field values are blank the nested object in the response will return None
E.g. Fiven the following fields' values:
secondary_address_1=''
secondary_address_2=''
secondary_address_town=''
secondary_address_county=''
secondary_address_postcode=''
secondary_address_country_id=None
The equivalent API response body will be:
'secondary_address': None
The same applies for changing the data.
3. If AddressSerializer has required=False, the validation is triggered only if at least
one of the fields is passed in.
"""
line_1 = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_1',
)
line_2 = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_2',
)
town = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_town',
)
county = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_county',
)
postcode = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_postcode',
)
area = NestedRelatedField(
AdministrativeArea,
allow_null=True,
required=False,
source='{source_prefix}_area',
)
country = NestedRelatedField(
Country,
allow_null=True,
required=False,
source='{source_prefix}_country',
)
REQUIRED_FIELDS = (
'line_1',
'town',
'country',
)
def __init__(
self, source_model, *args,
address_source_prefix='address', area_can_be_required=False,
postcode_can_be_required=False, **kwargs,
):
"""
Initialises the serializer.
It populates all necessary parts (e.g. Meta model, source, fields' source).
"""
# Define a custom Meta so that the Meta model can be specified as an argument
class MultiAddressMeta(self.Meta):
model = source_model
self.Meta = MultiAddressMeta
kwargs.setdefault('source', '*')
super().__init__(*args, **kwargs)
# populate fields' source
for field in self.fields.values():
field.source = field.source.format(source_prefix=address_source_prefix)
field.source_attrs = field.source.split('.')
self.area_can_be_required = area_can_be_required
self.postcode_can_be_required = postcode_can_be_required
self.address_source_prefix = address_source_prefix
def add_area_validator(self, validators):
"""
Mark area as required for US and Canadian companies.
"""
validators.append(
RulesBasedValidator(
ValidationRule(
'required',
OperatorRule(f'{self.address_source_prefix}_area', bool),
when=InRule(
f'{self.address_source_prefix}_country',
(
CountryEnum.united_states.value.id,
CountryEnum.canada.value.id,
),
),
),
),
)
def add_postcode_validator(self, validators):
"""
Mark postcode as required for US and Canadian companies.
"""
validators.append(
RulesBasedValidator(
ValidationRule(
'required',
OperatorRule(f'{self.address_source_prefix}_postcode', bool),
when=InRule(
f'{self.address_source_prefix}_country',
(
CountryEnum.united_states.value.id,
CountryEnum.canada.value.id,
),
),
),
),
)
def get_validators(self):
"""
Append ValidationRule for area/postcode depending on feature flag/context
Only mark area/postcode required if country is US/Canada & called from context where area
is safe to require, and if feature flag enabled. Currently the only context where area is
safe to require is CompanySerializer
"""
validators = super().get_validators()
if self.area_can_be_required:
self.add_area_validator(validators)
if self.postcode_can_be_required:
self.add_postcode_validator(validators)
return validators
def run_validation(self, data=serializers.empty):
"""
Converts None to dict with default values so that those values can be used to
reset the fields on the model.
"""
if data or not self.allow_null:
normalised_data = data
else:
normalised_data = {
field_name: None if (field.default == serializers.empty) else field.default
for field_name, field in self.fields.items()
}
return super().run_validation(data=normalised_data)
def to_representation(self, value):
"""
It returns None if none of the address values is set.
E.g.
{
'address': None
}
instead of
{
'address': {
'line_1': '',
'line_2': '',
'town': '',
'county': '',
'postcode': '',
'country': None
}
}
"""
address_dict = super().to_representation(value)
if not any(address_dict.values()):
return None
# for each address field, replace None with default if possible
for field_name, value in address_dict.items():
field_default = self.fields[field_name].default
if value is None and field_default is not serializers.empty:
address_dict[field_name] = field_default
return address_dict
def should_validate(self, data_combiner):
"""
Returns true if the data should be validated.
"""
if self.required:
return True
return any(
data_combiner.get_value(field.source)
for field in self.fields.values()
)
def validate(self, attrs):
"""
Validates the data if necessary.
This is needed because some addresses only need to be validated
if they are passed in.
"""
validated_data = super().validate(attrs)
data_combiner = DataCombiner(self.parent.instance, validated_data)
if self.should_validate(data_combiner):
errors = {}
for field_name in self.REQUIRED_FIELDS:
field = self.fields[field_name]
value = data_combiner.get_value(field.source)
if not value:
errors[field_name] = self.error_messages['required']
if errors:
raise ValidationError(errors)
return validated_data
class Meta:
"""Meta options."""
model = None
fields = (
'line_1',
'line_2',
'town',
'county',
'postcode',
'area',
'country',
)
| 33.093633 | 99 | 0.587992 | 16,871 | 0.954674 | 0 | 0 | 152 | 0.008601 | 0 | 0 | 8,006 | 0.453033 |
1c50b9af34c1306cdbc9fec048d28309381c28e4 | 4,763 | py | Python | samples/s07-rigid-objects/main.py | nomadsinteractive/ark | 52f84c6dbd5ca6bdd07d450b3911be1ffd995922 | [
"Apache-2.0"
]
| 5 | 2018-03-28T09:14:55.000Z | 2018-04-02T11:54:33.000Z | samples/s07-rigid-objects/main.py | nomadsinteractive/ark | 52f84c6dbd5ca6bdd07d450b3911be1ffd995922 | [
"Apache-2.0"
]
| null | null | null | samples/s07-rigid-objects/main.py | nomadsinteractive/ark | 52f84c6dbd5ca6bdd07d450b3911be1ffd995922 | [
"Apache-2.0"
]
| null | null | null | import math
import random
from ark import dear_imgui, ApplicationFacade, Arena, Event, Integer, Collider, RenderObject, Size, Camera, Vec3, Numeric
class Application:
def __init__(self, application: ApplicationFacade):
self._down_x = 0
self._down_y = 0
self._application = application
self._light_position = Vec3(100, 500, 0)
self._resource_loader = self._application.create_resource_loader('main.xml')
self._arena = self._resource_loader.load(Arena, 'main', c=self._application.camera, lp=self._light_position)
self._application.arena = self._arena
self._arena.add_event_listener(self.on_event)
self._imgui = self._arena.resource_loader.refs.imgui
self._world_box2d = self._resource_loader.load(Collider, 'b2World')
self._world_bullet = self._resource_loader.load(Collider, 'btWorld')
self._l001 = self._resource_loader.layers.l001
self._l003 = self._resource_loader.layers.l003
self._shape_id = Integer(0)
self._collider_id = Integer(0)
self._body_size = Numeric(50)
self._body_ttl = Numeric(5)
self._rigid_body_ground_b2 = self._world_box2d.create_body(Collider.BODY_TYPE_STATIC, Collider.BODY_SHAPE_BOX, (4.8, 0), Size(6.0, 1.0))
self._rigid_body_ground_bt = self._world_bullet.create_body(Collider.BODY_TYPE_STATIC, Collider.BODY_SHAPE_BOX, (480, 0, 0), Size(600, 100, 600))
self._shapes = [Collider.BODY_SHAPE_BALL, Collider.BODY_SHAPE_BOX, 3]
@property
def imgui(self):
return self._imgui
def on_event(self, event):
action = event.action
if action == Event.ACTION_DOWN:
(self._down_x, self._down_y) = event.xy
elif action == Event.ACTION_UP:
if abs(event.x - self._down_x) + abs(event.y - self._down_y) < 10:
self.on_click(event)
return True
return False
def on_click(self, event: Event):
shape_id = self._shape_id.val
collider_id = self._collider_id.val
render_object = [self.make_object_box2d, self.make_object_bullet][collider_id](shape_id, event)
self._defer_dispose(render_object)
def make_object_box2d(self, shape_id: int, event: Event) -> RenderObject:
xy = (event.x / 100, event.y / 100)
s = self._body_size / 100
shape = self._shapes[shape_id]
rigid_body = self._world_box2d.create_body(Collider.BODY_TYPE_DYNAMIC, shape, xy, Size(s, s))
render_object = RenderObject(random.randrange(1, 100), None, Size(self._body_size, self._body_size), None)
rigid_body.bind(render_object)
self._l003.add_render_object(render_object)
return render_object
def make_object_bullet(self, shape_id: int, event: Event) -> RenderObject:
xy = event.xy
shape = self._shapes[shape_id]
s = self._body_size.val
s1 = s / [2, 100, 50][shape_id]
rigid_body = self._world_bullet.create_body(Collider.BODY_TYPE_DYNAMIC, shape, xy, Size(s, s, s))
render_object = RenderObject(self._shape_id.val + 1, None, Size(s1, s1, s1))
rigid_body.bind(render_object)
self._l001.add_render_object(render_object)
return render_object
def create_toolbox(self):
builder = dear_imgui.RendererBuilder(self._imgui)
builder.begin('RigidBodies')
builder.text('Which collider engine shall we use?')
builder.radio_button('Box2D', self._collider_id, 0)
builder.same_line()
builder.radio_button('Bullet3', self._collider_id, 1)
builder.separator()
builder.text('Click somewhere to create a RigidBody typed below:')
builder.radio_button('Ball', self._shape_id, 0)
builder.same_line()
builder.radio_button('Box', self._shape_id, 1)
builder.same_line()
builder.radio_button('Duck', self._shape_id, 2)
builder.slider_float('RigidBody size', self._body_size, 10, 100, '%.1f')
builder.slider_float('RigidBody TTL', self._body_ttl, 5, 50, '%.1f')
builder.slider_float3('Light Position', self._light_position, 0, 1000, '%.1f')
builder.end()
self._imgui.add_renderer(builder.build())
@staticmethod
def _make_camera() -> Camera:
e = 500
camera = Camera()
camera.perspective(math.radians(45), 16 / 9, 0.1, 2000)
camera.look_at(Vec3(0, 0, e), Vec3(0, 0, e - 100), Vec3(0, 1, 0))
return camera
def _defer_dispose(self, render_object: RenderObject):
self._application.post(lambda: render_object.dispose(), self._body_ttl.val)
def main(app: Application):
app.create_toolbox()
if __name__ == '__main__':
main(Application(_application))
| 43.3 | 153 | 0.669116 | 4,491 | 0.942893 | 0 | 0 | 306 | 0.064245 | 0 | 0 | 244 | 0.051228 |
1c51a22587be89037e69f604118ecdbeda84cab5 | 11,693 | py | Python | jamf/models/computer_extension_attribute.py | jensenbox/python-jamf | 85213085b1064a00375a7aa7df5e33c19f5178eb | [
"RSA-MD"
]
| 1 | 2021-04-20T15:28:57.000Z | 2021-04-20T15:28:57.000Z | jamf/models/computer_extension_attribute.py | jensenbox/python-jamf | 85213085b1064a00375a7aa7df5e33c19f5178eb | [
"RSA-MD"
]
| null | null | null | jamf/models/computer_extension_attribute.py | jensenbox/python-jamf | 85213085b1064a00375a7aa7df5e33c19f5178eb | [
"RSA-MD"
]
| null | null | null | # coding: utf-8
"""
Jamf Pro API
## Overview This is a sample Jamf Pro server which allows for usage without any authentication. The Jamf Pro environment which supports the Try it Out functionality does not run the current beta version of Jamf Pro, thus any newly added endpoints will result in an error and should be used soley for documentation purposes. # noqa: E501
The version of the OpenAPI document: 10.25.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from jamf.configuration import Configuration
class ComputerExtensionAttribute(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'definition_id': 'str',
'name': 'str',
'description': 'str',
'enabled': 'bool',
'multi_value': 'bool',
'values': 'list[str]',
'data_type': 'str',
'options': 'list[str]',
'input_type': 'str'
}
attribute_map = {
'definition_id': 'definitionId',
'name': 'name',
'description': 'description',
'enabled': 'enabled',
'multi_value': 'multiValue',
'values': 'values',
'data_type': 'dataType',
'options': 'options',
'input_type': 'inputType'
}
def __init__(self, definition_id=None, name=None, description=None, enabled=None, multi_value=None, values=None, data_type=None, options=None, input_type=None, local_vars_configuration=None): # noqa: E501
"""ComputerExtensionAttribute - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._definition_id = None
self._name = None
self._description = None
self._enabled = None
self._multi_value = None
self._values = None
self._data_type = None
self._options = None
self._input_type = None
self.discriminator = None
if definition_id is not None:
self.definition_id = definition_id
if name is not None:
self.name = name
self.description = description
if enabled is not None:
self.enabled = enabled
if multi_value is not None:
self.multi_value = multi_value
self.values = values
self.data_type = data_type
self.options = options
self.input_type = input_type
@property
def definition_id(self):
"""Gets the definition_id of this ComputerExtensionAttribute. # noqa: E501
An identifier of extension attribute definition. # noqa: E501
:return: The definition_id of this ComputerExtensionAttribute. # noqa: E501
:rtype: str
"""
return self._definition_id
@definition_id.setter
def definition_id(self, definition_id):
"""Sets the definition_id of this ComputerExtensionAttribute.
An identifier of extension attribute definition. # noqa: E501
:param definition_id: The definition_id of this ComputerExtensionAttribute. # noqa: E501
:type definition_id: str
"""
self._definition_id = definition_id
@property
def name(self):
"""Gets the name of this ComputerExtensionAttribute. # noqa: E501
A human-readable name by which attribute can be referred to. # noqa: E501
:return: The name of this ComputerExtensionAttribute. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ComputerExtensionAttribute.
A human-readable name by which attribute can be referred to. # noqa: E501
:param name: The name of this ComputerExtensionAttribute. # noqa: E501
:type name: str
"""
self._name = name
@property
def description(self):
"""Gets the description of this ComputerExtensionAttribute. # noqa: E501
An additional explanation of exact attribute meaning, possible values, etc. # noqa: E501
:return: The description of this ComputerExtensionAttribute. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this ComputerExtensionAttribute.
An additional explanation of exact attribute meaning, possible values, etc. # noqa: E501
:param description: The description of this ComputerExtensionAttribute. # noqa: E501
:type description: str
"""
self._description = description
@property
def enabled(self):
"""Gets the enabled of this ComputerExtensionAttribute. # noqa: E501
:return: The enabled of this ComputerExtensionAttribute. # noqa: E501
:rtype: bool
"""
return self._enabled
@enabled.setter
def enabled(self, enabled):
"""Sets the enabled of this ComputerExtensionAttribute.
:param enabled: The enabled of this ComputerExtensionAttribute. # noqa: E501
:type enabled: bool
"""
self._enabled = enabled
@property
def multi_value(self):
"""Gets the multi_value of this ComputerExtensionAttribute. # noqa: E501
:return: The multi_value of this ComputerExtensionAttribute. # noqa: E501
:rtype: bool
"""
return self._multi_value
@multi_value.setter
def multi_value(self, multi_value):
"""Sets the multi_value of this ComputerExtensionAttribute.
:param multi_value: The multi_value of this ComputerExtensionAttribute. # noqa: E501
:type multi_value: bool
"""
self._multi_value = multi_value
@property
def values(self):
"""Gets the values of this ComputerExtensionAttribute. # noqa: E501
A value of extension attribute, in some rare cases there may be multiple values present, hence the array. # noqa: E501
:return: The values of this ComputerExtensionAttribute. # noqa: E501
:rtype: list[str]
"""
return self._values
@values.setter
def values(self, values):
"""Sets the values of this ComputerExtensionAttribute.
A value of extension attribute, in some rare cases there may be multiple values present, hence the array. # noqa: E501
:param values: The values of this ComputerExtensionAttribute. # noqa: E501
:type values: list[str]
"""
self._values = values
@property
def data_type(self):
"""Gets the data_type of this ComputerExtensionAttribute. # noqa: E501
A data type of extension attribute. # noqa: E501
:return: The data_type of this ComputerExtensionAttribute. # noqa: E501
:rtype: str
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""Sets the data_type of this ComputerExtensionAttribute.
A data type of extension attribute. # noqa: E501
:param data_type: The data_type of this ComputerExtensionAttribute. # noqa: E501
:type data_type: str
"""
allowed_values = [None,"STRING", "INTEGER", "DATE_TIME"] # noqa: E501
if self.local_vars_configuration.client_side_validation and data_type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `data_type` ({0}), must be one of {1}" # noqa: E501
.format(data_type, allowed_values)
)
self._data_type = data_type
@property
def options(self):
"""Gets the options of this ComputerExtensionAttribute. # noqa: E501
A closed list of possible values (applies to `popup` input type). # noqa: E501
:return: The options of this ComputerExtensionAttribute. # noqa: E501
:rtype: list[str]
"""
return self._options
@options.setter
def options(self, options):
"""Sets the options of this ComputerExtensionAttribute.
A closed list of possible values (applies to `popup` input type). # noqa: E501
:param options: The options of this ComputerExtensionAttribute. # noqa: E501
:type options: list[str]
"""
self._options = options
@property
def input_type(self):
"""Gets the input_type of this ComputerExtensionAttribute. # noqa: E501
The input method. `text` is most common and means simply free text, `popup` i a closed list of values from which one or many can be selected and `script` value is calculated and can never be set directly. # noqa: E501
:return: The input_type of this ComputerExtensionAttribute. # noqa: E501
:rtype: str
"""
return self._input_type
@input_type.setter
def input_type(self, input_type):
"""Sets the input_type of this ComputerExtensionAttribute.
The input method. `text` is most common and means simply free text, `popup` i a closed list of values from which one or many can be selected and `script` value is calculated and can never be set directly. # noqa: E501
:param input_type: The input_type of this ComputerExtensionAttribute. # noqa: E501
:type input_type: str
"""
allowed_values = [None,"TEXT", "POPUP", "SCRIPT", "LDAP"] # noqa: E501
if self.local_vars_configuration.client_side_validation and input_type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `input_type` ({0}), must be one of {1}" # noqa: E501
.format(input_type, allowed_values)
)
self._input_type = input_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ComputerExtensionAttribute):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ComputerExtensionAttribute):
return True
return self.to_dict() != other.to_dict()
| 33.408571 | 342 | 0.625673 | 11,107 | 0.949885 | 0 | 0 | 7,200 | 0.615753 | 0 | 0 | 6,724 | 0.575045 |
1c5289b76fb10d8b256a4000027a462353b8a389 | 1,342 | py | Python | SSOKeyGen/ssokeygendialog.py | chrcoe/sso-keygen | c149f6202fbecb38874c75bf82e0d4857d1249f9 | [
"MIT"
]
| null | null | null | SSOKeyGen/ssokeygendialog.py | chrcoe/sso-keygen | c149f6202fbecb38874c75bf82e0d4857d1249f9 | [
"MIT"
]
| null | null | null | SSOKeyGen/ssokeygendialog.py | chrcoe/sso-keygen | c149f6202fbecb38874c75bf82e0d4857d1249f9 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ssokeygendialog.ui'
#
# Created: Sun Feb 1 12:33:36 2015
# by: PyQt5 UI code generator 5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 300)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setGeometry(QtCore.QRect(30, 240, 341, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.testLabel = QtWidgets.QLabel(Dialog)
self.testLabel.setGeometry(QtCore.QRect(50, 40, 181, 31))
self.testLabel.setObjectName("testLabel")
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog Test"))
self.testLabel.setText(_translate("Dialog", "TextLabel"))
| 38.342857 | 106 | 0.708644 | 1,062 | 0.791356 | 0 | 0 | 0 | 0 | 0 | 0 | 295 | 0.219821 |
1c5370b938a0a9b556d9850b79dfef4883c667c0 | 4,138 | py | Python | util/n_download_util.py | TwrFyr/n-hen.py | 8d20639ee78cc34e4333fb247574ff10af81556c | [
"MIT"
]
| null | null | null | util/n_download_util.py | TwrFyr/n-hen.py | 8d20639ee78cc34e4333fb247574ff10af81556c | [
"MIT"
]
| 22 | 2020-12-04T15:16:36.000Z | 2021-04-29T12:20:04.000Z | util/n_download_util.py | TwrFyr/n-henpy | 8d20639ee78cc34e4333fb247574ff10af81556c | [
"MIT"
]
| null | null | null | import urllib.request
import os
from typing import List
from util.n_util import NUser
from util.n_util import get_n_entry
import time
import threading
from util.array_util import slice_array
delay: float = 2.5
class ProgressWrapper:
"""The progress wrapper keeps track of the progress of a operation by wrapping a current number and a total number.
It also wraps an optional function, which uses the current values and has to have the form 'func(current, total)'."""
def __init__(self, start, total, update):
self.current = start
self.total = total
self.update_callback = update
def update(self):
if self.update_callback is not None:
self.update_callback(self.current, self.total)
def download_images(lock, file_url_list: List[str], path: str, progress=None):
for file_url in file_url_list:
filename = os.path.join(path, file_url.split('/')[-1])
print('writing {} to {}'.format(file_url, filename))
urllib.request.urlretrieve(file_url, filename)
if progress is not None:
with lock:
progress.current += 1
progress.update()
def save_files_to_dir(file_url_list: List[str], path: str, update=None, thread_count: int = 1) -> None:
"""Saves all files represented by a list of url resources to the folder specified.
The files are being named after the last part of the url.
The number of threads can be increased to use more threads for the downloading of the images."""
# pretend to be normal user
# opener=urllib.request.build_opener()
# opener.addheaders=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1941.0 Safari/537.36')]
# urllib.request.install_opener(opener)
progress = ProgressWrapper(0, len(file_url_list), update)
progress.update()
if thread_count < 1 or thread_count > 16:
print(f'invalid thread count: {thread_count} not in [1, 16]')
return
else:
lock = threading.Lock()
threads = []
for i in range(thread_count):
slices = slice_array(file_url_list, thread_count)
t = threading.Thread(target=download_images, kwargs=dict(lock=lock, file_url_list=slices[i], path=path,
progress=progress),
daemon=True)
threads.append(t)
t.start()
for t in threads:
t.join()
def download_all_favorites(n_user: NUser, base_dir: str, update_entry=None, update_page=None, thread_count=1) -> None:
"""Downloads all entries favorited by `n_user` using the number of `thread_count` threads."""
print('downloading {}\'s {} favorites...'.format(n_user.username, n_user.fav_count))
current_entry = 1
total_entries = n_user.fav_count
for min_entry in n_user.favorite_list:
if update_entry is not None:
update_entry(current_entry=min_entry, current=current_entry, total=total_entries)
# get entry data
print('downloading entry with id {}'.format(min_entry.n_id))
entry = get_n_entry(min_entry.n_id)
if entry is None:
print('no connection possible, skipping...')
current_entry += 1
continue
# check directory is valid
if not os.path.exists(base_dir):
print('base directory does not exist, aborting...')
break
save_dir = os.path.join(base_dir, entry.digits)
if os.path.exists(save_dir):
print('entry already exists, skipping...')
current_entry += 1
continue
else:
os.mkdir(save_dir)
# download images
save_files_to_dir(entry.image_url_list, save_dir, update=update_page, thread_count=thread_count)
print('waiting for {} seconds...'.format(delay))
time.sleep(delay)
current_entry += 1
if update_entry is not None:
update_entry(current_entry=None, current=current_entry, total=total_entries)
print('download finished')
| 38.672897 | 150 | 0.646931 | 532 | 0.128565 | 0 | 0 | 0 | 0 | 0 | 0 | 1,186 | 0.286612 |
1c542217eb772ffd5114bee20efa5d974df6a3d5 | 2,907 | py | Python | stable-baselines/tests/test_deterministic.py | princeton-vl/PackIt | 9894d252c5238d582cba7c3d19540f89d47e4166 | [
"BSD-3-Clause"
]
| 49 | 2020-07-24T18:17:12.000Z | 2022-01-04T15:30:52.000Z | stable-baselines/tests/test_deterministic.py | princeton-vl/PackIt | 9894d252c5238d582cba7c3d19540f89d47e4166 | [
"BSD-3-Clause"
]
| 14 | 2020-07-21T20:21:08.000Z | 2022-03-12T00:42:18.000Z | stable-baselines/tests/test_deterministic.py | princeton-vl/PackIt | 9894d252c5238d582cba7c3d19540f89d47e4166 | [
"BSD-3-Clause"
]
| 5 | 2020-07-27T12:35:00.000Z | 2021-07-19T03:04:21.000Z | import pytest
from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO
from stable_baselines.ddpg import AdaptiveParamNoiseSpec
from stable_baselines.common.identity_env import IdentityEnv, IdentityEnvBox
from stable_baselines.common.vec_env import DummyVecEnv
PARAM_NOISE_DDPG = AdaptiveParamNoiseSpec(initial_stddev=float(0.2), desired_action_stddev=float(0.2))
# Hyperparameters for learning identity for each RL model
LEARN_FUNC_DICT = {
'a2c': lambda e: A2C(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'acer': lambda e: ACER(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'acktr': lambda e: ACKTR(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'deepq': lambda e: DeepQ(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'ddpg': lambda e: DDPG(policy="MlpPolicy", env=e, param_noise=PARAM_NOISE_DDPG).learn(total_timesteps=1000),
'ppo1': lambda e: PPO1(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'ppo2': lambda e: PPO2(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'trpo': lambda e: TRPO(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
}
@pytest.mark.slow
@pytest.mark.parametrize("model_name", ['a2c', 'acer', 'acktr', 'deepq', 'ppo1', 'ppo2', 'trpo'])
def test_identity(model_name):
"""
Test if the algorithm (with a given policy)
can learn an identity transformation (i.e. return observation as an action)
:param model_name: (str) Name of the RL model
"""
env = DummyVecEnv([lambda: IdentityEnv(10)])
model = LEARN_FUNC_DICT[model_name](env)
n_trials = 1000
obs = env.reset()
action_shape = model.predict(obs, deterministic=False)[0].shape
action, _ = model.predict(obs, deterministic=True)
assert action.shape == action_shape
for _ in range(n_trials):
new_action = model.predict(obs, deterministic=True)[0]
assert action == model.predict(obs, deterministic=True)[0]
assert new_action.shape == action_shape
# Free memory
del model, env
@pytest.mark.slow
@pytest.mark.parametrize("model_name", ['a2c', 'ddpg', 'ppo1', 'ppo2', 'trpo'])
def test_identity_continuous(model_name):
"""
Test if the algorithm (with a given policy)
can learn an identity transformation (i.e. return observation as an action)
:param model_name: (str) Name of the RL model
"""
env = DummyVecEnv([lambda: IdentityEnvBox(eps=0.5)])
model = LEARN_FUNC_DICT[model_name](env)
n_trials = 1000
obs = env.reset()
action_shape = model.predict(obs, deterministic=False)[0].shape
action, _ = model.predict(obs, deterministic=True)
assert action.shape == action_shape
for _ in range(n_trials):
new_action = model.predict(obs, deterministic=True)[0]
assert action == model.predict(obs, deterministic=True)[0]
assert new_action.shape == action_shape
| 40.943662 | 112 | 0.711042 | 0 | 0 | 0 | 0 | 1,738 | 0.597867 | 0 | 0 | 683 | 0.23495 |
1c547eed055111ebe6fcfe3bbff16bf6a9eb3360 | 1,129 | py | Python | tests/models/tensorflow/convert_to_tensorflow_serving.py | filipecosta90/dlbench | 11dd2fb58050c38a4baa429b207aaecad9097ce3 | [
"MIT"
]
| 14 | 2019-09-14T16:37:39.000Z | 2022-03-19T08:28:50.000Z | tests/models/tensorflow/convert_to_tensorflow_serving.py | filipecosta90/dlbench | 11dd2fb58050c38a4baa429b207aaecad9097ce3 | [
"MIT"
]
| 40 | 2019-11-14T16:07:08.000Z | 2022-03-29T21:47:15.000Z | tests/models/tensorflow/convert_to_tensorflow_serving.py | filipecosta90/dlbench | 11dd2fb58050c38a4baa429b207aaecad9097ce3 | [
"MIT"
]
| 2 | 2021-01-07T01:50:53.000Z | 2021-02-24T22:22:23.000Z | import tensorflow as tf
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
export_dir = './reference/00000002'
graph_pb = './creditcardfraud.pb'
builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
with tf.gfile.GFile(graph_pb, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
sigs = {}
with tf.Session(graph=tf.Graph()) as sess:
# name="" is important to ensure we don't get spurious prefixing
tf.import_graph_def(graph_def, name="")
g = tf.get_default_graph()
inp1 = g.get_tensor_by_name("transaction:0")
inp2 = g.get_tensor_by_name("reference:0")
out = g.get_tensor_by_name("output:0")
sigs[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \
tf.saved_model.signature_def_utils.predict_signature_def(
{"transaction": inp1, "reference": inp2}, {"output": out})
builder.add_meta_graph_and_variables(sess,
[tag_constants.SERVING],
signature_def_map=sigs)
builder.save()
| 34.212121 | 70 | 0.689105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 184 | 0.162976 |
1c54af7d2bc1fc02891b6239b955a52d082c20b2 | 936 | py | Python | pycuda/characterize.py | grlee77/pycuda | cfb787ac73a523fe4b32eff31ecffac485388bbf | [
"Apache-2.0"
]
| null | null | null | pycuda/characterize.py | grlee77/pycuda | cfb787ac73a523fe4b32eff31ecffac485388bbf | [
"Apache-2.0"
]
| null | null | null | pycuda/characterize.py | grlee77/pycuda | cfb787ac73a523fe4b32eff31ecffac485388bbf | [
"Apache-2.0"
]
| 1 | 2020-08-31T08:52:24.000Z | 2020-08-31T08:52:24.000Z | from __future__ import division
from __future__ import absolute_import
from pycuda.tools import context_dependent_memoize
import numpy as np
def platform_bits():
return tuple.__itemsize__ * 8
def has_stack():
from pycuda.driver import Context
return Context.get_device().compute_capability() >= (2, 0)
def has_double_support():
from pycuda.driver import Context
return Context.get_device().compute_capability() >= (1, 3)
@context_dependent_memoize
def sizeof(type_name, preamble=""):
from pycuda.compiler import SourceModule
mod = SourceModule("""
%s
extern "C"
__global__ void write_size(size_t *output)
{
*output = sizeof(%s);
}
""" % (preamble, type_name), no_extern_c=True)
import pycuda.gpuarray as gpuarray
output = gpuarray.empty((), dtype=np.uintp)
mod.get_function("write_size")(output, block=(1, 1, 1), grid=(1, 1))
return int(output.get())
| 24 | 72 | 0.700855 | 0 | 0 | 0 | 0 | 485 | 0.518162 | 0 | 0 | 134 | 0.143162 |
1c5536cbf34d028ddd3a2b10367f5360508e1251 | 1,666 | py | Python | bopt/transforms.py | georgedeath/bomean | 0dad35e0d584cf7c46c9a8cb0445f225875cfa86 | [
"MIT"
]
| 2 | 2020-05-19T15:48:37.000Z | 2021-08-16T10:41:49.000Z | bopt/transforms.py | georgedeath/bomean | 0dad35e0d584cf7c46c9a8cb0445f225875cfa86 | [
"MIT"
]
| null | null | null | bopt/transforms.py | georgedeath/bomean | 0dad35e0d584cf7c46c9a8cb0445f225875cfa86 | [
"MIT"
]
| null | null | null | import torch
from scipy.stats import median_absolute_deviation
class Transform_Base(object):
"""
Base class for transformations based on some data.
"""
def __init__(self, Ytr):
self.Ytr = Ytr
# Transform the mean
def scale_mean(self, mu):
return mu
# Reverse the transformation to the mean
def unscale_mean(self, mu):
return mu
# Reverse the transformation to the variance
def unscale_var(self, var):
return var
class Transform_Standardize(Transform_Base):
"""
Standardize the data
"""
def __init__(self, Ytr):
super().__init__(Ytr)
self.Ytr_mean = Ytr.mean()
self.Ytr_std = Ytr.std()
self.Ytr_var = Ytr.var()
def scale_mean(self, mu):
return (mu - self.Ytr_mean) / self.Ytr_std
def unscale_mean(self, mu):
return mu * self.Ytr_std + self.Ytr_mean
def unscale_var(self, var):
return var * self.Ytr_var
class Transform_StandardizeRobustly(Transform_Base):
"""
Robustly standardize the data by estimating its scale
"""
def __init__(self, Ytr):
super().__init__(Ytr)
self.Ytr_median = Ytr.median()
Ytr_numpy = Ytr.numpy().ravel()
self.Ytr_scale = torch.tensor(median_absolute_deviation(Ytr_numpy))
self.Ytr_scaleSQR = self.Ytr_scale**2
def scale_mean(self, mu):
return (mu - self.Ytr_median) / self.Ytr_scale
def unscale_mean(self, mu):
return mu * self.Ytr_scale + self.Ytr_median
def unscale_var(self, var):
return var * self.Ytr_scaleSQR
| 25.630769 | 76 | 0.617047 | 1,583 | 0.95018 | 0 | 0 | 0 | 0 | 0 | 0 | 284 | 0.170468 |
1c556489b0d99f41db32e59ce5f01f383067703c | 2,797 | py | Python | pygs/graphserver/compiler/dedupe.py | abyrd/graphserver | 42edcad2618635310c57fa6ab4a13974025248ba | [
"BSD-3-Clause-Clear"
]
| 2 | 2015-02-25T21:46:02.000Z | 2019-04-27T20:22:33.000Z | pygs/graphserver/compiler/dedupe.py | ninowalker/graphserver | dc08070bc6e295986633cf510ca46a2f8d451b92 | [
"BSD-3-Clause-Clear"
]
| null | null | null | pygs/graphserver/compiler/dedupe.py | ninowalker/graphserver | dc08070bc6e295986633cf510ca46a2f8d451b92 | [
"BSD-3-Clause-Clear"
]
| null | null | null | # eliminate duplicate service periods from a GTFS database
from graphserver.ext.gtfs.gtfsdb import GTFSDatabase
import sys
from optparse import OptionParser
def main():
usage = """usage: python dedupe.py <graphdb_filename>"""
parser = OptionParser(usage=usage)
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
exit(-1)
graphdb_filename = args[0]
gtfsdb = GTFSDatabase( graphdb_filename )
query = """
SELECT count(*), monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date
FROM calendar
GROUP BY monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date"""
duped_periods = gtfsdb.execute( query )
equivilants = []
for count, m,t,w,th,f,s,su,start_date,end_date in duped_periods:
# no need to check for dupes if there's only one
if count==1:
continue
#print count, m, t, w, th, f, s, su, start_date, end_date
# get service_ids for this dow/start_date/end_date combination
service_ids = [x[0] for x in list( gtfsdb.execute( "SELECT service_id FROM calendar where monday=? and tuesday=? and wednesday=? and thursday=? and friday=? and saturday=? and sunday=? and start_date=? and end_date=?", (m,t,w,th,f,s,su,start_date,end_date) ) ) ]
# group by service periods with the same set of exceptions
exception_set_grouper = {}
for service_id in service_ids:
exception_set = list(gtfsdb.execute( "SELECT date, exception_type FROM calendar_dates WHERE service_id=?", (service_id,) ) )
exception_set.sort()
exception_set = tuple(exception_set)
exception_set_grouper[exception_set] = exception_set_grouper.get(exception_set,[])
exception_set_grouper[exception_set].append( service_id )
# extend list of equivilants
for i, exception_set_group in enumerate( exception_set_grouper.values() ):
equivilants.append( ("%d%d%d%d%d%d%d-%s-%s-%d"%(m,t,w,th,f,s,su,start_date,end_date,i), exception_set_group) )
for new_name, old_names in equivilants:
for old_name in old_names:
print old_name, new_name
c = gtfsdb.conn.cursor()
c.execute( "UPDATE calendar SET service_id=? WHERE service_id=?", (new_name, old_name) )
c.execute( "UPDATE calendar_dates SET service_id=? WHERE service_id=?", (new_name, old_name) )
c.execute( "UPDATE trips SET service_id=? WHERE service_id=?", (new_name, old_name) )
gtfsdb.conn.commit()
c.close()
if __name__=='__main__':
| 39.957143 | 271 | 0.631748 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,019 | 0.364319 |
1c5786ec0bae08a5ef1c18dbc1ab79a0a17bfc34 | 105 | py | Python | 10/01/03/2.py | pylangstudy/201707 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | [
"CC0-1.0"
]
| null | null | null | 10/01/03/2.py | pylangstudy/201707 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | [
"CC0-1.0"
]
| 46 | 2017-06-30T22:19:07.000Z | 2017-07-31T22:51:31.000Z | 10/01/03/2.py | pylangstudy/201707 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | [
"CC0-1.0"
]
| null | null | null | class MyClass:
def __repr__(self): return self.__class__.__name__ + '()'
print(MyClass().__repr__())
| 26.25 | 61 | 0.704762 | 76 | 0.72381 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.038095 |
1c57a86a468018b2042fa4b09d8dfca249bb7498 | 9,562 | py | Python | tests/tasks/core/test_core.py | andykawabata/prefect | a11061c19847beeea26616ccaf4b404ad939676b | [
"ECL-2.0",
"Apache-2.0"
]
| 2 | 2020-09-28T16:24:02.000Z | 2020-10-08T17:08:19.000Z | tests/tasks/core/test_core.py | andykawabata/prefect | a11061c19847beeea26616ccaf4b404ad939676b | [
"ECL-2.0",
"Apache-2.0"
]
| 5 | 2021-06-28T20:52:27.000Z | 2022-02-27T13:04:42.000Z | tests/tasks/core/test_core.py | yalaudah/prefect | 2f7f92c39a4575119c3268b0415841c6aca5df60 | [
"Apache-2.0"
]
| 1 | 2020-05-04T13:22:11.000Z | 2020-05-04T13:22:11.000Z | import pytest
from prefect.core import Edge, Flow, Parameter, Task
from prefect.tasks.core import collections
from prefect.tasks.core.constants import Constant
from prefect.tasks.core.function import FunctionTask
class IdentityTask(Task):
def run(self, x):
return x
class TestConstant:
def test_constant_task_returns_its_value(self):
x = Constant("x")
assert x.run() == "x"
y = Constant(100)
assert y.run() == 100
def test_automatic_create_constant_task(self):
with Flow(name="test") as flow:
t = Task()
t.set_dependencies(upstream_tasks=[4])
assert len(flow.tasks) == 2
assert any(isinstance(t, Constant) for t in flow.tasks)
class TestFunctionTask:
def test_function_task_requires_callable(self):
with pytest.raises(TypeError):
FunctionTask(fn=1)
def test_function_task_takes_name_from_callable(self):
def my_fn():
pass
f = FunctionTask(fn=my_fn)
assert f.name == "my_fn"
def test_function_task_takes_name_from_arg_if_provided(self):
def my_fn():
pass
f = FunctionTask(fn=my_fn, name="test")
assert f.name == "test"
def test_function_task_docstring(self):
def my_fn():
"""An example docstring."""
pass
# Original docstring available on class
assert "FunctionTask" in FunctionTask.__doc__
# Wrapped function is docstring on instance
f = FunctionTask(fn=my_fn)
assert f.__doc__ == my_fn.__doc__
# Except when no docstring on wrapped function
f = FunctionTask(fn=lambda x: x + 1)
assert "FunctionTask" in f.__doc__
def test_function_task_sets__wrapped__(self):
def my_fn():
"""An example function"""
pass
t = FunctionTask(fn=my_fn)
assert t.__wrapped__ == my_fn
assert not hasattr(FunctionTask, "__wrapped__")
class TestCollections:
def test_list_returns_a_list(self):
l = collections.List()
with Flow(name="test") as f:
l.bind(1, 2)
assert f.run().result[l].result == [1, 2]
def test_list_binds_varargs(self):
t1 = Task()
t2 = Task()
l = collections.List()
with Flow(name="test") as f:
l.bind(t1, t2)
assert set([t1, t2, l]) == f.tasks
assert Edge(t1, l, key="arg_1") in f.edges
assert Edge(t2, l, key="arg_2") in f.edges
def test_tuple_returns_a_tuple(self):
l = collections.Tuple()
with Flow(name="test") as f:
l.bind(1, 2)
assert f.run().result[l].result == (1, 2)
def test_tuple_binds_varargs(self):
t1 = Task()
t2 = Task()
l = collections.Tuple()
with Flow(name="test") as f:
l.bind(t1, t2)
assert set([t1, t2, l]) == f.tasks
assert Edge(t1, l, key="arg_1") in f.edges
assert Edge(t2, l, key="arg_2") in f.edges
def test_set_returns_a_set(self):
l = collections.Set()
with Flow(name="test") as f:
l.bind(1, 2)
assert f.run().result[l].result == set([1, 2])
def test_set_binds_varargs(self):
t1 = Task()
t2 = Task()
l = collections.Set()
with Flow(name="test") as f:
l.bind(t1, t2)
assert set([t1, t2, l]) == f.tasks
assert Edge(t1, l, key="arg_1") in f.edges
assert Edge(t2, l, key="arg_2") in f.edges
def test_dict_returns_a_dict(self):
l = collections.Dict()
with Flow(name="test") as f:
l.bind(keys=["a", "b"], values=[1, 2])
assert f.run().result[l].result == dict(a=1, b=2)
def test_dict_handles_non_string_keys(self):
l = collections.Dict()
with Flow(name="test") as f:
l.bind(keys=[None, 55], values=[1, 2])
assert f.run().result[l].result == {None: 1, 55: 2}
def test_dict_raises_for_differing_length_key_value_pairs(self):
l = collections.Dict()
with Flow(name="test") as f:
l.bind(keys=["a"], values=[1, 2])
state = f.run()
assert state.result[l].is_failed()
assert isinstance(state.result[l].result, ValueError)
def test_list_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=[x, y])
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.List) for t in f.tasks) == 1
assert state.result[identity].result == [1, 2]
def test_list_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=[x, y], flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.List) for t in f.tasks) == 1
assert state.result[identity].result == [1, 2]
def test_tuple_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=(x, y))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Tuple) for t in f.tasks) == 1
assert state.result[identity].result == (1, 2)
def test_tuple_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=(x, y), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Tuple) for t in f.tasks) == 1
assert state.result[identity].result == (1, 2)
def test_set_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=set([x, y]))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Set) for t in f.tasks) == 1
assert state.result[identity].result == set([1, 2])
def test_set_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=set([x, y]), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Set) for t in f.tasks) == 1
assert state.result[identity].result == set([1, 2])
def test_dict_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=dict(a=x, b=y))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 5 # 2 params, identity, Dict, List of dict values
assert sum(isinstance(t, collections.Dict) for t in f.tasks) == 1
assert state.result[identity].result == dict(a=1, b=2)
def test_dict_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=dict(a=x, b=y), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 5 # 2 params, identity, Dict, List of dict values
assert sum(isinstance(t, collections.Dict) for t in f.tasks) == 1
assert state.result[identity].result == dict(a=1, b=2)
def test_nested_collection_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=dict(a=[x, dict(y=y)], b=(y, set([x]))))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 10
assert state.result[identity].result == dict(a=[1, dict(y=2)], b=(2, set([1])))
def test_nested_collection_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=dict(a=[x, dict(y=y)], b=(y, set([x]))), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 10
assert state.result[identity].result == dict(a=[1, dict(y=2)], b=(2, set([1])))
def test_list_maintains_sort_order_for_more_than_10_items(self):
# https://github.com/PrefectHQ/prefect/issues/2451
l = collections.List()
with Flow(name="test") as f:
l.bind(*list(range(15)))
assert f.run().result[l].result == list(range(15))
def test_tuple_maintains_sort_order_for_more_than_10_items(self):
# https://github.com/PrefectHQ/prefect/issues/2451
t = collections.Tuple()
with Flow(name="test") as f:
t.bind(*list(range(15)))
assert f.run().result[t].result == tuple(range(15))
| 33.787986 | 87 | 0.587743 | 9,336 | 0.976365 | 0 | 0 | 0 | 0 | 0 | 0 | 683 | 0.071429 |
1c57c91d84e8ef886ecab5c688f26666500663aa | 536 | py | Python | Tree/node.py | philipwerner/python_data_structures | 554c38376b732f65c5c168d0e1bd30bea3d1ab6b | [
"MIT"
]
| null | null | null | Tree/node.py | philipwerner/python_data_structures | 554c38376b732f65c5c168d0e1bd30bea3d1ab6b | [
"MIT"
]
| null | null | null | Tree/node.py | philipwerner/python_data_structures | 554c38376b732f65c5c168d0e1bd30bea3d1ab6b | [
"MIT"
]
| null | null | null | """Node class module for Binary Tree."""
class Node(object):
"""The Node class."""
def __init__(self, value):
"""Initialization of node object."""
self.value = value
self.left = None
self.right = None
def __str__(self):
"""Return a string representation of the node object."""
return f'{self.value}'
def __repr__(self):
"""Return a representation of the node object."""
return f'<Node | Value: {self.value} | Left: {self.left} | Right: {self.right}>'
| 26.8 | 88 | 0.585821 | 492 | 0.91791 | 0 | 0 | 0 | 0 | 0 | 0 | 290 | 0.541045 |
1c5842430ac7ddf81b0dae7e72f5e8595722304e | 26,713 | py | Python | qutip/operators.py | pschindler/qutip | dc399135b77a01077898e13bb7d30d60db9b6e67 | [
"BSD-3-Clause"
]
| 1 | 2018-05-31T17:38:03.000Z | 2018-05-31T17:38:03.000Z | qutip/operators.py | pschindler/qutip | dc399135b77a01077898e13bb7d30d60db9b6e67 | [
"BSD-3-Clause"
]
| 3 | 2021-08-23T19:00:52.000Z | 2021-08-24T21:38:04.000Z | qutip/operators.py | pschindler/qutip | dc399135b77a01077898e13bb7d30d60db9b6e67 | [
"BSD-3-Clause"
]
| 2 | 2017-08-11T11:14:52.000Z | 2022-03-13T21:37:47.000Z | # This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
"""
This module contains functions for generating Qobj representation of a variety
of commonly occuring quantum operators.
"""
__all__ = ['jmat', 'spin_Jx', 'spin_Jy', 'spin_Jz', 'spin_Jm', 'spin_Jp',
'spin_J_set', 'sigmap', 'sigmam', 'sigmax', 'sigmay', 'sigmaz',
'destroy', 'create', 'qeye', 'identity', 'position', 'momentum',
'num', 'squeeze', 'squeezing', 'displace', 'commutator',
'qutrit_ops', 'qdiags', 'phase', 'qzero', 'enr_destroy',
'enr_identity', 'charge', 'tunneling']
import numbers
import numpy as np
import scipy
import scipy.sparse as sp
from qutip.qobj import Qobj
from qutip.fastsparse import fast_csr_matrix, fast_identity
from qutip.dimensions import flatten
#
# Spin operators
#
def jmat(j, *args):
"""Higher-order spin operators:
Parameters
----------
j : float
Spin of operator
args : str
Which operator to return 'x','y','z','+','-'.
If no args given, then output is ['x','y','z']
Returns
-------
jmat : qobj / ndarray
``qobj`` for requested spin operator(s).
Examples
--------
>>> jmat(1) # doctest: +SKIP
[ Quantum object: dims = [[3], [3]], \
shape = [3, 3], type = oper, isHerm = True
Qobj data =
[[ 0. 0.70710678 0. ]
[ 0.70710678 0. 0.70710678]
[ 0. 0.70710678 0. ]]
Quantum object: dims = [[3], [3]], \
shape = [3, 3], type = oper, isHerm = True
Qobj data =
[[ 0.+0.j 0.-0.70710678j 0.+0.j ]
[ 0.+0.70710678j 0.+0.j 0.-0.70710678j]
[ 0.+0.j 0.+0.70710678j 0.+0.j ]]
Quantum object: dims = [[3], [3]], \
shape = [3, 3], type = oper, isHerm = True
Qobj data =
[[ 1. 0. 0.]
[ 0. 0. 0.]
[ 0. 0. -1.]]]
Notes
-----
If no 'args' input, then returns array of ['x','y','z'] operators.
"""
if (np.fix(2 * j) != 2 * j) or (j < 0):
raise TypeError('j must be a non-negative integer or half-integer')
if not args:
return jmat(j, 'x'), jmat(j, 'y'), jmat(j, 'z')
if args[0] == '+':
A = _jplus(j)
elif args[0] == '-':
A = _jplus(j).getH()
elif args[0] == 'x':
A = 0.5 * (_jplus(j) + _jplus(j).getH())
elif args[0] == 'y':
A = -0.5 * 1j * (_jplus(j) - _jplus(j).getH())
elif args[0] == 'z':
A = _jz(j)
else:
raise TypeError('Invalid type')
return Qobj(A)
def _jplus(j):
"""
Internal functions for generating the data representing the J-plus
operator.
"""
m = np.arange(j, -j - 1, -1, dtype=complex)
data = (np.sqrt(j * (j + 1.0) - (m + 1.0) * m))[1:]
N = m.shape[0]
ind = np.arange(1, N, dtype=np.int32)
ptr = np.array(list(range(N-1))+[N-1]*2, dtype=np.int32)
ptr[-1] = N-1
return fast_csr_matrix((data,ind,ptr), shape=(N,N))
def _jz(j):
"""
Internal functions for generating the data representing the J-z operator.
"""
N = int(2*j+1)
data = np.array([j-k for k in range(N) if (j-k)!=0], dtype=complex)
# Even shaped matrix
if (N % 2 == 0):
ind = np.arange(N, dtype=np.int32)
ptr = np.arange(N+1,dtype=np.int32)
ptr[-1] = N
# Odd shaped matrix
else:
j = int(j)
ind = np.array(list(range(j))+list(range(j+1,N)), dtype=np.int32)
ptr = np.array(list(range(j+1))+list(range(j,N)), dtype=np.int32)
ptr[-1] = N-1
return fast_csr_matrix((data,ind,ptr), shape=(N,N))
#
# Spin j operators:
#
def spin_Jx(j):
"""Spin-j x operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, 'x')
def spin_Jy(j):
"""Spin-j y operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, 'y')
def spin_Jz(j):
"""Spin-j z operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, 'z')
def spin_Jm(j):
"""Spin-j annihilation operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, '-')
def spin_Jp(j):
"""Spin-j creation operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, '+')
def spin_J_set(j):
"""Set of spin-j operators (x, y, z)
Parameters
----------
j : float
Spin of operators
Returns
-------
list : list of Qobj
list of ``qobj`` representating of the spin operator.
"""
return jmat(j)
#
# Pauli spin 1/2 operators:
#
def sigmap():
"""Creation operator for Pauli spins.
Examples
--------
>>> sigmap() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = False
Qobj data =
[[ 0. 1.]
[ 0. 0.]]
"""
return jmat(1 / 2., '+')
def sigmam():
"""Annihilation operator for Pauli spins.
Examples
--------
>>> sigmam() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = False
Qobj data =
[[ 0. 0.]
[ 1. 0.]]
"""
return jmat(1 / 2., '-')
def sigmax():
"""Pauli spin 1/2 sigma-x operator
Examples
--------
>>> sigmax() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = False
Qobj data =
[[ 0. 1.]
[ 1. 0.]]
"""
return 2.0 * jmat(1.0 / 2, 'x')
def sigmay():
"""Pauli spin 1/2 sigma-y operator.
Examples
--------
>>> sigmay() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = True
Qobj data =
[[ 0.+0.j 0.-1.j]
[ 0.+1.j 0.+0.j]]
"""
return 2.0 * jmat(1.0 / 2, 'y')
def sigmaz():
"""Pauli spin 1/2 sigma-z operator.
Examples
--------
>>> sigmaz() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = True
Qobj data =
[[ 1. 0.]
[ 0. -1.]]
"""
return 2.0 * jmat(1.0 / 2, 'z')
#
# DESTROY returns annihilation operator for N dimensional Hilbert space
# out = destroy(N), N is integer value & N>0
#
def destroy(N, offset=0):
'''Destruction (lowering) operator.
Parameters
----------
N : int
Dimension of Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Qobj for lowering operator.
Examples
--------
>>> destroy(4) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.00000000+0.j 1.00000000+0.j 0.00000000+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.00000000+0.j 1.41421356+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j 1.73205081+0.j]
[ 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j]]
'''
if not isinstance(N, (int, np.integer)): # raise error if N not integer
raise ValueError("Hilbert space dimension must be integer value")
data = np.sqrt(np.arange(offset+1, N+offset, dtype=complex))
ind = np.arange(1,N, dtype=np.int32)
ptr = np.arange(N+1, dtype=np.int32)
ptr[-1] = N-1
return Qobj(fast_csr_matrix((data,ind,ptr),shape=(N,N)), isherm=False)
#
# create returns creation operator for N dimensional Hilbert space
# out = create(N), N is integer value & N>0
#
def create(N, offset=0):
'''Creation (raising) operator.
Parameters
----------
N : int
Dimension of Hilbert space.
Returns
-------
oper : qobj
Qobj for raising operator.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Examples
--------
>>> create(4) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j]
[ 1.00000000+0.j 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j]
[ 0.00000000+0.j 1.41421356+0.j 0.00000000+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.00000000+0.j 1.73205081+0.j 0.00000000+0.j]]
'''
if not isinstance(N, (int, np.integer)): # raise error if N not integer
raise ValueError("Hilbert space dimension must be integer value")
qo = destroy(N, offset=offset) # create operator using destroy function
return qo.dag()
def _implicit_tensor_dimensions(dimensions):
"""
Total flattened size and operator dimensions for operator creation routines
that automatically perform tensor products.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
First dimension of an operator which can create an implicit tensor
product. If the type is `int`, it is promoted first to `[dimensions]`.
From there, it should be one of the two-elements `dims` parameter of a
`qutip.Qobj` representing an `oper` or `super`, with possible tensor
products.
Returns
-------
size : int
Dimension of backing matrix required to represent operator.
dimensions : list
Dimension list in the form required by ``Qobj`` creation.
"""
if not isinstance(dimensions, list):
dimensions = [dimensions]
flat = flatten(dimensions)
if not all(isinstance(x, numbers.Integral) and x >= 0 for x in flat):
raise ValueError("All dimensions must be integers >= 0")
return np.prod(flat), [dimensions, dimensions]
def qzero(dimensions):
"""
Zero operator.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
Dimension of Hilbert space. If provided as a list of ints, then the
dimension is the product over this list, but the ``dims`` property of
the new Qobj are set to this list. This can produce either `oper` or
`super` depending on the passed `dimensions`.
Returns
-------
qzero : qobj
Zero operator Qobj.
"""
size, dimensions = _implicit_tensor_dimensions(dimensions)
# A sparse matrix with no data is equal to a zero matrix.
return Qobj(fast_csr_matrix(shape=(size, size), dtype=complex),
dims=dimensions, isherm=True)
#
# QEYE returns identity operator for a Hilbert space with dimensions dims.
# a = qeye(N), N is integer or list of integers & all elements >= 0
#
def qeye(dimensions):
"""
Identity operator.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
Dimension of Hilbert space. If provided as a list of ints, then the
dimension is the product over this list, but the ``dims`` property of
the new Qobj are set to this list. This can produce either `oper` or
`super` depending on the passed `dimensions`.
Returns
-------
oper : qobj
Identity operator Qobj.
Examples
--------
>>> qeye(3) # doctest: +SKIP
Quantum object: dims = [[3], [3]], shape = (3, 3), type = oper, \
isherm = True
Qobj data =
[[ 1. 0. 0.]
[ 0. 1. 0.]
[ 0. 0. 1.]]
>>> qeye([2,2]) # doctest: +SKIP
Quantum object: dims = [[2, 2], [2, 2]], shape = (4, 4), type = oper, \
isherm = True
Qobj data =
[[1. 0. 0. 0.]
[0. 1. 0. 0.]
[0. 0. 1. 0.]
[0. 0. 0. 1.]]
"""
size, dimensions = _implicit_tensor_dimensions(dimensions)
return Qobj(fast_identity(size),
dims=dimensions, isherm=True, isunitary=True)
def identity(dims):
"""Identity operator. Alternative name to :func:`qeye`.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
Dimension of Hilbert space. If provided as a list of ints, then the
dimension is the product over this list, but the ``dims`` property of
the new Qobj are set to this list. This can produce either `oper` or
`super` depending on the passed `dimensions`.
Returns
-------
oper : qobj
Identity operator Qobj.
"""
return qeye(dims)
def position(N, offset=0):
"""
Position operator x=1/sqrt(2)*(a+a.dag())
Parameters
----------
N : int
Number of Fock states in Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Position operator as Qobj.
"""
a = destroy(N, offset=offset)
return 1.0 / np.sqrt(2.0) * (a + a.dag())
def momentum(N, offset=0):
"""
Momentum operator p=-1j/sqrt(2)*(a-a.dag())
Parameters
----------
N : int
Number of Fock states in Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Momentum operator as Qobj.
"""
a = destroy(N, offset=offset)
return -1j / np.sqrt(2.0) * (a - a.dag())
def num(N, offset=0):
"""Quantum object for number operator.
Parameters
----------
N : int
The dimension of the Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper: qobj
Qobj for number operator.
Examples
--------
>>> num(4) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = True
Qobj data =
[[0 0 0 0]
[0 1 0 0]
[0 0 2 0]
[0 0 0 3]]
"""
if offset == 0:
data = np.arange(1,N, dtype=complex)
ind = np.arange(1,N, dtype=np.int32)
ptr = np.array([0]+list(range(0,N)), dtype=np.int32)
ptr[-1] = N-1
else:
data = np.arange(offset, offset + N, dtype=complex)
ind = np.arange(N, dtype=np.int32)
ptr = np.arange(N+1,dtype=np.int32)
ptr[-1] = N
return Qobj(fast_csr_matrix((data,ind,ptr), shape=(N,N)), isherm=True)
def squeeze(N, z, offset=0):
"""Single-mode Squeezing operator.
Parameters
----------
N : int
Dimension of hilbert space.
z : float/complex
Squeezing parameter.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : :class:`qutip.qobj.Qobj`
Squeezing operator.
Examples
--------
>>> squeeze(4, 0.25) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.98441565+0.j 0.00000000+0.j 0.17585742+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.95349007+0.j 0.00000000+0.j 0.30142443+0.j]
[-0.17585742+0.j 0.00000000+0.j 0.98441565+0.j 0.00000000+0.j]
[ 0.00000000+0.j -0.30142443+0.j 0.00000000+0.j 0.95349007+0.j]]
"""
a = destroy(N, offset=offset)
op = (1 / 2.0) * np.conj(z) * (a ** 2) - (1 / 2.0) * z * (a.dag()) ** 2
return op.expm()
def squeezing(a1, a2, z):
"""Generalized squeezing operator.
.. math::
S(z) = \\exp\\left(\\frac{1}{2}\\left(z^*a_1a_2
- za_1^\\dagger a_2^\\dagger\\right)\\right)
Parameters
----------
a1 : :class:`qutip.qobj.Qobj`
Operator 1.
a2 : :class:`qutip.qobj.Qobj`
Operator 2.
z : float/complex
Squeezing parameter.
Returns
-------
oper : :class:`qutip.qobj.Qobj`
Squeezing operator.
"""
b = 0.5 * (np.conj(z) * (a1 * a2) - z * (a1.dag() * a2.dag()))
return b.expm()
def displace(N, alpha, offset=0):
"""Single-mode displacement operator.
Parameters
----------
N : int
Dimension of Hilbert space.
alpha : float/complex
Displacement amplitude.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Displacement operator.
Examples
---------
>>> displace(4,0.25) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.96923323+0.j -0.24230859+0.j 0.04282883+0.j -0.00626025+0.j]
[ 0.24230859+0.j 0.90866411+0.j -0.33183303+0.j 0.07418172+0.j]
[ 0.04282883+0.j 0.33183303+0.j 0.84809499+0.j -0.41083747+0.j]
[ 0.00626025+0.j 0.07418172+0.j 0.41083747+0.j 0.90866411+0.j]]
"""
a = destroy(N, offset=offset)
D = (alpha * a.dag() - np.conj(alpha) * a).expm()
return D
def commutator(A, B, kind="normal"):
"""
Return the commutator of kind `kind` (normal, anti) of the
two operators A and B.
"""
if kind == 'normal':
return A * B - B * A
elif kind == 'anti':
return A * B + B * A
else:
raise TypeError("Unknown commutator kind '%s'" % kind)
def qutrit_ops():
"""
Operators for a three level system (qutrit).
Returns
-------
opers: array
`array` of qutrit operators.
"""
from qutip.states import qutrit_basis
one, two, three = qutrit_basis()
sig11 = one * one.dag()
sig22 = two * two.dag()
sig33 = three * three.dag()
sig12 = one * two.dag()
sig23 = two * three.dag()
sig31 = three * one.dag()
return np.array([sig11, sig22, sig33, sig12, sig23, sig31],
dtype=object)
def qdiags(diagonals, offsets, dims=None, shape=None):
"""
Constructs an operator from an array of diagonals.
Parameters
----------
diagonals : sequence of array_like
Array of elements to place along the selected diagonals.
offsets : sequence of ints
Sequence for diagonals to be set:
- k=0 main diagonal
- k>0 kth upper diagonal
- k<0 kth lower diagonal
dims : list, optional
Dimensions for operator
shape : list, tuple, optional
Shape of operator. If omitted, a square operator large enough
to contain the diagonals is generated.
See Also
--------
scipy.sparse.diags : for usage information.
Notes
-----
This function requires SciPy 0.11+.
Examples
--------
>>> qdiags(sqrt(range(1, 4)), 1) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isherm = False
Qobj data =
[[ 0. 1. 0. 0. ]
[ 0. 0. 1.41421356 0. ]
[ 0. 0. 0. 1.73205081]
[ 0. 0. 0. 0. ]]
"""
data = sp.diags(diagonals, offsets, shape, format='csr', dtype=complex)
if not dims:
dims = [[], []]
if not shape:
shape = []
return Qobj(data, dims, list(shape))
def phase(N, phi0=0):
"""
Single-mode Pegg-Barnett phase operator.
Parameters
----------
N : int
Number of basis states in Hilbert space.
phi0 : float
Reference phase.
Returns
-------
oper : qobj
Phase operator with respect to reference phase.
Notes
-----
The Pegg-Barnett phase operator is Hermitian on a truncated Hilbert space.
"""
phim = phi0 + (2.0 * np.pi * np.arange(N)) / N # discrete phase angles
n = np.arange(N).reshape((N, 1))
states = np.array([np.sqrt(kk) / np.sqrt(N) * np.exp(1.0j * n * kk)
for kk in phim])
ops = np.array([np.outer(st, st.conj()) for st in states])
return Qobj(np.sum(ops, axis=0))
def enr_destroy(dims, excitations):
"""
Generate annilation operators for modes in a excitation-number-restricted
state space. For example, consider a system consisting of 4 modes, each
with 5 states. The total hilbert space size is 5**4 = 625. If we are
only interested in states that contain up to 2 excitations, we only need
to include states such as
(0, 0, 0, 0)
(0, 0, 0, 1)
(0, 0, 0, 2)
(0, 0, 1, 0)
(0, 0, 1, 1)
(0, 0, 2, 0)
...
This function creates annihilation operators for the 4 modes that act
within this state space:
a1, a2, a3, a4 = enr_destroy([5, 5, 5, 5], excitations=2)
From this point onwards, the annihiltion operators a1, ..., a4 can be
used to setup a Hamiltonian, collapse operators and expectation-value
operators, etc., following the usual pattern.
Parameters
----------
dims : list
A list of the dimensions of each subsystem of a composite quantum
system.
excitations : integer
The maximum number of excitations that are to be included in the
state space.
Returns
-------
a_ops : list of qobj
A list of annihilation operators for each mode in the composite
quantum system described by dims.
"""
from qutip.states import enr_state_dictionaries
nstates, state2idx, idx2state = enr_state_dictionaries(dims, excitations)
a_ops = [sp.lil_matrix((nstates, nstates), dtype=np.complex)
for _ in range(len(dims))]
for n1, state1 in idx2state.items():
for n2, state2 in idx2state.items():
for idx, a in enumerate(a_ops):
s1 = [s for idx2, s in enumerate(state1) if idx != idx2]
s2 = [s for idx2, s in enumerate(state2) if idx != idx2]
if (state1[idx] == state2[idx] - 1) and (s1 == s2):
a_ops[idx][n1, n2] = np.sqrt(state2[idx])
return [Qobj(a, dims=[dims, dims]) for a in a_ops]
def enr_identity(dims, excitations):
"""
Generate the identity operator for the excitation-number restricted
state space defined by the `dims` and `exciations` arguments. See the
docstring for enr_fock for a more detailed description of these arguments.
Parameters
----------
dims : list
A list of the dimensions of each subsystem of a composite quantum
system.
excitations : integer
The maximum number of excitations that are to be included in the
state space.
state : list of integers
The state in the number basis representation.
Returns
-------
op : Qobj
A Qobj instance that represent the identity operator in the
exication-number-restricted state space defined by `dims` and
`exciations`.
"""
from qutip.states import enr_state_dictionaries
nstates, _, _ = enr_state_dictionaries(dims, excitations)
data = sp.eye(nstates, nstates, dtype=np.complex)
return Qobj(data, dims=[dims, dims])
def charge(Nmax, Nmin=None, frac = 1):
"""
Generate the diagonal charge operator over charge states
from Nmin to Nmax.
Parameters
----------
Nmax : int
Maximum charge state to consider.
Nmin : int (default = -Nmax)
Lowest charge state to consider.
frac : float (default = 1)
Specify fractional charge if needed.
Returns
-------
C : Qobj
Charge operator over [Nmin,Nmax].
Notes
-----
.. versionadded:: 3.2
"""
if Nmin is None:
Nmin = -Nmax
diag = np.arange(Nmin, Nmax+1, dtype=float)
if frac != 1:
diag *= frac
C = sp.diags(diag, 0, format='csr', dtype=complex)
return Qobj(C, isherm=True)
def tunneling(N, m=1):
"""
Tunneling operator with elements of the form
:math:`\\sum |N><N+m| + |N+m><N|`.
Parameters
----------
N : int
Number of basis states in Hilbert space.
m : int (default = 1)
Number of excitations in tunneling event.
Returns
-------
T : Qobj
Tunneling operator.
Notes
-----
.. versionadded:: 3.2
"""
diags = [np.ones(N-m,dtype=int),np.ones(N-m,dtype=int)]
T = sp.diags(diags,[m,-m],format='csr', dtype=complex)
return Qobj(T, isherm=True)
# Break circular dependencies by a trailing import.
# Note that we use a relative import here to deal with that
# qutip.tensor is the *function* tensor, not the module.
from qutip.tensor import tensor
| 26.370188 | 79 | 0.573129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 19,474 | 0.729008 |
1c59215728acaff76dbcdca05ce20bf9c254f9f4 | 1,627 | py | Python | tests/test_deepsv.py | lsantuari/deepsv | debaa1442d1d97b8220be70e12321cf047d3e6a0 | [
"Apache-2.0"
]
| null | null | null | tests/test_deepsv.py | lsantuari/deepsv | debaa1442d1d97b8220be70e12321cf047d3e6a0 | [
"Apache-2.0"
]
| null | null | null | tests/test_deepsv.py | lsantuari/deepsv | debaa1442d1d97b8220be70e12321cf047d3e6a0 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from deepsv import deepsv
from unittest.mock import patch
"""Tests for the deepsv module.
"""
def test_something():
assert True
def test_adding_numbers():
assert deepsv.add_numbers(1, 1) == 2
assert deepsv.add_numbers(1, 2) != 2
def test_with_error():
with pytest.raises(ValueError):
# Do something that raises a ValueError
raise ValueError
# Fixture example
@pytest.fixture
def an_object():
return {}
def test_deepsv(an_object):
assert an_object == {}
def side_effect_function(mock):
print('This part of the code runs when patched')
return 'Some text that I want to test with'
def test_word_count_of_book_base():
book = 'https://www.gutenberg.org/files/59560/59560-0.txt'
wc = deepsv.word_count(book)
assert wc == 30577
@patch('deepsv.deepsv.download_text', side_effect=side_effect_function)
def test_word_count_of_book(mock):
# book = 'https://www.gutenberg.org/files/59560/59560-0.txt'
wc = deepsv.word_count(mock.text)
assert wc == 8
def test_count_single_base():
sequence = 'TTAGGACCA'
assert deepsv.count_single_base('A', sequence) == 3
assert deepsv.count_single_base('C', sequence) == 2
assert deepsv.count_single_base('G', sequence) == 2
assert deepsv.count_single_base('T', sequence) == 2
def side_effect_get_sequence():
return 'GTACGTCAG'
@patch('deepsv.deepsv.get_sequence', return_value='GTACGTCAG')
def test_count_bases(sequence):
seq_dict = {'A': 2, 'C': 2, 'G': 3, 'T': 2}
assert deepsv.count_bases(sequence) == seq_dict
| 22.287671 | 71 | 0.695144 | 0 | 0 | 0 | 0 | 469 | 0.288261 | 0 | 0 | 437 | 0.268593 |
1c59d4c3c8cb7118c29dce871107ae825dc23c99 | 8,959 | py | Python | tcex/bin/dep.py | phuerta-tc/tcex | 4a4e800e1a6114c1fde663f8c3ab7a1d58045c79 | [
"Apache-2.0"
]
| null | null | null | tcex/bin/dep.py | phuerta-tc/tcex | 4a4e800e1a6114c1fde663f8c3ab7a1d58045c79 | [
"Apache-2.0"
]
| null | null | null | tcex/bin/dep.py | phuerta-tc/tcex | 4a4e800e1a6114c1fde663f8c3ab7a1d58045c79 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
"""TcEx Dependencies Command"""
# standard library
import os
import platform
import shutil
import subprocess # nosec
import sys
from distutils.version import StrictVersion # pylint: disable=no-name-in-module
from pathlib import Path
from typing import List
from urllib.parse import quote
# third-party
import typer
# first-party
from tcex.app_config.models.tcex_json_model import LibVersionModel
from tcex.bin.bin_abc import BinABC
class Dep(BinABC):
"""Install dependencies for App."""
def __init__(
self,
branch: str,
no_cache_dir: bool,
proxy_host: str,
proxy_port: int,
proxy_user: str,
proxy_pass: str,
) -> None:
"""Initialize Class properties."""
super().__init__()
self.branch = branch
self.no_cache_dir = no_cache_dir
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
# properties
self.latest_version = None
self.lib_directory = (
f'lib_{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}'
)
self.proxy_enabled = False
self.proxy_env = {}
self.requirements_fqfn = Path('requirements.txt')
self.static_lib_dir = 'lib_latest'
# update tcex.json
self.tj.update.multiple()
def _build_command(self, python_executable: Path, lib_dir: Path) -> str:
"""Build the pip command for installing dependencies.
Args:
python_executable: The fully qualified path of the Python executable.
lib_dir: The fully qualified path of the lib directory.
Returns:
list: The Python pip command with all required args.
"""
exe_command = [
str(python_executable),
'-m',
'pip',
'install',
'-r',
str(self.requirements_fqfn),
'--ignore-installed',
'--quiet',
'--target',
lib_dir.name,
]
if self.no_cache_dir:
exe_command.append('--no-cache-dir')
if self.proxy_enabled:
# trust the pypi hosts to avoid ssl errors
trusted_hosts = ['pypi.org', 'pypi.python.org', 'files.pythonhosted.org']
for host in trusted_hosts:
exe_command.append('--trusted-host')
exe_command.append(host)
return exe_command
def _create_lib_latest(self) -> None:
"""Create the lib_latest symlink for App Builder."""
if platform.system() == 'Windows':
shutil.copytree(f'lib_{self.latest_version}', self.static_lib_dir)
else:
if os.path.islink(self.static_lib_dir):
os.unlink(self.static_lib_dir)
elif os.path.isfile(self.static_lib_dir):
os.rmdir(self.static_lib_dir)
os.symlink(f'lib_{self.latest_version}', self.static_lib_dir)
@staticmethod
def _remove_previous(fqpn: Path) -> None:
"""Remove previous lib directory recursively."""
if os.access(fqpn, os.W_OK):
shutil.rmtree(fqpn)
def configure_proxy(self) -> None:
"""Configure proxy settings using environment variables."""
if os.getenv('HTTP_PROXY') or os.getenv('HTTPS_PROXY'):
# don't change proxy settings if the OS already has them configured.
return
if self.proxy_host is not None and self.proxy_port is not None:
# proxy url without auth
proxy_url = f'{self.proxy_host}:{self.proxy_port}'
if self.proxy_user is not None and self.proxy_pass is not None:
proxy_user = quote(self.proxy_user, safe='~')
proxy_pass = quote(self.proxy_pass, safe='~')
# proxy url with auth
proxy_url = f'{proxy_user}:{proxy_pass}@{proxy_url}'
# update proxy properties
self.proxy_enabled = True
self.proxy_env = {
'HTTP_PROXY': f'http://{proxy_url}',
'HTTPS_PROXY': f'http://{proxy_url}',
}
# display proxy setting
self.print_setting('Using Proxy Server', f'{self.proxy_host}:{self.proxy_port}')
def create_temp_requirements(self) -> None:
"""Create a temporary requirements.txt.
This allows testing again a git branch instead of pulling from pypi.
"""
# Replace tcex version with develop branch of tcex
with self.requirements_fqfn.open() as fh:
current_requirements = fh.read().strip().split('\n')
self.requirements_fqfn = Path(f'temp-{self.requirements_fqfn}')
with self.requirements_fqfn.open(mode='w') as fh:
requirements = []
for line in current_requirements:
if not line:
continue
if line.startswith('tcex'):
line = (
'git+https://github.com/ThreatConnect-Inc/tcex.git@'
f'{self.branch}#egg=tcex'
)
requirements.append(line)
fh.write('\n'.join(requirements))
# display branch setting
self.print_setting('Using Branch', self.branch)
def install_deps(self) -> None:
"""Install Required Libraries using pip."""
# check for requirements.txt
if not self.requirements_fqfn.is_file():
self.handle_error('A requirements.txt file is required to install modules.')
# install all requested lib directories
for lib_version in self.lib_versions:
# remove lib directory from previous runs
self._remove_previous(lib_version.lib_dir)
if (
not lib_version.python_executable.is_file()
and not lib_version.python_executable.is_symlink()
):
# display error
typer.secho(
f'The Python executable ({lib_version.python_executable}) could not be found. '
'Skipping building lib directory for this Python version.',
fg=typer.colors.YELLOW,
)
continue
# display lib dir setting
self.print_setting('Lib Dir', f'{lib_version.lib_dir.name}')
# build the sub process command
exe_command = self._build_command(lib_version.python_executable, lib_version.lib_dir)
# display command setting
self.print_setting('Running', f'''{' '.join(exe_command)}''', fg_color='GREEN')
# recommended -> https://pip.pypa.io/en/latest/user_guide/#using-pip-from-your-program
p = subprocess.Popen( # pylint: disable=consider-using-with
exe_command,
shell=False, # nosec
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.proxy_env,
)
_, err = p.communicate() # pylint: disable=unused-variable
if p.returncode != 0:
# display error
err = err.decode('utf-8')
failure_display = typer.style(
f'Failure: {err}', fg=typer.colors.WHITE, bg=typer.colors.RED
)
typer.echo(f'{failure_display}')
sys.exit(1)
# TODO: [low] can this be updated to use version from model?
# version comparison
try:
python_version = lib_version.lib_dir.name.split('_', 1)[1]
except IndexError:
python_version = None
self.handle_error('Could not determine version from lib string.')
# TODO: [low] investigate using sematic_version package
# track the latest Python version
if self.latest_version is None or StrictVersion(python_version) > StrictVersion(
self.latest_version
):
self.latest_version = python_version
if self.branch != 'master':
# remove temp requirements.txt file
self.requirements_fqfn.unlink()
# create lib_latest directory
self._create_lib_latest()
@property
def lib_versions(self) -> List[LibVersionModel]:
"""Return the lib_version data required to build lib directories."""
if self.tj.model.lib_versions:
self.print_setting('Python Version', 'using version(s) defined in tcex.json')
# return the python versions defined in the tcex.json file
return self.tj.model.lib_versions
# return the current python version
return [
LibVersionModel(**{'python_executable': sys.executable, 'lib_dir': self.lib_directory})
]
| 36.125 | 99 | 0.58444 | 8,498 | 0.948543 | 0 | 0 | 743 | 0.082933 | 0 | 0 | 3,089 | 0.344793 |
1c5a7f175c98d892dc83db59726cb2f27a8bed94 | 2,198 | py | Python | parser/fase2/team20/execution/executeSentence2.py | LopDlMa/tytus | 0b43ee1c7300cb11ddbe593e08239321b71dc443 | [
"MIT"
]
| null | null | null | parser/fase2/team20/execution/executeSentence2.py | LopDlMa/tytus | 0b43ee1c7300cb11ddbe593e08239321b71dc443 | [
"MIT"
]
| null | null | null | parser/fase2/team20/execution/executeSentence2.py | LopDlMa/tytus | 0b43ee1c7300cb11ddbe593e08239321b71dc443 | [
"MIT"
]
| null | null | null | from .AST.sentence import *
from .AST.expression import *
from .AST.error import *
import sys
sys.path.append("../")
from console import *
def executeSentence2(self, sentence):
if isinstance(sentence, CreateDatabase):
h=0
elif isinstance(sentence, ShowDatabases):
h=0
elif isinstance(sentence, DropDatabase):
h=0
elif isinstance(sentence,Use):
h=0
elif isinstance(sentence,CreateTable):
h=0
elif isinstance(sentence, CreateType):
h=0
elif isinstance(sentence, InsertAll):
h=0
elif isinstance(sentence, Insert):
h=0
elif isinstance(sentence, Delete):
archivo = open("C3D.py", 'a')
archivo.write("\n")
archivo.write("ICreateDatabase("+sentence.name+","+sentence.ifNotExistsFlag+","+sentence.OrReplace+","+sentence.OwnerMode+")")
archivo.close()
elif isinstance(sentence,Select):
print(sentence.columns)
#print(sentence.columns[0].function)
#print(sentence.columns[0].expression)
print(sentence.tables)
print(sentence.options)
elif isinstance(sentence,DropTable):
h=0
elif isinstance(sentence,AlterDatabaseRename):
archivo = open("C3D.py", 'a')
archivo.write("\n")
archivo.write("ICreateDatabase("+sentence.name+","+sentence.ifNotExistsFlag+","+sentence.OrReplace+","+sentence.OwnerMode+")")
archivo.close()
elif isinstance(sentence,Update):
h=0
elif isinstance(sentence,AlterTableDropConstraint):
archivo = open("C3D.py", 'a')
archivo.write("\n")
archivo.write("ICreateDatabase("+sentence.name+","+sentence.ifNotExistsFlag+","+sentence.OrReplace+","+sentence.OwnerMode+")")
archivo.close()
elif isinstance(sentence,AlterTableAlterColumnType):
h=0
elif isinstance(sentence, AlterTableAddColumn):
h=0
elif isinstance(sentence, AlterTableDropColumn):
archivo = open("C3D.py", 'a')
archivo.write("\n")
archivo.write("ICreateDatabase("+sentence.name+","+sentence.ifNotExistsFlag+","+sentence.OrReplace+","+sentence.OwnerMode+")")
archivo.close()
| 36.032787 | 135 | 0.641947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 259 | 0.117834 |
1c5b0f1bcbcc57bddee91b24e585d8faf96244eb | 5,592 | py | Python | src/test/cli/component.py | huseyinbolt/cord-tester | ed9b79916e6326a45bfaf3227b8ff922d76df4f1 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null | src/test/cli/component.py | huseyinbolt/cord-tester | ed9b79916e6326a45bfaf3227b8ff922d76df4f1 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null | src/test/cli/component.py | huseyinbolt/cord-tester | ed9b79916e6326a45bfaf3227b8ff922d76df4f1 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null |
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright 2016-present Ciena Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Created on 24-Oct-2012
author:s: Anil Kumar ( [email protected] ),
Raghav Kashyap( [email protected] )
TestON is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
( at your option ) any later version.
TestON is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with TestON. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
from clicommon import *
class Component( object ):
"""
This is the tempalte class for components
"""
def __init__( self ):
self.default = ''
self.wrapped = sys.modules[ __name__ ]
self.count = 0
def __getattr__( self, name ):
"""
This will invoke, if the attribute wasn't found the usual ways.
Here it will look for assert_attribute and will execute when
AttributeError occurs.
It will return the result of the assert_attribute.
"""
try:
return getattr( self.wrapped, name )
except AttributeError as error:
# NOTE: The first time we load a driver module we get this error
if "'module' object has no attribute '__path__'" in error:
pass
else:
main.log.error( str(error.__class__) + " " + str(error) )
try:
def experimentHandling( *args, **kwargs ):
if main.EXPERIMENTAL_MODE == main.TRUE:
result = self.experimentRun( *args, **kwargs )
main.log.info( "EXPERIMENTAL MODE. API " +
str( name ) +
" not yet implemented. " +
"Returning dummy values" )
return result
else:
return main.FALSE
return experimentHandling
except TypeError as e:
main.log.error( "Arguments for experimental mode does not" +
" have key 'retruns'" + e )
def connect( self ):
vars( main )[ self.name + 'log' ] = logging.getLogger( self.name )
session_file = main.logdir + "/" + self.name + ".session"
self.log_handler = logging.FileHandler( session_file )
self.log_handler.setLevel( logging.DEBUG )
vars( main )[ self.name + 'log' ].setLevel( logging.DEBUG )
_formatter = logging.Formatter(
"%(asctime)s %(name)-10s: %(levelname)-8s: %(message)s" )
self.log_handler.setFormatter( _formatter )
vars( main )[ self.name + 'log' ].addHandler( self.log_handler )
# Adding header for the component log
vars( main )[ self.name + 'log' ].info( main.logHeader )
# Opening the session log to append command's execution output
self.logfile_handler = open( session_file, "w" )
return "Dummy"
def execute( self, cmd ):
return main.TRUE
# import commands
# return commands.getoutput( cmd )
def disconnect( self ):
return main.TRUE
def config( self ):
self = self
# Need to update the configuration code
def cleanup( self ):
return main.TRUE
def log( self, message ):
"""
Here finding the for the component to which the
log message based on the called child object.
"""
vars( main )[ self.name + 'log' ].info( "\n" + message + "\n" )
def close_log_handles( self ):
vars( main )[ self.name + 'log' ].removeHandler( self.log_handler )
if self.logfile_handler:
self.logfile_handler.close()
def get_version( self ):
return "Version unknown"
def experimentRun( self, *args, **kwargs ):
# FIXME handle *args
args = utilities.parse_args( [ "RETURNS" ], **kwargs )
return args[ "RETURNS" ]
if __name__ != "__main__":
import sys
sys.modules[ __name__ ] = Component()
| 35.392405 | 76 | 0.619456 | 3,482 | 0.622675 | 0 | 0 | 0 | 0 | 0 | 0 | 3,004 | 0.537196 |
1c5cd63de747901926f8ddd0a4d149ca05999677 | 2,575 | py | Python | python-framework/handlers/base/auth.py | huangxingx/python-framework | a62618b0ee5ecff9de426327892cdd690d10510d | [
"MIT"
]
| 7 | 2019-10-24T03:26:22.000Z | 2019-10-27T14:55:07.000Z | python-framework/handlers/base/auth.py | PJoemu/python-framework | a62618b0ee5ecff9de426327892cdd690d10510d | [
"MIT"
]
| 3 | 2021-06-08T19:13:10.000Z | 2022-01-13T00:38:48.000Z | python-framework/handlers/base/auth.py | PJoemu/python-framework | a62618b0ee5ecff9de426327892cdd690d10510d | [
"MIT"
]
| 2 | 2019-10-25T03:54:51.000Z | 2020-06-28T08:50:12.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author: x.huang
# @date:17-8-4
import logging
from pony.orm import db_session
from handlers.base.base import BaseRequestHandler
class LoginRequireError(Exception):
pass
class AuthBaseHandler(BaseRequestHandler):
""" 登录验证的基类 """
def prepare(self):
if not self.current_user and self.request.method.lower() != 'options':
self.render_error('Auth Error.', status_code=401)
super(AuthBaseHandler, self).prepare()
class Authentication(object):
def __init__(self, handler):
self.handler = handler
def admin_auth(self, username, password):
try:
with db_session:
user_obj = self.handler.m_useradmin.get(username=username, is_delete=False)
if user_obj:
is_auth = user_obj.check_password(password)
if is_auth:
user_dict = user_obj.to_dict(exclude=self.handler.m_useradmin.password.column)
user_dict['permission'] = user_obj.role_id.permission if user_obj.role_id else None
return user_dict
else:
return None
except Exception as e:
logging.error(str(e))
return None
def api_auth(self, phone, password, sc_auth=False):
try:
with db_session:
user_obj = self.handler.m_appuser.get(phone=phone, is_delete=False)
if user_obj:
is_auth = False
if password:
is_auth = user_obj.check_password(password)
if sc_auth or is_auth:
user_dict = user_obj.to_dict()
return user_dict
else:
return None
except Exception as e:
logging.error(str(e))
return None
def web_auth(self, username, password):
try:
with db_session:
user_obj = self.handler.m_comuser.get(com_username=username, is_delete=False)
if user_obj:
is_auth = False
if password:
is_auth = user_obj.check_password(password)
if is_auth:
user_dict = user_obj.to_dict()
return user_dict
else:
return None
except Exception as e:
logging.error(str(e))
return None
| 32.1875 | 107 | 0.533204 | 2,399 | 0.926613 | 0 | 0 | 0 | 0 | 0 | 0 | 139 | 0.053689 |
1c5d3932d3d58eb3852f548752bb665e5c02d910 | 475 | py | Python | pysol/core/helpers.py | lotfio/pysol | 34fac6d1ec246a7a037d8237e00974a9a9548faa | [
"MIT"
]
| 2 | 2019-10-09T21:58:20.000Z | 2020-01-08T07:29:28.000Z | pysol/core/helpers.py | lotfio/pysol | 34fac6d1ec246a7a037d8237e00974a9a9548faa | [
"MIT"
]
| null | null | null | pysol/core/helpers.py | lotfio/pysol | 34fac6d1ec246a7a037d8237e00974a9a9548faa | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
#| This file is part of cony
#|
#| @package Pysol python cli application
#| @author <lotfio lakehal>
#| @license MIT
#| @version 0.1.0
#| @copyright 2019 lotfio lakehal
import sys
# load module function
# this function loads a module by string name
def load_module(module):
module_path = module
if module_path in sys.modules:
return sys.modules[module_path]
return __import__(module_path, fromlist=[module]) | 23.75 | 53 | 0.673684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 272 | 0.572632 |
1c5dc3f5290f019cca3cade7daba3d9be28fa2da | 6,387 | py | Python | autograd_hacks/test_autograd_hacks.py | jusjusjus/autograd-hacks | c12556d03e40cccaa0e70e14b0120b723002ed9e | [
"Unlicense"
]
| 1 | 2020-05-01T12:14:43.000Z | 2020-05-01T12:14:43.000Z | autograd_hacks/test_autograd_hacks.py | jusjusjus/autograd-hacks | c12556d03e40cccaa0e70e14b0120b723002ed9e | [
"Unlicense"
]
| null | null | null | autograd_hacks/test_autograd_hacks.py | jusjusjus/autograd-hacks | c12556d03e40cccaa0e70e14b0120b723002ed9e | [
"Unlicense"
]
| null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
import pytest
from . import autograd_hacks
class StriddenNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(1, 20, 5, stride=2, padding=2)
self.conv2 = nn.Conv2d(20, 30, 5, stride=2, padding=2)
self.fc1_input_size = 7 * 7 * 30
self.fc1 = nn.Linear(self.fc1_input_size, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
batch_size = x.shape[0]
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = x.view(batch_size, self.fc1_input_size)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
class SimpleNet(nn.Module):
"""Lenet-5 from https://github.com/pytorch/examples/blob/master/mnist/main.py"""
def __init__(self):
super().__init__()
self.linear = nn.Linear(28 * 28, 10)
def forward(self, x):
x = torch.flatten(x, 1)
return self.linear(x)
class Net(nn.Module):
"""Lenet-5 from https://github.com/pytorch/examples/blob/master/mnist/main.py"""
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(1, 20, 5)
self.conv2 = nn.Conv2d(20, 50, 5)
self.fc1 = nn.Linear(4 * 4 * 50, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4 * 4 * 50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
class TinyNet(nn.Module):
"""Tiny LeNet-5 for Hessian testing"""
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(1, 2, 2, 1)
self.conv2 = nn.Conv2d(2, 2, 2, 1)
self.fc1 = nn.Linear(2, 2)
self.fc2 = nn.Linear(2, 10)
def forward(self, x): # 28x28
x = F.max_pool2d(x, 4, 4) # 7x7
x = F.relu(self.conv1(x)) # 6x6
x = F.max_pool2d(x, 2, 2) # 3x3
x = F.relu(self.conv2(x)) # 2x2
x = F.max_pool2d(x, 2, 2) # 1x1
x = x.view(-1, 2 * 1 * 1) # C * W * H
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
# Autograd helpers, from https://gist.github.com/apaszke/226abdf867c4e9d6698bd198f3b45fb7
def jacobian(y: torch.Tensor, x: torch.Tensor, create_graph=False):
jac = []
flat_y = y.reshape(-1)
grad_y = torch.zeros_like(flat_y)
for i in range(len(flat_y)):
grad_y[i] = 1.
grad_x, = torch.autograd.grad(flat_y, x, grad_y, retain_graph=True, create_graph=create_graph)
jac.append(grad_x.reshape(x.shape))
grad_y[i] = 0.
return torch.stack(jac).reshape(y.shape + x.shape)
def hessian(y: torch.Tensor, x: torch.Tensor):
return jacobian(jacobian(y, x, create_graph=True), x)
@pytest.mark.parametrize("Net", [Net, TinyNet, SimpleNet, StriddenNet])
def test_grad1(Net):
torch.manual_seed(1)
model = Net()
loss_fn = nn.CrossEntropyLoss()
n = 4
data = torch.rand(n, 1, 28, 28)
targets = torch.LongTensor(n).random_(0, 10)
autograd_hacks.add_hooks(model)
output = model(data)
loss_fn(output, targets).backward(retain_graph=True)
autograd_hacks.compute_grad1(model)
autograd_hacks.disable_hooks()
# Compare values against autograd
losses = torch.stack([loss_fn(output[i:i+1], targets[i:i+1])
for i in range(len(data))])
for layer in model.modules():
if not autograd_hacks.is_supported(layer):
continue
for param in layer.parameters():
assert torch.allclose(param.grad, param.grad1[0].mean(dim=0))
assert torch.allclose(jacobian(losses, param), param.grad1[0])
def test_applying_backwards_twice_fails():
torch.manual_seed(42)
model = Net()
loss_fn = nn.CrossEntropyLoss()
data = torch.rand(5, 1, 28, 28)
targets = torch.LongTensor(5).random_(0, 10)
autograd_hacks.add_hooks(model)
output = model(data)
loss_fn(output, targets).backward()
output = model(data)
with pytest.raises(AssertionError):
loss_fn(output, targets).backward()
def test_grad1_for_multiple_connected_passes():
torch.manual_seed(42)
model = SimpleNet()
loss_fn = nn.CrossEntropyLoss(reduction='sum')
def get_data(batch_size):
return (torch.rand(batch_size, 1, 28, 28),
torch.LongTensor(batch_size).random_(0, 10))
n = 5
autograd_hacks.add_hooks(model)
data, targets = get_data(n)
output = model(data)
loss1 = loss_fn(output, targets)
data, targets = get_data(n)
output = model(data)
loss2 = loss_fn(output, targets)
loss = loss1 - loss2
loss.backward()
autograd_hacks.compute_grad1(model)
autograd_hacks.disable_hooks()
for n, p in model.named_parameters():
grad1 = p.grad1[0] + p.grad1[1]
assert p.grad.shape == grad1.shape[1:]
assert torch.allclose(p.grad, grad1.mean(dim=0), atol=1e-7)
@pytest.mark.parametrize("hess_type", ['CrossEntropy', 'LeastSquares'])
def test_hess(hess_type):
torch.manual_seed(1)
model = TinyNet()
def least_squares_loss(data_, targets_):
assert len(data_) == len(targets_)
err = data_ - targets_
return torch.sum(err * err) / 2 / len(data_)
n = 3
data = torch.rand(n, 1, 28, 28)
autograd_hacks.add_hooks(model)
output = model(data)
if hess_type == 'LeastSquares':
targets = torch.rand(output.shape)
loss_fn = least_squares_loss
elif hess_type == 'CrossEntropy':
targets = torch.LongTensor(n).random_(0, 10)
loss_fn = nn.CrossEntropyLoss()
else:
raise ValueError(f"Unknown hessian type")
autograd_hacks.backprop_hess(output, hess_type)
autograd_hacks.clear_backprops(model)
autograd_hacks.backprop_hess(output, hess_type)
autograd_hacks.compute_hess(model)
autograd_hacks.disable_hooks()
for layer in model.modules():
if not autograd_hacks.is_supported(layer):
continue
for param in layer.parameters():
loss = loss_fn(output, targets)
hess_autograd = hessian(loss, param)
hess = param.hess
assert torch.allclose(hess, hess_autograd.reshape(hess.shape))
| 29.706977 | 102 | 0.614686 | 2,143 | 0.335525 | 0 | 0 | 2,217 | 0.347111 | 0 | 0 | 463 | 0.072491 |
1c5e34faccefb41600dc36e2445e46683f4cb6c1 | 5,213 | py | Python | tests/test_command.py | paulfurley/Mailpile | f89611d916e41e74dd00997327a2c2d042a96399 | [
"Apache-2.0"
]
| 1 | 2017-04-19T11:10:05.000Z | 2017-04-19T11:10:05.000Z | tests/test_command.py | paulfurley/Mailpile | f89611d916e41e74dd00997327a2c2d042a96399 | [
"Apache-2.0"
]
| null | null | null | tests/test_command.py | paulfurley/Mailpile | f89611d916e41e74dd00997327a2c2d042a96399 | [
"Apache-2.0"
]
| null | null | null | import unittest
import mailpile
from mock import patch
from mailpile.commands import Action as action
from tests import MailPileUnittest
class TestCommands(MailPileUnittest):
def test_index(self):
res = self.mp.rescan()
self.assertEqual(res.as_dict()["status"], 'success')
def test_search(self):
# A random search must return results in less than 0.2 seconds.
res = self.mp.search("foo")
self.assertLess(float(res.as_dict()["elapsed"]), 0.2)
def test_optimize(self):
res = self.mp.optimize()
self.assertEqual(res.as_dict()["result"], True)
def test_set(self):
self.mp.set("prefs.num_results=1")
results = self.mp.search("twitter")
self.assertEqual(results.result['stats']['count'], 1)
def test_unset(self):
self.mp.unset("prefs.num_results")
results = self.mp.search("twitter")
self.assertEqual(results.result['stats']['count'], 3)
def test_add(self):
res = self.mp.add("tests")
self.assertEqual(res.as_dict()["result"], True)
def test_add_mailbox_already_in_pile(self):
res = self.mp.add("tests")
self.assertEqual(res.as_dict()["result"], True)
def test_add_mailbox_no_such_directory(self):
res = self.mp.add("wut?")
self.assertEqual(res.as_dict()["result"], False)
def test_output(self):
res = self.mp.output("json")
self.assertEqual(res.as_dict()["result"], {'output': 'json'})
def test_help(self):
res = self.mp.help()
self.assertEqual(len(res.result), 3)
def test_help_variables(self):
res = self.mp.help_variables()
self.assertGreater(len(res.result['variables']), 1)
def test_help_with_param_search(self):
res = self.mp.help('search')
self.assertEqual(res.result['pre'], 'Search your mail!')
def test_help_urlmap_as_text(self):
res = self.mp.help_urlmap()
self.assertEqual(len(res.result), 1)
self.assertGreater(res.as_text(), 0)
def test_crypto_policy_auto_set_all_action(self):
res = self.mp.crypto_policy_auto_set_all()
self.assertEqual(res.as_dict()["message"], u'Discovered crypto policy')
self.assertEqual(set(), res.as_dict()['result'])
def test_crypto_policy_action(self):
res = self.mp.crypto_policy("foobar")
self.assertEqual(res.as_dict()["message"], u'Crypto policy for foobar is none')
self.assertEqual(res.as_dict()["result"], 'none')
class TestCommandResult(MailPileUnittest):
def test_command_result_as_dict(self):
res = self.mp.help_splash()
self.assertGreater(len(res.as_dict()), 0)
def test_command_result_as_text(self):
res = self.mp.help_splash()
self.assertGreater(res.as_text(), 0)
def test_command_result_as_text_for_boolean_result(self):
res = self.mp.rescan()
self.assertEquals(res.result['messages'], 0)
self.assertEquals(res.result['mailboxes'], 0)
self.assertEquals(res.result['vcards'], 0)
def test_command_result_non_zero(self):
res = self.mp.help_splash()
self.assertTrue(res)
def test_command_result_as_json(self):
res = self.mp.help_splash()
self.assertGreater(res.as_json(), 0)
def test_command_result_as_html(self):
res = self.mp.help_splash()
self.assertGreater(res.as_html(), 0)
class TestTagging(MailPileUnittest):
def test_addtag(self):
pass
class TestGPG(MailPileUnittest):
def test_key_search(self):
gpg_result = {
"D13C70DA": {
"uids": [
{
"email": "[email protected]"
}
]
}
}
with patch('mailpile.commands.GnuPG') as gpg_mock:
gpg_mock.return_value.search_key.return_value = gpg_result
res = action(self.mp._session, "crypto/gpg/searchkey", "D13C70DA")
email = res.result["D13C70DA"]["uids"][0]["email"]
self.assertEqual(email, "[email protected]")
gpg_mock.return_value.search_key.assert_called_with("D13C70DA")
def test_key_receive(self):
gpg_result = {
"updated": [
{
"fingerprint": "08A650B8E2CBC1B02297915DC65626EED13C70DA"
}
]
}
with patch('mailpile.commands.GnuPG') as gpg_mock:
gpg_mock.return_value.recv_key.return_value = gpg_result
res = action(self.mp._session, "crypto/gpg/receivekey", "D13C70DA")
self.assertEqual(res.result[0]["updated"][0]["fingerprint"],
"08A650B8E2CBC1B02297915DC65626EED13C70DA")
gpg_mock.return_value.recv_key.assert_called_with("D13C70DA")
def test_key_import(self):
res = action(self.mp._session, "crypto/gpg/importkey",
'testing/pub.key')
self.assertEqual(res.result["results"]["count"], 1)
def test_nicknym_get_key(self):
pass
def test_nicknym_refresh_key(self):
pass
if __name__ == '__main__':
unittest.main()
| 32.378882 | 87 | 0.619029 | 5,014 | 0.961826 | 0 | 0 | 0 | 0 | 0 | 0 | 854 | 0.163821 |
1c5fd36ae0b1a46a987890321b0748ee13ed63f6 | 7,739 | py | Python | navrep/envs/rosnavtrainencodedenv.py | ReykCS/navrep | 22ee4727268188414a8121f069e45c2ab798ca19 | [
"MIT"
]
| null | null | null | navrep/envs/rosnavtrainencodedenv.py | ReykCS/navrep | 22ee4727268188414a8121f069e45c2ab798ca19 | [
"MIT"
]
| null | null | null | navrep/envs/rosnavtrainencodedenv.py | ReykCS/navrep | 22ee4727268188414a8121f069e45c2ab798ca19 | [
"MIT"
]
| null | null | null | from gym import spaces
import numpy as np
from scipy import interpolate
import yaml
from navrep.envs.navreptrainenv import NavRepTrainEnv
from navrep.rosnav_models.utils.reward import RewardCalculator
from navrep.rosnav_models.utils.reward import RewardCalculator
class RosnavTrainEncodedEnv(NavRepTrainEnv):
""" takes a (2) action as input
outputs encoded obs (546) """
def __init__(self, roboter_yaml_path, roboter="tb3",
reward_fnc="rule_00", scenario='test',
silent=False, adaptive=True, max_steps_per_episode=500):
super(RosnavTrainEncodedEnv, self).__init__(scenario=scenario, silent=silent, adaptive=adaptive,
legacy_mode=False, collect_statistics=True)
self.setup_by_configuration(roboter_yaml_path)
min, max = self._get_action_space(roboter)
self.action_space = spaces.Box(
low=np.array(min),
high=np.array(max),
dtype=np.float,
)
self.observation_space = spaces.Box(
low=0,
high=np.inf,
shape=(self._laser_num_beams + 2,),
dtype=np.float32,
)
self.reward_calculator = RewardCalculator(
robot_radius=self._robot_radius,
safe_dist=1.6 * self._robot_radius,
goal_radius=0.1,
rule=reward_fnc,
extended_eval=True,
)
self._steps_curr_episode = 0
self._max_steps_per_episode = max_steps_per_episode
self.last_observation = None
self.roboter = roboter
def _get_action_space(self, roboter):
if roboter == "ridgeback":
return [self.linear_range[0], 0, self.angular_range[0]], [self.linear_range[1], 0.5, self.angular_range[1]]
return [self.linear_range[0], self.angular_range[0]], [self.linear_range[1], self.angular_range[1]]
def _get_action(self, action):
if self.roboter == "ridgeback":
return np.array(action)
return np.array([action[0], 0, action[1]])
def _get_observation_from_scan(self, obs):
if self.roboter == "tb3":
lidar_upsampling = 1080 // 360
downsampled_scan = obs.reshape((-1, lidar_upsampling))
downsampled_scan = np.min(downsampled_scan, axis=1)
return downsampled_scan
if self.roboter == "jackal" or self.roboter == "ridgeback":
rotated_scan = np.zeros_like(obs)
rotated_scan[:540] = obs[540:]
rotated_scan[540:] = obs[:540]
downsampled = np.zeros(810)
downsampled[:405] = rotated_scan[135:540]
downsampled[405:] = rotated_scan[540:945]
f = interpolate.interp1d(np.arange(0, 810), downsampled)
upsampled = f(np.linspace(0, 810 - 1, 944))
lidar = upsampled.reshape((-1, 2))
lidar = np.min(lidar, axis=1)
return lidar
if self.roboter == "agv":
rotated_scan = np.zeros_like(obs)
rotated_scan[:540] = obs[540:]
rotated_scan[540:] = obs[:540]
downsampled = np.zeros(540)
downsampled[:270] = rotated_scan[270:540]
downsampled[270:] = rotated_scan[540:810]
f = interpolate.interp1d(np.arange(0, 540), downsampled)
return f(np.linspace(0.0, 540 - 1, 720))
def step(self, action):
self._steps_curr_episode += 1
action_encoded = self._get_action(action)
obs, reward, done, info = super(RosnavTrainEncodedEnv, self).step(action_encoded)
lidar, rho, theta = self._encode_obs(obs)
# reward, reward_info = self.reward_calculator.get_reward(
# np.array(lidar),
# (rho, theta),
# action=np.array([action_encoded[0], action_encoded[2]]),
# global_plan=None,
# robot_pose=None
# )
# done = reward_info["is_done"]
print(reward)
# done = reward_info["is_done"]
observation = np.hstack([lidar, np.array([rho, theta])])
# if done:
# info["done_reason"] = reward_info["done_reason"]
# info["is_success"] = reward_info["is_success"]
# if self._steps_curr_episode > self._max_steps_per_episode:
# done = True
# info["done_reason"] = 0
# info["is_success"] = 0
# if done:
# observation = self.reset()
return observation, 100, done, info
def reset(self, *args, **kwargs):
self.reward_calculator.reset()
self._steps_curr_episode = 0
obs = super(RosnavTrainEncodedEnv, self).reset(*args, **kwargs)
observation, rho, theta = self._encode_obs(obs)
return np.hstack([observation, np.array([rho, theta])])
def _encode_obs(self, obs):
scan, robotstate = obs
lidar = [np.min([self.laser_range, i]) for i in self._get_observation_from_scan(scan)]
self.last_rosnav_scan = lidar
rho, theta = self._get_goal_pose_in_robot_frame(robotstate[:2])
return lidar, rho, theta
def close(self):
super(RosnavTrainEncodedEnv, self).close()
def render(self, mode="human", close=False, save_to_file=False,
robocentric=False, render_decoded_scan=True):
#super(RosnavTrainEncodedEnv, self).render(
# mode=mode, close=close, lidar_scan_override=self.last_rosnav_scan, save_to_file=save_to_file,
# robocentric=robocentric)
pass
def _get_goal_pose_in_robot_frame(self, goal_pos):
y_relative = goal_pos[1]
x_relative = goal_pos[0]
rho = (x_relative ** 2 + y_relative ** 2) ** 0.5
theta = (np.arctan2(y_relative, x_relative) + 4 * np.pi) % (2 * np.pi) - np.pi
return rho, theta
def setup_by_configuration(
self, robot_yaml_path
):
"""get the configuration from the yaml file, including robot radius, discrete action space and continuous action space.
Args: linear_range
linear_ranger): [description]
"""
with open(robot_yaml_path, "r") as fd:
robot_data = yaml.safe_load(fd)
# get robot radius
for body in robot_data["bodies"]:
if body["name"] == "base_footprint":
for footprint in body["footprints"]:
if footprint["radius"]:
self._robot_radius = footprint["radius"] * 1.05
# get laser related information
for plugin in robot_data["plugins"]:
if plugin["type"] == "Laser":
laser_angle_min = plugin["angle"]["min"]
laser_angle_max = plugin["angle"]["max"]
laser_angle_increment = plugin["angle"]["increment"]
self.laser_range = plugin["range"]
self._laser_num_beams = int(
round(
(laser_angle_max - laser_angle_min)
/ laser_angle_increment
)
+ 1
)
self._laser_max_range = plugin["range"]
self.linear_range = robot_data["robot"]["continuous_actions"]["linear_range"]
self.angular_range = robot_data["robot"]["continuous_actions"]["angular_range"]
@staticmethod
def _stack_spaces(ss):
low = []
high = []
for space in ss:
low.extend(space.low.tolist())
high.extend(space.high.tolist())
return spaces.Box(np.array(low).flatten(), np.array(high).flatten()) | 35.663594 | 127 | 0.580178 | 7,472 | 0.965499 | 0 | 0 | 265 | 0.034242 | 0 | 0 | 1,333 | 0.172244 |
1c6030cb89b906c901110530b42acd2d1d95f2a5 | 9,789 | py | Python | pdm/models/repositories.py | gaojiuli/pdm | 9aedd12e864b57826e850a10eeea45900bb62aad | [
"MIT"
]
| 1 | 2021-02-04T19:43:38.000Z | 2021-02-04T19:43:38.000Z | pdm/models/repositories.py | gaojiuli/pdm | 9aedd12e864b57826e850a10eeea45900bb62aad | [
"MIT"
]
| null | null | null | pdm/models/repositories.py | gaojiuli/pdm | 9aedd12e864b57826e850a10eeea45900bb62aad | [
"MIT"
]
| null | null | null | from __future__ import annotations
import sys
from functools import wraps
from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Tuple
from pdm._types import CandidateInfo, Source
from pdm.context import context
from pdm.exceptions import CandidateInfoNotFound, CorruptedCacheError
from pdm.models.candidates import Candidate
from pdm.models.requirements import (
Requirement,
filter_requirements_with_extras,
parse_requirement,
)
from pdm.models.specifiers import PySpecSet, SpecifierSet
from pdm.utils import allow_all_wheels
if TYPE_CHECKING:
from pdm.models.environment import Environment
def cache_result(
func: Callable[["BaseRepository", Candidate], CandidateInfo]
) -> Callable[["BaseRepository", Candidate], CandidateInfo]:
@wraps(func)
def wrapper(self, candidate: Candidate) -> CandidateInfo:
result = func(self, candidate)
self._candidate_info_cache.set(candidate, result)
return result
return wrapper
class BaseRepository:
"""A Repository acts as the source of packages and metadata."""
def __init__(self, sources: List[Source], environment: Environment) -> None:
"""
:param sources: a list of sources to download packages from.
:param environment: the bound environment instance.
"""
self.sources = sources
self.environment = environment
self._candidate_info_cache = context.make_candidate_info_cache()
self._hash_cache = context.make_hash_cache()
def get_filtered_sources(self, req: Requirement) -> List[Source]:
"""Get matching sources based on the index attribute."""
if not req.index:
return self.sources
return [source for source in self.sources if source["name"] == req.index]
def get_dependencies(
self, candidate: Candidate
) -> Tuple[List[Requirement], PySpecSet, str]:
"""Get (dependencies, python_specifier, summary) of the candidate."""
requirements, requires_python, summary = [], "", ""
last_ext_info = None
for getter in self.dependency_generators():
try:
requirements, requires_python, summary = getter(candidate)
except CandidateInfoNotFound:
last_ext_info = sys.exc_info()
continue
break
else:
if last_ext_info is not None:
raise last_ext_info[1].with_traceback(last_ext_info[2])
requirements = [parse_requirement(line) for line in requirements]
if candidate.req.extras:
# HACK: If this candidate has extras, add the original candidate
# (same pinned version, no extras) as its dependency. This ensures
# the same package with different extras (treated as distinct by
# the resolver) have the same version.
self_req = candidate.req.copy()
self_req.extras = None
requirements.append(self_req)
return requirements, PySpecSet(requires_python), summary
def find_matches(
self,
requirement: Requirement,
requires_python: PySpecSet = PySpecSet(),
allow_prereleases: Optional[bool] = None,
allow_all: bool = False,
) -> List[Candidate]:
"""Find matching candidates of a requirement.
:param requirement: the given requirement.
:param requires_python: the Python version constraint.
:param allow_prereleases: whether allow prerelease versions, or let us determine
if not given. If no non-prerelease is available, prereleases will be used.
:param allow_all: whether allow all wheels.
:returns: a list of candidates.
"""
if requirement.is_named:
return self._find_named_matches(
requirement, requires_python, allow_prereleases, allow_all
)
else:
# Fetch metadata so that resolver can know the candidate's name.
can = Candidate(requirement, self.environment)
can.get_metadata()
return [can]
def _find_named_matches(
self,
requirement: Requirement,
requires_python: PySpecSet = PySpecSet(),
allow_prereleases: Optional[bool] = None,
allow_all: bool = False,
) -> List[Candidate]:
"""Find candidates of the given NamedRequirement. Let it to be implemented in
subclasses.
"""
raise NotImplementedError
def _get_dependencies_from_cache(self, candidate: Candidate) -> CandidateInfo:
try:
result = self._candidate_info_cache.get(candidate)
except CorruptedCacheError:
self._candidate_info_cache.clear()
raise CandidateInfoNotFound(candidate)
except KeyError:
raise CandidateInfoNotFound(candidate)
return result
@cache_result
def _get_dependencies_from_metadata(self, candidate: Candidate) -> CandidateInfo:
deps = candidate.get_dependencies_from_metadata()
requires_python = candidate.requires_python
summary = candidate.metadata.summary
return deps, requires_python, summary
def get_hashes(self, candidate: Candidate) -> Optional[Dict[str, str]]:
"""Get hashes of all possible installable candidates of a given package version.
"""
if (
candidate.req.is_vcs
or candidate.req.is_file_or_url
and candidate.req.is_local_dir
):
return
if candidate.hashes:
return candidate.hashes
req = candidate.req.copy()
req.specifier = SpecifierSet(f"=={candidate.version}")
matching_candidates = self.find_matches(req, allow_all=True)
with self.environment.get_finder(self.sources) as finder:
self._hash_cache.session = finder.session
return {
c.link.filename: self._hash_cache.get_hash(c.link)
for c in matching_candidates
}
def dependency_generators(self) -> Iterable[Callable[[Candidate], CandidateInfo]]:
"""Return an iterable of getter functions to get dependencies, which will be
called one by one.
"""
raise NotImplementedError
class PyPIRepository(BaseRepository):
"""Get package and metadata from PyPI source."""
@cache_result
def _get_dependencies_from_json(self, candidate: Candidate) -> CandidateInfo:
if not candidate.name or not candidate.version:
# Only look for json api for named requirements.
raise CandidateInfoNotFound(candidate)
sources = self.get_filtered_sources(candidate.req)
url_prefixes = [
proc_url[:-7] # Strip "/simple".
for proc_url in (
raw_url.rstrip("/")
for raw_url in (source.get("url", "") for source in sources)
)
if proc_url.endswith("/simple")
]
with self.environment.get_finder(sources) as finder:
session = finder.session
for prefix in url_prefixes:
json_url = f"{prefix}/pypi/{candidate.name}/{candidate.version}/json"
resp = session.get(json_url)
if not resp.ok:
continue
info = resp.json()["info"]
requires_python = info["requires_python"] or ""
summary = info["summary"] or ""
try:
requirement_lines = info["requires_dist"] or []
except KeyError:
requirement_lines = info["requires"] or []
requirements = filter_requirements_with_extras(
requirement_lines, candidate.req.extras or ()
)
return requirements, requires_python, summary
raise CandidateInfoNotFound(candidate)
def dependency_generators(self) -> Iterable[Callable[[Candidate], CandidateInfo]]:
return (
self._get_dependencies_from_cache,
self._get_dependencies_from_json,
self._get_dependencies_from_metadata,
)
def _find_named_matches(
self,
requirement: Requirement,
requires_python: PySpecSet = PySpecSet(),
allow_prereleases: Optional[bool] = None,
allow_all: bool = False,
) -> List[Candidate]:
sources = self.get_filtered_sources(requirement)
# `allow_prereleases` is None means leave it to specifier to decide whether to
# include prereleases
if allow_prereleases is None:
allow_prereleases = requirement.allow_prereleases
with self.environment.get_finder(sources) as finder, allow_all_wheels():
cans = [
Candidate.from_installation_candidate(c, requirement, self.environment)
for c in finder.find_all_candidates(requirement.project_name)
]
sorted_cans = sorted(
(
c
for c in cans
if requirement.specifier.contains(c.version, allow_prereleases)
and (allow_all or requires_python.is_subset(c.requires_python))
),
key=lambda c: (c.version, c.link.is_wheel),
)
if not sorted_cans and allow_prereleases is None:
# No non-pre-releases is found, force pre-releases now
sorted_cans = sorted(
(
c
for c in cans
if requirement.specifier.contains(c.version, True)
and (allow_all or requires_python.is_subset(c.requires_python))
),
key=lambda c: c.version,
)
return sorted_cans
| 39.156 | 88 | 0.629073 | 8,786 | 0.897538 | 0 | 0 | 2,026 | 0.206967 | 0 | 0 | 1,855 | 0.189498 |
1c6036ce4a4bea03f2bf60037b8ba69bf71a83e1 | 713 | py | Python | tests/backends/test_cookie.py | euri10/starsessions | 6bd258a0f94d30b6ec4a8da41910f97c5dabbe54 | [
"MIT"
]
| 31 | 2021-07-15T13:00:06.000Z | 2022-03-17T08:25:52.000Z | tests/backends/test_cookie.py | euri10/starsessions | 6bd258a0f94d30b6ec4a8da41910f97c5dabbe54 | [
"MIT"
]
| 6 | 2021-09-01T15:25:20.000Z | 2022-03-13T07:29:19.000Z | tests/backends/test_cookie.py | euri10/starsessions | 6bd258a0f94d30b6ec4a8da41910f97c5dabbe54 | [
"MIT"
]
| 5 | 2021-08-19T04:46:35.000Z | 2022-03-09T15:27:22.000Z | import pytest
from starsessions import SessionBackend
@pytest.mark.asyncio
async def test_cookie_read_write(cookie: SessionBackend, session_payload: dict) -> None:
new_id = await cookie.write(session_payload, "session_id")
assert await cookie.read(new_id) == session_payload
@pytest.mark.asyncio
async def test_cookie_remove(cookie: SessionBackend) -> None:
await cookie.remove("session_id")
@pytest.mark.asyncio
async def test_cookie_exists(cookie: SessionBackend) -> None:
assert await cookie.exists("session_id") is False
@pytest.mark.asyncio
async def test_cookie_generate_id(cookie: SessionBackend) -> None:
new_id = await cookie.generate_id()
assert isinstance(new_id, str)
| 27.423077 | 88 | 0.775596 | 0 | 0 | 0 | 0 | 646 | 0.906031 | 562 | 0.788219 | 36 | 0.050491 |
1c60d6b7074a5670b3d1308323fd21a043a33869 | 4,888 | py | Python | sqlalchemy_dremio/db.py | thbeh/sqlalchemy_dremio | 180169a86200977a8087d39afe67d3594bd66523 | [
"Apache-2.0"
]
| 14 | 2020-04-19T16:14:37.000Z | 2021-11-14T01:45:51.000Z | sqlalchemy_dremio/db.py | thbeh/sqlalchemy_dremio | 180169a86200977a8087d39afe67d3594bd66523 | [
"Apache-2.0"
]
| 13 | 2020-04-18T14:44:49.000Z | 2022-03-14T13:45:22.000Z | sqlalchemy_dremio/db.py | thbeh/sqlalchemy_dremio | 180169a86200977a8087d39afe67d3594bd66523 | [
"Apache-2.0"
]
| 6 | 2020-04-29T10:18:59.000Z | 2021-08-19T13:46:30.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
from pyarrow import flight
from sqlalchemy_dremio.exceptions import Error, NotSupportedError
from sqlalchemy_dremio.flight_auth import HttpDremioClientAuthHandler
from sqlalchemy_dremio.query import execute
logger = logging.getLogger(__name__)
paramstyle = 'qmark'
def connect(c):
return Connection(c)
def check_closed(f):
"""Decorator that checks if connection/cursor is closed."""
def g(self, *args, **kwargs):
if self.closed:
raise Error(
'{klass} already closed'.format(klass=self.__class__.__name__))
return f(self, *args, **kwargs)
return g
def check_result(f):
"""Decorator that checks if the cursor has results from `execute`."""
def d(self, *args, **kwargs):
if self._results is None:
raise Error('Called before `execute`')
return f(self, *args, **kwargs)
return d
class Connection(object):
def __init__(self, connection_string):
# TODO: Find a better way to extend to addition flight parameters
splits = connection_string.split(";")
client = flight.FlightClient('grpc+tcp://{0}:{1}'.format(splits[2].split("=")[1], splits[3].split("=")[1]))
client.authenticate(HttpDremioClientAuthHandler(splits[0].split("=")[1], splits[1].split("=")[1]))
self.flightclient = client
self.closed = False
self.cursors = []
@check_closed
def rollback(self):
pass
@check_closed
def close(self):
"""Close the connection now."""
self.closed = True
for cursor in self.cursors:
try:
cursor.close()
except Error:
pass # already closed
@check_closed
def commit(self):
pass
@check_closed
def cursor(self):
"""Return a new Cursor Object using the connection."""
cursor = Cursor(self.flightclient)
self.cursors.append(cursor)
return cursor
@check_closed
def execute(self, query):
cursor = self.cursor()
return cursor.execute(query)
def __enter__(self):
return self
def __exit__(self, *exc):
self.commit() # no-op
self.close()
class Cursor(object):
"""Connection cursor."""
def __init__(self, flightclient=None):
self.flightclient = flightclient
# This read/write attribute specifies the number of rows to fetch at a
# time with .fetchmany(). It defaults to 1 meaning to fetch a single
# row at a time.
self.arraysize = 1
self.closed = False
# this is updated only after a query
self.description = None
# this is set to a list of rows after a successful query
self._results = None
@property
@check_result
@check_closed
def rowcount(self):
return len(self._results)
@check_closed
def close(self):
"""Close the cursor."""
self.closed = True
@check_closed
def execute(self, query, params=None):
self.description = None
self._results, self.description = execute(
query, self.flightclient)
return self
@check_closed
def executemany(self, query):
raise NotSupportedError(
'`executemany` is not supported, use `execute` instead')
@check_result
@check_closed
def fetchone(self):
"""
Fetch the next row of a query result set, returning a single sequence,
or `None` when no more data is available.
"""
try:
return self._results.pop(0)
except IndexError:
return None
@check_result
@check_closed
def fetchmany(self, size=None):
"""
Fetch the next set of rows of a query result, returning a sequence of
sequences (e.g. a list of tuples). An empty sequence is returned when
no more rows are available.
"""
size = size or self.arraysize
out = self._results[:size]
self._results = self._results[size:]
return out
@check_result
@check_closed
def fetchall(self):
"""
Fetch all (remaining) rows of a query result, returning them as a
sequence of sequences (e.g. a list of tuples). Note that the cursor's
arraysize attribute can affect the performance of this operation.
"""
out = self._results[:]
self._results = []
return out
@check_closed
def setinputsizes(self, sizes):
# not supported
pass
@check_closed
def setoutputsizes(self, sizes):
# not supported
pass
@check_closed
def __iter__(self):
return iter(self._results)
| 25.591623 | 115 | 0.618658 | 3,829 | 0.783347 | 0 | 0 | 2,560 | 0.523732 | 0 | 0 | 1,362 | 0.278642 |
1c61e6c641ff5d3b13cd3eb58254039918bc75f6 | 2,081 | py | Python | docker-images/rasa2/snips_services/tts_server.py | sanyaade-machine-learning/opensnips_original | 3c7d4aa2ef7dec7b0b8c532a537b79c3ef9df7cc | [
"MIT"
]
| 57 | 2017-12-28T22:50:20.000Z | 2022-01-25T16:05:36.000Z | docker-images/rasa2/snips_services/tts_server.py | sanyaade-machine-learning/opensnips_original | 3c7d4aa2ef7dec7b0b8c532a537b79c3ef9df7cc | [
"MIT"
]
| 28 | 2018-04-18T06:45:20.000Z | 2022-03-08T22:50:50.000Z | docker-images/rasa2/snips_services/tts_server.py | sanyaade-machine-learning/opensnips_original | 3c7d4aa2ef7dec7b0b8c532a537b79c3ef9df7cc | [
"MIT"
]
| 18 | 2017-12-27T01:57:14.000Z | 2021-03-02T14:13:06.000Z | #!/opt/rasa/anaconda/bin/python
# -*-: coding utf-8 -*-
""" Snips core and nlu server. """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import time
import os
from socket import error as socket_error
from SnipsMqttServer import SnipsMqttServer
import paho.mqtt.client as mqtt
from thread_handler import ThreadHandler
import sys,warnings
# apt-get install sox libsox-fmt-all
import sox
class SnipsTTSServer(SnipsMqttServer):
def __init__(self,
mqtt_hostname='mosquitto',
mqtt_port=1883,
):
SnipsMqttServer.__init__(self,mqtt_hostname,mqtt_port)
self.subscribe_to='hermes/tts/say'
def on_message(self, client, userdata, msg):
#print("MESSAGEtts: {}".format(msg.topic))
if msg.topic is not None and msg.topic=="hermes/tts/say":
print("MESSAGE OK: {}".format(msg.topic))
payload = json.loads(msg.payload)
# .decode('utf-8')
sessionId = payload.get('sessionId')
siteId = payload.get('siteId','default')
lang = payload.get('lang','en-GB')
theId = sessionId
fileName = '/tmp/speaking.wav'
os.system('/usr/bin/pico2wave -w=' + fileName + ' "{}" '.format(payload.get('text')))
#pubCommand = "mosquitto_pub -h " +self.mqtt_hostname+" -t 'hermes/audioServer/default/playBytes/0049a91e-8449-4398-9752-07c858234' -f '" + fileName + "'"
#print(pubCommand)
#os.system(pubCommand)
fp = open(fileName)
f = fp.read()
topic = 'hermes/audioServer/{}/playBytes'.format(siteId)
if theId is not None:
topic = topic + '/{}'.format(theId[::-1])
self.client.publish(topic, payload=bytes(f),qos=0)
#print("PUBLISHED on " + topic)
os.remove(fileName)
server = SnipsTTSServer()
server.start()
| 29.728571 | 166 | 0.605478 | 1,517 | 0.728976 | 0 | 0 | 0 | 0 | 0 | 0 | 604 | 0.290245 |
1c625b305422a96fe496b35f015f87dde84dd1cd | 462 | py | Python | gtd/migrations/0018_context_color.py | jimbofreedman/naggingnelly-api | 510d801791dcce39560bac227c12e5f6d9e80dcc | [
"BSD-3-Clause"
]
| null | null | null | gtd/migrations/0018_context_color.py | jimbofreedman/naggingnelly-api | 510d801791dcce39560bac227c12e5f6d9e80dcc | [
"BSD-3-Clause"
]
| null | null | null | gtd/migrations/0018_context_color.py | jimbofreedman/naggingnelly-api | 510d801791dcce39560bac227c12e5f6d9e80dcc | [
"BSD-3-Clause"
]
| null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-08-02 17:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gtd', '0017_auto_20180108_1508'),
]
operations = [
migrations.AddField(
model_name='context',
name='color',
field=models.CharField(default='ffffff', max_length=6),
),
]
| 22 | 67 | 0.614719 | 304 | 0.658009 | 0 | 0 | 0 | 0 | 0 | 0 | 125 | 0.270563 |
1c627c266e817eb089303a3e29f35bf34a1b6c4c | 6,652 | py | Python | neuralintents/main.py | nitori/neuralintents | 7a63075fbdca24ec6a6e5281552f64325dd279ff | [
"MIT"
]
| null | null | null | neuralintents/main.py | nitori/neuralintents | 7a63075fbdca24ec6a6e5281552f64325dd279ff | [
"MIT"
]
| null | null | null | neuralintents/main.py | nitori/neuralintents | 7a63075fbdca24ec6a6e5281552f64325dd279ff | [
"MIT"
]
| null | null | null | from abc import ABCMeta, abstractmethod
import random
import json
import pickle
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import nltk
from nltk.stem import WordNetLemmatizer
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.models import load_model
nltk.download('punkt', quiet=True)
nltk.download('wordnet', quiet=True)
class IAssistant(metaclass=ABCMeta):
@abstractmethod
def train_model(self):
""" Implemented in child class """
@abstractmethod
def request_tag(self, message):
""" Implemented in child class """
@abstractmethod
def get_tag_by_id(self, id):
""" Implemented in child class """
@abstractmethod
def request_method(self, message):
""" Implemented in child class """
@abstractmethod
def request(self, message):
""" Implemented in child class """
class GenericAssistant(IAssistant):
def __init__(self, intents, intent_methods={}, model_name="assistant_model", *, json_encoding='utf-8'):
self.intents = intents
self.intent_methods = intent_methods
self.model_name = model_name
self.json_encoding = json_encoding
if intents.endswith(".json"):
self.load_json_intents(intents)
self.lemmatizer = WordNetLemmatizer()
def load_json_intents(self, intents):
with open(intents, encoding=self.json_encoding) as f:
self.intents = json.load(f)
def train_model(self):
self.words = []
self.classes = []
documents = []
ignore_letters = ['!', '?', ',', '.']
for intent in self.intents['intents']:
for pattern in intent['patterns']:
word = nltk.word_tokenize(pattern)
self.words.extend(word)
documents.append((word, intent['tag']))
if intent['tag'] not in self.classes:
self.classes.append(intent['tag'])
self.words = [self.lemmatizer.lemmatize(w.lower()) for w in self.words if w not in ignore_letters]
self.words = sorted(list(set(self.words)))
self.classes = sorted(list(set(self.classes)))
training = []
output_empty = [0] * len(self.classes)
for doc in documents:
bag = []
word_patterns = doc[0]
word_patterns = [self.lemmatizer.lemmatize(word.lower()) for word in word_patterns]
for word in self.words:
bag.append(1) if word in word_patterns else bag.append(0)
output_row = list(output_empty)
output_row[self.classes.index(doc[1])] = 1
training.append([bag, output_row])
random.shuffle(training)
training = np.array(training)
train_x = list(training[:, 0])
train_y = list(training[:, 1])
self.model = Sequential()
self.model.add(Dense(128, input_shape=(len(train_x[0]),), activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(64, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(len(train_y[0]), activation='softmax'))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
self.model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
self.hist = self.model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=1)
def save_model(self, model_name=None):
if model_name is None:
self.model.save(f"{self.model_name}.h5", self.hist)
with open(f'{self.model_name}_words.pkl', 'wb') as f:
pickle.dump(self.words, f)
with open(f'{self.model_name}_classes.pkl', 'wb') as f:
pickle.dump(self.classes, f)
else:
self.model.save(f"{model_name}.h5", self.hist)
with open(f'{model_name}_words.pkl', 'wb') as f:
pickle.dump(self.words, f)
with open(f'{model_name}_classes.pkl', 'wb') as f:
pickle.dump(self.classes, f)
def load_model(self, model_name=None):
if model_name is None:
with open(f'{self.model_name}_words.pkl', 'rb') as f:
self.words = pickle.load(f)
with open(f'{self.model_name}_classes.pkl', 'rb') as f:
self.classes = pickle.load(f)
self.model = load_model(f'{self.model_name}.h5')
else:
with open(f'{model_name}_words.pkl', 'rb') as f:
self.words = pickle.load(f)
with open(f'{model_name}_classes.pkl', 'rb') as f:
self.classes = pickle.load(f)
self.model = load_model(f'{model_name}.h5')
def _clean_up_sentence(self, sentence):
sentence_words = nltk.word_tokenize(sentence)
sentence_words = [self.lemmatizer.lemmatize(word.lower()) for word in sentence_words]
return sentence_words
def _bag_of_words(self, sentence, words):
sentence_words = self._clean_up_sentence(sentence)
bag = [0] * len(words)
for s in sentence_words:
for i, word in enumerate(words):
if word == s:
bag[i] = 1
return np.array(bag)
def _predict_class(self, sentence):
p = self._bag_of_words(sentence, self.words)
res = self.model.predict(np.array([p]))[0]
ERROR_THRESHOLD = 0.1
results = [[i, r] for i, r in enumerate(res) if r > ERROR_THRESHOLD]
results.sort(key=lambda x: x[1], reverse=True)
return_list = []
for r in results:
return_list.append({'intent': self.classes[r[0]], 'probability': str(r[1])})
return return_list
def _get_response(self, ints, intents_json):
try:
tag = ints[0]['intent']
list_of_intents = intents_json['intents']
for i in list_of_intents:
if i['tag'] == tag:
result = random.choice(i['responses'])
break
except IndexError:
result = "I don't understand!"
return result
def request_tag(self, message):
pass
def get_tag_by_id(self, id):
pass
def request_method(self, message):
pass
def request(self, message):
ints = self._predict_class(message)
if ints[0]['intent'] in self.intent_methods.keys():
self.intent_methods[ints[0]['intent']]()
else:
return self._get_response(ints, self.intents) | 33.766497 | 109 | 0.599519 | 6,180 | 0.929044 | 0 | 0 | 457 | 0.068701 | 0 | 0 | 778 | 0.116957 |
1c63f92718131c9edb3951c411929fc66600dca1 | 607 | py | Python | cogs/TieThePie.py | Engineer152/Engineer-Bot | 9654666776d5ba91b1c8afdb32c86a7aedad7143 | [
"MIT"
]
| null | null | null | cogs/TieThePie.py | Engineer152/Engineer-Bot | 9654666776d5ba91b1c8afdb32c86a7aedad7143 | [
"MIT"
]
| null | null | null | cogs/TieThePie.py | Engineer152/Engineer-Bot | 9654666776d5ba91b1c8afdb32c86a7aedad7143 | [
"MIT"
]
| null | null | null | import discord
from discord.ext import commands
client = commands.Bot(command_prefix='your prefix',owner_ids = {your user id},case_insensitive=True )
class TieThePie(commands.Cog):
def __init__(self,client):
self.client=client
@commands.command()
async def tiethepie(self,ctx):
embed=discord.Embed(title="**Tie The Pie**",color=0x46e2ec,description='Subscribe to Dude Perfect to see the reveal of Panda\n**[Details](https://youtu.be/bFUZ5gruc0E)**ㅤㅤㅤㅤ**[Subscribe](http://bit.ly/SubDudePerfect)**')
await ctx.send(embed=embed)
def setup(client):
client.add_cog(TieThePie(client)) | 33.722222 | 224 | 0.742998 | 402 | 0.653659 | 0 | 0 | 317 | 0.515447 | 295 | 0.479675 | 186 | 0.302439 |
1c6537848455d77ed4e22e5c61b4d2a5153fa5e0 | 3,359 | py | Python | python/lsst/eotest/simulation/generate_Fe55_images.py | tguillemLSST/eotest | c6f150984fa5dff85b9805028645bf46fc846f11 | [
"BSD-3-Clause-LBNL"
]
| 3 | 2016-04-21T07:05:45.000Z | 2020-08-05T08:37:37.000Z | python/lsst/eotest/simulation/generate_Fe55_images.py | tguillemLSST/eotest | c6f150984fa5dff85b9805028645bf46fc846f11 | [
"BSD-3-Clause-LBNL"
]
| 70 | 2015-03-26T09:48:53.000Z | 2020-04-22T16:29:43.000Z | python/lsst/eotest/simulation/generate_Fe55_images.py | tguillemLSST/eotest | c6f150984fa5dff85b9805028645bf46fc846f11 | [
"BSD-3-Clause-LBNL"
]
| 5 | 2017-08-15T20:52:44.000Z | 2022-03-25T12:54:07.000Z | """
@brief Generate Fe55 images and associated darks and bias images
according to section 5.4 of the E/O document (Dec 19, 2012 version).
@author J. Chiang <[email protected]>
"""
import os
import numpy as np
from sim_inputs import *
from sim_tools import *
def generate_Fe55_images(exptimes, nxrays, outdir, sensorid, gain=gain,
bias_level=bias_level, sys_noise=sys_noise,
dark_current=dark_current):
nexp = len(exptimes)
for i, exptime, nxray in zip(list(range(nexp)), exptimes, nxrays):
#
# Bias images
#
outfile = "Fe55_bias_%s_%02i.fits" % (sensorid, i)
bias_file = os.path.join(outdir, outfile)
bias_segs = []
for hdu in range(nhdu):
seg = SegmentExposure(exptime=0, gain=gain)
seg.add_bias(level=bias_level, sigma=sys_noise) # electronics
seg.add_bias(level=0, sigma=read_noise) # read noise
bias_segs.append(seg)
bias_output = fitsFile(bias_segs)
bias_output[0].header['GAIN'] = gain
bias_output[0].header['BIASLVL'] = bias_level
bias_output[0].header['SYSNOISE'] = sys_noise
bias_output[0].header['RDNOISE'] = read_noise
bias_output.writeto(bias_file, overwrite=True)
#
# Dark images
#
outfile = "Fe55_dark_%s_%02i.fits" % (sensorid, i)
dark_file = os.path.join(outdir, outfile)
dark_segs = []
for hdu in range(nhdu):
seg = SegmentExposure(exptime=exptime, gain=gain)
seg.add_bias(level=bias_level, sigma=sys_noise) # electronics
seg.add_bias(level=0, sigma=read_noise) # read noise
seg.add_dark_current(level=dark_current) # dark current
dark_segs.append(seg)
dark_output = fitsFile(dark_segs)
dark_output[0].header['GAIN'] = gain
dark_output[0].header['BIASLVL'] = bias_level
dark_output[0].header['SYSNOISE'] = sys_noise
dark_output[0].header['RDNOISE'] = read_noise
dark_output[0].header['DARKCURR'] = dark_current
dark_output.writeto(dark_file, overwrite=True)
#
# Fe55 exposures
#
outfile = "Fe55_exp_%s_%02i.fits" % (sensorid, i)
Fe55_file = os.path.join(outdir, outfile)
fe55_segs = []
for hdu in range(nhdu):
seg = SegmentExposure(exptime=exptime, gain=gain)
seg.add_bias(level=bias_level, sigma=sys_noise) # electronics
seg.add_bias(level=0, sigma=read_noise) # read noise
seg.add_dark_current(level=dark_current) # dark current
seg.add_Fe55_hits(nxrays=nxray)
fe55_segs.append(seg)
fe55_output = fitsFile(fe55_segs)
fe55_output[0].header['GAIN'] = gain
fe55_output[0].header['BIASLVL'] = bias_level
fe55_output[0].header['SYSNOISE'] = sys_noise
fe55_output[0].header['RDNOISE'] = read_noise
fe55_output[0].header['DARKCURR'] = dark_current
fe55_output[0].header['FE55HITS'] = nxray
fe55_output.writeto(Fe55_file, overwrite=True)
if __name__ == '__main__':
nexp = 10
exptimes = np.linspace(1, 5, nexp)
nxrays = [int(x*1000) for x in exptimes]
generate_Fe55_images(exptimes, nxrays, '.', 'xxx-xx')
| 39.988095 | 74 | 0.61953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 563 | 0.167609 |
1c6615aacc368931eb1fadc13190d4aad9dc4cda | 32,663 | py | Python | pcdet/models/backbones_3d/pfe/voxel_set_abstraction.py | rayguan97/M3DETR | cb76890a28c1555f2c0138030e0a432df6ee731b | [
"Apache-2.0"
]
| 21 | 2022-01-21T11:02:15.000Z | 2022-03-08T14:55:30.000Z | pcdet/models/backbones_3d/pfe/voxel_set_abstraction.py | rayguan97/M3DETR | cb76890a28c1555f2c0138030e0a432df6ee731b | [
"Apache-2.0"
]
| 2 | 2022-01-21T08:10:49.000Z | 2022-01-21T23:44:40.000Z | pcdet/models/backbones_3d/pfe/voxel_set_abstraction.py | rayguan97/M3DETR | cb76890a28c1555f2c0138030e0a432df6ee731b | [
"Apache-2.0"
]
| 3 | 2022-01-21T11:41:55.000Z | 2022-01-24T14:20:19.000Z | import math
import numpy as np
import torch
import torch.nn as nn
from ....ops.pointnet2.pointnet2_stack import pointnet2_modules as pointnet2_stack_modules
from ....ops.pointnet2.pointnet2_stack import pointnet2_utils as pointnet2_stack_utils
from ....utils import common_utils
from ...backbones_2d.transformer import TransformerEncoderLayer3D, TransformerEncoder
from ...roi_heads.target_assigner.proposal_target_layer import ProposalTargetLayer
from ...model_utils.model_nms_utils import class_agnostic_nms
def bilinear_interpolate_torch(im, x, y):
"""
Args:
im: (H, W, C) [y, x]
x: (N)
y: (N)
Returns:
"""
x0 = torch.floor(x).long()
x1 = x0 + 1
y0 = torch.floor(y).long()
y1 = y0 + 1
x0 = torch.clamp(x0, 0, im.shape[1] - 1)
x1 = torch.clamp(x1, 0, im.shape[1] - 1)
y0 = torch.clamp(y0, 0, im.shape[0] - 1)
y1 = torch.clamp(y1, 0, im.shape[0] - 1)
Ia = im[y0, x0]
Ib = im[y1, x0]
Ic = im[y0, x1]
Id = im[y1, x1]
wa = (x1.type_as(x) - x) * (y1.type_as(y) - y)
wb = (x1.type_as(x) - x) * (y - y0.type_as(y))
wc = (x - x0.type_as(x)) * (y1.type_as(y) - y)
wd = (x - x0.type_as(x)) * (y - y0.type_as(y))
ans = torch.t((torch.t(Ia) * wa)) + torch.t(torch.t(Ib) * wb) + torch.t(torch.t(Ic) * wc) + torch.t(torch.t(Id) * wd)
return ans
def sample_points_with_roi(rois, points, sample_radius_with_roi, num_max_points_of_part=200000):
"""
Args:
rois: (M, 7 + C)
points: (N, 3)
sample_radius_with_roi:
num_max_points_of_part:
Returns:
sampled_points: (N_out, 3)
"""
if points.shape[0] < num_max_points_of_part:
distance = (points[:, None, :] - rois[None, :, 0:3]).norm(dim=-1)
min_dis, min_dis_roi_idx = distance.min(dim=-1)
roi_max_dim = (rois[min_dis_roi_idx, 3:6] / 2).norm(dim=-1)
point_mask = min_dis < roi_max_dim + sample_radius_with_roi
else:
start_idx = 0
point_mask_list = []
while start_idx < points.shape[0]:
distance = (points[start_idx:start_idx + num_max_points_of_part, None, :] - rois[None, :, 0:3]).norm(dim=-1)
min_dis, min_dis_roi_idx = distance.min(dim=-1)
roi_max_dim = (rois[min_dis_roi_idx, 3:6] / 2).norm(dim=-1)
cur_point_mask = min_dis < roi_max_dim + sample_radius_with_roi
point_mask_list.append(cur_point_mask)
start_idx += num_max_points_of_part
point_mask = torch.cat(point_mask_list, dim=0)
sampled_points = points[:1] if point_mask.sum() == 0 else points[point_mask, :]
return sampled_points, point_mask
def sector_fps(points, num_sampled_points, num_sectors):
"""
Args:
points: (N, 3)
num_sampled_points: int
num_sectors: int
Returns:
sampled_points: (N_out, 3)
"""
sector_size = np.pi * 2 / num_sectors
point_angles = torch.atan2(points[:, 1], points[:, 0]) + np.pi
sector_idx = (point_angles / sector_size).floor().clamp(min=0, max=num_sectors)
xyz_points_list = []
xyz_batch_cnt = []
num_sampled_points_list = []
for k in range(num_sectors):
mask = (sector_idx == k)
cur_num_points = mask.sum().item()
if cur_num_points > 0:
xyz_points_list.append(points[mask])
xyz_batch_cnt.append(cur_num_points)
ratio = cur_num_points / points.shape[0]
num_sampled_points_list.append(
min(cur_num_points, math.ceil(ratio * num_sampled_points))
)
if len(xyz_batch_cnt) == 0:
xyz_points_list.append(points)
xyz_batch_cnt.append(len(points))
num_sampled_points_list.append(num_sampled_points)
print(f'Warning: empty sector points detected in SectorFPS: points.shape={points.shape}')
xyz = torch.cat(xyz_points_list, dim=0)
xyz_batch_cnt = torch.tensor(xyz_batch_cnt, device=points.device).int()
sampled_points_batch_cnt = torch.tensor(num_sampled_points_list, device=points.device).int()
sampled_pt_idxs = pointnet2_stack_utils.stack_farthest_point_sample(
xyz.contiguous(), xyz_batch_cnt, sampled_points_batch_cnt
).long()
sampled_points = xyz[sampled_pt_idxs]
return sampled_points
class VoxelSetAbstractionTransFusionv5(nn.Module):
def __init__(self, model_cfg, voxel_size, point_cloud_range, num_bev_features=None,
num_rawpoint_features=None, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.voxel_size = voxel_size
self.point_cloud_range = point_cloud_range
SA_cfg = self.model_cfg.SA_LAYER
self.SA_layers = nn.ModuleList()
self.linears_in = nn.ModuleList()
self.linears_out = nn.ModuleList()
self.fusion_channel = sum([x[-1] for x in SA_cfg[self.model_cfg.FEATURES_SOURCE[-2]].MLPS])
# self.fusion_channel = 16
self.SA_layer_names = []
self.downsample_times_map = {}
c_in = 0
if 'bev' in self.model_cfg.FEATURES_SOURCE:
c_bev = num_bev_features
c_in += c_bev
if c_bev == self.fusion_channel:
self.linears_in.append(nn.Identity())
self.linears_out.append(nn.Identity())
else:
self.linears_in.append(nn.Sequential(
nn.Linear(c_bev, self.fusion_channel, bias=False),
nn.BatchNorm1d(self.fusion_channel)))
self.linears_out.append(nn.Sequential(
nn.Linear(self.fusion_channel, c_bev, bias=False),
nn.BatchNorm1d(c_bev)))
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
mlps = SA_cfg['raw_points'].MLPS
for k in range(len(mlps)):
mlps[k] = [num_rawpoint_features - 3] + mlps[k]
self.SA_rawpoints = pointnet2_stack_modules.StackSAModuleMSG(
radii=SA_cfg['raw_points'].POOL_RADIUS,
nsamples=SA_cfg['raw_points'].NSAMPLE,
mlps=mlps,
use_xyz=True,
pool_method='max_pool'
)
cur = sum([x[-1] for x in mlps])
if cur == self.fusion_channel:
self.linears_in.append(nn.Identity())
self.linears_out.append(nn.Identity())
else:
self.linears_in.append(nn.Sequential(
nn.Linear(cur, self.fusion_channel, bias=False),
nn.BatchNorm1d(self.fusion_channel)))
self.linears_out.append(nn.Sequential(
nn.Linear(self.fusion_channel, cur, bias=False),
nn.BatchNorm1d(cur)))
c_in += cur
for src_name in self.model_cfg.FEATURES_SOURCE:
if src_name in ['bev', 'raw_points']:
continue
self.downsample_times_map[src_name] = SA_cfg[src_name].DOWNSAMPLE_FACTOR
mlps = SA_cfg[src_name].MLPS
for k in range(len(mlps)):
mlps[k] = [mlps[k][0]] + mlps[k]
cur_layer = pointnet2_stack_modules.StackSAModuleMSG(
radii=SA_cfg[src_name].POOL_RADIUS,
nsamples=SA_cfg[src_name].NSAMPLE,
mlps=mlps,
use_xyz=True,
pool_method='max_pool',
)
self.SA_layers.append(cur_layer)
cur = sum([x[-1] for x in mlps])
if cur == self.fusion_channel:
self.linears_in.append(nn.Identity())
self.linears_out.append(nn.Identity())
else:
self.linears_in.append(nn.Sequential(
nn.Linear(cur, self.fusion_channel, bias=False),
nn.BatchNorm1d(self.fusion_channel)))
self.linears_out.append(nn.Sequential(
nn.Linear(self.fusion_channel, cur, bias=False),
nn.BatchNorm1d(cur)))
self.SA_layer_names.append(src_name)
c_in += cur
self.vsa_point_feature_fusion = nn.Sequential(
nn.Linear(c_in, self.model_cfg.NUM_OUTPUT_FEATURES, bias=False),
nn.BatchNorm1d(self.model_cfg.NUM_OUTPUT_FEATURES),
nn.ReLU(),
)
self.num_point_features = self.model_cfg.NUM_OUTPUT_FEATURES
self.num_point_features_before_fusion = c_in
if self.model_cfg.NORM:
self.transnorm = nn.LayerNorm(c_in)
else:
self.transnorm = None
if self.model_cfg.NORM2:
self.transnorm2 = nn.LayerNorm(self.fusion_channel)
else:
self.transnorm2 = None
# multi_location
self.trans_layer = TransformerEncoder(TransformerEncoderLayer3D(c_in, self.model_cfg.FUSION_HEAD), self.model_cfg.NUM_LAYERS, self.transnorm)
# have multi-modality + multi-scale
self.trans_fusion_layer = TransformerEncoder(TransformerEncoderLayer3D(self.fusion_channel, self.model_cfg.FUSION2_HEAD), self.model_cfg.NUM_LAYERS2, self.transnorm2)
self.reduce_radius = self.model_cfg.REDUCE_RADIUS**2
self.topks = self.model_cfg.NMS_CONFIG.TOPK
self.max_keypoints = self.model_cfg.NMS_CONFIG.MAX_POINTS
self.res1_actn_1 = nn.Sequential(
nn.LayerNorm(c_in),
nn.ReLU())
self.res1_actn_2 = nn.Sequential(
nn.LayerNorm(c_in),
nn.ReLU())
def interpolate_from_bev_features(self, keypoints, bev_features, batch_size, bev_stride):
x_idxs = (keypoints[:, :, 0] - self.point_cloud_range[0]) / self.voxel_size[0]
y_idxs = (keypoints[:, :, 1] - self.point_cloud_range[1]) / self.voxel_size[1]
x_idxs = x_idxs / bev_stride
y_idxs = y_idxs / bev_stride
point_bev_features_list = []
for k in range(batch_size):
cur_x_idxs = x_idxs[k]
cur_y_idxs = y_idxs[k]
cur_bev_features = bev_features[k].permute(1, 2, 0) # (H, W, C)
point_bev_features = bilinear_interpolate_torch(cur_bev_features, cur_x_idxs, cur_y_idxs)
point_bev_features_list.append(point_bev_features.unsqueeze(dim=0))
point_bev_features = torch.cat(point_bev_features_list, dim=0) # (B, N, C0)
return point_bev_features
def get_sampled_points(self, batch_dict):
batch_size = batch_dict['batch_size']
if self.model_cfg.POINT_SOURCE == 'raw_points':
src_points = batch_dict['points'][:, 1:4]
batch_indices = batch_dict['points'][:, 0].long()
elif self.model_cfg.POINT_SOURCE == 'voxel_centers':
src_points = common_utils.get_voxel_centers(
batch_dict['voxel_coords'][:, 1:4],
downsample_times=1,
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
batch_indices = batch_dict['voxel_coords'][:, 0].long()
else:
raise NotImplementedError
keypoints_list = []
for bs_idx in range(batch_size):
bs_mask = (batch_indices == bs_idx)
sampled_points = src_points[bs_mask].unsqueeze(dim=0) # (1, N, 3)
if self.model_cfg.SAMPLE_METHOD == 'FPS':
cur_pt_idxs = pointnet2_stack_utils.furthest_point_sample(
sampled_points[:, :, 0:3].contiguous(), self.model_cfg.NUM_KEYPOINTS
).long()
if sampled_points.shape[1] < self.model_cfg.NUM_KEYPOINTS:
empty_num = self.model_cfg.NUM_KEYPOINTS - sampled_points.shape[1]
cur_pt_idxs[0, -empty_num:] = cur_pt_idxs[0, :empty_num]
keypoints = sampled_points[0][cur_pt_idxs[0]].unsqueeze(dim=0)
elif self.model_cfg.SAMPLE_METHOD == 'FastFPS':
raise NotImplementedError
else:
raise NotImplementedError
keypoints_list.append(keypoints)
keypoints = torch.cat(keypoints_list, dim=0) # (B, M, 3)
return keypoints
def get_sampled_points_post(self, batch_dict, keypoints):
batch_size = batch_dict['batch_size']
src_points = keypoints
keypoints_list = []
for bs_idx in range(batch_size):
sampled_points = src_points[bs_idx].unsqueeze(dim=0) # (1, N, 3)
if sampled_points.shape[1] < self.max_keypoints:
cur_count = sampled_points.shape[1]
cur_pt_idxs = torch.arange(0, self.max_keypoints)
empty_num = self.max_keypoints - cur_count
while empty_num >= cur_count:
cur_pt_idxs[cur_count:cur_count * 2] = cur_pt_idxs[:cur_count]
empty_num -= cur_count
cur_count *= 2
if cur_count < self.max_keypoints:
assert empty_num == self.max_keypoints - cur_count
cur_pt_idxs[-empty_num:] = cur_pt_idxs[:empty_num]
keypoint = sampled_points[0][cur_pt_idxs].unsqueeze(dim=0)
else:
cur_pt_idxs = pointnet2_stack_utils.furthest_point_sample(
sampled_points[:, :, 0:3].contiguous(), self.max_keypoints
).long()
if sampled_points.shape[1] < self.max_keypoints:
empty_num = self.max_keypoints - sampled_points.shape[1]
cur_pt_idxs[0, -empty_num:] = cur_pt_idxs[0, :empty_num]
keypoint = sampled_points[0][cur_pt_idxs[0]].unsqueeze(dim=0)
keypoints_list.append(keypoint)
keypoint = torch.cat(keypoints_list, dim=0) # (B, M, 3)
return keypoint
def reduce_points(self, batch_dict):
batch_indices = batch_dict['points'][:, 0].long()
masks = []
for bs_idx, roi in enumerate(batch_dict['batch_cls_preds']):
bs_mask = (batch_indices == bs_idx)
pts = batch_dict['points'][bs_mask].unsqueeze(dim=1)[:, :, 1: 4] # (N, 1, 3)
s, _ = torch.max(batch_dict['batch_cls_preds'][bs_idx], dim=1)
top, idx = torch.topk(s, self.topks)
c = batch_dict['batch_box_preds'][bs_idx][idx][:, :3].unsqueeze(dim=0)
dist = (pts - c)**2
dist, _ = dist.sum(dim=-1).min(dim=1)
mask = (dist <= self.reduce_radius)
masks.extend(mask)
batch_dict['points'] = batch_dict['points'][masks]
return batch_dict
def reduce_points_post(self, keypoints, batch_dict):
keypoints_list = []
for bs_idx, roi in enumerate(batch_dict['batch_cls_preds']):
pts = keypoints[bs_idx].unsqueeze(dim=1)
s, _ = torch.max(batch_dict['batch_cls_preds'][bs_idx], dim=1)
top, idx = torch.topk(s, self.topks)
c = batch_dict['batch_box_preds'][bs_idx][idx][:, :3].unsqueeze(dim=0)
dist = (pts - c)**2
dist, _ = dist.sum(dim=-1).min(dim=1)
mask = (dist <= self.reduce_radius)
keypoints_list.append(keypoints[bs_idx][mask])
return keypoints_list
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size:
keypoints: (B, num_keypoints, 3)
multi_scale_3d_features: {
'x_conv4': ...
}
points: optional (N, 1 + 3 + C) [bs_idx, x, y, z, ...]
spatial_features: optional
spatial_features_stride: optional
Returns:
point_features: (N, C)
point_coords: (N, 4)
"""
if self.model_cfg.POINT_SOURCE == 'raw_points' and self.reduce_radius > 0:
# batch_dict = self.reduce_points(batch_dict)
keypoints = self.get_sampled_points(batch_dict)
keypoint_lst = self.reduce_points_post(keypoints, batch_dict)
keypoints = self.get_sampled_points_post(batch_dict, keypoint_lst)
else:
keypoints = self.get_sampled_points(batch_dict)
point_features_list = []
if 'bev' in self.model_cfg.FEATURES_SOURCE:
point_bev_features = self.interpolate_from_bev_features(
keypoints, batch_dict['spatial_features'], batch_dict['batch_size'],
bev_stride=batch_dict['spatial_features_stride']
)
point_features_list.append(point_bev_features)
batch_size, num_keypoints, _ = keypoints.shape
new_xyz = keypoints.view(-1, 3)
new_xyz_batch_cnt = new_xyz.new_zeros(batch_size).int().fill_(num_keypoints)
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
raw_points = batch_dict['points']
xyz = raw_points[:, 1:4]
xyz_batch_cnt = xyz.new_zeros(batch_size).int()
for bs_idx in range(batch_size):
xyz_batch_cnt[bs_idx] = (raw_points[:, 0] == bs_idx).sum()
point_features = raw_points[:, 4:].contiguous() if raw_points.shape[1] > 4 else None
pooled_points, pooled_features = self.SA_rawpoints(
xyz=xyz.contiguous(),
xyz_batch_cnt=xyz_batch_cnt,
new_xyz=new_xyz,
new_xyz_batch_cnt=new_xyz_batch_cnt,
features=point_features,
)
point_features_list.append(pooled_features.view(batch_size, num_keypoints, -1))
for k, src_name in enumerate(self.SA_layer_names):
cur_coords = batch_dict['multi_scale_3d_features'][src_name].indices
xyz = common_utils.get_voxel_centers(
cur_coords[:, 1:4],
downsample_times=self.downsample_times_map[src_name],
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
xyz_batch_cnt = xyz.new_zeros(batch_size).int()
for bs_idx in range(batch_size):
xyz_batch_cnt[bs_idx] = (cur_coords[:, 0] == bs_idx).sum()
pooled_points, pooled_features = self.SA_layers[k](
xyz=xyz.contiguous(),
xyz_batch_cnt=xyz_batch_cnt,
new_xyz=new_xyz,
new_xyz_batch_cnt=new_xyz_batch_cnt,
features=batch_dict['multi_scale_3d_features'][src_name].features.contiguous(),
)
point_features_list.append(pooled_features.view(batch_size, num_keypoints, -1))
point_features_list_new = []
for i, x in enumerate(point_features_list):
feat = self.linears_in[i](x.view(batch_size * num_keypoints, -1))
point_features_list_new.append(feat.view(1, batch_size * num_keypoints, -1))
fusion_feat = torch.cat(point_features_list_new, dim=0)
# have multi-modality + multi-scale
trans1_feat_list = self.trans_fusion_layer(fusion_feat).view(len(fusion_feat), batch_size, num_keypoints, -1)
trans1_feat_projected_list = []
for i, x in enumerate(trans1_feat_list):
feat = self.linears_out[i](x.view(batch_size * num_keypoints, -1))
trans1_feat_projected_list.append(feat.view(batch_size, num_keypoints, -1))
# multi_location
point_features_main1 = torch.cat(point_features_list, dim=2)
point_features_res1 = self.res1_actn_1(torch.cat(trans1_feat_projected_list, dim=2))
point_features_main2 = point_features_res1 + point_features_main1
point_features_res2 = self.res1_actn_2(self.trans_layer(point_features_main2.permute(1, 0, 2)).permute(1, 0, 2))
point_features = point_features_main2 + point_features_res2
batch_idx = torch.arange(batch_size, device=keypoints.device).view(-1, 1).repeat(1, keypoints.shape[1]).view(-1)
point_coords = torch.cat((batch_idx.view(-1, 1).float(), keypoints.view(-1, 3)), dim=1)
batch_dict['point_features_before_fusion'] = point_features.reshape(-1, point_features.shape[-1])
point_features = self.vsa_point_feature_fusion(point_features.reshape(-1, point_features.shape[-1]))
batch_dict['point_features'] = point_features # (BxN, C)
batch_dict['point_coords'] = point_coords # (BxN, 4)
return batch_dict
class VoxelSetAbstraction(nn.Module):
def __init__(self, model_cfg, voxel_size, point_cloud_range, num_bev_features=None,
num_rawpoint_features=None, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.voxel_size = voxel_size
self.point_cloud_range = point_cloud_range
SA_cfg = self.model_cfg.SA_LAYER
self.SA_layers = nn.ModuleList()
self.SA_layer_names = []
self.downsample_times_map = {}
c_in = 0
for src_name in self.model_cfg.FEATURES_SOURCE:
if src_name in ['bev', 'raw_points']:
continue
self.downsample_times_map[src_name] = SA_cfg[src_name].DOWNSAMPLE_FACTOR
if SA_cfg[src_name].get('INPUT_CHANNELS', None) is None:
input_channels = SA_cfg[src_name].MLPS[0][0] \
if isinstance(SA_cfg[src_name].MLPS[0], list) else SA_cfg[src_name].MLPS[0]
else:
input_channels = SA_cfg[src_name]['INPUT_CHANNELS']
cur_layer, cur_num_c_out = pointnet2_stack_modules.build_local_aggregation_module(
input_channels=input_channels, config=SA_cfg[src_name]
)
self.SA_layers.append(cur_layer)
self.SA_layer_names.append(src_name)
c_in += cur_num_c_out
if 'bev' in self.model_cfg.FEATURES_SOURCE:
c_bev = num_bev_features
c_in += c_bev
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
self.SA_rawpoints, cur_num_c_out = pointnet2_stack_modules.build_local_aggregation_module(
input_channels=num_rawpoint_features - 3, config=SA_cfg['raw_points']
)
c_in += cur_num_c_out
self.vsa_point_feature_fusion = nn.Sequential(
nn.Linear(c_in, self.model_cfg.NUM_OUTPUT_FEATURES, bias=False),
nn.BatchNorm1d(self.model_cfg.NUM_OUTPUT_FEATURES),
nn.ReLU(),
)
self.num_point_features = self.model_cfg.NUM_OUTPUT_FEATURES
self.num_point_features_before_fusion = c_in
def interpolate_from_bev_features(self, keypoints, bev_features, batch_size, bev_stride):
"""
Args:
keypoints: (N1 + N2 + ..., 4)
bev_features: (B, C, H, W)
batch_size:
bev_stride:
Returns:
point_bev_features: (N1 + N2 + ..., C)
"""
x_idxs = (keypoints[:, 1] - self.point_cloud_range[0]) / self.voxel_size[0]
y_idxs = (keypoints[:, 2] - self.point_cloud_range[1]) / self.voxel_size[1]
x_idxs = x_idxs / bev_stride
y_idxs = y_idxs / bev_stride
point_bev_features_list = []
for k in range(batch_size):
bs_mask = (keypoints[:, 0] == k)
cur_x_idxs = x_idxs[bs_mask]
cur_y_idxs = y_idxs[bs_mask]
cur_bev_features = bev_features[k].permute(1, 2, 0) # (H, W, C)
point_bev_features = bilinear_interpolate_torch(cur_bev_features, cur_x_idxs, cur_y_idxs)
point_bev_features_list.append(point_bev_features)
point_bev_features = torch.cat(point_bev_features_list, dim=0) # (N1 + N2 + ..., C)
return point_bev_features
def sectorized_proposal_centric_sampling(self, roi_boxes, points):
"""
Args:
roi_boxes: (M, 7 + C)
points: (N, 3)
Returns:
sampled_points: (N_out, 3)
"""
sampled_points, _ = sample_points_with_roi(
rois=roi_boxes, points=points,
sample_radius_with_roi=self.model_cfg.SPC_SAMPLING.SAMPLE_RADIUS_WITH_ROI,
num_max_points_of_part=self.model_cfg.SPC_SAMPLING.get('NUM_POINTS_OF_EACH_SAMPLE_PART', 200000)
)
sampled_points = sector_fps(
points=sampled_points, num_sampled_points=self.model_cfg.NUM_KEYPOINTS,
num_sectors=self.model_cfg.SPC_SAMPLING.NUM_SECTORS
)
return sampled_points
def get_sampled_points(self, batch_dict):
"""
Args:
batch_dict:
Returns:
keypoints: (N1 + N2 + ..., 4), where 4 indicates [bs_idx, x, y, z]
"""
batch_size = batch_dict['batch_size']
if self.model_cfg.POINT_SOURCE == 'raw_points':
src_points = batch_dict['points'][:, 1:4]
batch_indices = batch_dict['points'][:, 0].long()
elif self.model_cfg.POINT_SOURCE == 'voxel_centers':
src_points = common_utils.get_voxel_centers(
batch_dict['voxel_coords'][:, 1:4],
downsample_times=1,
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
batch_indices = batch_dict['voxel_coords'][:, 0].long()
else:
raise NotImplementedError
keypoints_list = []
for bs_idx in range(batch_size):
bs_mask = (batch_indices == bs_idx)
sampled_points = src_points[bs_mask].unsqueeze(dim=0) # (1, N, 3)
if self.model_cfg.SAMPLE_METHOD == 'FPS':
cur_pt_idxs = pointnet2_stack_utils.farthest_point_sample(
sampled_points[:, :, 0:3].contiguous(), self.model_cfg.NUM_KEYPOINTS
).long()
if sampled_points.shape[1] < self.model_cfg.NUM_KEYPOINTS:
times = int(self.model_cfg.NUM_KEYPOINTS / sampled_points.shape[1]) + 1
non_empty = cur_pt_idxs[0, :sampled_points.shape[1]]
cur_pt_idxs[0] = non_empty.repeat(times)[:self.model_cfg.NUM_KEYPOINTS]
keypoints = sampled_points[0][cur_pt_idxs[0]].unsqueeze(dim=0)
elif self.model_cfg.SAMPLE_METHOD == 'SPC':
cur_keypoints = self.sectorized_proposal_centric_sampling(
roi_boxes=batch_dict['rois'][bs_idx], points=sampled_points[0]
)
bs_idxs = cur_keypoints.new_ones(cur_keypoints.shape[0]) * bs_idx
keypoints = torch.cat((bs_idxs[:, None], cur_keypoints), dim=1)
else:
raise NotImplementedError
keypoints_list.append(keypoints)
keypoints = torch.cat(keypoints_list, dim=0) # (B, M, 3) or (N1 + N2 + ..., 4)
if len(keypoints.shape) == 3:
batch_idx = torch.arange(batch_size, device=keypoints.device).view(-1, 1).repeat(1, keypoints.shape[1]).view(-1, 1)
keypoints = torch.cat((batch_idx.float(), keypoints.view(-1, 3)), dim=1)
return keypoints
@staticmethod
def aggregate_keypoint_features_from_one_source(
batch_size, aggregate_func, xyz, xyz_features, xyz_bs_idxs, new_xyz, new_xyz_batch_cnt,
filter_neighbors_with_roi=False, radius_of_neighbor=None, num_max_points_of_part=200000, rois=None
):
"""
Args:
aggregate_func:
xyz: (N, 3)
xyz_features: (N, C)
xyz_bs_idxs: (N)
new_xyz: (M, 3)
new_xyz_batch_cnt: (batch_size), [N1, N2, ...]
filter_neighbors_with_roi: True/False
radius_of_neighbor: float
num_max_points_of_part: int
rois: (batch_size, num_rois, 7 + C)
Returns:
"""
xyz_batch_cnt = xyz.new_zeros(batch_size).int()
if filter_neighbors_with_roi:
point_features = torch.cat((xyz, xyz_features), dim=-1) if xyz_features is not None else xyz
point_features_list = []
for bs_idx in range(batch_size):
bs_mask = (xyz_bs_idxs == bs_idx)
_, valid_mask = sample_points_with_roi(
rois=rois[bs_idx], points=xyz[bs_mask],
sample_radius_with_roi=radius_of_neighbor, num_max_points_of_part=num_max_points_of_part,
)
point_features_list.append(point_features[bs_mask][valid_mask])
xyz_batch_cnt[bs_idx] = valid_mask.sum()
valid_point_features = torch.cat(point_features_list, dim=0)
xyz = valid_point_features[:, 0:3]
xyz_features = valid_point_features[:, 3:] if xyz_features is not None else None
else:
for bs_idx in range(batch_size):
xyz_batch_cnt[bs_idx] = (xyz_bs_idxs == bs_idx).sum()
pooled_points, pooled_features = aggregate_func(
xyz=xyz.contiguous(),
xyz_batch_cnt=xyz_batch_cnt,
new_xyz=new_xyz,
new_xyz_batch_cnt=new_xyz_batch_cnt,
features=xyz_features.contiguous(),
)
return pooled_features
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size:
keypoints: (B, num_keypoints, 3)
multi_scale_3d_features: {
'x_conv4': ...
}
points: optional (N, 1 + 3 + C) [bs_idx, x, y, z, ...]
spatial_features: optional
spatial_features_stride: optional
Returns:
point_features: (N, C)
point_coords: (N, 4)
"""
keypoints = self.get_sampled_points(batch_dict)
point_features_list = []
if 'bev' in self.model_cfg.FEATURES_SOURCE:
point_bev_features = self.interpolate_from_bev_features(
keypoints, batch_dict['spatial_features'], batch_dict['batch_size'],
bev_stride=batch_dict['spatial_features_stride']
)
point_features_list.append(point_bev_features)
batch_size = batch_dict['batch_size']
new_xyz = keypoints[:, 1:4].contiguous()
new_xyz_batch_cnt = new_xyz.new_zeros(batch_size).int()
for k in range(batch_size):
new_xyz_batch_cnt[k] = (keypoints[:, 0] == k).sum()
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
raw_points = batch_dict['points']
pooled_features = self.aggregate_keypoint_features_from_one_source(
batch_size=batch_size, aggregate_func=self.SA_rawpoints,
xyz=raw_points[:, 1:4],
xyz_features=raw_points[:, 4:].contiguous() if raw_points.shape[1] > 4 else None,
xyz_bs_idxs=raw_points[:, 0],
new_xyz=new_xyz, new_xyz_batch_cnt=new_xyz_batch_cnt,
filter_neighbors_with_roi=self.model_cfg.SA_LAYER['raw_points'].get('FILTER_NEIGHBOR_WITH_ROI', False),
radius_of_neighbor=self.model_cfg.SA_LAYER['raw_points'].get('RADIUS_OF_NEIGHBOR_WITH_ROI', None),
rois=batch_dict.get('rois', None)
)
point_features_list.append(pooled_features)
for k, src_name in enumerate(self.SA_layer_names):
cur_coords = batch_dict['multi_scale_3d_features'][src_name].indices
cur_features = batch_dict['multi_scale_3d_features'][src_name].features.contiguous()
xyz = common_utils.get_voxel_centers(
cur_coords[:, 1:4], downsample_times=self.downsample_times_map[src_name],
voxel_size=self.voxel_size, point_cloud_range=self.point_cloud_range
)
pooled_features = self.aggregate_keypoint_features_from_one_source(
batch_size=batch_size, aggregate_func=self.SA_layers[k],
xyz=xyz.contiguous(), xyz_features=cur_features, xyz_bs_idxs=cur_coords[:, 0],
new_xyz=new_xyz, new_xyz_batch_cnt=new_xyz_batch_cnt,
filter_neighbors_with_roi=self.model_cfg.SA_LAYER[src_name].get('FILTER_NEIGHBOR_WITH_ROI', False),
radius_of_neighbor=self.model_cfg.SA_LAYER[src_name].get('RADIUS_OF_NEIGHBOR_WITH_ROI', None),
rois=batch_dict.get('rois', None)
)
point_features_list.append(pooled_features)
point_features = torch.cat(point_features_list, dim=-1)
batch_dict['point_features_before_fusion'] = point_features.view(-1, point_features.shape[-1])
point_features = self.vsa_point_feature_fusion(point_features.view(-1, point_features.shape[-1]))
batch_dict['point_features'] = point_features # (BxN, C)
batch_dict['point_coords'] = keypoints # (BxN, 4)
return batch_dict
| 42.585398 | 174 | 0.608732 | 28,374 | 0.868689 | 0 | 0 | 2,071 | 0.063405 | 0 | 0 | 3,875 | 0.118636 |
1c66241d3877e47cd775f05edef325a5a8e7b8d8 | 451 | py | Python | metabot2txt/display.py | HeitorBoschirolli/metabot2txt | 845c6b1042f7e586cf80de56e78c976e3c919f0a | [
"MIT"
]
| null | null | null | metabot2txt/display.py | HeitorBoschirolli/metabot2txt | 845c6b1042f7e586cf80de56e78c976e3c919f0a | [
"MIT"
]
| null | null | null | metabot2txt/display.py | HeitorBoschirolli/metabot2txt | 845c6b1042f7e586cf80de56e78c976e3c919f0a | [
"MIT"
]
| null | null | null | import os
def display_on_editor(text):
with open('.metabot2txt', 'w') as f:
f.write(text)
os.system('gedit .metabot2txt')
def display_list_on_editor(texts):
if os.path.isfile('.metabot2txt'):
os.remove('.metabot2txt')
for text in texts:
with open('.metabot2txt', 'a') as f:
f.write(text)
f.write('\n=====================================\n')
os.system('gedit .metabot2txt')
| 22.55 | 64 | 0.536585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 145 | 0.321508 |
1c66f42291c5ccace3a10ed12cc3202e55caf594 | 47,394 | py | Python | cogs/errors.py | i1470s/IVRY | 922908b19b57881ad6fef2b45fabe6bc1ff7a298 | [
"MIT"
]
| 3 | 2020-10-03T20:53:39.000Z | 2020-10-11T07:58:57.000Z | cogs/errors.py | i1470s/IVRY | 922908b19b57881ad6fef2b45fabe6bc1ff7a298 | [
"MIT"
]
| 3 | 2020-10-11T22:23:30.000Z | 2020-10-14T16:54:37.000Z | cogs/errors.py | i1470s/IVRY | 922908b19b57881ad6fef2b45fabe6bc1ff7a298 | [
"MIT"
]
| null | null | null | #PRIMARY IMPORTS
import discord, os, datetime, sys, json, traceback, logging
#SECONDARY IMPORTS
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from discord.ext import commands
from data import config
#LOGGING
logger = logging.getLogger("ivry")
logger.debug("errors.py Started")
class Errors(commands.Cog):
def __init__(self, client):
self.client = client
#ERROR MESSAGES
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
if hasattr(ctx.command, 'on_error'):
return
cog = ctx.cog
if cog:
if cog._get_overridden_method(cog.cog_command_error) is not None:
return
ignored = (commands.CommandNotFound)
error = getattr(error, 'original', error)
if isinstance(error, ignored):
return
#COMMAND ERROR
elif isinstance(error, commands.CommandError):
embed = discord.Embed(title=f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CONVERSION ERROR
elif isinstance(error, commands.ConversionError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#USER INPUT ERROR
elif isinstance(error, commands.UserInputError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal User Input Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal User Input Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING REQUIRED ARGUMENT
elif isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#TOO MANY ARGUMENTS
elif isinstance(error, commands.TooManyArguments):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD ARGUMENT
elif isinstance(error, commands.BadArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MESSAGE NOT FOUND
elif isinstance(error, commands.MessageNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Message Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Message Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MEMBER NOT FOUND
elif isinstance(error, commands.MemberNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Member Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Member Not Found occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#USER NOT FOUND
elif isinstance(error, commands.UserNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal User Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal User Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHANNEL NOT FOUND
elif isinstance(error, commands.ChannelNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Channel Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Channel Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHANNEL NOT READABLE
elif isinstance(error, commands.ChannelNotReadable):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Channel Not Readable Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Channel Not Readable Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD COLOR ARGUMENT
elif isinstance(error, commands.BadColourArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Colour Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Colour Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#ROLE NOT FOUND
elif isinstance(error, commands.RoleNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Role Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Role Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD INVITE ARGUMENT
elif isinstance(error, commands.BadInviteArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EMOJI NOT FOUND
elif isinstance(error, commands.EmojiNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Emoji Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Emoji Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#PARTIAL EMOJI CONVERSION FAILURE
elif isinstance(error, commands.PartialEmojiConversionFailure):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Partial Emoji Conversion Failure Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Partial Emoji Conversion Failure Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD BOOL ARGUMENT
elif isinstance(error, commands.BadBoolArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Bool Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Bool Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD UNION ARGUMENT
elif isinstance(error, commands.BadUnionArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Union Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Union Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#ARGUMENT PARSING ERROR
elif isinstance(error, commands.ArgumentParsingError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Argument Parsing Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Argument Parsing Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#UNEXPECTED QUOTE ERROR
elif isinstance(error, commands.UnexpectedQuoteError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Unexpected Quote Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Unexpected Quote Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#INVALID END OF QUOTED STRING
elif isinstance(error, commands.InvalidEndOfQuotedStringError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Invalid End Of Quoted String Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Invalid End Of Quoted String Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXPECTED CLOSING QUOTE ERROR
elif isinstance(error, commands.ExpectedClosingQuoteError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Expected Closing Quote Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Expected Closing Quote Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#COMMAND NOT FOUND
elif isinstance(error, commands.CommandNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHECK FAILURE
elif isinstance(error, commands.CheckFailure):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Check Failure Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Check Failure Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHECK ANY FAILURE
elif isinstance(error, commands.CheckAnyFailure):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Check Any Failure Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Check Any Failure Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#PRIVATE MESSAGE ONLY
elif isinstance(error, commands.PrivateMessageOnly):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Private Message Only Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Private Message Only Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NO PRIVATE MESSAGE
elif isinstance(error, commands.NoPrivateMessage):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal No Private Message Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal No Private Message Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NOT OWNER
elif isinstance(error, commands.NotOwner):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Not Owner Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Not Owner Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING PERMISSIONS
elif isinstance(error, commands.MissingPermissions):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Missing Permissions Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Missing Permissions Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING ROLE
elif isinstance(error, commands.MissingRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Missing Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Missing Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BOT MISSING ROLE
elif isinstance(error, commands.BotMissingRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bot Missing Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bot Missing Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING ANY ROLE
elif isinstance(error, commands.MissingAnyRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Missing Any Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Missing Any Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BOT MISSING ANY ROLE
elif isinstance(error, commands.BotMissingAnyRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bot Missing Any Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bot Missing Any Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NSFW CHANNEL REQUIRED
elif isinstance(error, commands.NSFWChannelRequired):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal NSFW Channel Required Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal NSFW Channel Required Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#DISABLED COMMAND
elif isinstance(error, commands.DisabledCommand):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Disabled Command Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Disabled Command Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#COMMAND INVOKE ERROR
elif isinstance(error, commands.CommandInvokeError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Invoke Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Invoke Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#COMMAND ON COOLDOWN
elif isinstance(error, commands.CommandOnCooldown):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command On Cooldown Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command On Cooldown Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MAX CONCURRENCY REACHED
elif isinstance(error, commands.MaxConcurrencyReached):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Max Concurrency Reached Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Max Concurrency Reached Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION ERROR
elif isinstance(error, commands.ExtensionError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION ALREADY LOADED
elif isinstance(error, commands.ExtensionAlreadyLoaded):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Already Loaded Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Already Loaded Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION NOT LOADED
elif isinstance(error, commands.ExtensionNotLoaded):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Not Loaded Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Not Loaded Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NO ENTRY POINT ERROR
elif isinstance(error, commands.NoEntryPointError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal No Entry Point Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal No Entrypoint Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION FAILED
elif isinstance(error, commands.ExtensionFailed):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Failed Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Failed Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION NOT FOUND
elif isinstance(error, commands.ExtensionNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CLIENT EXCEPTION
#COMMAND REGISTRATION ERROR
elif isinstance(error, commands.CommandRegistrationError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Registration Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Registration Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
def setup(client):
client.add_cog(Errors(client)) | 65.551867 | 138 | 0.531502 | 47,022 | 0.992151 | 0 | 0 | 46,863 | 0.988796 | 46,828 | 0.988058 | 14,451 | 0.304912 |
1c672bcb64cc19b33318c71e9093a770db7e263e | 1,315 | py | Python | Using Python to Access Web Data/Problem 6_Extracting Data from JSON 5.py | Karoline0097/University-of-Michigan-Python-for-Everybody | 8b3999638c0c074ae3c1120de87cf8f31740ebb8 | [
"MIT"
]
| null | null | null | Using Python to Access Web Data/Problem 6_Extracting Data from JSON 5.py | Karoline0097/University-of-Michigan-Python-for-Everybody | 8b3999638c0c074ae3c1120de87cf8f31740ebb8 | [
"MIT"
]
| null | null | null | Using Python to Access Web Data/Problem 6_Extracting Data from JSON 5.py | Karoline0097/University-of-Michigan-Python-for-Everybody | 8b3999638c0c074ae3c1120de87cf8f31740ebb8 | [
"MIT"
]
| null | null | null | ## Problem 5: Extracting Data from JSON
# Example: http://py4e-data.dr-chuck.net/comments_42.json
# data consists of a number of names and comment counts in JSON
# {
# comments: [
# {
# name: "Matthias"
# count: 97
# },
# {
# name: "Geomer"
# count: 97
# }
# ...
# ]
# }
import urllib.request, urllib.parse, urllib.error
import json
import ssl
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
# prompt for a URL
url = input('Enter URL: ')
# handle for data
data_handle = urllib.request.urlopen(url, context=ctx)
# read the JSON data from that URL using urllib
# decode UTF 8 byte array to Unicode string
data = data_handle.read().decode()
# parse string containing json into structured object (-> JSON object / Python dictionary)
# data_js is dictionary
data_js = json.loads(data)
# compute the sum of the numbers in the file
number_sum = 0
# parse and extract the comment counts from the JSON data,
# data_js['comments'] is list of dictionaries
# print(data_js['comments'])
for user in data_js['comments']:
print('Name:', user['name'])
print('Count:', user['count'])
number_sum = number_sum + user['count']
# Example: Total count 2553
print('Total Count:', number_sum)
| 24.811321 | 90 | 0.686692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 846 | 0.643346 |
1c67babe06797acaab8d0e9b738376ce3cb3ee88 | 376 | py | Python | lessons/day_05/python/app.py | jiaguilera/a-walk-in-graphql | ed4f44b4f4bf283cc7342141eb8127a2745ea2d7 | [
"MIT"
]
| 16 | 2020-06-16T17:12:16.000Z | 2021-12-03T14:19:38.000Z | lessons/day_05/python/app.py | martinarnesi/a-walk-in-graphql | 56cd949cbeb4c4322882bd15398a867b16900ccd | [
"MIT"
]
| 8 | 2020-06-11T21:53:03.000Z | 2020-07-26T01:47:10.000Z | lessons/day_05/python/app.py | martinarnesi/a-walk-in-graphql | 56cd949cbeb4c4322882bd15398a867b16900ccd | [
"MIT"
]
| 9 | 2020-06-15T13:09:57.000Z | 2022-03-06T14:49:17.000Z | from ariadne import make_executable_schema, load_schema_from_path
from ariadne.asgi import GraphQL
from resolvers import query, skill, person, eye_color, mutation
# import schema from GraphQL file
type_defs = load_schema_from_path("./schema.gql")
schema = make_executable_schema(
type_defs, query, skill, person, eye_color, mutation
)
app = GraphQL(schema, debug=True)
| 28.923077 | 65 | 0.800532 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 47 | 0.125 |
1c6bbe01f2a25c56bbd4e7b84c94d14c49d0cee9 | 1,127 | py | Python | src/__main__.py | andreaswatch/piTomation | 140bff77ad0b84ad17898106c7be7dc48a2d0783 | [
"MIT"
]
| null | null | null | src/__main__.py | andreaswatch/piTomation | 140bff77ad0b84ad17898106c7be7dc48a2d0783 | [
"MIT"
]
| null | null | null | src/__main__.py | andreaswatch/piTomation | 140bff77ad0b84ad17898106c7be7dc48a2d0783 | [
"MIT"
]
| null | null | null | import importlib
import time
from pathlib import Path
import os
import sys
def import_plugins():
#find actual path
realpath = os.path.realpath(__file__)
dirname = os.path.dirname(realpath)
#add modules & plugins
plugin_path = os.path.join(dirname, "plugins")
for dir_path in Path(plugin_path).rglob('*.py'):
dp = str(dir_path)
if dp.lower().endswith("__init__.py"):
continue
path = dp[len(dirname)+1:-3].replace(os.sep,".")
if len(path.split('.')) < 4:
'''only import the top level plugin directory, so that potential submodules are
only imported if they are imported by the plugins.'''
print(" > " + path)
importlib.import_module(path)
print("Import plugins ..")
import_plugins()
print("Import app ..")
import modules.app.App as piTomation
app: piTomation.App
print("Start app ..")
app = piTomation.App()
#try:
# app = piTomation.App()
#except Exception as ex:
# print(ex)
# exit()
try:
while not app.is_disposed:
time.sleep(1)
except Exception as ex:
print(ex)
| 21.673077 | 92 | 0.624667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 353 | 0.313221 |
1c6d48cdfb7c008c470c879e2a06b5ce0223008d | 1,208 | py | Python | src/decanter/core/extra/utils.py | MatthewK3023/decanter-ai-core-sdk | d09a0316d5c3f28d55fd0dd83ef7f3e141d421de | [
"MIT"
]
| null | null | null | src/decanter/core/extra/utils.py | MatthewK3023/decanter-ai-core-sdk | d09a0316d5c3f28d55fd0dd83ef7f3e141d421de | [
"MIT"
]
| null | null | null | src/decanter/core/extra/utils.py | MatthewK3023/decanter-ai-core-sdk | d09a0316d5c3f28d55fd0dd83ef7f3e141d421de | [
"MIT"
]
| null | null | null | """
Functions support other modules.
"""
import uuid
def check_response(response, key=None):
"""CHeck the api response.
Make sure the status call is successful and the response have specific key.
Return:
class: `Response <Response>`
"""
code = response.status_code
if not 200 <= code < 300:
raise Exception('[Decanter Core response Error] Request Error')
if key is not None and key not in response.json():
raise KeyError('[Decanter Core response Error] No key value')
return response
def gen_id(type_, name):
"""Generate a random UUID if name isn't given.
Returns:
string
"""
if name is None:
rand_id = uuid.uuid4()
rand_id = str(rand_id)[:8]
name = type_ + '_' + rand_id
return name
def isnotebook():
"""Return True if SDK is running on Jupyter Notebook."""
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True # Jupyter notebook or qtconsole
if shell == 'TerminalInteractiveShell':
return False # Terminal running IPython
return False
except NameError:
return False
| 23.686275 | 79 | 0.626656 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 541 | 0.447848 |
1c6e264ceb5ab2e61f2f2b6e3294aa8858b8f9fd | 1,064 | py | Python | 03/03.py | stevenpclark/aoc2021 | 726009e5a2a87025943a736e8676784ca7cdc8bd | [
"MIT"
]
| 1 | 2021-11-30T05:25:58.000Z | 2021-11-30T05:25:58.000Z | 03/03.py | stevenpclark/aoc2021 | 726009e5a2a87025943a736e8676784ca7cdc8bd | [
"MIT"
]
| null | null | null | 03/03.py | stevenpclark/aoc2021 | 726009e5a2a87025943a736e8676784ca7cdc8bd | [
"MIT"
]
| null | null | null | import numpy as np
def filter_data(data, use_most_common):
_, nc = data.shape
for c in range(nc):
nr, _ = data.shape
if nr <= 1:
break
col_score = sum(data[:,c])/nr
if use_most_common:
keep_val = col_score >= 0.5
else:
keep_val = col_score < 0.5
mask = data[:,c] == keep_val
data = data[mask, :]
x = 0
for n in data[0,:]:
x = (x << 1) + n
return x
def main():
fn = 'input.txt'
#fn = 'test.txt'
lines = np.loadtxt(fn, dtype=str)
num_lines = len(lines)
data = np.array([[int(c) for c in s] for s in lines])
gamma_list = (np.sum(data, axis=0)/num_lines > 0.5).astype(int)
gamma = 0
epsilon = 0
for n in gamma_list:
gamma = (gamma << 1) + n
epsilon = (epsilon << 1) + (1-n)
print(gamma*epsilon)
rating1 = filter_data(data, use_most_common=True)
rating2 = filter_data(data, use_most_common=False)
print(rating1*rating2)
if __name__ == '__main__':
main()
| 20.461538 | 67 | 0.535714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 37 | 0.034774 |
1c6f742ff7bb6409fa5b1d806e2433034d2aa878 | 1,096 | py | Python | distillation/build_student.py | fengxiaoshuai/CNN_model_optimizer | 4c48420989ffe31a4075d36a5133fee0d999466a | [
"Apache-2.0"
]
| null | null | null | distillation/build_student.py | fengxiaoshuai/CNN_model_optimizer | 4c48420989ffe31a4075d36a5133fee0d999466a | [
"Apache-2.0"
]
| 1 | 2021-01-05T10:41:24.000Z | 2021-01-05T10:41:24.000Z | distillation/build_student.py | fengxiaoshuai/CNN_model_optimizer | 4c48420989ffe31a4075d36a5133fee0d999466a | [
"Apache-2.0"
]
| 1 | 2020-08-07T02:56:20.000Z | 2020-08-07T02:56:20.000Z | import tensorflow as tf
import numpy as np
with tf.variable_scope("student"):
input_label = tf.placeholder(dtype=tf.float32, shape=[10, 10], name="label")
input_image = tf.placeholder(dtype=tf.float32, shape=[10, 224, 224, 3], name="input")
conv1 = tf.layers.conv2d(inputs=input_image, filters=64, kernel_size=[3, 3], padding='same')
conv2 = tf.layers.conv2d(conv1, filters=64, kernel_size=[3, 3], padding='same')
conv3 = tf.layers.conv2d(conv2, filters=64, kernel_size=[3, 3], padding='same')
shape = int(np.prod(conv3.get_shape()[1:]))
flat = tf.reshape(conv3, [-1, shape])
fc1 = tf.layers.dense(flat, units=100)
fc2 = tf.layers.dense(fc1, units=10, name="logit")
probability = tf.nn.softmax(fc2)
loss = tf.losses.softmax_cross_entropy(input_label, fc2)
print(input_label)
image = np.ones(shape=[10, 224, 224, 3])
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
saver = tf.train.Saver()
saver.save(sess, "./student/student")
print(sess.run(probability, feed_dict={input_image: image}))
| 40.592593 | 96 | 0.681569 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.061131 |
1c70409e75cdadbb3949d0d1cde6a6029abd620b | 5,365 | py | Python | code/statistical_tests.py | ChamiLamelas/Math36B_FinalProject | 0bdb5d17769553a4edb163534c21cc641860a07a | [
"MIT"
]
| null | null | null | code/statistical_tests.py | ChamiLamelas/Math36B_FinalProject | 0bdb5d17769553a4edb163534c21cc641860a07a | [
"MIT"
]
| null | null | null | code/statistical_tests.py | ChamiLamelas/Math36B_FinalProject | 0bdb5d17769553a4edb163534c21cc641860a07a | [
"MIT"
]
| null | null | null | import scipy.stats
import numpy as np
def f_test(sample_x, sample_y, larger_varx_alt):
"""
Computes the F-value and corresponding p-value for a pair of samples and alternative hypothesis.
Parameters
----------
sample_x : list
A random sample x1,...,xnx. Let its (underlying) variance be ox^2 and its sample variance Sx^2.
sample_y : list
A random sample y1,...,yny. Let its (underlying) variance be oy^2 and its sample variance Sy^2.
larger_varx_alt : bool
True if alternative hypothesis is ox^2 > oy^2. False if ox^2 < oy^2.
Returns
-------
f_value : float
Sx^2 / Sy^2 as defined in 'A Quick, Compact, Two-Sample Dispersion Test: Count Five'.
p_value : float
Let F be the F-distribution with nx, ny df. 1 - P(F < f_value) if larger_varx_alt = True, P(F < f_value) otherwise. More extreme F = Sx^2 / Sy^2 values for alternative ox^2 > oy^2 are to the right. More extreme F values for ox^2 < oy^2 are to the left.
"""
# calculate unbiased sample variances (n-1 in the denominator)
sample_var_x = np.var(sample_x, ddof=1)
sample_var_y = np.var(sample_y, ddof=1)
f_value = sample_var_x/sample_var_y
nx = len(sample_x)
ny = len(sample_y)
# compute P(F < f_value) with nx-1, ny-1 df
cdf = scipy.stats.f.cdf(f_value, nx-1, ny-1)
# More extreme f_value = Sx^2 / Sy^2 values for alternative ox^2 > oy^2. ox^2 being even bigger would be represented by larger quotient Sx^2 / Sy^2.
# More extreme f_value for ox^2 < oy^2 are to the left. ox^2 being even smaller would be represented by smaller quotient.
p_value = 1 - cdf if larger_varx_alt else cdf
return f_value, p_value
def f1_test(sample_x, sample_y, larger_varx_alt):
"""
Computes the F1-value as defined in 'Fixing the F Test for Equal Variances' and corresponding p-value for a pair of samples and alternative hypothesis.
Parameters
----------
sample_x : list
A random sample x1,...,xnx. Let its (underlying) variance be ox^2 and its sample variance Sx^2.
sample_y : list
A random sample y1,...,yny. Let its (underlying) variance be oy^2 and its sample variance Sy^2.
larger_varx_alt : bool
True if alternative hypothesis is ox^2 > oy^2. False if ox^2 < oy^2.
Returns
-------
p_value : float
Let F be the F-distribution with rx, ry df as specified in equation (1) of 'Fixing the F Test for Equal Variances'. 1 - P(F < f_value) if larger_varx_alt = True, P(F < f_value) otherwise.
"""
# calculate unbiased sample variances (n-1 in the denominator)
sample_var_x = np.var(sample_x, ddof=1)
sample_var_y = np.var(sample_y, ddof=1)
f_value = sample_var_x/sample_var_y
nx = len(sample_x)
ny = len(sample_y)
xmean = np.mean(sample_x)
ymean = np.mean(sample_y)
# compute moment, variance below equatio (1) of Shoemaker paper
fourth_moment = (np.sum((sample_x - xmean)**4) +
np.sum((sample_y - ymean)**4))/(nx + ny)
pooled_var = ((nx-1)*sample_var_x + (ny-1)*sample_var_y)/(nx + ny)
# see equation (1) of Shoemaker paper
rx = 2*nx / ((fourth_moment/pooled_var**2) - ((nx - 3)/(nx - 1)))
ry = 2*ny / ((fourth_moment/pooled_var**2) - ((ny - 3)/(ny - 1)))
# compute P(F < f_value) with rx-1, ry-1 df
cdf = scipy.stats.f.cdf(f_value, rx-1, ry-1)
# More extreme f_value = Sx^2 / Sy^2 values for alternative ox^2 > oy^2. ox^2 being even bigger would be represented by larger quotient Sx^2 / Sy^2.
# More extreme f_value for ox^2 < oy^2 are to the left. ox^2 being even smaller would be represented by smaller quotient.
p_value = 1 - cdf if larger_varx_alt else cdf
return p_value
def count_five(sample_x, sample_y, center):
"""
Computes the extreme counts for samples x and y as defined in 'A Quick, Compact, Two-Sample Dispersion Test: Count Five'.
Parameters
----------
sample_x : list
A random sample x1,...,xn.
sample_y : list
A random sample y1,...,ym.
center : str
Whether to use 'mean' or 'median' for centering.
Returns
-------
extreme_count_x : int
C_x computed with centering mu being sample mean if center = 'mean' and sample median if center = 'median' as defined in equation (1) of 'A Quick, Compact, Two-Sample Dispersion Test: Count Five'.
extreme_count_y : int
C_y defined analogously to C_x above.
Raises
------
ValueError
If center is neither 'mean' or 'median'.
"""
if center not in {'mean', 'median'}:
raise ValueError('Invalid center %s' % (center))
if center == 'mean':
centering_x = np.mean(sample_x)
centering_y = np.mean(sample_y)
else:
centering_x = np.median(sample_x)
centering_y = np.median(sample_y)
# compute absolute deviations from centering for x, y samples
abs_dev_x = np.abs(np.array(sample_x) - centering_x)
abs_dev_y = np.abs(np.array(sample_y) - centering_y)
# count number of X deviations greater than max Y deviation and vice versa
# see equation (1) of Count Five paper
extreme_count_x = np.sum(np.where(abs_dev_x > np.max(abs_dev_y), 1, 0))
extreme_count_y = np.sum(np.where(abs_dev_y > np.max(abs_dev_x), 1, 0))
return extreme_count_x, extreme_count_y
| 41.269231 | 261 | 0.654054 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,540 | 0.659832 |
1c71ba0a22523d640266f7845ef799a8f73cbe39 | 243 | py | Python | pawpyseed/compiler.py | akashkumarsingh612/pawpyseed | 6f5aa0b8ca8c28a0221e5256afeb939c3344560b | [
"BSD-3-Clause"
]
| null | null | null | pawpyseed/compiler.py | akashkumarsingh612/pawpyseed | 6f5aa0b8ca8c28a0221e5256afeb939c3344560b | [
"BSD-3-Clause"
]
| null | null | null | pawpyseed/compiler.py | akashkumarsingh612/pawpyseed | 6f5aa0b8ca8c28a0221e5256afeb939c3344560b | [
"BSD-3-Clause"
]
| null | null | null | import os, subprocess
def compile_core(comp, scilib):
"""
ATTENTION, NOT FINISHED
"""
subprocess.call(("make pawpy_%s"%comp).split())
def compile_core(comp, scilib):
"""
ATTENTION, NOT FINISHED
"""
subprocess.call("make hfc".split()) | 18.692308 | 48 | 0.691358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 91 | 0.374486 |
1c720e3c45ed8efa4771cbbb3a3b55d0385c9d41 | 1,125 | py | Python | finnhub_python/socket.py | humdings/finnhub-python | ca98681e5a529598e9d17e3ebc2f6d49c64b54de | [
"MIT"
]
| null | null | null | finnhub_python/socket.py | humdings/finnhub-python | ca98681e5a529598e9d17e3ebc2f6d49c64b54de | [
"MIT"
]
| null | null | null | finnhub_python/socket.py | humdings/finnhub-python | ca98681e5a529598e9d17e3ebc2f6d49c64b54de | [
"MIT"
]
| null | null | null | """
Example usage of Finnhub socket API.
"""
from __future__ import print_function # Py2 compat
import websocket
from finnhub_python.utils import get_finnhub_api_key
def write_line(data, fname):
with open(fname, 'a+') as f:
f.write(data + '\n')
def on_message(ws, message):
write_line(message, tick_file)
def on_error(ws, error):
print(error)
def on_close(ws):
print("### closed ###")
def on_open(ws):
for symbol in SYMBOLS:
subscribe(ws, symbol)
def subscribe(ws, symbol):
template = '{"type":"subscribe","symbol":"X"}'
req = template.replace('X', symbol.upper())
ws.send(req)
tick_file = 'raw_ticks.txt'
token = get_finnhub_api_key()
SYMBOLS = [
"AAPL",
"SPY",
"VXX",
"BINANCE:ETHUSDT",
"BINANCE:BTCUSDT"
]
if __name__ == "__main__":
websocket.enableTrace(True)
ws = websocket.WebSocketApp("wss://ws.finnhub.io?token=" + token,
on_message=on_message,
on_error=on_error,
on_close=on_close)
ws.on_open = on_open
ws.run_forever() | 20.089286 | 69 | 0.604444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 221 | 0.196444 |
1c722363623f21dde32f8eb4058f20a248ddb2fd | 2,570 | py | Python | pycovjson/cli/convert.py | RileyWilliams/pycovjson | 741737f53ef18ef1476eccb5e626866843c152bd | [
"BSD-3-Clause"
]
| 10 | 2016-08-16T17:46:30.000Z | 2021-04-06T22:03:58.000Z | pycovjson/cli/convert.py | RileyWilliams/pycovjson | 741737f53ef18ef1476eccb5e626866843c152bd | [
"BSD-3-Clause"
]
| 46 | 2016-07-21T13:14:14.000Z | 2020-07-02T09:16:29.000Z | pycovjson/cli/convert.py | RileyWilliams/pycovjson | 741737f53ef18ef1476eccb5e626866843c152bd | [
"BSD-3-Clause"
]
| 6 | 2016-07-29T09:56:37.000Z | 2020-08-23T18:20:47.000Z | """
Pycovjson - Command line interface
Author: rileywilliams
Version: 0.1.0
"""
import argparse
from pycovjson.write import Writer
from pycovjson.read_netcdf import NetCDFReader as Reader
def main():
"""
Command line interface for pycovjson - Converts Scientific Data Formats into CovJSON and saves to disk.
:argument -i: Input file path.
:argument -o: Output file name.
:argument -t: Use Tiling.
:argument -v: Which variable to populate coverage with.
:argument -s: [tile shape]: Tile shape.
:argument -n: Use interactive mode.
:argument -u: MongoDB URL
"""
parser = argparse.ArgumentParser(
description='Convert Scientific Data Formats into CovJSON.')
parser.add_argument('-i', '--input', dest='inputfile',
help='Name of input file', required=True)
parser.add_argument('-o', '--output', dest='outputfile',
help='Name and location of output file', default='coverage.covjson')
parser.add_argument('-t', '--tiled', action='store_true', help='Apply tiling')
parser.add_argument('-s', '--shape', nargs='+',
help='Tile shape, list', type=int)
parser.add_argument('-v', dest='variable',
help='Variable to populate coverage with', required=True)
parser.add_argument('-n', '--interactive', action='store_true', help='Enter interactive mode')
parser.add_argument('-u', '--endpoint_url', dest='endpoint_url', nargs=1,
help='MongoDB endpoint for CovJSON persistence')
args = parser.parse_args()
inputfile = args.inputfile
outputfile = args.outputfile
variable = args.variable
tiled = args.tiled
tile_shape = args.shape
interactive = args.interactive
endpoint_url = args.endpoint_url
if interactive:
axis = input('Which Axis?', Reader.get_axis(variable))
if tiled and len(tile_shape) == 0:
reader = Reader(inputfile)
shape_list = reader.get_shape(variable)
dims = reader.get_dimensions(variable)
print(list(zip(dims, shape_list)))
tile_shape = input(
'Enter the shape tile shape as a list of comma separated integers')
tile_shape = tile_shape.split(',')
tile_shape = list(map(int, tile_shape))
print(tile_shape)
if outputfile == None:
outputfile = outputfile.default
Writer(outputfile, inputfile, [variable],
tiled=tiled, tile_shape=tile_shape, endpoint_url=endpoint_url).write()
if __name__ == '__main__':
main()
| 36.714286 | 107 | 0.649027 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 992 | 0.385992 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.