code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
import numpy as np
import pandas as pd
import xarray as xr
from . import utils
from .bounds import PolygonBounds
def subset(da, domain):
"""Subset dataset to specified domain.
Parameters
----------
da : xarray.DataArray
Dataset
domain : bcdp.Bounds
Boundary to subset on.
Returns
-------
xarray.DataArray
Subsetted dataset.
"""
# Use simpler 1D subsettting if bounds is just a bounding box.
domain = self.overlap if domain is None else domain
if isinstance(domain, PolygonBounds):
pts = np.c_[da.lon.values.ravel(), da.lat.values.ravel()]
mask = domain.contains(pts).reshape(da.lon.shape)
md = xr.DataArray(mask, dims=('y', 'x'))
da = da.where(md).isel(x=md.any('y'), y=md.any('x'))
else:
da = (da.pipe(utils.subset_1d, 'y', domain.lat_bnds)
.pipe(utils.subset_1d, 'x', domain.lon_bnds))
if domain.time_bnds._bnds:
da = utils.subset_1d(da, 'time', domain.time_bnds)
return da
def normalize_times(da, assume_gregorian=False):
"""Normalize times in dataset.
If frequency is monthly, set day of month to 1. If daily, set hour to 0Z.
Parameters
----------
da : xarray.DataArray
Dataset
assume_gregorian : bool, optional
If True, express datetimes on nonstandard calendars to gregorian.
Returns
-------
xarray.DataArray
Normalized dataset.
"""
da = da.copy()
idx = da.indexes['time']
times = idx.to_series()
da['time'] = times.apply(lambda d: d.replace(day=1).replace(hour=0))
if assume_gregorian and not isinstance(idx, pd.DatetimeIndex):
times = times.apply(lambda d: np.datetime64(str(d)))
da = da.assign_coords(time=times)
return da
def resample(da, freq):
"""Resample datasets to a standard frequency.
Parameters
----------
da : xarray.DataArray
Dataset
freq : str, optional
Pandas frequency string.
Returns
-------
xarray.DataArray
Subsetted dataset.
"""
try:
ref_freq = utils.infer_freq(da)
except TypeError:
# non-standard calendar
ref_freq = None
if freq != ref_freq:
attrs = da.attrs
da = da.resample(time=freq).mean('time')
da.attrs.update(attrs)
return da
def select_season(da, season=None):
"""Subset dataset to only selected season.
Parameters
----------
da : xarray.DataArray
Dataset
season : str or tuple, optional
Season. Can be a string (eg 'NDJFM'), or a tuple
(start_month, end_month)
Returns
-------
xarray.DataArray
Seasonalized dataset.
"""
if season:
ys, ye = da.time.dt.year.values.min(), da.time.dt.year.values.max()
if isinstance(season, str):
ms, me = utils.season_to_range(season)
else:
# Subset data to include only selected season
ms, me = season
mask1 = da.time.dt.month >= ms
mask2 = da.time.dt.month <= me
if ms > me:
cond1 = mask1|mask2
else:
cond1 = mask1&mask2
# Additionally remove years which do not contain all months
# in the season
locut = f'{ys}-{ms}'
hicut = f'{ye}-{me}'
str_times = da.time.astype(str)
cond2 = (str_times >= locut) & (str_times <= hicut)
da = da.isel(time=cond1&cond2)
return da
| [
"xarray.DataArray"
] | [((719, 754), 'xarray.DataArray', 'xr.DataArray', (['mask'], {'dims': "('y', 'x')"}), "(mask, dims=('y', 'x'))\n", (731, 754), True, 'import xarray as xr\n')] |
from math import sqrt
from PyQt5.QtCore import QRectF
from epyseg.draw.shapes.circle2d import *
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class Point2D(QPointF):
def __init__(self, *args, color=0xFFFF00, fill_color=None, opacity=1., stroke=0.65, line_style=None, **kwargs):
self.isSet = True
if len(args)==2:
self.size = 5
if stroke is not None and stroke > 2:
self.size = stroke
#TODO need fix size
super(Point2D, self).__init__(*args)
else:
self.size = 5
super(Point2D, self).__init__(*args) # create an empty point for drawing
self.color = color
self.fill_color = fill_color
self.stroke = stroke
self.opacity = opacity
self.scale = 1
self.translation = QPointF()
self.line_style = line_style
def set_opacity(self, opacity):
self.opacity = opacity
def set_line_style(self,style):
'''allows lines to be dashed or dotted or have custom pattern
:param style: a list of numbers or any of the following Qt.SolidLine, Qt.DashLine, Qt.DashDotLine, Qt.DotLine, Qt.DashDotDotLine but not Qt.CustomDashLine, Qt.CustomDashLine is assumed by default if a list is passed in. None is also a valid value that resets the line --> assume plain line
:return:
'''
self.line_style = style
# if style is a list then assume custom pattern otherwise apply solidline
def contains(self, *args):
x=0
y=0
if isinstance(args[0], QPoint) or isinstance(args[0], QPointF):
x = args[0].x()
y = args[0].y()
if sqrt((x-self.x())**2+(y-self.y())**2)<10:
return True
return False
def translate(self, translation):
self.setX(self.x() + translation.x())
self.setY(self.y() + translation.y())
def draw(self, painter, draw=True):
if self.color is None and self.fill_color is None:
return
if draw:
painter.save()
painter.setOpacity(self.opacity)
if self.color is not None:
pen = QPen(QColor(self.color))
if self.stroke is not None:
pen.setWidthF(self.stroke)
if self.line_style is not None:
if self.line_style in [Qt.SolidLine, Qt.DashLine, Qt.DashDotLine, Qt.DotLine, Qt.DashDotDotLine]:
pen.setStyle(self.line_style)
elif isinstance(self.line_style, list):
pen.setStyle(Qt.CustomDashLine)
pen.setDashPattern(self.line_style)
painter.setPen(pen)
else:
painter.setPen(Qt.NoPen) # required to draw something filled without a border
if self.fill_color is not None:
painter.setBrush(QBrush(QColor(self.fill_color)))
if draw:
point_to_draw = QPointF(self.x(), self.y())
if self.scale is not None and self.scale != 1:
point_to_draw.setX(point_to_draw.x()*self.scale)
point_to_draw.setY(point_to_draw.y()*self.scale)
if self.translation is not None:
point_to_draw.setX(point_to_draw.x()+self.translation.x())
point_to_draw.setY(point_to_draw.y()+self.translation.y())
painter.drawEllipse(point_to_draw.x()-self.stroke/2., point_to_draw.y()-self.stroke/2, self.stroke, self.stroke)
painter.restore()
# def fill(self, painter, draw=True):
# if self.fill_color is None:
# return
# if draw:
# painter.save()
# painter.setBrush(QBrush(QColor(self.fill_color)))
# painter.setOpacity(self.opacity)
# if draw:
# painter.drawEllipse(self.x()-self.stroke/2., self.y()-self.stroke/2, self.stroke, self.stroke)
# painter.restore()
#
# def drawAndFill(self, painter):
# painter.save()
# self.draw(painter, draw=False)
# self.fill(painter, draw=False)
# size = max(self.size, self.stroke)
# painter.drawEllipse(self.x()-size/2., self.y()-size/2, size, size) # drawEllipse (x, y, w, h)
# painter.restore()
def boundingRect(self):
return QRectF(self.x()-self.stroke/2., self.y()-self.stroke/2, self.stroke, self.stroke)
def add(self, *args):
point = args[1]
self.setX(point.x())
self.setY(point.y())
def set_P1(self, *args):
if not args:
logger.error("no coordinate set...")
return
if len(args) == 1:
# self.moveTo(args[0].x(), args[0].y())
self.setX(args[0].x())
self.setY(args[0].y())
else:
# self.moveTo(QPointF(args[0], args[1]))
self.setX(args[0])
self.setY(args[1])
# self.setX(point.x())
# self.setY(point.y())
def get_P1(self):
return self
def width(self):
return 0
def height(self):
return 0
def set_to_scale(self, factor):
self.scale = factor
def set_to_translation(self, translation):
self.translation = translation
if __name__ == '__main__':
# ça marche --> voici deux examples de shapes
test = Point2D(128, 128)
# print(test.x(), test.y(), test.width(), test.height())
print(test.contains(QPointF(128, 128)))
print(test.contains(QPointF(129, 129)))
print(test.contains(QPointF(-1, -1)))
print(test.contains(QPointF(0, 0)))
print(test.contains(QPointF(100, 100)))
print(test.contains(QPointF(100, 100.1)))
print(test.x())
print(test.y())
print(test.translate(QPoint(10, 10)))
print(test.x())
print(test.y())
# p1 = test.p1()
# print(p1.x(), p1.y())
# p2 = test.p2()
# print(p2.x(), p2.y())
# print(test.arrow)
# print(test.length()) # sqrt 2 --> 141
# # if it's an arrow I can add easily all the stuff I need
#
# test = Rect2D(0, 0, 1, 1)
# p1 = test.p1()
# print(p1.x(), p1.y())
# p2 = test.p2()
# print(p2.x(), p2.y())
# print(test.arrow)
# import math
# print(test.length() == math.sqrt(2)) # sqrt 2
#
# test2 = Rect2D()
# p1 = test2.p1()
# print(p1.x(), p1.y())
# p2 = test2.p2()
# print(p2.x(), p2.y())
# print(test2.arrow)
| [
"epyseg.tools.logger.TA_logger"
] | [((149, 160), 'epyseg.tools.logger.TA_logger', 'TA_logger', ([], {}), '()\n', (158, 160), False, 'from epyseg.tools.logger import TA_logger\n')] |
import os
from copy import copy
from astropy.io.fits import getdata, getheader
from astropy.wcs import WCS
from matplotlib import cm
from matplotlib.colors import Normalize
from solarviewer.app.plot import PlotWidget
from solarviewer.config.base import ViewerController, DataType, ViewerType, ViewerConfig, DataModel, Viewer
from solarviewer.util import classproperty
from solarviewer.viewer.util import MPLCoordinatesMixin
class Plain2DModel(DataModel):
def __init__(self, data):
self._data = data
self.wcs = None
self._cmap = cm.get_cmap("gray")
self.cmap_preferences = {"over": None, "under": None}
self.norm = Normalize(vmin=data.min(), vmax=data.max())
@property
def data(self):
return self._data
def setData(self, data):
self._data = data
@property
def cmap(self):
cmap = copy(self._cmap)
over = self.cmap_preferences["over"]
under = self.cmap_preferences["under"]
if over:
cmap.set_over(over)
if under:
cmap.set_under(under)
return cmap
def setCMap(self, cmap):
self._cmap = cmap
class AstroPyViewer(PlotWidget):
def __init__(self):
PlotWidget.__init__(self)
def draw(self, model):
try:
self.figure.clear()
self.ax = self.figure.add_subplot(111, projection=model.wcs)
image = self.ax.imshow(model.data, cmap=model.cmap, norm=model.norm)
except Exception as ex:
self.figure.clear()
self.figure.text(0.5, 0.5, s="Error during rendering data: " + str(ex), ha="center", va="center")
class AstroPyViewerController(ViewerController, MPLCoordinatesMixin):
data_type = DataType.PLAIN_2D
viewer_type = ViewerType.MPL
def __init__(self, model):
ViewerController.__init__(self)
self._model = model
self._view = AstroPyViewer()
self._view.updateModel(model)
MPLCoordinatesMixin.__init__(self)
@classproperty
def viewer_config(self) -> ViewerConfig:
return ViewerConfig().setMenuPath("File/Open 2D FITS/MPL")
@classmethod
def fromFile(cls, file):
data = getdata(file)
header = getheader(file)
model = Plain2DModel(data)
model.wcs = WCS(header)
model.title = os.path.basename(file)
return cls(model)
@classmethod
def fromModel(cls, model):
return cls(model)
def getTitle(self) -> str:
return self._model.title
def getView(self):
return self.view
def getContent(self):
return self.model
def setContent(self, model):
self.model = model
self.view.model = model
@property
def model(self) -> DataModel:
return self._model
@property
def view(self) -> Viewer:
return self._view
def updateModel(self, model):
self._model = model
self._view.updateModel(model)
| [
"astropy.io.fits.getheader",
"solarviewer.app.plot.PlotWidget.__init__",
"solarviewer.viewer.util.MPLCoordinatesMixin.__init__",
"solarviewer.config.base.ViewerConfig",
"solarviewer.config.base.ViewerController.__init__",
"astropy.wcs.WCS",
"astropy.io.fits.getdata",
"os.path.basename",
"copy.copy",
"matplotlib.cm.get_cmap"
] | [((560, 579), 'matplotlib.cm.get_cmap', 'cm.get_cmap', (['"""gray"""'], {}), "('gray')\n", (571, 579), False, 'from matplotlib import cm\n'), ((873, 889), 'copy.copy', 'copy', (['self._cmap'], {}), '(self._cmap)\n', (877, 889), False, 'from copy import copy\n'), ((1227, 1252), 'solarviewer.app.plot.PlotWidget.__init__', 'PlotWidget.__init__', (['self'], {}), '(self)\n', (1246, 1252), False, 'from solarviewer.app.plot import PlotWidget\n'), ((1833, 1864), 'solarviewer.config.base.ViewerController.__init__', 'ViewerController.__init__', (['self'], {}), '(self)\n', (1858, 1864), False, 'from solarviewer.config.base import ViewerController, DataType, ViewerType, ViewerConfig, DataModel, Viewer\n'), ((1978, 2012), 'solarviewer.viewer.util.MPLCoordinatesMixin.__init__', 'MPLCoordinatesMixin.__init__', (['self'], {}), '(self)\n', (2006, 2012), False, 'from solarviewer.viewer.util import MPLCoordinatesMixin\n'), ((2207, 2220), 'astropy.io.fits.getdata', 'getdata', (['file'], {}), '(file)\n', (2214, 2220), False, 'from astropy.io.fits import getdata, getheader\n'), ((2238, 2253), 'astropy.io.fits.getheader', 'getheader', (['file'], {}), '(file)\n', (2247, 2253), False, 'from astropy.io.fits import getdata, getheader\n'), ((2309, 2320), 'astropy.wcs.WCS', 'WCS', (['header'], {}), '(header)\n', (2312, 2320), False, 'from astropy.wcs import WCS\n'), ((2343, 2365), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (2359, 2365), False, 'import os\n'), ((2093, 2107), 'solarviewer.config.base.ViewerConfig', 'ViewerConfig', ([], {}), '()\n', (2105, 2107), False, 'from solarviewer.config.base import ViewerController, DataType, ViewerType, ViewerConfig, DataModel, Viewer\n')] |
import random
class Solution:
def findKthLargest(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
if nums is None or len(nums)==0:
return nums
nums = self.quickSort(nums,0,len(nums)-1)
return nums[k-1]
def partition(self,nums,p,q):
ra = random.randint(p,q)
nums[p],nums[ra] = nums[ra],nums[p]
x = nums[p]
i = p
for j in range(p+1,q+1,1):
if nums[j]>=x: #左边是nums[j]而不是nums[i]
i+=1
nums[i],nums[j] = nums[j],nums[i]
nums[p],nums[i] = nums[i],nums[p]
return i
def quickSort(self,nums,p,q):
if p<=q:
r = self.partition(nums,p,q)
self.quickSort(nums,p,r-1)
self.quickSort(nums,r+1,q)
return nums
list = [3,2,1,5,6,4]
k = 2
s = Solution()
print(s.findKthLargest(list,k)) | [
"random.randint"
] | [((369, 389), 'random.randint', 'random.randint', (['p', 'q'], {}), '(p, q)\n', (383, 389), False, 'import random\n')] |
# coding=utf-8
import os
import numpy as np
def findFiles(root_dir, filter_type, reverse=False):
"""
在指定目录查找指定类型文件 -> paths, names, files
:param root_dir: 查找目录
:param filter_type: 文件类型
:param reverse: 是否返回倒序文件列表,默认为False
:return: 路径、名称、文件全路径
"""
separator = os.path.sep
paths = []
names = []
files = []
for parent, dirname, filenames in os.walk(root_dir):
for filename in filenames:
if filename.endswith(filter_type):
paths.append(parent + separator)
names.append(filename)
for i in range(paths.__len__()):
files.append(paths[i] + names[i])
print(names.__len__().__str__() + " files have been found.")
paths.sort()
names.sort()
files.sort()
if reverse:
paths.reverse()
names.reverse()
files.reverse()
return paths, names, files
def isDirExist(path='output'):
"""
判断指定目录是否存在,如果存在返回True,否则返回False并新建目录 -> bool
:param path: 指定目录
:return: 判断结果
"""
if not os.path.exists(path):
os.makedirs(path)
return False
else:
return True
def getRandomNum(start=0, end=100):
"""
获取指定范围内的随机整数,默认范围为0-100 -> rand_num
:param start: 最小值
:param end: 最大值
:return: 随机数
"""
return np.random.randint(start, end + 1) | [
"os.makedirs",
"os.path.exists",
"numpy.random.randint",
"os.walk"
] | [((388, 405), 'os.walk', 'os.walk', (['root_dir'], {}), '(root_dir)\n', (395, 405), False, 'import os\n'), ((1305, 1338), 'numpy.random.randint', 'np.random.randint', (['start', '(end + 1)'], {}), '(start, end + 1)\n', (1322, 1338), True, 'import numpy as np\n'), ((1041, 1061), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1055, 1061), False, 'import os\n'), ((1071, 1088), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (1082, 1088), False, 'import os\n')] |
import pandas as pd
import json
class DataMiner:
def load_data(self, data):
"""
Loads data that miner uses further.
Parameters
----------
data : json/list
-- Data to be loaded. At the moment engine handles only json or list data
Returns
-------
"""
if type(data) is "json":
dataF = pd.read_json(data)
elif type(data) is list:
dataF = pd.DataFrame(data)
else:
print("Other types not implemented")
return
dataF = self.__process_data(dataF)
self.__dataF = dataF
def __process_data(self, dataF):
"""
Process Data
Processes data to create further custom columns that can be utiliesed later on.
Parameters
----------
dataF : dataframe
-- Table/Dataframe that needs futher processing
Returns
-------
dataF
-- Processed Dataframe containing requisite information.
"""
dataF['created_time'] = pd.to_datetime(dataF['created_time'])
dataF['char_cnt'] = dataF['message'].str.len()
dataF['month'] = dataF['created_time'].dt.month
dataF['week'] = dataF['created_time'].dt.week
return dataF
def get_avg_vals_per_group(self, group_col_name, val_col_name):
"""
Get Average/Mean values per group.
Average/Mean values for a given column
Parameters
----------
group_col_name : str
-- Name of the column by which to group data
val_col_name : str
-- Column name from which Unique values need to be retrived
Returns
-------
list
-- Returns list of average/mean values
"""
return self.__dataF.groupby([group_col_name])[val_col_name].mean()
def get_max_vals_per_group(self, group_col_name, val_col_name):
"""
Get Max values per group.
Max values for a given column
Parameters
----------
group_col_name : str
-- Name of the column by which to group data
val_col_name : str
-- Column name from which Unique values need to be retrived
Returns
-------
list
-- Returns list of max values
"""
return self.__dataF.groupby([group_col_name])[val_col_name].max()
def get_count_vals_per_group(self, group_col_name, val_col_name):
"""
Get Count of values per group.
Count of values for a given column
Parameters
----------
group_col_name : str
Name of the column by which to group data
val_col_name : str
Column name from which Unique values need to be retrived
Returns
-------
list
Returns list of count
"""
return self.__dataF.groupby([group_col_name])[val_col_name].count()
def get_unique_vals_per_group(self, group_col_name, val_col_name):
"""
Get Unique values per group.
Unique values for a given column
Parameters
----------
group_col_name : str
-- Name of the column by which to group data
val_col_name : str
-- Column name from which Unique values need to be retrived
Returns
-------
list
-- Returns list of unique values
"""
return self.__dataF.groupby([group_col_name])[val_col_name].nunique() | [
"pandas.DataFrame",
"pandas.read_json",
"pandas.to_datetime"
] | [((1079, 1116), 'pandas.to_datetime', 'pd.to_datetime', (["dataF['created_time']"], {}), "(dataF['created_time'])\n", (1093, 1116), True, 'import pandas as pd\n'), ((388, 406), 'pandas.read_json', 'pd.read_json', (['data'], {}), '(data)\n', (400, 406), True, 'import pandas as pd\n'), ((460, 478), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (472, 478), True, 'import pandas as pd\n')] |
# Copyright (c) 2013 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import copy
from congress import data_types
from congress.datalog import analysis
from congress.datalog import base as datalogbase
from congress.datalog import compile
from congress.datalog import utility
from congress import exception
from congress.policy_engines import agnostic
from congress.tests import base
from congress.tests import helper
class TestParser(base.TestCase):
def test_tablename(self):
"""Test correct parsing of tablenames."""
p = compile.parse1('p(1)')
self.assertEqual(p.table.table, 'p')
self.assertIsNone(p.table.modal)
self.assertIsNone(p.table.service)
p = compile.parse1('nova:p(1)')
self.assertEqual(p.table.table, 'p')
self.assertIsNone(p.table.modal)
self.assertEqual(p.table.service, 'nova')
p = compile.parse1('execute[nova:p(1)]')
self.assertEqual(p.table.table, 'p')
self.assertEqual(p.table.modal, 'execute')
self.assertEqual(p.table.service, 'nova')
def test_rule_hash(self):
"""Test whether rules are properly hashed."""
s = set()
p = compile.parse1('p(x) :- q(x), s(x,y)')
s.add(p)
q = compile.parse1('p(x) :- q(x), s(x,y)')
s.discard(q)
self.assertEqual(s, set())
def test_event_equality(self):
r1 = compile.parse1('p(x) :- q(x)')
r2 = compile.parse1('p(x) :- q(x)')
e1 = compile.Event(formula=r1, target='alice', insert=True)
e2 = compile.Event(formula=r2, target='alice', insert=True)
self.assertEqual(e1, e2)
def test_event_facts(self):
# insert
event = compile.parse('insert[p(1) :- true]')
self.assertEqual(len(event), 1)
event = event[0]
fact = compile.parse1('p(1) :- true')
self.assertEqual(event.formula, fact)
self.assertTrue(event.insert)
self.assertIsNone(event.target)
# delete
event = compile.parse('delete[p(1) :- true]')
self.assertEqual(len(event), 1)
event = event[0]
fact = compile.parse1('p(1) :- true')
self.assertEqual(event.formula, fact)
self.assertFalse(event.insert)
self.assertIsNone(event.target)
# insert with policy
event = compile.parse('insert[p(1) :- true; "policy"]')
self.assertEqual(len(event), 1)
event = event[0]
fact = compile.parse1('p(1) :- true')
self.assertEqual(event.formula, fact)
self.assertTrue(event.insert)
self.assertEqual(event.target, "policy")
def test_event_rules(self):
"""Test modal operators."""
# a rule we use a few times
pqrule = compile.parse1('p(x) :- q(x)')
# rule-level modal (with insert)
event = compile.parse('insert[p(x) :- q(x)]')
self.assertEqual(len(event), 1)
event = event[0]
self.assertEqual(event.formula, pqrule)
self.assertTrue(event.insert)
self.assertIsNone(event.target)
# rule-level modal with delete
event = compile.parse('delete[p(x) :- q(x)]')
self.assertEqual(len(event), 1)
event = event[0]
self.assertEqual(event.formula, pqrule)
self.assertFalse(event.insert)
self.assertIsNone(event.target)
# embedded modals
event = compile.parse('insert[execute[p(x)] :- q(x)]')
self.assertEqual(len(event), 1)
event = event[0]
rule = compile.parse1('execute[p(x)] :- q(x)')
self.assertEqual(event.formula, rule)
self.assertTrue(event.insert)
self.assertIsNone(event.target)
# rule-level modal with policy name
event = compile.parse('insert[p(x) :- q(x); "policy"]')
self.assertEqual(len(event), 1)
event = event[0]
self.assertEqual(event.formula, pqrule)
self.assertTrue(event.insert)
self.assertEqual(event.target, "policy")
def test_modal_execute(self):
# modal rule
rule = compile.parse('execute[p(x)] :- q(x)')
self.assertEqual(len(rule), 1)
rule = rule[0]
self.assertEqual(rule.head.table.modal, 'execute')
# modal rule with namespace
rule = compile.parse('execute[nova:disconnectNetwork(x)] :- q(x)')
self.assertEqual(len(rule), 1)
rule = rule[0]
self.assertEqual(rule.head.table.modal, 'execute')
# modal query
rule = compile.parse('execute[p(x)]')
self.assertEqual(len(rule), 1)
rule = rule[0]
self.assertEqual(rule.table.modal, 'execute')
def test_update_rules(self):
rule = compile.parse1('insert[p(x)] :- q(x)')
self.assertEqual(rule.head.table.modal, 'insert')
rule = compile.parse1('insert[p(x)] :- execute[q(x)]')
self.assertEqual(rule.head.table.modal, 'insert')
def test_modal_failures(self):
self.assertRaises(exception.PolicyException, compile.parse1,
'insert[p(x) :- q(x)')
self.assertRaises(exception.PolicyException, compile.parse1,
'insert[insert[p(x)] :- q(x)')
self.assertRaises(exception.PolicyException, compile.parse1,
'nonexistent[insert[p(x)] :- q(x)]')
self.assertRaises(exception.PolicyException, compile.parse1,
'insert[nonexistent[p(x)] :- q(x)]')
class TestColumnReferences(base.TestCase):
class SchemaWrapper (object):
"""Placeholder so we don't use the actual policy-engine for tests."""
def __init__(self, schema):
self.schema = schema
self.kind = datalogbase.DATASOURCE_POLICY_TYPE
def test_column_references_lowlevel(self):
"""Test column-references with low-level checks."""
# do the first one the painful way, to ensure the parser
# is doing something reasonable.
run = agnostic.Runtime()
code = ("p(x) :- nova:q(id=x)")
actual = run.parse(code)
self.assertEqual(len(actual), 1)
rule = actual[0]
self.assertEqual(len(rule.heads), 1)
self.assertEqual(rule.head.table.table, "p")
self.assertEqual(len(rule.head.arguments), 1)
self.assertEqual(rule.head.arguments[0].name, 'x')
self.assertEqual(len(rule.body), 1)
lit = rule.body[0]
self.assertFalse(lit.is_negated())
self.assertEqual(lit.table.table, "q")
self.assertIsNone(lit.table.modal)
self.assertEqual(lit.table.service, 'nova')
self.assertEqual(len(lit.arguments), 0)
self.assertEqual(len(lit.named_arguments), 1)
self.assertIn('id', lit.named_arguments)
self.assertEqual(lit.named_arguments['id'].name, 'x')
def test_hash(self):
x = set()
x.add(compile.parse1('p(x, y) :- nova:q(x, id=y)'))
x.add(compile.parse1('p(x, y) :- nova:q(x, id=y)'))
self.assertEqual(len(x), 1)
self.assertIn(compile.parse1('p(x, y) :- nova:q(x, id=y)'), x)
x.discard(compile.parse1('p(x, y) :- nova:q(x, id=y)'))
self.assertEqual(len(x), 0)
def test_lessthan(self):
x = compile.parse1('nova:q(x)')
y = compile.parse1('nova:q(x, id=y)')
self.assertLess(x, y)
x = compile.parse1('nova:q(x)')
y = compile.parse1('nova:q(x, id=y)')
self.assertGreaterEqual(y, x)
x = compile.parse1('nova:q(x, id=w)')
y = compile.parse1('nova:q(x, id=y)')
self.assertLess(x, y)
x = compile.parse1('nova:q(id=x)')
y = compile.parse1('nova:q(id=y)')
self.assertLess(x, y)
x = compile.parse1('nova:q(id=x)')
y = compile.parse1('nova:q(id=y, status=z)')
self.assertLess(x, y)
x = compile.parse1('p(x, y) :- nova:q(x, id=y)')
y = compile.parse1('p(x, y) :- nova:q(x, id=y, status=z)')
self.assertLess(x, y)
def test_column_references_parse_errors(self):
"""Test invalid column references occurring in a single atom."""
def check_err(code, errmsg, msg):
try:
compile.parse(code)
self.fail("Error should have been thrown but was not: " + msg)
except exception.PolicyException as e:
emsg = "Err message '{}' should include '{}'".format(
str(e), errmsg)
self.assertIn(errmsg, str(e), msg + ": " + emsg)
check_err(
'p(x) :- nova:q(id=x, status=x, id=y)',
'multiple values for column name id',
'Multiple values for column name')
check_err(
'p(x) :- nova:q(4=y, id=w, 4=z)',
'multiple values for column number 4',
'Multiple values for column number')
check_err(
'p(x) :- nova:q(x, 1=z, y)',
'positional parameter after a reference parameter',
'Positional parameter after reference parameter')
check_err(
'p(x) :- nova:q(x, y, 1=z)',
'1 is already provided by position arguments',
'Conflict between name and position')
def test_positional_args_padding_atom(self):
"""Test positional args padding on a single atom."""
def check_err(rule, errmsg, msg):
rule = compile.parse1(rule)
try:
rule.eliminate_column_references_and_pad_positional(theories)
self.fail("Failed to throw error {}".format(errmsg))
except (exception.PolicyException,
exception.IncompleteSchemaException) as e:
emsg = "Err messages '{}' should include '{}'".format(
str(e), errmsg)
self.assertIn(errmsg, str(e), msg + ": " + emsg)
def check(code, correct, msg, no_theory=False):
actual = compile.parse1(
code).eliminate_column_references_and_pad_positional(
{} if no_theory else theories)
eq = helper.datalog_same(str(actual), correct)
self.assertTrue(eq, msg)
run = agnostic.Runtime()
run.create_policy('nova')
schema = compile.Schema({'q': ('id', 'name', 'status')})
theories = {'nova': self.SchemaWrapper(schema)}
# Too few positional args
code = ("p(x) :- nova:q(w, y)")
correct = "p(x) :- nova:q(w, y, x3)"
check(code, correct, 'Too few positional args')
code = ("p(x) :- nova:q(w)")
correct = "p(x) :- nova:q(w, y, x3)"
check(code, correct, 'Too few positional args')
code = ("p(x) :- nova:q()")
correct = "p(x) :- nova:q(w, y, x3)"
check(code, correct, 'Too few (no) positional args')
# No schema provided, no change
code = ("p(x) :- nova:q(w, y)")
correct = "p(x) :- nova:q(w, y)"
check(code, correct, 'No schema provided', True)
code = ("p(x) :- nova:q(w, x, y, z)")
correct = "p(x) :- nova:q(w, x, y, z)"
check(code, correct, 'No schema provided', True)
def test_positional_args_padding_multiple_atoms(self):
"""Test positional args padding on a single atom."""
def check(code, correct, msg, no_theory=False):
actual = compile.parse1(
code).eliminate_column_references_and_pad_positional(
{} if no_theory else theories)
eq = helper.datalog_same(str(actual), correct)
self.assertTrue(eq, msg)
run = agnostic.Runtime()
run.create_policy('nova')
schema = compile.Schema({'q': ('id', 'name', 'status'),
'r': ('id', 'age', 'weight')})
theories = {'nova': self.SchemaWrapper(schema)}
# Multiple atoms, no shared variable
code = ("p(x) :- nova:q(x, y), nova:r(w)")
correct = "p(x) :- nova:q(x, y, z0), nova:r(w, y0, y1)"
check(code, correct, 'Multiple atoms')
# Multiple atoms, some shared variable
code = ("p(x) :- nova:q(x, y), nova:r(x)")
correct = "p(x) :- nova:q(x, y, z0), nova:r(x, y0, y1)"
check(code, correct, 'Multiple atoms')
# Multiple atoms, same table
code = ("p(x) :- nova:q(x, y), nova:q(x)")
correct = "p(x) :- nova:q(x, y, z0), nova:q(x, w0, w1)"
check(code, correct, 'Multiple atoms, same table')
def test_column_references_validation_errors(self):
"""Test invalid column references occurring in a single atom."""
schema = compile.Schema({'q': ('id', 'name', 'status'),
'r': ('id', 'age', 'weight')},
complete=True)
theories = {'nova': self.SchemaWrapper(schema)}
def check_err(rule, errmsg, msg):
rule = compile.parse1(rule)
try:
rule.eliminate_column_references_and_pad_positional(theories)
self.fail("Failed to throw error {}".format(errmsg))
except (exception.PolicyException,
exception.IncompleteSchemaException) as e:
emsg = "Err messages '{}' should include '{}'".format(
str(e), errmsg)
self.assertIn(errmsg, str(e), msg + ": " + emsg)
check_err(
'p(x) :- nova:missing(id=x)',
'uses unknown table missing',
'Unknown table')
check_err(
'p(x) :- nova:q(id=x, birthday=y)',
'column name birthday does not exist',
'Unknown column name')
check_err(
'p(x) :- nova:q(4=y)',
'column index 4 is too large',
'Large column number')
check_err(
'p(x) :- nova:q(id=x, 0=y)',
'index 0 references column id, which is also referenced by name',
'Conflict between name and number references')
check_err(
'p(x) :- nova:q(x, y, id=z)',
'already provided by position',
'Conflict between name and position')
theories = {}
check_err(
'p(x) :- nova:missing(id=x)',
'schema is unknown',
'Missing schema')
def test_column_references_atom(self):
"""Test column references occurring in a single atom in a rule."""
def check(code, correct, msg):
actual = compile.parse1(
code).eliminate_column_references_and_pad_positional(theories)
eq = helper.datalog_same(str(actual), correct)
self.assertTrue(eq, msg)
run = agnostic.Runtime()
run.create_policy('nova')
schema = compile.Schema({'q': ('id', 'name', 'status')})
theories = {'nova': self.SchemaWrapper(schema)}
# Multiple column names
code = ("p(x) :- nova:q(id=x, status=y)")
correct = "p(x) :- nova:q(x, w, y)"
check(code, correct, 'Multiple column names')
# Multiple column numbers
code = ("p(x) :- nova:q(0=x, 1=y, 2=z)")
correct = "p(x) :- nova:q(x, y, z)"
check(code, correct, 'Multiple column numbers')
# Mix column names and numbers
code = ("p(x) :- nova:q(id=x, 2=y)")
correct = "p(x) :- nova:q(x, w, y)"
check(code, correct, 'Mix names and numbers')
# Object constants
code = ("p(x) :- nova:q(id=3, 2=2)")
correct = "p(x) :- nova:q(3, w, 2)"
check(code, correct, 'Object constants')
# Out of order
code = ("p(x, y) :- nova:q(status=y, id=x)")
correct = "p(x, y) :- nova:q(x, z, y)"
check(code, correct, 'Out of order')
# Out of order with numbers
code = ("p(x, y) :- nova:q(1=y, 0=x)")
correct = "p(x, y) :- nova:q(x, y, z)"
check(code, correct, 'Out of order with numbers')
# Positional plus named
code = ("p(x, y) :- nova:q(x, status=y)")
correct = "p(x, y) :- nova:q(x, z, y)"
check(code, correct, 'Positional plus named')
# Positional plus named 2
code = ("p(x, y, z) :- nova:q(x, y, 2=z)")
correct = "p(x, y, z) :- nova:q(x, y, z)"
check(code, correct, 'Positional plus named 2')
# Pure positional (different since we are providing schema)
code = ("p(x, y, z) :- nova:q(x, y, z)")
correct = "p(x, y, z) :- nova:q(x, y, z)"
check(code, correct, 'Pure positional')
# Pure positional (without schema)
code = ("p(x) :- nova:q(x, y, z)")
run.delete_policy('nova')
correct = "p(x) :- nova:q(x, y, z)"
check(code, correct, 'Pure positional without schema')
# Too few pure positional EKCS
def test_column_references_multiple_atoms(self):
"""Test column references occurring in multiple atoms in a rule."""
def check(code, correct, msg):
actual = compile.parse1(
code).eliminate_column_references_and_pad_positional(theories)
eq = helper.datalog_same(str(actual), correct)
self.assertTrue(eq, msg)
run = agnostic.Runtime()
run.create_policy('nova')
schema = compile.Schema({'q': ('id', 'name', 'status'),
'r': ('id', 'age', 'weight')})
theories = {'nova': self.SchemaWrapper(schema)}
# Multiple atoms
code = ("p(x) :- nova:q(id=x, 2=y), nova:r(id=x)")
correct = "p(x) :- nova:q(x, x0, y), nova:r(x, y0, y1)"
check(code, correct, 'Multiple atoms')
# Multiple atoms sharing column name but different variables
code = ("p(x) :- nova:q(id=x), nova:r(id=y)")
correct = "p(x) :- nova:q(x, x0, x1), nova:r(y, y0, y1)"
check(code, correct, 'Multiple atoms shared column name')
# Multiple atoms, same table
code = ("p(x) :- nova:q(id=x, 2=y), nova:q(id=x)")
correct = "p(x) :- nova:q(x, x0, y), nova:q(x, y0, y1)"
check(code, correct, 'Multiple atoms, same table')
def test_eliminate_column_references_body_order(self):
"""Test eliminate_column_references preserves order insensitivity."""
run = agnostic.Runtime()
run.create_policy('nova')
schema = compile.Schema({'q': ('id', 'name', 'status'),
'r': ('id', 'age', 'weight')})
theories = {'nova': self.SchemaWrapper(schema)}
rule1 = compile.parse1(
"p(x) :- nova:q(id=x, 2=y), nova:r(id=x)"
).eliminate_column_references_and_pad_positional(theories)
rule2 = compile.parse1(
"p(x) :- nova:r(id=x), nova:q(id=x, 2=y)"
).eliminate_column_references_and_pad_positional(theories)
self.assertEqual(rule1, rule2, 'eliminate_column_references failed to '
'preserve order insensitivity')
class TestCompiler(base.TestCase):
def test_type_checkers(self):
"""Test the type checkers, e.g. is_atom, is_rule."""
atom = compile.Literal("p", [])
atom2 = compile.Literal("q", [])
atom3 = compile.Literal("r", [])
lit = compile.Literal("r", [], negated=True)
regular_rule = compile.Rule(atom, [atom2, atom3])
regular_rule2 = compile.Rule(atom, [lit, atom2])
multi_rule = compile.Rule([atom, atom2], [atom3])
fake_rule = compile.Rule([atom, 1], [atom2])
fake_rule2 = compile.Rule(atom, [atom2, 1])
# is_atom
self.assertTrue(compile.is_atom(atom))
self.assertTrue(compile.is_atom(atom2))
self.assertTrue(compile.is_atom(atom3))
self.assertFalse(compile.is_atom(lit))
self.assertFalse(compile.is_atom(regular_rule))
self.assertFalse(compile.is_atom(regular_rule2))
self.assertFalse(compile.is_atom(multi_rule))
self.assertFalse(compile.is_atom(fake_rule))
self.assertFalse(compile.is_atom(fake_rule2))
self.assertFalse(compile.is_atom("a string"))
# is_literal
self.assertTrue(compile.is_literal(atom))
self.assertTrue(compile.is_literal(atom2))
self.assertTrue(compile.is_literal(atom3))
self.assertTrue(compile.is_literal(lit))
self.assertFalse(compile.is_literal(regular_rule))
self.assertFalse(compile.is_literal(regular_rule2))
self.assertFalse(compile.is_literal(multi_rule))
self.assertFalse(compile.is_literal(fake_rule))
self.assertFalse(compile.is_literal(fake_rule2))
self.assertFalse(compile.is_literal("a string"))
# is_regular_rule
self.assertFalse(compile.is_regular_rule(atom))
self.assertFalse(compile.is_regular_rule(atom2))
self.assertFalse(compile.is_regular_rule(atom3))
self.assertFalse(compile.is_regular_rule(lit))
self.assertTrue(compile.is_regular_rule(regular_rule))
self.assertTrue(compile.is_regular_rule(regular_rule2))
self.assertFalse(compile.is_regular_rule(multi_rule))
self.assertFalse(compile.is_regular_rule(fake_rule))
self.assertFalse(compile.is_regular_rule(fake_rule2))
self.assertFalse(compile.is_regular_rule("a string"))
# is_multi_rule
self.assertFalse(compile.is_multi_rule(atom))
self.assertFalse(compile.is_multi_rule(atom2))
self.assertFalse(compile.is_multi_rule(atom3))
self.assertFalse(compile.is_multi_rule(lit))
self.assertFalse(compile.is_multi_rule(regular_rule))
self.assertFalse(compile.is_multi_rule(regular_rule2))
self.assertTrue(compile.is_multi_rule(multi_rule))
self.assertFalse(compile.is_multi_rule(fake_rule))
self.assertFalse(compile.is_multi_rule(fake_rule2))
self.assertFalse(compile.is_multi_rule("a string"))
# is_rule
self.assertFalse(compile.is_rule(atom))
self.assertFalse(compile.is_rule(atom2))
self.assertFalse(compile.is_rule(atom3))
self.assertFalse(compile.is_rule(lit))
self.assertTrue(compile.is_rule(regular_rule))
self.assertTrue(compile.is_rule(regular_rule2))
self.assertTrue(compile.is_rule(multi_rule))
self.assertFalse(compile.is_rule(fake_rule))
self.assertFalse(compile.is_rule(fake_rule2))
self.assertFalse(compile.is_rule("a string"))
# is_datalog
self.assertTrue(compile.is_datalog(atom))
self.assertTrue(compile.is_datalog(atom2))
self.assertTrue(compile.is_datalog(atom3))
self.assertFalse(compile.is_datalog(lit))
self.assertTrue(compile.is_datalog(regular_rule))
self.assertTrue(compile.is_datalog(regular_rule2))
self.assertFalse(compile.is_datalog(multi_rule))
self.assertFalse(compile.is_datalog(fake_rule))
self.assertFalse(compile.is_datalog(fake_rule2))
self.assertFalse(compile.is_datalog("a string"))
# is_extended_datalog
self.assertTrue(compile.is_extended_datalog(atom))
self.assertTrue(compile.is_extended_datalog(atom2))
self.assertTrue(compile.is_extended_datalog(atom3))
self.assertFalse(compile.is_extended_datalog(lit))
self.assertTrue(compile.is_extended_datalog(regular_rule))
self.assertTrue(compile.is_extended_datalog(regular_rule2))
self.assertTrue(compile.is_extended_datalog(multi_rule))
self.assertFalse(compile.is_extended_datalog(fake_rule))
self.assertFalse(compile.is_extended_datalog(fake_rule2))
self.assertFalse(compile.is_extended_datalog("a string"))
def test_rule_validation(self):
"""Test that rules are properly validated."""
# unsafe var in head
rule = compile.parse1('p(x) :- q(y)')
errs = compile.rule_errors(rule)
self.assertEqual(len(errs), 1)
# multiple unsafe vars in head
rule = compile.parse1('p(x,y,z) :- q(w)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 3)
# unsafe var in negtative literal:
rule = compile.parse1('p(x) :- q(x), not r(y)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
# unsafe var in negative literal: ensure head doesn't make safe
rule = compile.parse1('p(x) :- not q(x)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
# unsafe var in negative literal:
# ensure partial safety not total safety
rule = compile.parse1('p(x) :- q(x), not r(x,y)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
# unsafe var in negative literal: ensure double negs doesn't make safe
rule = compile.parse1('p(x) :- q(x), not r(x,y), not s(x, y)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
# multiple heads with modal
rule = compile.parse1('execute[p(x)], r(x) :- q(x)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
# modal in body
rule = compile.parse1('p(x) :- execute[q(x)]')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
# keywords
rule = compile.parse1('equal(x) :- q(x)')
errs = compile.rule_errors(rule)
self.assertEqual(len(set([str(x) for x in errs])), 1)
def test_module_schemas(self):
"""Test that rules are properly checked against module schemas."""
run = agnostic.Runtime()
run.create_policy('mod1')
run.create_policy('mod2')
run.set_schema('mod1', compile.Schema({'p': (1, 2, 3), 'q': (1,)}),
complete=True)
run.set_schema('mod2', compile.Schema({'p': (1,), 'q': (1, 2)}),
complete=True)
def check_err(code_string, theory, emsg, msg, f=compile.rule_errors):
rule = compile.parse1(code_string)
errs = f(rule, run.theory, theory)
self.assertTrue(any(emsg in str(err) for err in errs),
msg + ":: Failed to find error message '" + emsg +
"' in: " + ";".join(str(e) for e in errs))
# no errors
rule = compile.parse1('p(x) :- q(x), mod1:p(x, y, z), mod2:q(x, y), '
'mod1:q(t), mod2:p(t)')
errs = compile.rule_errors(rule, run.theory)
self.assertEqual(len(errs), 0, "Should not have found any errors")
# unknown table within module
check_err('p(x) :- q(x), mod1:r(x), r(x)',
'mod3',
'unknown table',
'Unknown table for rule')
# wrong number of arguments
check_err('p(x) :- q(x), mod1:p(x,y,z,w), r(x)',
'mod3',
'exactly 3 arguments are permitted',
'Wrong number of arguments for rule')
# same tests for an atom
# no errors
atom = compile.parse1('p(1, 2, 2)')
errs = compile.fact_errors(atom, run.theory, 'mod1')
self.assertEqual(len(errs), 0, "Should not have found any errors")
# unknown table within module
check_err('r(1)',
'mod1',
'unknown table',
'Unknown table for atom',
f=compile.fact_errors)
# wrong number of arguments
check_err('p(1, 2, 3, 4)',
'mod1',
'exactly 3 arguments are permitted',
'Wrong number of arguments for atom',
f=compile.fact_errors)
# schema update
schema = compile.Schema()
rule1 = compile.parse1('p(x) :- q(x, y)')
change1 = schema.update(rule1.head, True)
rule2 = compile.parse1('p(x) :- r(x, y)')
change2 = schema.update(rule2.head, True)
self.assertEqual(schema.count['p'], 2)
schema.revert(change2)
self.assertEqual(schema.count['p'], 1)
schema.revert(change1)
self.assertNotIn('p', schema.count)
schema.update(rule1.head, True)
schema.update(rule2.head, True)
change1 = schema.update(rule1.head, False)
change2 = schema.update(rule2.head, False)
self.assertNotIn('p', schema.count)
schema.revert(change2)
self.assertEqual(schema.count['p'], 1)
schema.revert(change1)
self.assertEqual(schema.count['p'], 2)
def test_rule_recursion(self):
rules = compile.parse('p(x) :- q(x), r(x) q(x) :- r(x) r(x) :- t(x)')
self.assertFalse(compile.is_recursive(rules))
rules = compile.parse('p(x) :- p(x)')
self.assertTrue(compile.is_recursive(rules))
rules = compile.parse('p(x) :- q(x) q(x) :- r(x) r(x) :- p(x)')
self.assertTrue(compile.is_recursive(rules))
rules = compile.parse('p(x) :- q(x) q(x) :- not p(x)')
self.assertTrue(compile.is_recursive(rules))
rules = compile.parse('p(x) :- q(x), s(x) q(x) :- t(x) s(x) :- p(x)')
self.assertTrue(compile.is_recursive(rules))
def test_rule_stratification(self):
rules = compile.parse('p(x) :- not q(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- p(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- q(x) q(x) :- p(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- q(x) q(x) :- not r(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- not q(x) q(x) :- not r(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- not q(x) '
'q(x) :- not r(x) '
'r(x) :- not s(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- q(x), r(x) '
'q(x) :- not t(x) '
'r(x) :- not s(x)')
self.assertTrue(compile.is_stratified(rules))
rules = compile.parse('p(x) :- not p(x)')
self.assertFalse(compile.is_stratified(rules))
rules = compile.parse('p(x) :- q(x) q(x) :- not p(x)')
self.assertFalse(compile.is_stratified(rules))
rules = compile.parse('p(x) :- q(x),r(x) r(x) :- not p(x)')
self.assertFalse(compile.is_stratified(rules))
rules = compile.parse('p(x) :- q(x), r(x) '
'q(x) :- not t(x) '
'r(x) :- not s(x) '
't(x) :- p(x)')
self.assertFalse(compile.is_stratified(rules))
class TestDependencyGraph(base.TestCase):
def test_nodes_edges(self):
g = compile.RuleDependencyGraph()
# first insertion
g.formula_insert(compile.parse1('p(x), q(x) :- r(x), s(x)'))
self.assertTrue(g.node_in('p'))
self.assertTrue(g.node_in('q'))
self.assertTrue(g.node_in('r'))
self.assertTrue(g.node_in('s'))
self.assertTrue(g.edge_in('p', 'r', False))
self.assertTrue(g.edge_in('p', 's', False))
self.assertTrue(g.edge_in('q', 'r', False))
self.assertTrue(g.edge_in('q', 's', False))
self.assertFalse(g.has_cycle())
# another insertion
g.formula_insert(compile.parse1('r(x) :- t(x)'))
self.assertTrue(g.node_in('p'))
self.assertTrue(g.node_in('q'))
self.assertTrue(g.node_in('r'))
self.assertTrue(g.node_in('s'))
self.assertTrue(g.edge_in('p', 'r', False))
self.assertTrue(g.edge_in('p', 's', False))
self.assertTrue(g.edge_in('q', 'r', False))
self.assertTrue(g.edge_in('q', 's', False))
self.assertTrue(g.node_in('t'))
self.assertTrue(g.edge_in('r', 't', False))
self.assertFalse(g.has_cycle())
# 3rd insertion, creating a cycle
g.formula_insert(compile.parse1('t(x) :- p(x)'))
self.assertTrue(g.edge_in('t', 'p', False))
self.assertTrue(g.has_cycle())
# deletion
g.formula_delete(compile.parse1('p(x), q(x) :- r(x), s(x)'))
self.assertTrue(g.node_in('p'))
self.assertTrue(g.node_in('r'))
self.assertTrue(g.node_in('t'))
self.assertTrue(g.edge_in('r', 't', False))
self.assertTrue(g.edge_in('t', 'p', False))
self.assertFalse(g.has_cycle())
# double-insertion
g.formula_insert(compile.parse1('p(x) :- q(x), r(x)'))
g.formula_insert(compile.parse1('p(1) :- r(1)'))
self.assertTrue(g.has_cycle())
# deletion -- checking for bag semantics
g.formula_delete(compile.parse1('p(1) :- r(1)'))
self.assertTrue(g.has_cycle())
g.formula_delete(compile.parse1('p(x) :- q(x), r(x)'))
self.assertFalse(g.has_cycle())
# update
g.formula_update([
compile.Event(compile.parse1('a(x) :- b(x)')),
compile.Event(compile.parse1('b(x) :- c(x)')),
compile.Event(compile.parse1('c(x) :- a(x)'))])
self.assertTrue(g.has_cycle())
g.formula_update([
compile.Event(compile.parse1('c(x) :- a(x)'), insert=False)])
self.assertFalse(g.has_cycle())
# cycle enumeration
g = compile.RuleDependencyGraph()
g.formula_insert(compile.parse1('p(x) :- q(x), r(x)'))
g.formula_insert(compile.parse1('q(x) :- t(x), not s(x)'))
g.formula_insert(compile.parse1('t(x) :- t(x), p(x), q(x)'))
self.assertTrue(g.has_cycle())
self.assertEqual(len(g.cycles()), 3)
expected_cycle_set = set([
utility.Cycle(['p', 'q', 't', 'p']),
utility.Cycle(['q', 't', 'q']),
utility.Cycle(['t', 't'])
])
actual_cycle_set = set([
utility.Cycle(g.cycles()[0]),
utility.Cycle(g.cycles()[1]),
utility.Cycle(g.cycles()[2])
])
self.assertEqual(expected_cycle_set, actual_cycle_set)
def test_dependencies(self):
g = compile.RuleDependencyGraph()
g.formula_insert(compile.parse1('p(x) :- q(x), r(x)'))
g.formula_insert(compile.parse1('q(x) :- t(x), not s(x)'))
self.assertEqual(g.dependencies('p'), set(['p', 'q', 'r', 't', 's']))
self.assertEqual(g.dependencies('q'), set(['q', 't', 's']))
self.assertEqual(g.dependencies('r'), set(['r']))
self.assertEqual(g.dependencies('t'), set(['t']))
self.assertEqual(g.dependencies('s'), set(['s']))
# cyclic case
g = compile.RuleDependencyGraph()
g.formula_insert(compile.parse1('p(x) :- q(x), r(x)'))
g.formula_insert(compile.parse1('q(x) :- t(x), not s(x)'))
g.formula_insert(compile.parse1('t(x) :- t(x), p(x), q(x)'))
self.assertEqual(g.dependencies('p'), set(['p', 'q', 'r', 't', 's']))
self.assertEqual(g.dependencies('q'), set(['p', 'q', 'r', 't', 's']))
self.assertEqual(g.dependencies('r'), set(['r']))
self.assertEqual(g.dependencies('t'), set(['p', 'q', 'r', 't', 's']))
self.assertEqual(g.dependencies('s'), set(['s']))
g = compile.RuleDependencyGraph(head_to_body=False)
g.formula_insert(compile.parse1('p(x) :- q(x), r(x)'))
g.formula_insert(compile.parse1('q(x) :- t(x), not s(x)'))
self.assertEqual(g.dependencies('p'), set(['p']))
self.assertEqual(g.dependencies('q'), set(['q', 'p']))
self.assertEqual(g.dependencies('r'), set(['r', 'p']))
self.assertEqual(g.dependencies('t'), set(['t', 'q', 'p']))
self.assertEqual(g.dependencies('s'), set(['s', 'q', 'p']))
def test_modal_index(self):
m = analysis.ModalIndex()
m.add('execute', 'p')
self.assertEqual(set(m.tables('execute')), set(['p']))
m.add('execute', 'q')
self.assertEqual(set(m.tables('execute')), set(['p', 'q']))
m.remove('execute', 'q')
self.assertEqual(set(m.tables('execute')), set(['p']))
m.add('execute', 'q')
m.add('execute', 'q')
m.remove('execute', 'q')
self.assertEqual(set(m.tables('execute')), set(['p', 'q']))
m.remove('execute', 'q')
self.assertEqual(set(m.tables('execute')), set(['p']))
m.add('foo', 'p')
self.assertEqual(set(m.tables('foo')), set(['p']))
self.assertEqual(set(m.tables('bar')), set())
self.assertEqual(set(m.tables('execute')), set(['p']))
def test_modal_index_composition(self):
m = analysis.ModalIndex()
m.add('execute', 'p')
m.add('execute', 'q')
m.add('execute', 'r')
m.add('foo', 'r')
m.add('foo', 's')
n = analysis.ModalIndex()
n.add('execute', 'p')
n.add('execute', 'alpha')
n.add('foo', 'r')
n.add('bar', 'beta')
n_plus_m = analysis.ModalIndex()
n_plus_m.add('execute', 'p')
n_plus_m.add('execute', 'p')
n_plus_m.add('execute', 'q')
n_plus_m.add('execute', 'r')
n_plus_m.add('execute', 'alpha')
n_plus_m.add('foo', 'r')
n_plus_m.add('foo', 's')
n_plus_m.add('foo', 'r')
n_plus_m.add('bar', 'beta')
m_copy = copy.copy(m)
m_copy += n
self.assertEqual(m_copy, n_plus_m)
m_minus_n = analysis.ModalIndex()
m_minus_n.add('execute', 'q')
m_minus_n.add('execute', 'r')
m_minus_n.add('foo', 's')
m_copy = copy.copy(m)
m_copy -= n
self.assertEqual(m_copy, m_minus_n)
def test_modals(self):
g = compile.RuleDependencyGraph()
g.formula_insert(compile.parse1('p(x) :- q(x)'))
g.formula_insert(compile.parse1('q(x) :- r(x)'))
g.formula_insert(compile.parse1('execute[p(x)] :- q(x)'))
chgs = g.formula_insert(compile.parse1('execute[r(x)] :- q(x)'))
g.formula_insert(compile.parse1('insert[s(x)] :- q(x)'))
self.assertEqual(set(g.tables_with_modal('execute')), set(['p', 'r']))
g.undo_changes(chgs)
self.assertEqual(set(g.tables_with_modal('execute')), set(['p']))
chgs = g.formula_delete(compile.parse1('execute[p(x)] :- q(x)'))
self.assertEqual(set(g.tables_with_modal('execute')), set())
g.undo_changes(chgs)
self.assertEqual(set(g.tables_with_modal('execute')), set(['p']))
class TestSchema(base.TestCase):
def test_schema_columns(self):
test_schema = compile.Schema({
'p': (1, 2, 3),
'q': ({'name': 'a', 'type': 'Str'},
{'name': 'b', 'nullable': False})},
complete=True)
self.assertEqual(test_schema.columns('p'),
[1, 2, 3])
self.assertEqual(test_schema.columns('q'),
['a', 'b'])
self.assertEqual([(data_types.Scalar, True), (data_types.Scalar, True),
(data_types.Scalar, True)],
test_schema.types('p'))
self.assertEqual([(data_types.Str, True), (data_types.Scalar, False)],
test_schema.types('q'))
| [
"congress.datalog.compile.is_recursive",
"congress.datalog.compile.Schema",
"congress.datalog.compile.parse1",
"congress.datalog.compile.Event",
"congress.datalog.compile.is_literal",
"congress.datalog.compile.is_multi_rule",
"congress.datalog.compile.is_stratified",
"congress.datalog.utility.Cycle",
"copy.copy",
"congress.datalog.compile.Rule",
"congress.datalog.analysis.ModalIndex",
"congress.datalog.compile.is_rule",
"congress.datalog.compile.rule_errors",
"congress.datalog.compile.is_datalog",
"congress.policy_engines.agnostic.Runtime",
"congress.datalog.compile.is_extended_datalog",
"congress.datalog.compile.Literal",
"congress.datalog.compile.RuleDependencyGraph",
"congress.datalog.compile.parse",
"congress.datalog.compile.fact_errors",
"congress.datalog.compile.is_regular_rule",
"congress.datalog.compile.is_atom"
] | [((1218, 1240), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1)"""'], {}), "('p(1)')\n", (1232, 1240), False, 'from congress.datalog import compile\n'), ((1383, 1410), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:p(1)"""'], {}), "('nova:p(1)')\n", (1397, 1410), False, 'from congress.datalog import compile\n'), ((1560, 1596), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""execute[nova:p(1)]"""'], {}), "('execute[nova:p(1)]')\n", (1574, 1596), False, 'from congress.datalog import compile\n'), ((1858, 1896), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), s(x,y)"""'], {}), "('p(x) :- q(x), s(x,y)')\n", (1872, 1896), False, 'from congress.datalog import compile\n'), ((1926, 1964), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), s(x,y)"""'], {}), "('p(x) :- q(x), s(x,y)')\n", (1940, 1964), False, 'from congress.datalog import compile\n'), ((2070, 2100), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x)"""'], {}), "('p(x) :- q(x)')\n", (2084, 2100), False, 'from congress.datalog import compile\n'), ((2114, 2144), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x)"""'], {}), "('p(x) :- q(x)')\n", (2128, 2144), False, 'from congress.datalog import compile\n'), ((2158, 2212), 'congress.datalog.compile.Event', 'compile.Event', ([], {'formula': 'r1', 'target': '"""alice"""', 'insert': '(True)'}), "(formula=r1, target='alice', insert=True)\n", (2171, 2212), False, 'from congress.datalog import compile\n'), ((2226, 2280), 'congress.datalog.compile.Event', 'compile.Event', ([], {'formula': 'r2', 'target': '"""alice"""', 'insert': '(True)'}), "(formula=r2, target='alice', insert=True)\n", (2239, 2280), False, 'from congress.datalog import compile\n'), ((2380, 2417), 'congress.datalog.compile.parse', 'compile.parse', (['"""insert[p(1) :- true]"""'], {}), "('insert[p(1) :- true]')\n", (2393, 2417), False, 'from congress.datalog import compile\n'), ((2498, 2528), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1) :- true"""'], {}), "('p(1) :- true')\n", (2512, 2528), False, 'from congress.datalog import compile\n'), ((2687, 2724), 'congress.datalog.compile.parse', 'compile.parse', (['"""delete[p(1) :- true]"""'], {}), "('delete[p(1) :- true]')\n", (2700, 2724), False, 'from congress.datalog import compile\n'), ((2805, 2835), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1) :- true"""'], {}), "('p(1) :- true')\n", (2819, 2835), False, 'from congress.datalog import compile\n'), ((3007, 3054), 'congress.datalog.compile.parse', 'compile.parse', (['"""insert[p(1) :- true; "policy"]"""'], {}), '(\'insert[p(1) :- true; "policy"]\')\n', (3020, 3054), False, 'from congress.datalog import compile\n'), ((3135, 3165), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1) :- true"""'], {}), "('p(1) :- true')\n", (3149, 3165), False, 'from congress.datalog import compile\n'), ((3421, 3451), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x)"""'], {}), "('p(x) :- q(x)')\n", (3435, 3451), False, 'from congress.datalog import compile\n'), ((3510, 3547), 'congress.datalog.compile.parse', 'compile.parse', (['"""insert[p(x) :- q(x)]"""'], {}), "('insert[p(x) :- q(x)]')\n", (3523, 3547), False, 'from congress.datalog import compile\n'), ((3795, 3832), 'congress.datalog.compile.parse', 'compile.parse', (['"""delete[p(x) :- q(x)]"""'], {}), "('delete[p(x) :- q(x)]')\n", (3808, 3832), False, 'from congress.datalog import compile\n'), ((4068, 4114), 'congress.datalog.compile.parse', 'compile.parse', (['"""insert[execute[p(x)] :- q(x)]"""'], {}), "('insert[execute[p(x)] :- q(x)]')\n", (4081, 4114), False, 'from congress.datalog import compile\n'), ((4195, 4234), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""execute[p(x)] :- q(x)"""'], {}), "('execute[p(x)] :- q(x)')\n", (4209, 4234), False, 'from congress.datalog import compile\n'), ((4420, 4467), 'congress.datalog.compile.parse', 'compile.parse', (['"""insert[p(x) :- q(x); "policy"]"""'], {}), '(\'insert[p(x) :- q(x); "policy"]\')\n', (4433, 4467), False, 'from congress.datalog import compile\n'), ((4739, 4777), 'congress.datalog.compile.parse', 'compile.parse', (['"""execute[p(x)] :- q(x)"""'], {}), "('execute[p(x)] :- q(x)')\n", (4752, 4777), False, 'from congress.datalog import compile\n'), ((4951, 5010), 'congress.datalog.compile.parse', 'compile.parse', (['"""execute[nova:disconnectNetwork(x)] :- q(x)"""'], {}), "('execute[nova:disconnectNetwork(x)] :- q(x)')\n", (4964, 5010), False, 'from congress.datalog import compile\n'), ((5170, 5200), 'congress.datalog.compile.parse', 'compile.parse', (['"""execute[p(x)]"""'], {}), "('execute[p(x)]')\n", (5183, 5200), False, 'from congress.datalog import compile\n'), ((5366, 5404), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""insert[p(x)] :- q(x)"""'], {}), "('insert[p(x)] :- q(x)')\n", (5380, 5404), False, 'from congress.datalog import compile\n'), ((5479, 5526), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""insert[p(x)] :- execute[q(x)]"""'], {}), "('insert[p(x)] :- execute[q(x)]')\n", (5493, 5526), False, 'from congress.datalog import compile\n'), ((6645, 6663), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (6661, 6663), False, 'from congress.policy_engines import agnostic\n'), ((7896, 7923), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(x)"""'], {}), "('nova:q(x)')\n", (7910, 7923), False, 'from congress.datalog import compile\n'), ((7936, 7969), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(x, id=y)"""'], {}), "('nova:q(x, id=y)')\n", (7950, 7969), False, 'from congress.datalog import compile\n'), ((8013, 8040), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(x)"""'], {}), "('nova:q(x)')\n", (8027, 8040), False, 'from congress.datalog import compile\n'), ((8053, 8086), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(x, id=y)"""'], {}), "('nova:q(x, id=y)')\n", (8067, 8086), False, 'from congress.datalog import compile\n'), ((8138, 8171), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(x, id=w)"""'], {}), "('nova:q(x, id=w)')\n", (8152, 8171), False, 'from congress.datalog import compile\n'), ((8184, 8217), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(x, id=y)"""'], {}), "('nova:q(x, id=y)')\n", (8198, 8217), False, 'from congress.datalog import compile\n'), ((8261, 8291), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(id=x)"""'], {}), "('nova:q(id=x)')\n", (8275, 8291), False, 'from congress.datalog import compile\n'), ((8304, 8334), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(id=y)"""'], {}), "('nova:q(id=y)')\n", (8318, 8334), False, 'from congress.datalog import compile\n'), ((8378, 8408), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(id=x)"""'], {}), "('nova:q(id=x)')\n", (8392, 8408), False, 'from congress.datalog import compile\n'), ((8421, 8461), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""nova:q(id=y, status=z)"""'], {}), "('nova:q(id=y, status=z)')\n", (8435, 8461), False, 'from congress.datalog import compile\n'), ((8505, 8549), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x, y) :- nova:q(x, id=y)"""'], {}), "('p(x, y) :- nova:q(x, id=y)')\n", (8519, 8549), False, 'from congress.datalog import compile\n'), ((8562, 8616), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x, y) :- nova:q(x, id=y, status=z)"""'], {}), "('p(x, y) :- nova:q(x, id=y, status=z)')\n", (8576, 8616), False, 'from congress.datalog import compile\n'), ((10825, 10843), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (10841, 10843), False, 'from congress.policy_engines import agnostic\n'), ((10895, 10942), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'q': ('id', 'name', 'status')}"], {}), "({'q': ('id', 'name', 'status')})\n", (10909, 10942), False, 'from congress.datalog import compile\n'), ((12233, 12251), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (12249, 12251), False, 'from congress.policy_engines import agnostic\n'), ((12303, 12380), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')}"], {}), "({'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')})\n", (12317, 12380), False, 'from congress.datalog import compile\n'), ((13247, 13344), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')}"], {'complete': '(True)'}), "({'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')\n }, complete=True)\n", (13261, 13344), False, 'from congress.datalog import compile\n'), ((15296, 15314), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (15312, 15314), False, 'from congress.policy_engines import agnostic\n'), ((15366, 15413), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'q': ('id', 'name', 'status')}"], {}), "({'q': ('id', 'name', 'status')})\n", (15380, 15413), False, 'from congress.datalog import compile\n'), ((17799, 17817), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (17815, 17817), False, 'from congress.policy_engines import agnostic\n'), ((17869, 17946), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')}"], {}), "({'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')})\n", (17883, 17946), False, 'from congress.datalog import compile\n'), ((18859, 18877), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (18875, 18877), False, 'from congress.policy_engines import agnostic\n'), ((18929, 19006), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')}"], {}), "({'q': ('id', 'name', 'status'), 'r': ('id', 'age', 'weight')})\n", (18943, 19006), False, 'from congress.datalog import compile\n'), ((19710, 19734), 'congress.datalog.compile.Literal', 'compile.Literal', (['"""p"""', '[]'], {}), "('p', [])\n", (19725, 19734), False, 'from congress.datalog import compile\n'), ((19751, 19775), 'congress.datalog.compile.Literal', 'compile.Literal', (['"""q"""', '[]'], {}), "('q', [])\n", (19766, 19775), False, 'from congress.datalog import compile\n'), ((19792, 19816), 'congress.datalog.compile.Literal', 'compile.Literal', (['"""r"""', '[]'], {}), "('r', [])\n", (19807, 19816), False, 'from congress.datalog import compile\n'), ((19831, 19869), 'congress.datalog.compile.Literal', 'compile.Literal', (['"""r"""', '[]'], {'negated': '(True)'}), "('r', [], negated=True)\n", (19846, 19869), False, 'from congress.datalog import compile\n'), ((19893, 19927), 'congress.datalog.compile.Rule', 'compile.Rule', (['atom', '[atom2, atom3]'], {}), '(atom, [atom2, atom3])\n', (19905, 19927), False, 'from congress.datalog import compile\n'), ((19952, 19984), 'congress.datalog.compile.Rule', 'compile.Rule', (['atom', '[lit, atom2]'], {}), '(atom, [lit, atom2])\n', (19964, 19984), False, 'from congress.datalog import compile\n'), ((20006, 20042), 'congress.datalog.compile.Rule', 'compile.Rule', (['[atom, atom2]', '[atom3]'], {}), '([atom, atom2], [atom3])\n', (20018, 20042), False, 'from congress.datalog import compile\n'), ((20063, 20095), 'congress.datalog.compile.Rule', 'compile.Rule', (['[atom, 1]', '[atom2]'], {}), '([atom, 1], [atom2])\n', (20075, 20095), False, 'from congress.datalog import compile\n'), ((20117, 20147), 'congress.datalog.compile.Rule', 'compile.Rule', (['atom', '[atom2, 1]'], {}), '(atom, [atom2, 1])\n', (20129, 20147), False, 'from congress.datalog import compile\n'), ((24391, 24421), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(y)"""'], {}), "('p(x) :- q(y)')\n", (24405, 24421), False, 'from congress.datalog import compile\n'), ((24437, 24462), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (24456, 24462), False, 'from congress.datalog import compile\n'), ((24557, 24591), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x,y,z) :- q(w)"""'], {}), "('p(x,y,z) :- q(w)')\n", (24571, 24591), False, 'from congress.datalog import compile\n'), ((24607, 24632), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (24626, 24632), False, 'from congress.datalog import compile\n'), ((24754, 24794), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), not r(y)"""'], {}), "('p(x) :- q(x), not r(y)')\n", (24768, 24794), False, 'from congress.datalog import compile\n'), ((24810, 24835), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (24829, 24835), False, 'from congress.datalog import compile\n'), ((24986, 25020), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- not q(x)"""'], {}), "('p(x) :- not q(x)')\n", (25000, 25020), False, 'from congress.datalog import compile\n'), ((25036, 25061), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (25055, 25061), False, 'from congress.datalog import compile\n'), ((25236, 25278), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), not r(x,y)"""'], {}), "('p(x) :- q(x), not r(x,y)')\n", (25250, 25278), False, 'from congress.datalog import compile\n'), ((25294, 25319), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (25313, 25319), False, 'from congress.datalog import compile\n'), ((25477, 25532), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), not r(x,y), not s(x, y)"""'], {}), "('p(x) :- q(x), not r(x,y), not s(x, y)')\n", (25491, 25532), False, 'from congress.datalog import compile\n'), ((25548, 25573), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (25567, 25573), False, 'from congress.datalog import compile\n'), ((25688, 25733), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""execute[p(x)], r(x) :- q(x)"""'], {}), "('execute[p(x)], r(x) :- q(x)')\n", (25702, 25733), False, 'from congress.datalog import compile\n'), ((25749, 25774), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (25768, 25774), False, 'from congress.datalog import compile\n'), ((25877, 25916), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- execute[q(x)]"""'], {}), "('p(x) :- execute[q(x)]')\n", (25891, 25916), False, 'from congress.datalog import compile\n'), ((25932, 25957), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (25951, 25957), False, 'from congress.datalog import compile\n'), ((26055, 26089), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""equal(x) :- q(x)"""'], {}), "('equal(x) :- q(x)')\n", (26069, 26089), False, 'from congress.datalog import compile\n'), ((26105, 26130), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule'], {}), '(rule)\n', (26124, 26130), False, 'from congress.datalog import compile\n'), ((26319, 26337), 'congress.policy_engines.agnostic.Runtime', 'agnostic.Runtime', ([], {}), '()\n', (26335, 26337), False, 'from congress.policy_engines import agnostic\n'), ((27057, 27145), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), mod1:p(x, y, z), mod2:q(x, y), mod1:q(t), mod2:p(t)"""'], {}), "(\n 'p(x) :- q(x), mod1:p(x, y, z), mod2:q(x, y), mod1:q(t), mod2:p(t)')\n", (27071, 27145), False, 'from congress.datalog import compile\n'), ((27189, 27226), 'congress.datalog.compile.rule_errors', 'compile.rule_errors', (['rule', 'run.theory'], {}), '(rule, run.theory)\n', (27208, 27226), False, 'from congress.datalog import compile\n'), ((27798, 27826), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1, 2, 2)"""'], {}), "('p(1, 2, 2)')\n", (27812, 27826), False, 'from congress.datalog import compile\n'), ((27842, 27887), 'congress.datalog.compile.fact_errors', 'compile.fact_errors', (['atom', 'run.theory', '"""mod1"""'], {}), "(atom, run.theory, 'mod1')\n", (27861, 27887), False, 'from congress.datalog import compile\n'), ((28466, 28482), 'congress.datalog.compile.Schema', 'compile.Schema', ([], {}), '()\n', (28480, 28482), False, 'from congress.datalog import compile\n'), ((28499, 28532), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x, y)"""'], {}), "('p(x) :- q(x, y)')\n", (28513, 28532), False, 'from congress.datalog import compile\n'), ((28599, 28632), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- r(x, y)"""'], {}), "('p(x) :- r(x, y)')\n", (28613, 28632), False, 'from congress.datalog import compile\n'), ((29318, 29380), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x), r(x) q(x) :- r(x) r(x) :- t(x)"""'], {}), "('p(x) :- q(x), r(x) q(x) :- r(x) r(x) :- t(x)')\n", (29331, 29380), False, 'from congress.datalog import compile\n'), ((29452, 29481), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- p(x)"""'], {}), "('p(x) :- p(x)')\n", (29465, 29481), False, 'from congress.datalog import compile\n'), ((29552, 29609), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x) q(x) :- r(x) r(x) :- p(x)"""'], {}), "('p(x) :- q(x) q(x) :- r(x) r(x) :- p(x)')\n", (29565, 29609), False, 'from congress.datalog import compile\n'), ((29680, 29727), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x) q(x) :- not p(x)"""'], {}), "('p(x) :- q(x) q(x) :- not p(x)')\n", (29693, 29727), False, 'from congress.datalog import compile\n'), ((29798, 29861), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x), s(x) q(x) :- t(x) s(x) :- p(x)"""'], {}), "('p(x) :- q(x), s(x) q(x) :- t(x) s(x) :- p(x)')\n", (29811, 29861), False, 'from congress.datalog import compile\n'), ((29972, 30005), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- not q(x)"""'], {}), "('p(x) :- not q(x)')\n", (29985, 30005), False, 'from congress.datalog import compile\n'), ((30077, 30106), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- p(x)"""'], {}), "('p(x) :- p(x)')\n", (30090, 30106), False, 'from congress.datalog import compile\n'), ((30178, 30221), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x) q(x) :- p(x)"""'], {}), "('p(x) :- q(x) q(x) :- p(x)')\n", (30191, 30221), False, 'from congress.datalog import compile\n'), ((30293, 30340), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x) q(x) :- not r(x)"""'], {}), "('p(x) :- q(x) q(x) :- not r(x)')\n", (30306, 30340), False, 'from congress.datalog import compile\n'), ((30412, 30463), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- not q(x) q(x) :- not r(x)"""'], {}), "('p(x) :- not q(x) q(x) :- not r(x)')\n", (30425, 30463), False, 'from congress.datalog import compile\n'), ((30535, 30604), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- not q(x) q(x) :- not r(x) r(x) :- not s(x)"""'], {}), "('p(x) :- not q(x) q(x) :- not r(x) r(x) :- not s(x)')\n", (30548, 30604), False, 'from congress.datalog import compile\n'), ((30742, 30811), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x), r(x) q(x) :- not t(x) r(x) :- not s(x)"""'], {}), "('p(x) :- q(x), r(x) q(x) :- not t(x) r(x) :- not s(x)')\n", (30755, 30811), False, 'from congress.datalog import compile\n'), ((30949, 30982), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- not p(x)"""'], {}), "('p(x) :- not p(x)')\n", (30962, 30982), False, 'from congress.datalog import compile\n'), ((31055, 31102), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x) q(x) :- not p(x)"""'], {}), "('p(x) :- q(x) q(x) :- not p(x)')\n", (31068, 31102), False, 'from congress.datalog import compile\n'), ((31175, 31227), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x),r(x) r(x) :- not p(x)"""'], {}), "('p(x) :- q(x),r(x) r(x) :- not p(x)')\n", (31188, 31227), False, 'from congress.datalog import compile\n'), ((31300, 31387), 'congress.datalog.compile.parse', 'compile.parse', (['"""p(x) :- q(x), r(x) q(x) :- not t(x) r(x) :- not s(x) t(x) :- p(x)"""'], {}), "(\n 'p(x) :- q(x), r(x) q(x) :- not t(x) r(x) :- not s(x) t(x) :- p(x)')\n", (31313, 31387), False, 'from congress.datalog import compile\n'), ((31626, 31655), 'congress.datalog.compile.RuleDependencyGraph', 'compile.RuleDependencyGraph', ([], {}), '()\n', (31653, 31655), False, 'from congress.datalog import compile\n'), ((34170, 34199), 'congress.datalog.compile.RuleDependencyGraph', 'compile.RuleDependencyGraph', ([], {}), '()\n', (34197, 34199), False, 'from congress.datalog import compile\n'), ((34938, 34967), 'congress.datalog.compile.RuleDependencyGraph', 'compile.RuleDependencyGraph', ([], {}), '()\n', (34965, 34967), False, 'from congress.datalog import compile\n'), ((35453, 35482), 'congress.datalog.compile.RuleDependencyGraph', 'compile.RuleDependencyGraph', ([], {}), '()\n', (35480, 35482), False, 'from congress.datalog import compile\n'), ((36045, 36092), 'congress.datalog.compile.RuleDependencyGraph', 'compile.RuleDependencyGraph', ([], {'head_to_body': '(False)'}), '(head_to_body=False)\n', (36072, 36092), False, 'from congress.datalog import compile\n'), ((36588, 36609), 'congress.datalog.analysis.ModalIndex', 'analysis.ModalIndex', ([], {}), '()\n', (36607, 36609), False, 'from congress.datalog import analysis\n'), ((37413, 37434), 'congress.datalog.analysis.ModalIndex', 'analysis.ModalIndex', ([], {}), '()\n', (37432, 37434), False, 'from congress.datalog import analysis\n'), ((37590, 37611), 'congress.datalog.analysis.ModalIndex', 'analysis.ModalIndex', ([], {}), '()\n', (37609, 37611), False, 'from congress.datalog import analysis\n'), ((37751, 37772), 'congress.datalog.analysis.ModalIndex', 'analysis.ModalIndex', ([], {}), '()\n', (37770, 37772), False, 'from congress.datalog import analysis\n'), ((38115, 38127), 'copy.copy', 'copy.copy', (['m'], {}), '(m)\n', (38124, 38127), False, 'import copy\n'), ((38212, 38233), 'congress.datalog.analysis.ModalIndex', 'analysis.ModalIndex', ([], {}), '()\n', (38231, 38233), False, 'from congress.datalog import analysis\n'), ((38362, 38374), 'copy.copy', 'copy.copy', (['m'], {}), '(m)\n', (38371, 38374), False, 'import copy\n'), ((38479, 38508), 'congress.datalog.compile.RuleDependencyGraph', 'compile.RuleDependencyGraph', ([], {}), '()\n', (38506, 38508), False, 'from congress.datalog import compile\n'), ((39347, 39469), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'p': (1, 2, 3), 'q': ({'name': 'a', 'type': 'Str'}, {'name': 'b',\n 'nullable': False})}"], {'complete': '(True)'}), "({'p': (1, 2, 3), 'q': ({'name': 'a', 'type': 'Str'}, {'name':\n 'b', 'nullable': False})}, complete=True)\n", (39361, 39469), False, 'from congress.datalog import compile\n'), ((7541, 7585), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x, y) :- nova:q(x, id=y)"""'], {}), "('p(x, y) :- nova:q(x, id=y)')\n", (7555, 7585), False, 'from congress.datalog import compile\n'), ((7601, 7645), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x, y) :- nova:q(x, id=y)"""'], {}), "('p(x, y) :- nova:q(x, id=y)')\n", (7615, 7645), False, 'from congress.datalog import compile\n'), ((7705, 7749), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x, y) :- nova:q(x, id=y)"""'], {}), "('p(x, y) :- nova:q(x, id=y)')\n", (7719, 7749), False, 'from congress.datalog import compile\n'), ((7772, 7816), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x, y) :- nova:q(x, id=y)"""'], {}), "('p(x, y) :- nova:q(x, id=y)')\n", (7786, 7816), False, 'from congress.datalog import compile\n'), ((10032, 10052), 'congress.datalog.compile.parse1', 'compile.parse1', (['rule'], {}), '(rule)\n', (10046, 10052), False, 'from congress.datalog import compile\n'), ((13523, 13543), 'congress.datalog.compile.parse1', 'compile.parse1', (['rule'], {}), '(rule)\n', (13537, 13543), False, 'from congress.datalog import compile\n'), ((20191, 20212), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['atom'], {}), '(atom)\n', (20206, 20212), False, 'from congress.datalog import compile\n'), ((20238, 20260), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['atom2'], {}), '(atom2)\n', (20253, 20260), False, 'from congress.datalog import compile\n'), ((20286, 20308), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['atom3'], {}), '(atom3)\n', (20301, 20308), False, 'from congress.datalog import compile\n'), ((20335, 20355), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['lit'], {}), '(lit)\n', (20350, 20355), False, 'from congress.datalog import compile\n'), ((20382, 20411), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['regular_rule'], {}), '(regular_rule)\n', (20397, 20411), False, 'from congress.datalog import compile\n'), ((20438, 20468), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['regular_rule2'], {}), '(regular_rule2)\n', (20453, 20468), False, 'from congress.datalog import compile\n'), ((20495, 20522), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['multi_rule'], {}), '(multi_rule)\n', (20510, 20522), False, 'from congress.datalog import compile\n'), ((20549, 20575), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['fake_rule'], {}), '(fake_rule)\n', (20564, 20575), False, 'from congress.datalog import compile\n'), ((20602, 20629), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['fake_rule2'], {}), '(fake_rule2)\n', (20617, 20629), False, 'from congress.datalog import compile\n'), ((20656, 20683), 'congress.datalog.compile.is_atom', 'compile.is_atom', (['"""a string"""'], {}), "('a string')\n", (20671, 20683), False, 'from congress.datalog import compile\n'), ((20731, 20755), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['atom'], {}), '(atom)\n', (20749, 20755), False, 'from congress.datalog import compile\n'), ((20781, 20806), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['atom2'], {}), '(atom2)\n', (20799, 20806), False, 'from congress.datalog import compile\n'), ((20832, 20857), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['atom3'], {}), '(atom3)\n', (20850, 20857), False, 'from congress.datalog import compile\n'), ((20883, 20906), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['lit'], {}), '(lit)\n', (20901, 20906), False, 'from congress.datalog import compile\n'), ((20933, 20965), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['regular_rule'], {}), '(regular_rule)\n', (20951, 20965), False, 'from congress.datalog import compile\n'), ((20992, 21025), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['regular_rule2'], {}), '(regular_rule2)\n', (21010, 21025), False, 'from congress.datalog import compile\n'), ((21052, 21082), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['multi_rule'], {}), '(multi_rule)\n', (21070, 21082), False, 'from congress.datalog import compile\n'), ((21109, 21138), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['fake_rule'], {}), '(fake_rule)\n', (21127, 21138), False, 'from congress.datalog import compile\n'), ((21165, 21195), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['fake_rule2'], {}), '(fake_rule2)\n', (21183, 21195), False, 'from congress.datalog import compile\n'), ((21222, 21252), 'congress.datalog.compile.is_literal', 'compile.is_literal', (['"""a string"""'], {}), "('a string')\n", (21240, 21252), False, 'from congress.datalog import compile\n'), ((21306, 21335), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['atom'], {}), '(atom)\n', (21329, 21335), False, 'from congress.datalog import compile\n'), ((21362, 21392), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['atom2'], {}), '(atom2)\n', (21385, 21392), False, 'from congress.datalog import compile\n'), ((21419, 21449), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['atom3'], {}), '(atom3)\n', (21442, 21449), False, 'from congress.datalog import compile\n'), ((21476, 21504), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['lit'], {}), '(lit)\n', (21499, 21504), False, 'from congress.datalog import compile\n'), ((21530, 21567), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['regular_rule'], {}), '(regular_rule)\n', (21553, 21567), False, 'from congress.datalog import compile\n'), ((21593, 21631), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['regular_rule2'], {}), '(regular_rule2)\n', (21616, 21631), False, 'from congress.datalog import compile\n'), ((21658, 21693), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['multi_rule'], {}), '(multi_rule)\n', (21681, 21693), False, 'from congress.datalog import compile\n'), ((21720, 21754), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['fake_rule'], {}), '(fake_rule)\n', (21743, 21754), False, 'from congress.datalog import compile\n'), ((21781, 21816), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['fake_rule2'], {}), '(fake_rule2)\n', (21804, 21816), False, 'from congress.datalog import compile\n'), ((21843, 21878), 'congress.datalog.compile.is_regular_rule', 'compile.is_regular_rule', (['"""a string"""'], {}), "('a string')\n", (21866, 21878), False, 'from congress.datalog import compile\n'), ((21930, 21957), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['atom'], {}), '(atom)\n', (21951, 21957), False, 'from congress.datalog import compile\n'), ((21984, 22012), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['atom2'], {}), '(atom2)\n', (22005, 22012), False, 'from congress.datalog import compile\n'), ((22039, 22067), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['atom3'], {}), '(atom3)\n', (22060, 22067), False, 'from congress.datalog import compile\n'), ((22094, 22120), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['lit'], {}), '(lit)\n', (22115, 22120), False, 'from congress.datalog import compile\n'), ((22147, 22182), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['regular_rule'], {}), '(regular_rule)\n', (22168, 22182), False, 'from congress.datalog import compile\n'), ((22209, 22245), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['regular_rule2'], {}), '(regular_rule2)\n', (22230, 22245), False, 'from congress.datalog import compile\n'), ((22271, 22304), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['multi_rule'], {}), '(multi_rule)\n', (22292, 22304), False, 'from congress.datalog import compile\n'), ((22331, 22363), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['fake_rule'], {}), '(fake_rule)\n', (22352, 22363), False, 'from congress.datalog import compile\n'), ((22390, 22423), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['fake_rule2'], {}), '(fake_rule2)\n', (22411, 22423), False, 'from congress.datalog import compile\n'), ((22450, 22483), 'congress.datalog.compile.is_multi_rule', 'compile.is_multi_rule', (['"""a string"""'], {}), "('a string')\n", (22471, 22483), False, 'from congress.datalog import compile\n'), ((22529, 22550), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['atom'], {}), '(atom)\n', (22544, 22550), False, 'from congress.datalog import compile\n'), ((22577, 22599), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['atom2'], {}), '(atom2)\n', (22592, 22599), False, 'from congress.datalog import compile\n'), ((22626, 22648), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['atom3'], {}), '(atom3)\n', (22641, 22648), False, 'from congress.datalog import compile\n'), ((22675, 22695), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['lit'], {}), '(lit)\n', (22690, 22695), False, 'from congress.datalog import compile\n'), ((22721, 22750), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['regular_rule'], {}), '(regular_rule)\n', (22736, 22750), False, 'from congress.datalog import compile\n'), ((22776, 22806), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['regular_rule2'], {}), '(regular_rule2)\n', (22791, 22806), False, 'from congress.datalog import compile\n'), ((22832, 22859), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['multi_rule'], {}), '(multi_rule)\n', (22847, 22859), False, 'from congress.datalog import compile\n'), ((22886, 22912), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['fake_rule'], {}), '(fake_rule)\n', (22901, 22912), False, 'from congress.datalog import compile\n'), ((22939, 22966), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['fake_rule2'], {}), '(fake_rule2)\n', (22954, 22966), False, 'from congress.datalog import compile\n'), ((22993, 23020), 'congress.datalog.compile.is_rule', 'compile.is_rule', (['"""a string"""'], {}), "('a string')\n", (23008, 23020), False, 'from congress.datalog import compile\n'), ((23068, 23092), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['atom'], {}), '(atom)\n', (23086, 23092), False, 'from congress.datalog import compile\n'), ((23118, 23143), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['atom2'], {}), '(atom2)\n', (23136, 23143), False, 'from congress.datalog import compile\n'), ((23169, 23194), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['atom3'], {}), '(atom3)\n', (23187, 23194), False, 'from congress.datalog import compile\n'), ((23221, 23244), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['lit'], {}), '(lit)\n', (23239, 23244), False, 'from congress.datalog import compile\n'), ((23270, 23302), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['regular_rule'], {}), '(regular_rule)\n', (23288, 23302), False, 'from congress.datalog import compile\n'), ((23328, 23361), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['regular_rule2'], {}), '(regular_rule2)\n', (23346, 23361), False, 'from congress.datalog import compile\n'), ((23388, 23418), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['multi_rule'], {}), '(multi_rule)\n', (23406, 23418), False, 'from congress.datalog import compile\n'), ((23445, 23474), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['fake_rule'], {}), '(fake_rule)\n', (23463, 23474), False, 'from congress.datalog import compile\n'), ((23501, 23531), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['fake_rule2'], {}), '(fake_rule2)\n', (23519, 23531), False, 'from congress.datalog import compile\n'), ((23558, 23588), 'congress.datalog.compile.is_datalog', 'compile.is_datalog', (['"""a string"""'], {}), "('a string')\n", (23576, 23588), False, 'from congress.datalog import compile\n'), ((23645, 23678), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['atom'], {}), '(atom)\n', (23672, 23678), False, 'from congress.datalog import compile\n'), ((23704, 23738), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['atom2'], {}), '(atom2)\n', (23731, 23738), False, 'from congress.datalog import compile\n'), ((23764, 23798), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['atom3'], {}), '(atom3)\n', (23791, 23798), False, 'from congress.datalog import compile\n'), ((23825, 23857), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['lit'], {}), '(lit)\n', (23852, 23857), False, 'from congress.datalog import compile\n'), ((23883, 23924), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['regular_rule'], {}), '(regular_rule)\n', (23910, 23924), False, 'from congress.datalog import compile\n'), ((23950, 23992), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['regular_rule2'], {}), '(regular_rule2)\n', (23977, 23992), False, 'from congress.datalog import compile\n'), ((24018, 24057), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['multi_rule'], {}), '(multi_rule)\n', (24045, 24057), False, 'from congress.datalog import compile\n'), ((24084, 24122), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['fake_rule'], {}), '(fake_rule)\n', (24111, 24122), False, 'from congress.datalog import compile\n'), ((24149, 24188), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['fake_rule2'], {}), '(fake_rule2)\n', (24176, 24188), False, 'from congress.datalog import compile\n'), ((24215, 24254), 'congress.datalog.compile.is_extended_datalog', 'compile.is_extended_datalog', (['"""a string"""'], {}), "('a string')\n", (24242, 24254), False, 'from congress.datalog import compile\n'), ((26437, 26480), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'p': (1, 2, 3), 'q': (1,)}"], {}), "({'p': (1, 2, 3), 'q': (1,)})\n", (26451, 26480), False, 'from congress.datalog import compile\n'), ((26551, 26591), 'congress.datalog.compile.Schema', 'compile.Schema', (["{'p': (1,), 'q': (1, 2)}"], {}), "({'p': (1,), 'q': (1, 2)})\n", (26565, 26591), False, 'from congress.datalog import compile\n'), ((26729, 26756), 'congress.datalog.compile.parse1', 'compile.parse1', (['code_string'], {}), '(code_string)\n', (26743, 26756), False, 'from congress.datalog import compile\n'), ((29406, 29433), 'congress.datalog.compile.is_recursive', 'compile.is_recursive', (['rules'], {}), '(rules)\n', (29426, 29433), False, 'from congress.datalog import compile\n'), ((29506, 29533), 'congress.datalog.compile.is_recursive', 'compile.is_recursive', (['rules'], {}), '(rules)\n', (29526, 29533), False, 'from congress.datalog import compile\n'), ((29634, 29661), 'congress.datalog.compile.is_recursive', 'compile.is_recursive', (['rules'], {}), '(rules)\n', (29654, 29661), False, 'from congress.datalog import compile\n'), ((29752, 29779), 'congress.datalog.compile.is_recursive', 'compile.is_recursive', (['rules'], {}), '(rules)\n', (29772, 29779), False, 'from congress.datalog import compile\n'), ((29886, 29913), 'congress.datalog.compile.is_recursive', 'compile.is_recursive', (['rules'], {}), '(rules)\n', (29906, 29913), False, 'from congress.datalog import compile\n'), ((30030, 30058), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30051, 30058), False, 'from congress.datalog import compile\n'), ((30131, 30159), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30152, 30159), False, 'from congress.datalog import compile\n'), ((30246, 30274), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30267, 30274), False, 'from congress.datalog import compile\n'), ((30365, 30393), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30386, 30393), False, 'from congress.datalog import compile\n'), ((30488, 30516), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30509, 30516), False, 'from congress.datalog import compile\n'), ((30695, 30723), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30716, 30723), False, 'from congress.datalog import compile\n'), ((30902, 30930), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (30923, 30930), False, 'from congress.datalog import compile\n'), ((31008, 31036), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (31029, 31036), False, 'from congress.datalog import compile\n'), ((31128, 31156), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (31149, 31156), False, 'from congress.datalog import compile\n'), ((31253, 31281), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (31274, 31281), False, 'from congress.datalog import compile\n'), ((31507, 31535), 'congress.datalog.compile.is_stratified', 'compile.is_stratified', (['rules'], {}), '(rules)\n', (31528, 31535), False, 'from congress.datalog import compile\n'), ((31708, 31750), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x), q(x) :- r(x), s(x)"""'], {}), "('p(x), q(x) :- r(x), s(x)')\n", (31722, 31750), False, 'from congress.datalog import compile\n'), ((32214, 32244), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""r(x) :- t(x)"""'], {}), "('r(x) :- t(x)')\n", (32228, 32244), False, 'from congress.datalog import compile\n'), ((32814, 32844), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""t(x) :- p(x)"""'], {}), "('t(x) :- p(x)')\n", (32828, 32844), False, 'from congress.datalog import compile\n'), ((32982, 33024), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x), q(x) :- r(x), s(x)"""'], {}), "('p(x), q(x) :- r(x), s(x)')\n", (32996, 33024), False, 'from congress.datalog import compile\n'), ((33343, 33379), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), r(x)"""'], {}), "('p(x) :- q(x), r(x)')\n", (33357, 33379), False, 'from congress.datalog import compile\n'), ((33406, 33436), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1) :- r(1)"""'], {}), "('p(1) :- r(1)')\n", (33420, 33436), False, 'from congress.datalog import compile\n'), ((33552, 33582), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(1) :- r(1)"""'], {}), "('p(1) :- r(1)')\n", (33566, 33582), False, 'from congress.datalog import compile\n'), ((33648, 33684), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), r(x)"""'], {}), "('p(x) :- q(x), r(x)')\n", (33662, 33684), False, 'from congress.datalog import compile\n'), ((34225, 34261), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), r(x)"""'], {}), "('p(x) :- q(x), r(x)')\n", (34239, 34261), False, 'from congress.datalog import compile\n'), ((34288, 34328), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""q(x) :- t(x), not s(x)"""'], {}), "('q(x) :- t(x), not s(x)')\n", (34302, 34328), False, 'from congress.datalog import compile\n'), ((34355, 34397), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""t(x) :- t(x), p(x), q(x)"""'], {}), "('t(x) :- t(x), p(x), q(x)')\n", (34369, 34397), False, 'from congress.datalog import compile\n'), ((34993, 35029), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), r(x)"""'], {}), "('p(x) :- q(x), r(x)')\n", (35007, 35029), False, 'from congress.datalog import compile\n'), ((35056, 35096), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""q(x) :- t(x), not s(x)"""'], {}), "('q(x) :- t(x), not s(x)')\n", (35070, 35096), False, 'from congress.datalog import compile\n'), ((35508, 35544), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), r(x)"""'], {}), "('p(x) :- q(x), r(x)')\n", (35522, 35544), False, 'from congress.datalog import compile\n'), ((35571, 35611), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""q(x) :- t(x), not s(x)"""'], {}), "('q(x) :- t(x), not s(x)')\n", (35585, 35611), False, 'from congress.datalog import compile\n'), ((35638, 35680), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""t(x) :- t(x), p(x), q(x)"""'], {}), "('t(x) :- t(x), p(x), q(x)')\n", (35652, 35680), False, 'from congress.datalog import compile\n'), ((36118, 36154), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x), r(x)"""'], {}), "('p(x) :- q(x), r(x)')\n", (36132, 36154), False, 'from congress.datalog import compile\n'), ((36181, 36221), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""q(x) :- t(x), not s(x)"""'], {}), "('q(x) :- t(x), not s(x)')\n", (36195, 36221), False, 'from congress.datalog import compile\n'), ((38534, 38564), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- q(x)"""'], {}), "('p(x) :- q(x)')\n", (38548, 38564), False, 'from congress.datalog import compile\n'), ((38591, 38621), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""q(x) :- r(x)"""'], {}), "('q(x) :- r(x)')\n", (38605, 38621), False, 'from congress.datalog import compile\n'), ((38648, 38687), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""execute[p(x)] :- q(x)"""'], {}), "('execute[p(x)] :- q(x)')\n", (38662, 38687), False, 'from congress.datalog import compile\n'), ((38721, 38760), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""execute[r(x)] :- q(x)"""'], {}), "('execute[r(x)] :- q(x)')\n", (38735, 38760), False, 'from congress.datalog import compile\n'), ((38787, 38825), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""insert[s(x)] :- q(x)"""'], {}), "('insert[s(x)] :- q(x)')\n", (38801, 38825), False, 'from congress.datalog import compile\n'), ((39041, 39080), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""execute[p(x)] :- q(x)"""'], {}), "('execute[p(x)] :- q(x)')\n", (39055, 39080), False, 'from congress.datalog import compile\n'), ((8847, 8866), 'congress.datalog.compile.parse', 'compile.parse', (['code'], {}), '(code)\n', (8860, 8866), False, 'from congress.datalog import compile\n'), ((19113, 19170), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- nova:q(id=x, 2=y), nova:r(id=x)"""'], {}), "('p(x) :- nova:q(id=x, 2=y), nova:r(id=x)')\n", (19127, 19170), False, 'from congress.datalog import compile\n'), ((19270, 19327), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""p(x) :- nova:r(id=x), nova:q(id=x, 2=y)"""'], {}), "('p(x) :- nova:r(id=x), nova:q(id=x, 2=y)')\n", (19284, 19327), False, 'from congress.datalog import compile\n'), ((34530, 34565), 'congress.datalog.utility.Cycle', 'utility.Cycle', (["['p', 'q', 't', 'p']"], {}), "(['p', 'q', 't', 'p'])\n", (34543, 34565), False, 'from congress.datalog import utility\n'), ((34579, 34609), 'congress.datalog.utility.Cycle', 'utility.Cycle', (["['q', 't', 'q']"], {}), "(['q', 't', 'q'])\n", (34592, 34609), False, 'from congress.datalog import utility\n'), ((34623, 34648), 'congress.datalog.utility.Cycle', 'utility.Cycle', (["['t', 't']"], {}), "(['t', 't'])\n", (34636, 34648), False, 'from congress.datalog import utility\n'), ((10577, 10597), 'congress.datalog.compile.parse1', 'compile.parse1', (['code'], {}), '(code)\n', (10591, 10597), False, 'from congress.datalog import compile\n'), ((11985, 12005), 'congress.datalog.compile.parse1', 'compile.parse1', (['code'], {}), '(code)\n', (11999, 12005), False, 'from congress.datalog import compile\n'), ((15090, 15110), 'congress.datalog.compile.parse1', 'compile.parse1', (['code'], {}), '(code)\n', (15104, 15110), False, 'from congress.datalog import compile\n'), ((17593, 17613), 'congress.datalog.compile.parse1', 'compile.parse1', (['code'], {}), '(code)\n', (17607, 17613), False, 'from congress.datalog import compile\n'), ((33797, 33827), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""a(x) :- b(x)"""'], {}), "('a(x) :- b(x)')\n", (33811, 33827), False, 'from congress.datalog import compile\n'), ((33856, 33886), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""b(x) :- c(x)"""'], {}), "('b(x) :- c(x)')\n", (33870, 33886), False, 'from congress.datalog import compile\n'), ((33915, 33945), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""c(x) :- a(x)"""'], {}), "('c(x) :- a(x)')\n", (33929, 33945), False, 'from congress.datalog import compile\n'), ((34041, 34071), 'congress.datalog.compile.parse1', 'compile.parse1', (['"""c(x) :- a(x)"""'], {}), "('c(x) :- a(x)')\n", (34055, 34071), False, 'from congress.datalog import compile\n')] |
import numpy as np
def determinant(matrix):
return np.linalg.det(matrix)
| [
"numpy.linalg.det"
] | [((59, 80), 'numpy.linalg.det', 'np.linalg.det', (['matrix'], {}), '(matrix)\n', (72, 80), True, 'import numpy as np\n')] |
# Copyright (c) 2020 CNES
#
# All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import unittest
import pyinterp.geodetic
class System(unittest.TestCase):
def test_init(self):
wgs = pyinterp.geodetic.System()
self.assertIsInstance(wgs, pyinterp.geodetic.System)
with self.assertRaises(TypeError):
wgs = pyinterp.geodetic.System(12.0)
with self.assertRaises(TypeError):
wgs = pyinterp.geodetic.System((12.0, 3.0, 5))
wgs = pyinterp.geodetic.System((1, 1))
self.assertIsInstance(wgs, pyinterp.geodetic.System)
class Coordinates(unittest.TestCase):
def test_init(self):
wgs = pyinterp.geodetic.Coordinates()
self.assertIsInstance(wgs, pyinterp.geodetic.Coordinates)
wgs = pyinterp.geodetic.Coordinates(pyinterp.geodetic.System())
self.assertIsInstance(wgs, pyinterp.geodetic.Coordinates)
class Point2D(unittest.TestCase):
def test_init(self):
# pt = pyinterp.geodetic.Point2D()
# self.assertEqual(pt.lon, 0)
# self.assertEqual(pt.lat, 0)
pt = pyinterp.geodetic.Point2D(1, 2)
self.assertEqual(pt.lon, 1)
self.assertEqual(pt.lat, 2)
class Box2D(unittest.TestCase):
def test_init(self):
box = pyinterp.geodetic.Box2D()
# self.assertEqual(box.min_corner.lon, 0)
# self.assertEqual(box.min_corner.lat, 0)
# self.assertEqual(box.max_corner.lon, 0)
# self.assertEqual(box.max_corner.lat, 0)
box = pyinterp.geodetic.Box2D.entire_earth()
self.assertEqual(box.min_corner.lon, -180)
self.assertEqual(box.min_corner.lat, -90)
self.assertEqual(box.max_corner.lon, 180)
self.assertEqual(box.max_corner.lat, 90)
box = pyinterp.geodetic.Box2D(pyinterp.geodetic.Point2D(1, 2),
pyinterp.geodetic.Point2D(3, 4))
self.assertEqual(box.min_corner.lon, 1)
self.assertEqual(box.min_corner.lat, 2)
self.assertEqual(box.max_corner.lon, 3)
self.assertEqual(box.max_corner.lat, 4)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main"
] | [((2189, 2204), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2202, 2204), False, 'import unittest\n')] |
from django.db import models
from django.contrib.auth import models as auth_models
from django.core.urlresolvers import reverse
import datetime
def get_next_month_first_date():
"""Returns the date of the 1st day of the next month (compared to today)"""
today = datetime.datetime.today().date()
if today.month == 12:
next_month_first = datetime.date(today.year+1,1,1)
else:
next_month_first = datetime.date(today.year,today.month+1,1)
return next_month_first
class RoomManager(models.Manager):
def get_queryset(self):
long_ago = datetime.datetime.today().date() - datetime.timedelta(days=36)
return super(RoomManager, self).get_queryset().filter(req_open='O').filter(modified_on__gt=long_ago)
class room_requirement(models.Model):
"""Model to save any room requirement details"""
owner = models.ForeignKey(auth_models.User, on_delete=models.PROTECT)
req_status_choices = (
('O', 'Open'),
('C', 'Closed'),
)
req_open = models.CharField(default="O", choices=req_status_choices,
max_length=2, verbose_name="Request Status",
help_text="Status of the request")
contact_number = models.CharField(blank=True, null=True, max_length=16,
help_text="Contact number (optional)")
gender_req_choices = (
('M', 'Male'),
('F', 'Female'),
('A', 'Any'),
)
gender_req = models.CharField(default="A", choices=gender_req_choices,
max_length=2, help_text="Gender requirement",
verbose_name="Gender requirement")
locality = models.CharField(null=True, max_length=64, help_text="Name of locality/society")
rent = models.IntegerField(null=True, help_text="Rent per month (in Rs.)")
deposit = models.IntegerField(null=True, help_text="Security deposit (in Rs.)")
vacancies = models.IntegerField(default=1, help_text="Total vacancies (no. of persons)")
immediate_possession = models.BooleanField(default=True, help_text="Is the room available for immediate possession?")
available_from = models.DateField(blank=True, null=True, help_text="When would the room be available for accomodation?")
more_details = models.CharField(blank=True, null=True, max_length=2048, help_text="Additional details")
modified_on = models.DateTimeField(blank=True, null=True, auto_now_add=True, help_text="Internal field")
# default manager
objects = models.Manager()
# custom manager for filtering of "closed" & "old but open" posts
active = RoomManager()
def __unicode__(self):
return "{0} - {1}".format(self.locality, self.owner.username)
def get_post_url(self):
return reverse('roomreq:indi', kwargs={'post_id':self.id})
def save(self, *args, **kwargs):
if not self.immediate_possession and not self.available_from:
self.available_from = get_next_month_first_date()
self.modified_on = datetime.datetime.today().date()
super(room_requirement, self).save(*args, **kwargs)
class Meta:
verbose_name = "Room Requirement"
ordering = ['-modified_on',]
| [
"django.db.models.DateField",
"django.db.models.Manager",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.core.urlresolvers.reverse",
"django.db.models.BooleanField",
"datetime.datetime.today",
"datetime.date",
"django.db.models.DateTimeField",
"datetime.timedelta",
"django.db.models.CharField"
] | [((858, 919), 'django.db.models.ForeignKey', 'models.ForeignKey', (['auth_models.User'], {'on_delete': 'models.PROTECT'}), '(auth_models.User, on_delete=models.PROTECT)\n', (875, 919), False, 'from django.db import models\n'), ((1017, 1158), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""O"""', 'choices': 'req_status_choices', 'max_length': '(2)', 'verbose_name': '"""Request Status"""', 'help_text': '"""Status of the request"""'}), "(default='O', choices=req_status_choices, max_length=2,\n verbose_name='Request Status', help_text='Status of the request')\n", (1033, 1158), False, 'from django.db import models\n'), ((1241, 1339), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(16)', 'help_text': '"""Contact number (optional)"""'}), "(blank=True, null=True, max_length=16, help_text=\n 'Contact number (optional)')\n", (1257, 1339), False, 'from django.db import models\n'), ((1494, 1636), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""A"""', 'choices': 'gender_req_choices', 'max_length': '(2)', 'help_text': '"""Gender requirement"""', 'verbose_name': '"""Gender requirement"""'}), "(default='A', choices=gender_req_choices, max_length=2,\n help_text='Gender requirement', verbose_name='Gender requirement')\n", (1510, 1636), False, 'from django.db import models\n'), ((1717, 1802), 'django.db.models.CharField', 'models.CharField', ([], {'null': '(True)', 'max_length': '(64)', 'help_text': '"""Name of locality/society"""'}), "(null=True, max_length=64, help_text='Name of locality/society'\n )\n", (1733, 1802), False, 'from django.db import models\n'), ((1809, 1876), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'help_text': '"""Rent per month (in Rs.)"""'}), "(null=True, help_text='Rent per month (in Rs.)')\n", (1828, 1876), False, 'from django.db import models\n'), ((1891, 1960), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'help_text': '"""Security deposit (in Rs.)"""'}), "(null=True, help_text='Security deposit (in Rs.)')\n", (1910, 1960), False, 'from django.db import models\n'), ((1977, 2053), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)', 'help_text': '"""Total vacancies (no. of persons)"""'}), "(default=1, help_text='Total vacancies (no. of persons)')\n", (1996, 2053), False, 'from django.db import models\n'), ((2081, 2180), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Is the room available for immediate possession?"""'}), "(default=True, help_text=\n 'Is the room available for immediate possession?')\n", (2100, 2180), False, 'from django.db import models\n'), ((2197, 2305), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)', 'help_text': '"""When would the room be available for accomodation?"""'}), "(blank=True, null=True, help_text=\n 'When would the room be available for accomodation?')\n", (2213, 2305), False, 'from django.db import models\n'), ((2320, 2413), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(2048)', 'help_text': '"""Additional details"""'}), "(blank=True, null=True, max_length=2048, help_text=\n 'Additional details')\n", (2336, 2413), False, 'from django.db import models\n'), ((2428, 2523), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'auto_now_add': '(True)', 'help_text': '"""Internal field"""'}), "(blank=True, null=True, auto_now_add=True, help_text=\n 'Internal field')\n", (2448, 2523), False, 'from django.db import models\n'), ((2556, 2572), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (2570, 2572), False, 'from django.db import models\n'), ((357, 392), 'datetime.date', 'datetime.date', (['(today.year + 1)', '(1)', '(1)'], {}), '(today.year + 1, 1, 1)\n', (370, 392), False, 'import datetime\n'), ((426, 471), 'datetime.date', 'datetime.date', (['today.year', '(today.month + 1)', '(1)'], {}), '(today.year, today.month + 1, 1)\n', (439, 471), False, 'import datetime\n'), ((2812, 2864), 'django.core.urlresolvers.reverse', 'reverse', (['"""roomreq:indi"""'], {'kwargs': "{'post_id': self.id}"}), "('roomreq:indi', kwargs={'post_id': self.id})\n", (2819, 2864), False, 'from django.core.urlresolvers import reverse\n'), ((271, 296), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (294, 296), False, 'import datetime\n'), ((615, 642), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(36)'}), '(days=36)\n', (633, 642), False, 'import datetime\n'), ((3061, 3086), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (3084, 3086), False, 'import datetime\n'), ((580, 605), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (603, 605), False, 'import datetime\n')] |
import sys
from Ziggeo import Ziggeo
if(len(sys.argv) < 3):
print ("Error\n")
print ("Usage: $>python _videos_index_orientation.py YOUR_API_TOKEN YOUR_PRIVATE_KEY\n")
sys.exit()
api_token = sys.argv[1]
private_key = sys.argv[2]
count_landscape = 0.0
count_portrait = 0.0
ziggeo = Ziggeo(api_token, private_key)
def indexVideos(skip=0):
global count_landscape, count_portrait
video_list = ziggeo.videos().index({"limit":100, "skip":skip})
for video in video_list:
width = video['streams'][0]['video_width']
height = video['streams'][0]['video_height']
if ( width > height) :
count_landscape += 1
else :
count_portrait += 1
if(len(video_list) > 0):
indexVideos(skip+100)
pass
indexVideos(0)
pr_portrait = (count_portrait/(count_landscape+count_portrait))*100
pr_landscape = (count_landscape/(count_landscape+count_portrait))*100
print("Portrait Count = {:.0f} Portrait Percentage {:.2f}%".format(count_portrait, pr_portrait))
print("Landscape Count = {:.0f} Landscape Percentage {:.2f}%".format(count_landscape, pr_landscape)) | [
"Ziggeo.Ziggeo",
"sys.exit"
] | [((286, 316), 'Ziggeo.Ziggeo', 'Ziggeo', (['api_token', 'private_key'], {}), '(api_token, private_key)\n', (292, 316), False, 'from Ziggeo import Ziggeo\n'), ((172, 182), 'sys.exit', 'sys.exit', ([], {}), '()\n', (180, 182), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
# Created by hkh at 2019-01-30
import sys
import nltk
from nltk.parse import CoreNLPParser
stanford_dir = '/home/hkh/tools/stanford-postagger-full/'
modelfile = stanford_dir + 'models/chinese-distsim.tagger'
jarfile = stanford_dir + 'stanford-postagger.jar'
# nltk.internals.config_java(options='-Xmx3024m')
# st = StanfordPOSTagger(model_filename=modelfile, path_to_jar=jarfile)
tagger = CoreNLPParser(url='http://localhost:9000', tagtype='pos')
# print(tagger.tag("for all their trouble, I forgive you !".split(" ")))
# lines = ["for all their trouble, I forgive you !".split(" "), "The StanfordTokenizer will be deprecated in version 3.2.5.".split(" ")]
# lines = []
# for line in sys.stdin:
# line = line.strip()
# lines += [line.split(" ")]
# if len(lines) == 2000:
# pos_sents = tagger.tag_sents(lines)
# for pos in pos_sents:
# print(" ".join(["%s|%s" % (p[0], p[1]) for p in pos]))
# lines = []
# pos_sents = tagger.tag_sents(lines)
# for pos in pos_sents:
# print(" ".join(["%s|%s" % (p[0], p[1]) for p in pos]))
lines = []
for line in sys.stdin:
line = line.strip()
pos_sent = tagger.tag(line.split(" "))
print(" ".join(["%s|%s" % (p[0], p[1]) for p in pos_sent]), flush=True)
| [
"nltk.parse.CoreNLPParser"
] | [((425, 482), 'nltk.parse.CoreNLPParser', 'CoreNLPParser', ([], {'url': '"""http://localhost:9000"""', 'tagtype': '"""pos"""'}), "(url='http://localhost:9000', tagtype='pos')\n", (438, 482), False, 'from nltk.parse import CoreNLPParser\n')] |
from collections import OrderedDict
import pandas as pd
# Here is some code to read in some stock data from the Yahoo Finance API
AAPL = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=AAPL&a=0&b=1&c=2000",
parse_dates=['Date'])
MSFT = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=MSFT&a=0&b=1&c=2000",
parse_dates=['Date'])
IBM = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=IBM&a=0&b=1&c=2000",
parse_dates=['Date'])
xyvalues = OrderedDict(AAPL=AAPL[['Date', 'Adj Close']],
MSFT=MSFT[['Date', 'Adj Close']],
IBM=IBM[['Date', 'Adj Close']])
df = pd.concat(xyvalues, axis=1, names=["l0", "l1"])
from bokeh.charts import TimeSeries
ts = TimeSeries(df, title="timeseries, pd_input", filename="stocks_timeseries.html")
ts.legend("top_left").show()
| [
"collections.OrderedDict",
"pandas.concat",
"pandas.read_csv",
"bokeh.charts.TimeSeries"
] | [((138, 234), 'pandas.read_csv', 'pd.read_csv', (['"""http://ichart.yahoo.com/table.csv?s=AAPL&a=0&b=1&c=2000"""'], {'parse_dates': "['Date']"}), "('http://ichart.yahoo.com/table.csv?s=AAPL&a=0&b=1&c=2000',\n parse_dates=['Date'])\n", (149, 234), True, 'import pandas as pd\n'), ((247, 343), 'pandas.read_csv', 'pd.read_csv', (['"""http://ichart.yahoo.com/table.csv?s=MSFT&a=0&b=1&c=2000"""'], {'parse_dates': "['Date']"}), "('http://ichart.yahoo.com/table.csv?s=MSFT&a=0&b=1&c=2000',\n parse_dates=['Date'])\n", (258, 343), True, 'import pandas as pd\n'), ((355, 450), 'pandas.read_csv', 'pd.read_csv', (['"""http://ichart.yahoo.com/table.csv?s=IBM&a=0&b=1&c=2000"""'], {'parse_dates': "['Date']"}), "('http://ichart.yahoo.com/table.csv?s=IBM&a=0&b=1&c=2000',\n parse_dates=['Date'])\n", (366, 450), True, 'import pandas as pd\n'), ((468, 583), 'collections.OrderedDict', 'OrderedDict', ([], {'AAPL': "AAPL[['Date', 'Adj Close']]", 'MSFT': "MSFT[['Date', 'Adj Close']]", 'IBM': "IBM[['Date', 'Adj Close']]"}), "(AAPL=AAPL[['Date', 'Adj Close']], MSFT=MSFT[['Date',\n 'Adj Close']], IBM=IBM[['Date', 'Adj Close']])\n", (479, 583), False, 'from collections import OrderedDict\n'), ((631, 678), 'pandas.concat', 'pd.concat', (['xyvalues'], {'axis': '(1)', 'names': "['l0', 'l1']"}), "(xyvalues, axis=1, names=['l0', 'l1'])\n", (640, 678), True, 'import pandas as pd\n'), ((721, 800), 'bokeh.charts.TimeSeries', 'TimeSeries', (['df'], {'title': '"""timeseries, pd_input"""', 'filename': '"""stocks_timeseries.html"""'}), "(df, title='timeseries, pd_input', filename='stocks_timeseries.html')\n", (731, 800), False, 'from bokeh.charts import TimeSeries\n')] |
#################################################################################
###### This file has been adapted from code provided in the US COVID19 forecast hub:
###### URL of original file
###### The original file has been provided under the MIT license, and so is this adapted version.
#################################################################################
# Before executing the script, we need selenium. Run `pip install urllib3`
import shutil
import zipfile
import os
import sys
import requests
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
from webdriver_manager.chrome import ChromeDriverManager
from pathlib import Path
def download_covid_zip_files(path):
url = "https://covid-19.bsvgateway.org/"
options = webdriver.ChromeOptions()
prefs = {'download.default_directory': path}
options.add_argument('--no-sandbox')
options.add_argument('--headless')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--disable-gpu')
options.add_experimental_option('prefs', prefs)
driver = webdriver.Chrome(ChromeDriverManager().install(),
chrome_options=options)
driver.get(url)
time.sleep(3)
try:
# Get Global data
element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "global-model-outputs-links")))
rows = element.find_elements(By.TAG_NAME, "tr")
# Get the columns (all the column 2)
time.sleep(3)
for row in rows[1:]:
cols = row.find_elements(By.TAG_NAME, "td")
cols = cols[0:4]
for col in cols:
# extract download path
elements = col.find_elements(By.TAG_NAME, "a")
for ele in elements:
name = ele.get_attribute('href').split('/')[-1]
filepath = path + '/' + name
print(filepath)
# check if already downloaded
if os.path.exists(filepath):
continue
else:
# download file
driver.get(ele.get_attribute('href'))
time.sleep(4)
finally:
driver.quit()
if __name__ == '__main__':
try:
path = os.path.join(os.getcwd(), "data-raw", "LANL")
download_covid_zip_files(path)
except IndexError:
path=str(Path.cwd().parent.parent.joinpath("data-raw", "LANL"))
download_covid_zip_files(path)
| [
"os.path.exists",
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.support.ui.WebDriverWait",
"pathlib.Path.cwd",
"time.sleep",
"os.getcwd",
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"webdriver_manager.chrome.ChromeDriverManager"
] | [((903, 928), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (926, 928), False, 'from selenium import webdriver\n'), ((1345, 1358), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1355, 1358), False, 'import time\n'), ((1624, 1637), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1634, 1637), False, 'import time\n'), ((1444, 1513), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (["(By.ID, 'global-model-outputs-links')"], {}), "((By.ID, 'global-model-outputs-links'))\n", (1474, 1513), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((2516, 2527), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2525, 2527), False, 'import os\n'), ((1234, 1255), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ([], {}), '()\n', (1253, 1255), False, 'from webdriver_manager.chrome import ChromeDriverManager\n'), ((1412, 1437), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', '(10)'], {}), '(driver, 10)\n', (1425, 1437), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((2190, 2214), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (2204, 2214), False, 'import os\n'), ((2401, 2414), 'time.sleep', 'time.sleep', (['(4)'], {}), '(4)\n', (2411, 2414), False, 'import time\n'), ((2628, 2638), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (2636, 2638), False, 'from pathlib import Path\n')] |
import sys
from common import mymath
def search():
m = {}
destFreq = 5
for i in range(10000):
v = i ** 3
freq = mymath.hashDigitCounter(v)
print(v, freq)
if freq not in m.keys():
m[freq] = 1
else:
m[freq] += 1
if m[freq] == destFreq:
ret = freq
break
def enumRet():
ret = mymath.hashDigitCounter(589323567104)
print("list...", ret)
for i in range(10000):
v = i ** 3
freq = mymath.hashDigitCounter(v)
if freq == ret:
print(i, v, freq)
break
if __name__ == "__main__":
# search()
enumRet()
| [
"common.mymath.hashDigitCounter"
] | [((400, 437), 'common.mymath.hashDigitCounter', 'mymath.hashDigitCounter', (['(589323567104)'], {}), '(589323567104)\n', (423, 437), False, 'from common import mymath\n'), ((142, 168), 'common.mymath.hashDigitCounter', 'mymath.hashDigitCounter', (['v'], {}), '(v)\n', (165, 168), False, 'from common import mymath\n'), ((525, 551), 'common.mymath.hashDigitCounter', 'mymath.hashDigitCounter', (['v'], {}), '(v)\n', (548, 551), False, 'from common import mymath\n')] |
from time import sleep
import docker
from pathlib import Path
from maestro_agent.services.docker import DockerContainerStatus, JmeterDocker
from maestro_agent.services.agent.hooks import AgentHooks
from maestro_agent.logging import Logger
from maestro_agent.services.jmeter.container import JmeterContainerStateManager
from maestro_agent.services.maestro_api.run_log import RunLogApi
from maestro_agent.app_state import ApplicationState
from maestro_agent.services.running_test.files import RunningTestFiles
from maestro_agent.settings import JMETER_RUN_LOGS_PATH
def run_jmeter_container_handler(finish, finished, failed, run, agent):
CONTAINER_CHECK_TIMEOUT = 5.0
try:
JmeterContainerStateManager.clean_old_containers()
agent_hooks = AgentHooks(run_id=run.id, agent_id=agent.id)
jmeter_docker = JmeterDocker(run=run)
jmeter_docker.run_container()
def upload_logs_file():
logs_path = Path(JMETER_RUN_LOGS_PATH % run.id)
if logs_path.is_file():
run_log_file = open(logs_path, "r")
print(run_log_file)
RunLogApi.upload_log_file(run.id, agent.id, run_log_file)
def finish_test(status):
Logger.debug("Test is finished. Jmeter container status=%s" % status)
agent_hooks.finished()
finished("Test is finished")
while finish() is False:
try:
running_container = jmeter_docker.get_running_container()
if (
running_container.status == DockerContainerStatus.running
or running_container.status == DockerContainerStatus.created
):
Logger.debug("Jmeter container is running...")
sleep(CONTAINER_CHECK_TIMEOUT)
else:
finish_test(running_container.status)
except docker.errors.NotFound:
finish_test("CONTAINER_NOT_FOUND")
upload_logs_file()
# Clean up all data that was created during test execution
running_test_files = RunningTestFiles(run_id=run.id)
running_test_files.clean_up_files()
except Exception as e:
failed(e)
JmeterContainerStateManager.clean_old_containers()
ApplicationState.available()
| [
"maestro_agent.services.docker.JmeterDocker",
"maestro_agent.logging.Logger.debug",
"pathlib.Path",
"maestro_agent.services.jmeter.container.JmeterContainerStateManager.clean_old_containers",
"maestro_agent.services.agent.hooks.AgentHooks",
"time.sleep",
"maestro_agent.app_state.ApplicationState.available",
"maestro_agent.services.maestro_api.run_log.RunLogApi.upload_log_file",
"maestro_agent.services.running_test.files.RunningTestFiles"
] | [((2245, 2295), 'maestro_agent.services.jmeter.container.JmeterContainerStateManager.clean_old_containers', 'JmeterContainerStateManager.clean_old_containers', ([], {}), '()\n', (2293, 2295), False, 'from maestro_agent.services.jmeter.container import JmeterContainerStateManager\n'), ((2300, 2328), 'maestro_agent.app_state.ApplicationState.available', 'ApplicationState.available', ([], {}), '()\n', (2326, 2328), False, 'from maestro_agent.app_state import ApplicationState\n'), ((691, 741), 'maestro_agent.services.jmeter.container.JmeterContainerStateManager.clean_old_containers', 'JmeterContainerStateManager.clean_old_containers', ([], {}), '()\n', (739, 741), False, 'from maestro_agent.services.jmeter.container import JmeterContainerStateManager\n'), ((764, 808), 'maestro_agent.services.agent.hooks.AgentHooks', 'AgentHooks', ([], {'run_id': 'run.id', 'agent_id': 'agent.id'}), '(run_id=run.id, agent_id=agent.id)\n', (774, 808), False, 'from maestro_agent.services.agent.hooks import AgentHooks\n'), ((833, 854), 'maestro_agent.services.docker.JmeterDocker', 'JmeterDocker', ([], {'run': 'run'}), '(run=run)\n', (845, 854), False, 'from maestro_agent.services.docker import DockerContainerStatus, JmeterDocker\n'), ((2118, 2149), 'maestro_agent.services.running_test.files.RunningTestFiles', 'RunningTestFiles', ([], {'run_id': 'run.id'}), '(run_id=run.id)\n', (2134, 2149), False, 'from maestro_agent.services.running_test.files import RunningTestFiles\n'), ((950, 985), 'pathlib.Path', 'Path', (['(JMETER_RUN_LOGS_PATH % run.id)'], {}), '(JMETER_RUN_LOGS_PATH % run.id)\n', (954, 985), False, 'from pathlib import Path\n'), ((1231, 1300), 'maestro_agent.logging.Logger.debug', 'Logger.debug', (["('Test is finished. Jmeter container status=%s' % status)"], {}), "('Test is finished. Jmeter container status=%s' % status)\n", (1243, 1300), False, 'from maestro_agent.logging import Logger\n'), ((1127, 1184), 'maestro_agent.services.maestro_api.run_log.RunLogApi.upload_log_file', 'RunLogApi.upload_log_file', (['run.id', 'agent.id', 'run_log_file'], {}), '(run.id, agent.id, run_log_file)\n', (1152, 1184), False, 'from maestro_agent.services.maestro_api.run_log import RunLogApi\n'), ((1721, 1767), 'maestro_agent.logging.Logger.debug', 'Logger.debug', (['"""Jmeter container is running..."""'], {}), "('Jmeter container is running...')\n", (1733, 1767), False, 'from maestro_agent.logging import Logger\n'), ((1788, 1818), 'time.sleep', 'sleep', (['CONTAINER_CHECK_TIMEOUT'], {}), '(CONTAINER_CHECK_TIMEOUT)\n', (1793, 1818), False, 'from time import sleep\n')] |
# Copyright (C) 2022 Dremio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script for formatting notebook json for readable git diffs."""
import json
from pathlib import Path
from typing import Any
def _pretty_print_json_to_file(data: Any, path: Path) -> None:
print(f"Writing {path} ...")
with open(path, "w") as f:
json.dump(data, f, indent=2, sort_keys=True)
def _format_notebooks() -> None:
for path in Path(".").glob("*.ipynb"):
with open(path) as f:
notebook_dict = json.load(f)
for cell_dict in notebook_dict["cells"]:
# "source" used to be a list of lines but when dowloading a notebook from a running server
# those now get joined together into a single string field, which we undo here to keep an orderly diff
old_source = cell_dict.get("source")
if isinstance(old_source, list):
if len(old_source) != 1:
continue
old_source_text = old_source[0]
elif isinstance(old_source, str):
if not old_source:
cell_dict["source"] = []
continue
old_source_text = old_source
else:
raise Exception(f"Unhandled cell source type: {old_source}")
new_source = [
source_line.rstrip() + "\n"
for source_line in old_source_text.splitlines()
]
new_source[-1] = new_source[-1].rstrip("\n")
cell_dict["source"] = new_source
_pretty_print_json_to_file(notebook_dict, path)
if __name__ == "__main__":
_format_notebooks()
| [
"json.load",
"json.dump",
"pathlib.Path"
] | [((844, 888), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(2)', 'sort_keys': '(True)'}), '(data, f, indent=2, sort_keys=True)\n', (853, 888), False, 'import json\n'), ((940, 949), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (944, 949), False, 'from pathlib import Path\n'), ((1025, 1037), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1034, 1037), False, 'import json\n')] |
# <editor-fold desc="File Header">
# Copyright :
# Description : This python file contains all classes, functions and scripts related to logger and logging control
# </editor-fold>
# <editor-fold desc="Python file variables">
filename = 'common_logger_control' # Name of python filename (useful for logger traceability)
# </editor-fold>
# <editor-fold desc="Function to create logger">
def create_logger(parent_logger):
# Standard setup has the handler
# Import control
import logging
# Create logger
# Note : Only log levels >= this level will be executed for all handlers
logger = logging.getLogger(parent_logger)
logger.setLevel(logging.DEBUG)
# File handler
fh = logging.FileHandler(parent_logger + '.log')
fh.setLevel(logging.INFO)
# Console handler
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# Create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# Assign formatter to handlers
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# Add handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
logger.info('Started main logger for %s' % parent_logger)
return logger
# </editor-fold>
# <editor-fold desc="Function to update logger handlers from config file">
def logger_config_update(config_filename, logger_name):
# <editor-fold desc="Import Control">
from common_config_control import config_open_file
import logging
import sys
# </editor-fold>
# <editor-fold desc="Define Constants">
fcn_name = 'logger_config_update'
# </editor-fold>
# <editor-fold desc="Function logger (handlers typically picked up from parent)">
logger_name = logger_name + '.' + fcn_name
fcn_logger = logging.getLogger(logger_name)
fcn_logger.debug('Updating logger configuration')
# </editor-fold>
# <editor-fold desc="Extract Parent Logger Name (required for reading config file)">
parent_logger_name = logger_name.split('.')[0]
# </editor-fold>
# <editor-fold desc="Read config file">
config = config_open_file(parent_logger_name, config_filename)
if config == 'Error': # Error when reading config file, likely file is missing
fcn_logger.error('Reading config file error, application will now exit')
sys.exit() # Exit application
fh_level = 'INFO'
sh_level = 'INFO'
config_read_flag = False
try:
fh_level = config[parent_logger_name + '-Logger']['file_handler_level'] # String : Reporting level file handlers
sh_level = config[parent_logger_name + '-Logger']['stream_handler_level'] # String : Reporting level stream handlers
config_read_flag = True # Indicate config file section parsed successfully
except KeyError as error:
fcn_logger.debug('Could not find config section %s to update Logger', error)
# </editor-fold>
# <editor-fold desc="Update Logger Handlers With Settings From Config File">
if config_read_flag:
try:
parent_logger = logging.getLogger(parent_logger_name)
for handler in parent_logger.handlers:
handler_type = handler.__class__.__name__
if handler_type == "FileHandler":
handler.setLevel(fh_level)
elif handler_type == "StreamHandler":
handler.setLevel(sh_level)
fcn_logger.debug('Logger configuration updated successfully')
except KeyError as error:
fcn_logger.error(error)
# </editor-fold>
# <editor-fold desc="Return config object as it's been read">
return config
# </editor-fold>
# </editor-fold>
# <editor-fold desc="Function to list all currently active loggers">
def logger_list_active(parent_logger):
# Import control
import logging
# Constants
fcn_name = 'logger_list_active'
# Function logger (handlers typically picked up from parent)
logger_name = parent_logger + '.' + filename + '.' + fcn_name
fcn_logger = logging.getLogger(logger_name)
fcn_logger.info('Getting dictionary of all active loggers')
# Get a dictionary of all loggers
loggers_dict = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
# Update log
fcn_logger.info('Currently there are %d loggers active' % (len(loggers_dict)-3))
# Return dictionary
return loggers_dict
# </editor-fold>
# <editor-fold desc="Main script to run code or tests">
if __name__ == "__main__":
# Define variables
lgr_name = 'TesterApp'
cfg_filename = 'config.ini'
# Create logger
app_logger = create_logger(lgr_name)
# Test messages
app_logger.debug('Debug message')
app_logger.info('Info message')
app_logger.warning('Warning message')
app_logger.error('Error message')
# Update logger from config file
logger_config_update(cfg_filename, lgr_name)
# Test messages
app_logger.debug('Debug message')
app_logger.info('Info message')
app_logger.warning('Warning message')
app_logger.error('Error message')
logger_list_active(lgr_name)
# </editor-fold>
| [
"logging.getLogger",
"logging.StreamHandler",
"logging.Formatter",
"logging.FileHandler",
"common_config_control.config_open_file",
"sys.exit"
] | [((615, 647), 'logging.getLogger', 'logging.getLogger', (['parent_logger'], {}), '(parent_logger)\n', (632, 647), False, 'import logging\n'), ((712, 755), 'logging.FileHandler', 'logging.FileHandler', (["(parent_logger + '.log')"], {}), "(parent_logger + '.log')\n", (731, 755), False, 'import logging\n'), ((818, 841), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (839, 841), False, 'import logging\n'), ((912, 985), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (929, 985), False, 'import logging\n'), ((1809, 1839), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (1826, 1839), False, 'import logging\n'), ((2135, 2188), 'common_config_control.config_open_file', 'config_open_file', (['parent_logger_name', 'config_filename'], {}), '(parent_logger_name, config_filename)\n', (2151, 2188), False, 'from common_config_control import config_open_file\n'), ((4076, 4106), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (4093, 4106), False, 'import logging\n'), ((2362, 2372), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2370, 2372), False, 'import sys\n'), ((4230, 4253), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (4247, 4253), False, 'import logging\n'), ((3091, 3128), 'logging.getLogger', 'logging.getLogger', (['parent_logger_name'], {}), '(parent_logger_name)\n', (3108, 3128), False, 'import logging\n')] |
from django.urls import path
from django.conf.urls import url, include
from . import views
from rest_framework import serializers, viewsets, routers
router = routers.DefaultRouter()
router.register(r'passwords', views.PasswordViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path(r'create_password',views.create_password),
url(r'^health_check/', include('health_check.urls')),
]
| [
"django.conf.urls.include",
"django.urls.path",
"rest_framework.routers.DefaultRouter"
] | [((160, 183), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (181, 183), False, 'from rest_framework import serializers, viewsets, routers\n'), ((381, 427), 'django.urls.path', 'path', (['"""create_password"""', 'views.create_password'], {}), "('create_password', views.create_password)\n", (385, 427), False, 'from django.urls import path\n'), ((270, 290), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (277, 290), False, 'from django.conf.urls import url, include\n'), ((316, 374), 'django.conf.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""rest_framework"""'}), "('rest_framework.urls', namespace='rest_framework')\n", (323, 374), False, 'from django.conf.urls import url, include\n'), ((456, 484), 'django.conf.urls.include', 'include', (['"""health_check.urls"""'], {}), "('health_check.urls')\n", (463, 484), False, 'from django.conf.urls import url, include\n')] |
"""
* @author <NAME>
* @email <EMAIL>
* @desc Script to recieve all Messages send to the Arduino on this computer. Used as Mockup for the Chat-View of the Frontend. Needs to be killed manually.
"""
from multiprocessing.connection import Listener
from helperClasses import DataBaseUtilities
import sys
#used for stress testing the connection for long messages, check for equality instead of guessing by reading the output by your own
message = '<KEY>'
while(True):
address = ('localhost', 6300)
listener = Listener(address, authkey=b'PyToPyCom')
con = listener.accept()
msg = con.recv()
if ((str.encode('\n') == msg) or (str.encode('') == msg)):
listener.close()
continue
sys.stdout.write("--> {}".format(msg.decode()))
listener.close()
| [
"multiprocessing.connection.Listener"
] | [((519, 558), 'multiprocessing.connection.Listener', 'Listener', (['address'], {'authkey': "b'PyToPyCom'"}), "(address, authkey=b'PyToPyCom')\n", (527, 558), False, 'from multiprocessing.connection import Listener\n')] |
"""
Helper functins for painless option parsing.
The key to keeping "options" management simple lies in recognizing that differs (crucially) from "configuration" management:
- By definition, option settings should always be -optional- (so we do not need to keep track of
which option keys are required or not - only whether they are permitted or not).
- Option values should ideally by very simple (boolean, strings, numerics) and in most cases should not
require validation (at least at the 'option-passing') level. That said, values can be of any type -
but if any validation or type checking is required (beyond that the types match exactly what is prent
in the default options dict), then it needs to be done externally.
- Option setting (and generation) happens in runtime -- most typically as an `options` dict passed to
some function. So for the purpose of "option" management, we need not be concerned about locating and
parsing config files.
Further, the scope will most likely be at module, class or instance level. So this means we don't need to worry
about walking up class trees to find some default options dict.
So at least 90 percent of the time - this boils down to "carefully merging dicts".
The most likely use case goes about like this:
```
from caixa.options import resolve_options
DEFAULTOPTIONS = {'allow-snorkeling': True, 'rescue-after': 10}
# then somewhere in a module or class
def rescue_person(self, instructor: str, options: Dict[str,any]):
niceopts = resolve_options(DEFAULTOPTIONS, options)
```
That's it, and most of the time, it should be all you need.
TODO: enforce type compliance on values.
"""
from copy import deepcopy
from typing import Dict, Any
def update_strict(targetdict: dict, otherdict: dict, forcedeep: bool = True) -> None:
"""
Like `dict.update()` except:
- We require that every key in the :otherdict be present in the :targetdict we are copying into
- Only keys of type `str` (and of non-zero-length) are allowed
- A `deepcopy` is performed on each target value (unless `forcedeep` evaluates to False)
"""
for (k, v) in otherdict.items():
assert_valid_option_key(k)
if k not in targetdict:
raise ValueError(f"invalid option key '{k}' - not recognized")
targetdict[k] = deepcopy(v) if forcedeep else v
def assert_valid_option_key(k: str) -> None:
if not isinstance(k, str):
raise ValueError(f"invalid option key '{k}' - must be of type str")
if len(k) == 0:
raise ValueError(f"invalid option key '{k}' - cannot be the empty string")
def resolve_options(default_options: Dict[str,Any], update_options: Dict[str,Any]) -> Dict[str,Any]:
"""
Returns a new options dict, with the :default_options and :update_options dicts safely merged.
"""
if update_options is None:
update_options = {}
newoptions = deepcopy(default_options)
update_strict(newoptions, update_options)
return newoptions
| [
"copy.deepcopy"
] | [((2954, 2979), 'copy.deepcopy', 'deepcopy', (['default_options'], {}), '(default_options)\n', (2962, 2979), False, 'from copy import deepcopy\n'), ((2371, 2382), 'copy.deepcopy', 'deepcopy', (['v'], {}), '(v)\n', (2379, 2382), False, 'from copy import deepcopy\n')] |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# author:jingtongyu
# datetime:2021/6/29 下午9:45
# software: PyCharm
import datetime
import decimal
import uuid
from flask import Flask as _Flask
from flask.json import JSONEncoder as _JSONEncoder
from common.error_code import ServerError
class JSONEncoder(_JSONEncoder):
def default(self, o):
if hasattr(o, 'keys') and hasattr(o, '__getitem__'):
return dict(o)
if isinstance(o, datetime.datetime):
# 格式化时间
return o.strftime("%Y-%m-%d %H:%M:%S")
if isinstance(o, datetime.date):
# 格式化日期
return o.strftime('%Y-%m-%d')
if isinstance(o, decimal.Decimal):
# 格式化高精度数字
return str(o)
if isinstance(o, uuid.UUID):
# 格式化uuid
return str(o)
if isinstance(o, bytes):
# 格式化字节数据
return o.decode("utf-8")
# if isinstance(o, (bool, float, int)):
# # 返回元数据
# return o
raise ServerError()
class Flask(_Flask):
json_encoder = JSONEncoder | [
"common.error_code.ServerError"
] | [((1027, 1040), 'common.error_code.ServerError', 'ServerError', ([], {}), '()\n', (1038, 1040), False, 'from common.error_code import ServerError\n')] |
import unittest
from .threads import *
def f1():
pass
def f2(stop_event):
pass
def f3(**kwargs):
pass
run_last = 0.05
wait_exit = 1
class WhiteBox(unittest.TestCase):
def test_parameter(self):
def test0(Thread):
def test(func, noerror, event_name=None, kwargs=None):
try:
Thread(target=func, event_name=event_name, kwargs=kwargs)
except ValueError as e:
noerror = not noerror
self.assertTrue(noerror)
test(f1, True)
test(f1, False, 'stop_event')
test(f1, True, kwargs={'stop_event': None})
test(f2, True)
test(f2, True, 'stop_event')
test(f2, False, 'stop_event', {'stop_event': None})
test(f3, True)
test(f3, True, 'stop_event')
test(f3, False, 'stop_event', {'stop_event': None})
test0(StoppableThread)
test0(ReStartableThread)
def test_stop(self):
def test(Thread):
stopped = [False]
def f(stop_event):
while 1:
if stop_event.is_set():
break
stopped[0] = True
t = Thread(target=f)
t.start()
t.join(run_last)
t.stop()
t.join(wait_exit)
self.assertTrue(stopped[0])
test(StoppableThread)
test(ReStartableThread)
def test_restart(self):
s = [0]
def f(stop_event):
s[0] += 1
while 1:
if stop_event.is_set():
break
s[0] += 10
t = ReStartableThread(target=f)
t.start()
t.join(run_last)
self.assertEqual(s[0], 1)
t.stop()
t.join(wait_exit)
self.assertEqual(s[0], 11)
t.start()
t.join(run_last)
self.assertEqual(s[0], 12)
t.stop()
t.join(wait_exit)
self.assertEqual(s[0], 22)
def test_other(self):
t = ReStartableThread(target=f2)
t.stop()
t.stop()
t.start()
t.start()
t.stop()
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((2217, 2232), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2230, 2232), False, 'import unittest\n')] |
import os
from subprocess import Popen, PIPE, STDOUT
import logging
import shutil
import semantic_version as sv
import sys
import virtualenv
import debpackager.packages.conf.configurations as cfg
logger = logging.getLogger(__name__)
def get_new_version(extra_args):
custom_version = extra_args.get('custom_version')
if custom_version:
if sv.validate(custom_version):
return custom_version
logger.error(
'The given version format is invalid. value was: {}, please set '
'version in format (major.minor.patch) like - 2.1.0'.format(
custom_version))
raise Exception(
'version format is invalid. supplied version: {}'
.format(custom_version))
version = extra_args.get('pom').project.get('version')
if not version:
raise Exception(
'Could not get a valid version, please check project.json')
if not sv.validate(version):
raise Exception(
'version format is invalid. value is: {}'.format(version))
version = sv.Version(version)
return str(version)
def run_command(command):
if type(command) == list:
command = " ".join(command)
logger.debug('running command: {}'.format(command))
proc = Popen(command, stdout=PIPE, stderr=STDOUT, shell=True)
while proc.poll() is None:
line = proc.stdout.readline().strip()
if line:
logger.info(line.decode('ascii'))
proc.wait()
stdo, erro = proc.communicate()
if stdo:
logger.info(stdo)
if erro:
logger.info(erro)
exit_code = proc.returncode
if exit_code != 0:
raise Exception('failed to run command, '
'{} with exit code {}'.format(command, exit_code))
return proc.returncode
def create_virtual_env(project_path, install_path, ve_args):
# remove existing virtual env if exists
ve_path = os.path.join(project_path, cfg.VIRTUAL_ENV_PATH)
if os.path.exists(ve_path):
shutil.rmtree(ve_path)
try:
logger.info('creating virtual env')
sys.argv = ['']
if '--no-wheel' not in ve_args:
sys.argv.append('--no-wheel')
sys.argv.extend(ve_args)
sys.argv.append(ve_path)
try:
virtualenv.main()
except SystemExit as sysext:
if sysext.code != 0:
raise SystemExit(sysext)
except Exception:
logger.exception('failed to create virtualenv: ')
raise Exception('failed to create virtualenv!')
try:
logger.info('installing requirements for virtualenv')
# update pip to latest version
run_command(['{}/{}/bin/pip'.format(project_path,
cfg.VIRTUAL_ENV_PATH),
'install',
'-U',
'pip'])
if not os.path.exists('{}/requirements.txt'.format(project_path)):
logger.warning('requirements.txt not found')
return ve_path
run_command(['{}/{}/bin/pip'.format(project_path,
cfg.VIRTUAL_ENV_PATH),
'install',
'-r',
'{}/requirements.txt'.format(project_path)])
virtualenv.make_environment_relocatable(ve_path)
fixup_scripts(install_path, ve_path)
except Exception:
logger.exception('failed to install requirements! error message:')
raise Exception('fail to install requirements.')
return ve_path
def fixup_scripts(deploy_ve_home_dir, ve_path):
""" fixes the shebang line in virtualenv bin dir scripts
to be where the package virtualenv was deployed
:param deploy_ve_home_dir:
:param ve_path: (str) path where virtualenv is located after installation
"""
ok_abs_scripts = ['python', 'python%s' % sys.version[:3],
'activate', 'activate.bat', 'activate_this.py',
'activate.fish', 'activate.csh']
if not deploy_ve_home_dir:
return
bin_dir = os.path.join(ve_path, 'bin')
# This is what we expect at the top of scripts:
shebang = '#!{} python{}'.format('/usr/bin/env', sys.version[:3])
# This is what we'll put:
new_shebang = '#!{}/{}/bin/python{}'.format(deploy_ve_home_dir,
cfg.VIRTUAL_ENV_PATH,
sys.version[:3])
for filename in os.listdir(bin_dir):
filename = os.path.join(bin_dir, filename)
if not os.path.isfile(filename):
# ignore subdirs, e.g. .svn ones.
continue
lines = None
with open(filename, 'rb') as f:
try:
lines = f.read().decode('utf-8').splitlines()
except UnicodeDecodeError:
# This is probably a binary program instead
# of a script, so just ignore it.
continue
if not lines:
logger.warn('Script %s is an empty file' % filename)
continue
old_shebang = lines[0].strip()
old_shebang = old_shebang[0:2] + os.path.normcase(old_shebang[2:])
if not old_shebang.startswith(shebang):
if os.path.basename(filename) in ok_abs_scripts:
logger.debug('Cannot make script %s relative' % filename)
elif lines[0].strip() == new_shebang:
logger.info('Script %s has already been made relative' % filename)
else:
logger.warn('Script %s cannot be made relative '
'(it\'s not a normal script that starts with %s)'
% (filename, shebang))
continue
logger.info('Making script %s relative' % filename)
with open(filename, 'wb') as f:
f.write('\n'.join([new_shebang] + lines[1:]).encode('utf-8'))
def install_deb_dependencies(extra_args):
debs_to_install = extra_args.get('pom').project.get('deb_dependencies', [])
if not debs_to_install:
logger.warning('Not installing deb dependencies')
return
command = ['sudo', 'apt-get', 'install', '-y', '--force-yes']
command.extend(debs_to_install)
run_command(command)
if __name__ == '__main__':
create_virtual_env('/tmp', '/tmp/proj', [ '--always-copy', '-p', 'python2.7']) | [
"logging.getLogger",
"os.path.exists",
"os.listdir",
"semantic_version.validate",
"subprocess.Popen",
"sys.argv.append",
"os.path.join",
"os.path.isfile",
"virtualenv.main",
"os.path.basename",
"sys.argv.extend",
"shutil.rmtree",
"semantic_version.Version",
"virtualenv.make_environment_relocatable",
"os.path.normcase"
] | [((208, 235), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (225, 235), False, 'import logging\n'), ((1078, 1097), 'semantic_version.Version', 'sv.Version', (['version'], {}), '(version)\n', (1088, 1097), True, 'import semantic_version as sv\n'), ((1286, 1340), 'subprocess.Popen', 'Popen', (['command'], {'stdout': 'PIPE', 'stderr': 'STDOUT', 'shell': '(True)'}), '(command, stdout=PIPE, stderr=STDOUT, shell=True)\n', (1291, 1340), False, 'from subprocess import Popen, PIPE, STDOUT\n'), ((1941, 1989), 'os.path.join', 'os.path.join', (['project_path', 'cfg.VIRTUAL_ENV_PATH'], {}), '(project_path, cfg.VIRTUAL_ENV_PATH)\n', (1953, 1989), False, 'import os\n'), ((1997, 2020), 'os.path.exists', 'os.path.exists', (['ve_path'], {}), '(ve_path)\n', (2011, 2020), False, 'import os\n'), ((4108, 4136), 'os.path.join', 'os.path.join', (['ve_path', '"""bin"""'], {}), "(ve_path, 'bin')\n", (4120, 4136), False, 'import os\n'), ((4513, 4532), 'os.listdir', 'os.listdir', (['bin_dir'], {}), '(bin_dir)\n', (4523, 4532), False, 'import os\n'), ((359, 386), 'semantic_version.validate', 'sv.validate', (['custom_version'], {}), '(custom_version)\n', (370, 386), True, 'import semantic_version as sv\n'), ((945, 965), 'semantic_version.validate', 'sv.validate', (['version'], {}), '(version)\n', (956, 965), True, 'import semantic_version as sv\n'), ((2030, 2052), 'shutil.rmtree', 'shutil.rmtree', (['ve_path'], {}), '(ve_path)\n', (2043, 2052), False, 'import shutil\n'), ((2221, 2245), 'sys.argv.extend', 'sys.argv.extend', (['ve_args'], {}), '(ve_args)\n', (2236, 2245), False, 'import sys\n'), ((2254, 2278), 'sys.argv.append', 'sys.argv.append', (['ve_path'], {}), '(ve_path)\n', (2269, 2278), False, 'import sys\n'), ((3312, 3360), 'virtualenv.make_environment_relocatable', 'virtualenv.make_environment_relocatable', (['ve_path'], {}), '(ve_path)\n', (3351, 3360), False, 'import virtualenv\n'), ((4553, 4584), 'os.path.join', 'os.path.join', (['bin_dir', 'filename'], {}), '(bin_dir, filename)\n', (4565, 4584), False, 'import os\n'), ((2182, 2211), 'sys.argv.append', 'sys.argv.append', (['"""--no-wheel"""'], {}), "('--no-wheel')\n", (2197, 2211), False, 'import sys\n'), ((2304, 2321), 'virtualenv.main', 'virtualenv.main', ([], {}), '()\n', (2319, 2321), False, 'import virtualenv\n'), ((4600, 4624), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (4614, 4624), False, 'import os\n'), ((5196, 5229), 'os.path.normcase', 'os.path.normcase', (['old_shebang[2:]'], {}), '(old_shebang[2:])\n', (5212, 5229), False, 'import os\n'), ((5294, 5320), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (5310, 5320), False, 'import os\n')] |
from django.db import models
from django.contrib.gis.db import models
class Category(models.Model):
name = models.CharField("Category name",
max_length = 50, )
visible = models.BooleanField("Is category immediately visible", default=True)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Categories'
class Building(models.Model):
name = models.CharField("Building name",
max_length = 50, )
location = models.PointField( srid=4326, geography=True )
category = models.ForeignKey(Category, on_delete=models.CASCADE,
verbose_name = "Building's category")
def __str__(self):
return self.name
| [
"django.contrib.gis.db.models.BooleanField",
"django.contrib.gis.db.models.ForeignKey",
"django.contrib.gis.db.models.PointField",
"django.contrib.gis.db.models.CharField"
] | [((112, 160), 'django.contrib.gis.db.models.CharField', 'models.CharField', (['"""Category name"""'], {'max_length': '(50)'}), "('Category name', max_length=50)\n", (128, 160), False, 'from django.contrib.gis.db import models\n'), ((187, 255), 'django.contrib.gis.db.models.BooleanField', 'models.BooleanField', (['"""Is category immediately visible"""'], {'default': '(True)'}), "('Is category immediately visible', default=True)\n", (206, 255), False, 'from django.contrib.gis.db import models\n'), ((407, 455), 'django.contrib.gis.db.models.CharField', 'models.CharField', (['"""Building name"""'], {'max_length': '(50)'}), "('Building name', max_length=50)\n", (423, 455), False, 'from django.contrib.gis.db import models\n'), ((483, 527), 'django.contrib.gis.db.models.PointField', 'models.PointField', ([], {'srid': '(4326)', 'geography': '(True)'}), '(srid=4326, geography=True)\n', (500, 527), False, 'from django.contrib.gis.db import models\n'), ((545, 639), 'django.contrib.gis.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""Building\'s category"""'}), '(Category, on_delete=models.CASCADE, verbose_name=\n "Building\'s category")\n', (562, 639), False, 'from django.contrib.gis.db import models\n')] |
import numpy as np
from scipy.linalg import solve_banded
'''
Reaction-diffusion equation (1D) solver
'''
def reactDiffuse1d(th):
D = 1 # diffisivity
L, m = 5.0, 100 # domain = [-L,L], using m subdivisions
T, n = 0.5, 10 # time = [0,T], using n time steps
dx = L*2/m
dt = T/n
U = np.zeros([m+1,n+1]) # store solutions
# form iteration matrix
a = D*dt/(dx*dx)
#r = np.r_[1 + 2 * a, -a, np.zeros(m - 1)]
# #A = toeplitz(r)
#A[0,1] = A[m,m-1] = -2*a
A_bands = [
np.r_[0, -2 * a, [-a] * (m - 1)],
np.ones(m + 1) * (1 + 2 * a),
np.r_[[-a] * (m - 1), -2 * a, 0]
] # banded matrix
x = np.linspace(-L,L,m+1)
U[:, 0] = 0.05 * np.exp(-5 * x * x) # initial condition
for i in range(n):
R = react(U[:, i], th)
try:
U[:, i + 1] = solve_banded((1, 1), A_bands, U[:, i] + R)
except:
U.fill(np.nan)
return U
#U[:,i+1] = np.linalg.solve(A, U[:,i] + R)
return U
def react(u,th):
# reaction term
R = th[0] * u - \
th[1] * np.power(u, 2) + \
th[2] * np.power(u, 3) + \
0 * np.power(u, 4)
return R | [
"numpy.ones",
"numpy.power",
"numpy.exp",
"numpy.zeros",
"numpy.linspace",
"scipy.linalg.solve_banded"
] | [((306, 330), 'numpy.zeros', 'np.zeros', (['[m + 1, n + 1]'], {}), '([m + 1, n + 1])\n', (314, 330), True, 'import numpy as np\n'), ((662, 687), 'numpy.linspace', 'np.linspace', (['(-L)', 'L', '(m + 1)'], {}), '(-L, L, m + 1)\n', (673, 687), True, 'import numpy as np\n'), ((705, 723), 'numpy.exp', 'np.exp', (['(-5 * x * x)'], {}), '(-5 * x * x)\n', (711, 723), True, 'import numpy as np\n'), ((560, 574), 'numpy.ones', 'np.ones', (['(m + 1)'], {}), '(m + 1)\n', (567, 574), True, 'import numpy as np\n'), ((839, 881), 'scipy.linalg.solve_banded', 'solve_banded', (['(1, 1)', 'A_bands', '(U[:, i] + R)'], {}), '((1, 1), A_bands, U[:, i] + R)\n', (851, 881), False, 'from scipy.linalg import solve_banded\n'), ((1152, 1166), 'numpy.power', 'np.power', (['u', '(4)'], {}), '(u, 4)\n', (1160, 1166), True, 'import numpy as np\n'), ((1121, 1135), 'numpy.power', 'np.power', (['u', '(3)'], {}), '(u, 3)\n', (1129, 1135), True, 'import numpy as np\n'), ((1086, 1100), 'numpy.power', 'np.power', (['u', '(2)'], {}), '(u, 2)\n', (1094, 1100), True, 'import numpy as np\n')] |
# Generated by Django 3.2.2 on 2021-05-15 15:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0004_create_basic_users'),
]
operations = [
migrations.AlterField(
model_name='user',
name='plan',
field=models.ForeignKey(default='B', on_delete=django.db.models.deletion.PROTECT, to='core.plan', verbose_name='plan'),
),
]
| [
"django.db.models.ForeignKey"
] | [((362, 478), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '"""B"""', 'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""core.plan"""', 'verbose_name': '"""plan"""'}), "(default='B', on_delete=django.db.models.deletion.PROTECT,\n to='core.plan', verbose_name='plan')\n", (379, 478), False, 'from django.db import migrations, models\n')] |
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .base import Resource
from google.cloud import storage
class GcsBucket(Resource):
_internal_client = storage.Client()
ASSET_TYPE = "storage.googleapis.com/Bucket"
RESOURCE_ID_PATTERN = "\/\/storage.googleapis.com\/(.*)"
REQUIRED_PERMISSIONS = [
"storage.buckets.getIamPolicy",
"storage.buckets.setIamPolicy",
]
def _client(self):
(bucket,) = self._parsed_resource_id()
return self._internal_client.get_bucket(bucket)
def _get_policy_permissions(self):
return self._client().test_iam_permissions(
permissions=self.REQUIRED_PERMISSIONS
)
def _get_current_policy(self, resource_path):
return self._client().get_iam_policy()
def _get_updated_policy(self, resource_path):
policy = self._get_current_policy(resource_path)
policy.bindings.append(
{
"role": self._role,
"members": [self._new_member],
}
)
return policy
def _process_updated_iam_policy(self, resource, policy):
return self._client().set_iam_policy(policy)
def delete_test_instance(self):
self._client().delete()
@classmethod
def make_test_instance(cls):
bucket_name = cls.get_test_instance_name()
cls._internal_client.create_bucket(bucket_name)
return cls.get_test_instance(
"//storage.googleapis.com/{name}".format(name=bucket_name),
"roles/storage.legacyBucketReader",
) | [
"google.cloud.storage.Client"
] | [((714, 730), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (728, 730), False, 'from google.cloud import storage\n')] |
#!/usr/bin/env python
import librosa
import numpy as np
import pyworld as pw
import scipy
import imageio
from sklearn.preprocessing import normalize
class conf:
"""
Configuration Parameter Class
"""
"""
time length of preprocessed audio
"""
prep_audio_dataset_second=3
# sampling rate
sample_ratio = 16000 # 22050
"""
Short Time FFT window size
librosa default value 2048
stft returned value shape (1 + n_fft/2, t)
"""
n_fft = 256 #2048
"""
Encoderで縦・横方向に畳み込むサイズ倍率
Encが8レイヤ、各レイヤで行列サイズ1/2になるので 256
入力スペクトログラムの行・列のサイズはこの倍数とすること
"""
Encocer_Feature_Constant=2**7 #2**7:128@n_fft256 #2**8:256
"""
enable saving the label(specImage)
"""
enable_output_labelWav = True
"""
scaleArray()のscaleFactorを表示するか
"""
print_scaleFactor=False
"""
スペクトログラムへの変換時の最小強度
→ 対数とったときに-6が最小値になる
"""
eps= 10**-6
"""
強度スペクトログラムの正規化時のスケール倍率
"""
scale_abs=0.1
"""
強度スペクトログラムの正規化時のオフセット
epsから決定
"""
offset_abs=0.6
"""
位相スペクトログラムの正規化時のスケール倍率
"""
scale_phase=1/(np.pi*2)
"""
位相スペクトログラムの正規化時のオフセット
"""
offset_phase=0.5
def convert_to_wave(Dabs, Dphase):
D_hat = 10 ** Dabs * np.exp(1j*Dphase) #xp.exp(1j*Dphase)
y_hat = librosa.istft(D_hat)
return y_hat
def convert_to_spectrogram(waveNDArray):
"""
convert audio 1D Numpy Array to spectrogram 2D Numpy Array.
note. Dabs = np.log10(np.abs(D) + 10**-6)
:param waveNDArray:
:return: Dabs,Dphase
"""
# スペクトル・位相マップ 作成
D = librosa.stft(waveNDArray, n_fft=conf.n_fft) #D:np.ndarray [shape=(1 + n_fft/2, t), dtype=dtype]
Dabs = np.log10(np.abs(D) + conf.eps)
Dphase = np.angle(D)
return Dabs,Dphase
def padding_spectrogram(D):
"""
スペクトログラムの行列サイズをEncoderに特徴的な値の整数倍にする
Encが8レイヤ、各レイヤで行列サイズ1/2になるので、入力スペクトログラムの行・列のサイズは256の倍数とする
"""
D = D[0:D.shape[0]-1,:] #最後の行を削除 TODO: 0:-1
w_div,w_rem = divmod(D.shape[1], conf.Encocer_Feature_Constant)
D = np.pad(D, [(0,0), (0, conf.Encocer_Feature_Constant * (w_div + 1) - D.shape[1])],
'constant', constant_values = np.min(np.abs(D)))
return D
def anonymization(fs, waveNDArray, f0Value = 0, sp_strechRatio = np.random.uniform(0.6, 2, size=1), gaussian_s = 3):
"""
WAV音声データから話者情報を取り除いたWAV音声データを作成
label音声からinput音声作成用
:param path:
:param f0Value:
:param sp_strechRatio:
:return:
"""
waveNDArray = waveNDArray.astype(np.float)
_f0, t = pw.dio(waveNDArray, fs) # 基本周波数の抽出
f0 = pw.stonemask(waveNDArray, _f0, t, fs) # 基本周波数の修正
sp = pw.cheaptrick(waveNDArray, f0, t, fs) # スペクトル包絡の抽出
ap = pw.d4c(waveNDArray, f0, t, fs) # 非周期性指標の抽出
f0_fixed0 = np.ones(f0.shape) * f0Value
f0_median = np.median(f0)
sp_median = np.median(sp)
ap_median = np.median(ap)
# SPを高周波方向に伸縮
sp2 = np.ones_like(sp)*np.min(sp)
for f in range(sp2.shape[1]):
if(int(f / sp_strechRatio) >= sp.shape[1]): break
sp2[:, f] = sp[:, int(f / sp_strechRatio)]
# SP/APに正規分布ノイズ
sp_noised = sp2 + np.random.normal(sp_median,sp_median/10,sp2.shape)
ap_noised = ap + np.random.normal(ap_median,ap_median/10,ap.shape)
#ガウシアンフィルタ
sp_gaussian = scipy.ndimage.filters.gaussian_filter(sp_noised,gaussian_s)
ap_gaussian = scipy.ndimage.filters.gaussian_filter(ap_noised,gaussian_s)
# 音声復元
synthesized = pw.synthesize(f0_fixed0, sp, ap, fs)
return synthesized
def normalize_abs(a:np.ndarray) ->(np.ndarray,float,float):
return normalize(a,scale=conf.scale_abs,offset=conf.offset_abs)
def normalize_phase(a:np.ndarray)->(np.ndarray,float,float):
return normalize(a,scale=conf.scale_phase,offset=conf.offset_phase)
def normalize(ndArray, min=0, max=1,scale=None,offset=None):
"""
normalize ndArray.
(all ndArray values are clamped in min~max.)
:param ndArray:
:param min:
:param max:
:return: スケール後の配列、スケール倍率、オフセット
"""
if scale==None:
scale = (max-min) / (np.max(ndArray) - np.min(ndArray))
scaled = ndArray * scale
if offset==None:
offset = - np.min(scaled) + min
ret = scaled + offset
if(ret.min()<min) or (max<ret.max()):
print("warning:normalized value outrange (but cliped).check scale/offset value.")
print(f"original max/min:{ndArray.max()}/{ndArray.min()}")
print(f"original max/min:{ndArray.max()}/{ndArray.min()}")
if conf.print_scaleFactor:
print('scale:{}, offset:{}'.format(scale,offset))
return ret.clip(min,max), scale, offset
def denormalize_abs(a:np.ndarray)->np.ndarray:
return denormalize(a,scale=conf.scale_abs,offset=conf.offset_abs)
def denormalize_phase(a:np.ndarray) ->np.ndarray:
return denormalize(a,scale=conf.scale_phase,offset=conf.offset_phase)
def denormalize(ndArray:np.ndarray, scale:float,offset:float) ->np.ndarray:
return (ndArray - offset)/ scale
def clip_audio_length(audio_ndarray, sr, second):
"""
audio_ndarray の長さ[秒]をsecondになるようにカット・paddingする
:param audio_ndarray:
:param sr:
:return:
"""
if audio_ndarray.shape[0] > second * sr:
ret = audio_ndarray[:second * sr]
else:
ret = np.pad(audio_ndarray, [(0, second * sr - audio_ndarray.shape[0])], 'constant', constant_values=0)
assert ret.__len__() == second * sr , "audioのサイズが second[sec] * sr(sampling rate)[/sec]になっていない"
return ret
def save_spectrogram_asImage(Dabs,Dphase,savename):
"""
save ndarray matrix as image.
r:abs
g:phase
b:none
abs/phase must be clamped in 0~1.
:param Dabs:
:param Dphase:
:param savename: includes image extension (ex. '.png')
:return:
"""
# create image array -> (channel,row,col)
assert (0 <= Dabs.min()) & ( Dabs.max() <=1), f"Dabs must be in 0~1. min:{Dabs.min()}, max:{Dabs.max()}"
assert (0 <= Dphase.min()) & ( Dphase.max() <=1) , "Dphase must be in 0~1"
srcImg = (np.array([Dabs,Dphase,np.zeros(Dabs.shape)])*255).astype('uint8')
# reorder to channel last -> (row,col,channel)
srcImg = np.rollaxis(srcImg,0,3)
imageio.imsave(savename, srcImg)
| [
"librosa.istft",
"scipy.ndimage.filters.gaussian_filter",
"imageio.imsave",
"numpy.rollaxis",
"pyworld.synthesize",
"numpy.max",
"numpy.exp",
"pyworld.dio",
"pyworld.d4c",
"numpy.min",
"numpy.random.normal",
"numpy.abs",
"numpy.ones",
"numpy.ones_like",
"numpy.median",
"pyworld.stonemask",
"pyworld.cheaptrick",
"numpy.angle",
"numpy.pad",
"numpy.zeros",
"numpy.random.uniform",
"librosa.stft",
"sklearn.preprocessing.normalize"
] | [((1327, 1347), 'librosa.istft', 'librosa.istft', (['D_hat'], {}), '(D_hat)\n', (1340, 1347), False, 'import librosa\n'), ((1612, 1655), 'librosa.stft', 'librosa.stft', (['waveNDArray'], {'n_fft': 'conf.n_fft'}), '(waveNDArray, n_fft=conf.n_fft)\n', (1624, 1655), False, 'import librosa\n'), ((1764, 1775), 'numpy.angle', 'np.angle', (['D'], {}), '(D)\n', (1772, 1775), True, 'import numpy as np\n'), ((2295, 2328), 'numpy.random.uniform', 'np.random.uniform', (['(0.6)', '(2)'], {'size': '(1)'}), '(0.6, 2, size=1)\n', (2312, 2328), True, 'import numpy as np\n'), ((2560, 2583), 'pyworld.dio', 'pw.dio', (['waveNDArray', 'fs'], {}), '(waveNDArray, fs)\n', (2566, 2583), True, 'import pyworld as pw\n'), ((2605, 2642), 'pyworld.stonemask', 'pw.stonemask', (['waveNDArray', '_f0', 't', 'fs'], {}), '(waveNDArray, _f0, t, fs)\n', (2617, 2642), True, 'import pyworld as pw\n'), ((2664, 2701), 'pyworld.cheaptrick', 'pw.cheaptrick', (['waveNDArray', 'f0', 't', 'fs'], {}), '(waveNDArray, f0, t, fs)\n', (2677, 2701), True, 'import pyworld as pw\n'), ((2725, 2755), 'pyworld.d4c', 'pw.d4c', (['waveNDArray', 'f0', 't', 'fs'], {}), '(waveNDArray, f0, t, fs)\n', (2731, 2755), True, 'import pyworld as pw\n'), ((2829, 2842), 'numpy.median', 'np.median', (['f0'], {}), '(f0)\n', (2838, 2842), True, 'import numpy as np\n'), ((2859, 2872), 'numpy.median', 'np.median', (['sp'], {}), '(sp)\n', (2868, 2872), True, 'import numpy as np\n'), ((2889, 2902), 'numpy.median', 'np.median', (['ap'], {}), '(ap)\n', (2898, 2902), True, 'import numpy as np\n'), ((3299, 3359), 'scipy.ndimage.filters.gaussian_filter', 'scipy.ndimage.filters.gaussian_filter', (['sp_noised', 'gaussian_s'], {}), '(sp_noised, gaussian_s)\n', (3336, 3359), False, 'import scipy\n'), ((3377, 3437), 'scipy.ndimage.filters.gaussian_filter', 'scipy.ndimage.filters.gaussian_filter', (['ap_noised', 'gaussian_s'], {}), '(ap_noised, gaussian_s)\n', (3414, 3437), False, 'import scipy\n'), ((3466, 3502), 'pyworld.synthesize', 'pw.synthesize', (['f0_fixed0', 'sp', 'ap', 'fs'], {}), '(f0_fixed0, sp, ap, fs)\n', (3479, 3502), True, 'import pyworld as pw\n'), ((3599, 3657), 'sklearn.preprocessing.normalize', 'normalize', (['a'], {'scale': 'conf.scale_abs', 'offset': 'conf.offset_abs'}), '(a, scale=conf.scale_abs, offset=conf.offset_abs)\n', (3608, 3657), False, 'from sklearn.preprocessing import normalize\n'), ((3728, 3790), 'sklearn.preprocessing.normalize', 'normalize', (['a'], {'scale': 'conf.scale_phase', 'offset': 'conf.offset_phase'}), '(a, scale=conf.scale_phase, offset=conf.offset_phase)\n', (3737, 3790), False, 'from sklearn.preprocessing import normalize\n'), ((6160, 6185), 'numpy.rollaxis', 'np.rollaxis', (['srcImg', '(0)', '(3)'], {}), '(srcImg, 0, 3)\n', (6171, 6185), True, 'import numpy as np\n'), ((6188, 6220), 'imageio.imsave', 'imageio.imsave', (['savename', 'srcImg'], {}), '(savename, srcImg)\n', (6202, 6220), False, 'import imageio\n'), ((1275, 1296), 'numpy.exp', 'np.exp', (['(1.0j * Dphase)'], {}), '(1.0j * Dphase)\n', (1281, 1296), True, 'import numpy as np\n'), ((2785, 2802), 'numpy.ones', 'np.ones', (['f0.shape'], {}), '(f0.shape)\n', (2792, 2802), True, 'import numpy as np\n'), ((2931, 2947), 'numpy.ones_like', 'np.ones_like', (['sp'], {}), '(sp)\n', (2943, 2947), True, 'import numpy as np\n'), ((2948, 2958), 'numpy.min', 'np.min', (['sp'], {}), '(sp)\n', (2954, 2958), True, 'import numpy as np\n'), ((3144, 3198), 'numpy.random.normal', 'np.random.normal', (['sp_median', '(sp_median / 10)', 'sp2.shape'], {}), '(sp_median, sp_median / 10, sp2.shape)\n', (3160, 3198), True, 'import numpy as np\n'), ((3216, 3269), 'numpy.random.normal', 'np.random.normal', (['ap_median', '(ap_median / 10)', 'ap.shape'], {}), '(ap_median, ap_median / 10, ap.shape)\n', (3232, 3269), True, 'import numpy as np\n'), ((5263, 5364), 'numpy.pad', 'np.pad', (['audio_ndarray', '[(0, second * sr - audio_ndarray.shape[0])]', '"""constant"""'], {'constant_values': '(0)'}), "(audio_ndarray, [(0, second * sr - audio_ndarray.shape[0])],\n 'constant', constant_values=0)\n", (5269, 5364), True, 'import numpy as np\n'), ((1729, 1738), 'numpy.abs', 'np.abs', (['D'], {}), '(D)\n', (1735, 1738), True, 'import numpy as np\n'), ((2204, 2213), 'numpy.abs', 'np.abs', (['D'], {}), '(D)\n', (2210, 2213), True, 'import numpy as np\n'), ((4075, 4090), 'numpy.max', 'np.max', (['ndArray'], {}), '(ndArray)\n', (4081, 4090), True, 'import numpy as np\n'), ((4093, 4108), 'numpy.min', 'np.min', (['ndArray'], {}), '(ndArray)\n', (4099, 4108), True, 'import numpy as np\n'), ((4179, 4193), 'numpy.min', 'np.min', (['scaled'], {}), '(scaled)\n', (4185, 4193), True, 'import numpy as np\n'), ((6046, 6066), 'numpy.zeros', 'np.zeros', (['Dabs.shape'], {}), '(Dabs.shape)\n', (6054, 6066), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import rospy
import roslib; roslib.load_manifest('megatrond_control')
from std_msgs.msg import Float64
#added
from geometry_msgs.msg import Twist
flw_cmd = rospy.Publisher('/vel_ctrl_flw/command', Float64, queue_size=10)
frw_cmd = rospy.Publisher('/vel_ctrl_frw/command', Float64, queue_size=10)
rrw_cmd = rospy.Publisher('/vel_ctrl_rrw/command', Float64, queue_size=10)
rlw_cmd = rospy.Publisher('/vel_ctrl_rlw/command', Float64, queue_size=10)
def callback(msg):
#rospy.loginfo("Received a /cmd_vel message!")
#rospy.loginfo("Linear Components: [%f, %f, %f]"%(msg.linear.x, msg.linear.y, msg.linear.z))
#rospy.loginfo("Angular Components: [%f, %f, %f]"%(msg.angular.x, msg.angular.y, msg.angular.z))
#Constants:
w = 0.285
h = 0.635
l = 0.700
r = 0.1015
#Wheel velocity processing:
flw_vel = 1/r*(msg.linear.y - msg.linear.x + msg.angular.z*(-w-h))
frw_vel = 1/r*(-msg.linear.y - msg.linear.x + msg.angular.z*(w+h))
rrw_vel = 1/r*(msg.linear.y - msg.linear.x + msg.angular.z*(w+h))
rlw_vel = 1/r*(-msg.linear.y - msg.linear.x + msg.angular.z*(-w-h))
flw_cmd.publish(flw_vel)
frw_cmd.publish(frw_vel)
rrw_cmd.publish(rrw_vel)
rlw_cmd.publish(rlw_vel)
def gazebo_control():
rospy.init_node('gazebo_control', anonymous=True)
rospy.Subscriber("/cmd_vel", Twist, callback)
rospy.spin()
if __name__ == '__main__':
try:
gazebo_control()
except rospy.ROSInterruptException:
pass
| [
"rospy.Subscriber",
"rospy.init_node",
"roslib.load_manifest",
"rospy.spin",
"rospy.Publisher"
] | [((50, 91), 'roslib.load_manifest', 'roslib.load_manifest', (['"""megatrond_control"""'], {}), "('megatrond_control')\n", (70, 91), False, 'import roslib\n'), ((179, 243), 'rospy.Publisher', 'rospy.Publisher', (['"""/vel_ctrl_flw/command"""', 'Float64'], {'queue_size': '(10)'}), "('/vel_ctrl_flw/command', Float64, queue_size=10)\n", (194, 243), False, 'import rospy\n'), ((254, 318), 'rospy.Publisher', 'rospy.Publisher', (['"""/vel_ctrl_frw/command"""', 'Float64'], {'queue_size': '(10)'}), "('/vel_ctrl_frw/command', Float64, queue_size=10)\n", (269, 318), False, 'import rospy\n'), ((329, 393), 'rospy.Publisher', 'rospy.Publisher', (['"""/vel_ctrl_rrw/command"""', 'Float64'], {'queue_size': '(10)'}), "('/vel_ctrl_rrw/command', Float64, queue_size=10)\n", (344, 393), False, 'import rospy\n'), ((404, 468), 'rospy.Publisher', 'rospy.Publisher', (['"""/vel_ctrl_rlw/command"""', 'Float64'], {'queue_size': '(10)'}), "('/vel_ctrl_rlw/command', Float64, queue_size=10)\n", (419, 468), False, 'import rospy\n'), ((1277, 1326), 'rospy.init_node', 'rospy.init_node', (['"""gazebo_control"""'], {'anonymous': '(True)'}), "('gazebo_control', anonymous=True)\n", (1292, 1326), False, 'import rospy\n'), ((1331, 1376), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/cmd_vel"""', 'Twist', 'callback'], {}), "('/cmd_vel', Twist, callback)\n", (1347, 1376), False, 'import rospy\n'), ((1382, 1394), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (1392, 1394), False, 'import rospy\n')] |
"""
Django settings for django_oauth_client project.
Generated by 'django-admin startproject' using Django 2.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import django_heroku
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = '!<KEY>
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', '!<KEY>')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DJANGO_DEBUG', '') != 'False'
ALLOWED_HOSTS = ['django-oauth-client.herokuapp.com', 'localhost']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.github',
'customoauthprovider',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_oauth_client.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_oauth_client.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
SITE_ID = 1
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_cdn', 'static_root')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media_cdn', 'media_root')
LOGIN_REDIRECT_URL = 'home'
LOGIN_URL = 'login'
LOGOUT_REDIRECT_URL = 'home'
# OAUTH_SERVER_BASEURL = 'https://django-oauth-server.herokuapp.com'
# OAUTH_SERVER_BASEURL = 'http://localhost:8001'
OAUTH_SERVER_BASEURL = os.environ.get('DJANGO_OAUTH_SERVER_BASEURL', 'http://localhost:8001')
# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_BACKEND = os.environ.get('DJANGO_EMAIL_BACKEND', 'django.core.mail.backends.console.EmailBackend')
EMAIL_HOST = os.environ.get('DJANGO_EMAIL_HOST')
EMAIL_HOST_USER = os.environ.get('DJANGO_EMAIL_USER')
EMAIL_HOST_PASSWORD = os.environ.get('DJANGO_EMAIL_PASSWORD')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': 'mysite.log',
'formatter': 'verbose'
},
},
'loggers': {
'django': {
'handlers':['file'],
'propagate': True,
'level':'DEBUG',
},
'MYAPP': {
'handlers': ['file'],
'level': 'DEBUG',
},
}
}
django_heroku.settings(locals()) | [
"os.path.join",
"os.environ.get",
"os.path.abspath"
] | [((736, 781), 'os.environ.get', 'os.environ.get', (['"""DJANGO_SECRET_KEY"""', '"""!<KEY>"""'], {}), "('DJANGO_SECRET_KEY', '!<KEY>')\n", (750, 781), False, 'import os\n'), ((3686, 3737), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static_cdn"""', '"""static_root"""'], {}), "(BASE_DIR, 'static_cdn', 'static_root')\n", (3698, 3737), False, 'import os\n'), ((3774, 3823), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media_cdn"""', '"""media_root"""'], {}), "(BASE_DIR, 'media_cdn', 'media_root')\n", (3786, 3823), False, 'import os\n'), ((4045, 4115), 'os.environ.get', 'os.environ.get', (['"""DJANGO_OAUTH_SERVER_BASEURL"""', '"""http://localhost:8001"""'], {}), "('DJANGO_OAUTH_SERVER_BASEURL', 'http://localhost:8001')\n", (4059, 4115), False, 'import os\n'), ((4200, 4292), 'os.environ.get', 'os.environ.get', (['"""DJANGO_EMAIL_BACKEND"""', '"""django.core.mail.backends.console.EmailBackend"""'], {}), "('DJANGO_EMAIL_BACKEND',\n 'django.core.mail.backends.console.EmailBackend')\n", (4214, 4292), False, 'import os\n'), ((4302, 4337), 'os.environ.get', 'os.environ.get', (['"""DJANGO_EMAIL_HOST"""'], {}), "('DJANGO_EMAIL_HOST')\n", (4316, 4337), False, 'import os\n'), ((4356, 4391), 'os.environ.get', 'os.environ.get', (['"""DJANGO_EMAIL_USER"""'], {}), "('DJANGO_EMAIL_USER')\n", (4370, 4391), False, 'import os\n'), ((4414, 4453), 'os.environ.get', 'os.environ.get', (['"""DJANGO_EMAIL_PASSWORD"""'], {}), "('DJANGO_EMAIL_PASSWORD')\n", (4428, 4453), False, 'import os\n'), ((857, 891), 'os.environ.get', 'os.environ.get', (['"""DJANGO_DEBUG"""', '""""""'], {}), "('DJANGO_DEBUG', '')\n", (871, 891), False, 'import os\n'), ((3635, 3667), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3647, 3667), False, 'import os\n'), ((467, 492), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (482, 492), False, 'import os\n'), ((2584, 2620), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (2596, 2620), False, 'import os\n'), ((1940, 1975), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1952, 1975), False, 'import os\n')] |
from django.urls import path
from . import views
app_name= 'posts'
urlpatterns = [
path('', views.index, name="index" ),
path('<int:id>/', views.detail, name="detail"),
path('<int:id>/edit', views.edit, name="edit")
]
| [
"django.urls.path"
] | [((89, 124), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (93, 124), False, 'from django.urls import path\n'), ((132, 178), 'django.urls.path', 'path', (['"""<int:id>/"""', 'views.detail'], {'name': '"""detail"""'}), "('<int:id>/', views.detail, name='detail')\n", (136, 178), False, 'from django.urls import path\n'), ((184, 230), 'django.urls.path', 'path', (['"""<int:id>/edit"""', 'views.edit'], {'name': '"""edit"""'}), "('<int:id>/edit', views.edit, name='edit')\n", (188, 230), False, 'from django.urls import path\n')] |
from django.db import models
# Create your models here.
class People(models.Model):
fio=models.CharField(max_length=255)
sex=models.CharField(max_length=1)
age=models.IntegerField()
def __str__(self):
return 'ФИО:%s Пол:%s Возраст:%s'%(self.fio,self.sex,self.age) | [
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((93, 125), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (109, 125), False, 'from django.db import models\n'), ((134, 164), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)'}), '(max_length=1)\n', (150, 164), False, 'from django.db import models\n'), ((173, 194), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (192, 194), False, 'from django.db import models\n')] |
from datetime import datetime, timedelta
import numpy as np
def generate_day_list(day, step_size=7, n_steps = 52):
strt_dt = datetime.strptime(day, "%Y-%m-%d")
dt_list = []
for i in range(n_steps):
dt_list.append(strt_dt)
strt_dt += timedelta(days=step_size)
return [datetime.strftime(dt, "%Y-%m-%d") for dt in dt_list]
def generate_daily_list(year, **kwargs):
strt_date = datetime(year, 1, 1)
end_date = datetime(year + 1, 1, 1)
if "month" in kwargs.keys():
month = kwargs["month"]
if month == 12:
strt_date = datetime(year, month, 1)
end_date = datetime(year + 1, 1, 1)
else:
strt_date = datetime(year, month, 1)
end_date = datetime(year, month + 1, 1)
num_days = end_date - strt_date
daily_list = []
for day in range(num_days.days):
dt = strt_date + timedelta(days=day)
dt_str = dt.strftime("%Y-%m-%d")
daily_list.append(dt_str + "/")
return daily_list
def gen_radial_coordinates(time):
theta = 2*np.pi*time/24
x1 = np.cos(theta)
x2 = np.sin(theta)
return x1,x2
def index_to_center(index_lat, index_lon):
grid_size = 0.75
lat = 90-grid_size*index_lat
lon = grid_size*index_lon
return lat, lon
def lon_transform_0_base(lon):
if lon <0:
lon = lon+360
return lon
def lon_transform_minus180_base(lon):
if lon >180:
lon = lon-360
return lon
def lat_lon_index(lat_lon):
lat, lon = lat_lon
grid_size = 0.75
lon = lon_transform_0_base(lon)
lat_index = int((90.325- lat)/grid_size)
lon_index = int((0.325+lon)/grid_size)
return (lat_index, lon_index)
| [
"datetime.datetime",
"datetime.datetime.strptime",
"numpy.cos",
"numpy.sin",
"datetime.timedelta",
"datetime.datetime.strftime"
] | [((131, 165), 'datetime.datetime.strptime', 'datetime.strptime', (['day', '"""%Y-%m-%d"""'], {}), "(day, '%Y-%m-%d')\n", (148, 165), False, 'from datetime import datetime, timedelta\n'), ((415, 435), 'datetime.datetime', 'datetime', (['year', '(1)', '(1)'], {}), '(year, 1, 1)\n', (423, 435), False, 'from datetime import datetime, timedelta\n'), ((451, 475), 'datetime.datetime', 'datetime', (['(year + 1)', '(1)', '(1)'], {}), '(year + 1, 1, 1)\n', (459, 475), False, 'from datetime import datetime, timedelta\n'), ((1094, 1107), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (1100, 1107), True, 'import numpy as np\n'), ((1117, 1130), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (1123, 1130), True, 'import numpy as np\n'), ((264, 289), 'datetime.timedelta', 'timedelta', ([], {'days': 'step_size'}), '(days=step_size)\n', (273, 289), False, 'from datetime import datetime, timedelta\n'), ((303, 336), 'datetime.datetime.strftime', 'datetime.strftime', (['dt', '"""%Y-%m-%d"""'], {}), "(dt, '%Y-%m-%d')\n", (320, 336), False, 'from datetime import datetime, timedelta\n'), ((590, 614), 'datetime.datetime', 'datetime', (['year', 'month', '(1)'], {}), '(year, month, 1)\n', (598, 614), False, 'from datetime import datetime, timedelta\n'), ((638, 662), 'datetime.datetime', 'datetime', (['(year + 1)', '(1)', '(1)'], {}), '(year + 1, 1, 1)\n', (646, 662), False, 'from datetime import datetime, timedelta\n'), ((701, 725), 'datetime.datetime', 'datetime', (['year', 'month', '(1)'], {}), '(year, month, 1)\n', (709, 725), False, 'from datetime import datetime, timedelta\n'), ((749, 777), 'datetime.datetime', 'datetime', (['year', '(month + 1)', '(1)'], {}), '(year, month + 1, 1)\n', (757, 777), False, 'from datetime import datetime, timedelta\n'), ((898, 917), 'datetime.timedelta', 'timedelta', ([], {'days': 'day'}), '(days=day)\n', (907, 917), False, 'from datetime import datetime, timedelta\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Hemant and Karan and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
import uuid
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
from random import randint
import datetime
class ResidentialUnit(Document):
def generate_pin(self):
#frappe.errprint(self.exit_pin)
self.entry_pin = randint(10000, 99999)
self.start_from = frappe.utils.now()
source_datetime = frappe.utils.get_datetime(self.start_from)
eod = datetime.datetime(
year=source_datetime.year,
month=source_datetime.month,
day=source_datetime.day
) + datetime.timedelta(days=1, microseconds=-1)
self.expires_on=eod
self.expired = 0
#frappe.errprint(self.entry_pin)
#frappe.errprint(self.expires_on)
self.save()
#return 1
def on_update(self):
source_datetime = frappe.utils.get_datetime(self.start_from)
eod = datetime.datetime(
year=source_datetime.year,
month=source_datetime.month,
day=source_datetime.day
) + datetime.timedelta(days=1, microseconds=-1)
self.expires_on = eod
#return 1
def check_expired():
#frappe.logger().debug('Checking Expired pins...')
for d in frappe.db.sql("""select name, start_from, expires_on, expired
from `tabResidential Unit`
where expired = 0 and (start_from is not null or expires_on is not null) """, as_dict=1):
today = frappe.utils.now_datetime()
#frappe.logger().debug(str(today) + "|" + str(d.start_from) + "|" + str(d.expires_on))
if today> d.expires_on:
frappe.set_value("Residential Unit",d.name,"expired",1)
frappe.db.commit()
#frappe.logger().debug('Pin flagged as expired on ' + d.name)
@frappe.whitelist(allow_guest=True)
def verify_entry_pin(pin, entry_pin):
pin_stored = frappe.get_doc("Pin")
if pin_stored.pin == pin:
bb28741238af481dacf6187153fdc3cf()
residents = frappe.db.sql("""select name, entry_pin, start_from, expires_on, expired
from `tabResidential Unit`
where entry_pin=%(pin)s and expired=0""", {"pin": entry_pin}, as_dict=True)
#return residents
if residents:
return True
else:
return False
@frappe.whitelist(allow_guest=True)
def unit(pin, unit_number):
pin_stored = frappe.get_doc("Pin")
if pin_stored.pin == pin:
unit = {}
exists = frappe.db.exists('Residential Unit',{'unit_number':unit_number})
if exists:
residents = frappe.db.sql("""select name, contact_type, contact, show_as, sufix from `tabResident`
where parent=%(name)s and hidden=0
order by idx""", {"name": exists}, as_dict=True)
#unit = frappe.get_doc('Residential Unit', exists)
return residents#residents #unit.residents
else:
return None
else:
return None
@frappe.whitelist(allow_guest=True)
def call_unit(CLID,From,To, CallStatus):
pin_stored = frappe.get_doc("Pin")
pin = To.split('|')[0].split(':')[1]
if pin_stored.pin == pin:
if CallStatus == 'ringing':
from werkzeug.wrappers import Response
response = Response()
response.mimetype = 'text/xml'
response.charset = 'utf-8'
data= Element("Response")
dial = SubElement(data, "Dial")
dial.set('callerId', CLID)
dial.set('callerName', "kyalom170124081248")
dial.set('digitsMatchBLeg', '9')
dial.set('callbackUrl', 'http://gate.pema.co.za:5000/open')
SubElement(dial, "User").text ='sip:' + To.split('|')[1]
response.data = tostring(data)
#tree = ElementTree(response)
return response
else:
return None
#params = {}
#params['to'] = 'sip:' + To.split('|')[1]
#params['clid'] = CLID
#params['frm']= From
#return params
else:
return None
@frappe.whitelist(allow_guest=True)
def bb28741238af481dacf6187153fdc3cf():
#import random
#pin = random.randint(9999, 99999)
pin=str(uuid.uuid4())
frappe.db.set_value("Pin",None,'pin',pin)
frappe.db.commit()
return pin
@frappe.whitelist(allow_guest=True)
def ping():
return 'pong'
@frappe.whitelist(allow_guest=True)
def verify(pin):
pin_stored = frappe.get_doc("Pin")
if pin_stored.pin.replace('-','') == pin:
return True
else:
return False
@frappe.whitelist(allow_guest=True)
def verify_number(number):
exists = frappe.db.exists('Resident', {'contact': number})
#return exists
if exists:
resident = frappe.get_doc('Resident', exists)
unit = frappe.get_doc('Residential Unit', resident.parent)
return unit.unit_number
else:
return None
@frappe.whitelist(allow_guest=True)
def unit_list(pin):
pin_stored = frappe.get_doc("Pin")
if pin_stored.pin == pin:
_list = frappe.get_all('Residential Unit', fields=['unit_number', 'exit_pin'])
return _list
else:
return None
| [
"frappe.utils.get_datetime",
"frappe.db.commit",
"frappe.get_doc",
"datetime.timedelta",
"datetime.datetime",
"frappe.db.exists",
"frappe.db.sql",
"random.randint",
"frappe.whitelist",
"uuid.uuid4",
"xml.etree.ElementTree.SubElement",
"frappe.db.set_value",
"frappe.utils.now",
"xml.etree.ElementTree.tostring",
"werkzeug.wrappers.Response",
"xml.etree.ElementTree.Element",
"frappe.utils.now_datetime",
"frappe.get_all",
"frappe.set_value"
] | [((1756, 1790), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (1772, 1790), False, 'import frappe\n'), ((2215, 2249), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (2231, 2249), False, 'import frappe\n'), ((2786, 2820), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (2802, 2820), False, 'import frappe\n'), ((3681, 3715), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (3697, 3715), False, 'import frappe\n'), ((3910, 3944), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (3926, 3944), False, 'import frappe\n'), ((3974, 4008), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (3990, 4008), False, 'import frappe\n'), ((4144, 4178), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (4160, 4178), False, 'import frappe\n'), ((4452, 4486), 'frappe.whitelist', 'frappe.whitelist', ([], {'allow_guest': '(True)'}), '(allow_guest=True)\n', (4468, 4486), False, 'import frappe\n'), ((1269, 1462), 'frappe.db.sql', 'frappe.db.sql', (['"""select name, start_from, expires_on, expired\n\t\t\tfrom `tabResidential Unit`\n\t\t\twhere expired = 0 and (start_from is not null or expires_on is not null) """'], {'as_dict': '(1)'}), '(\n """select name, start_from, expires_on, expired\n\t\t\tfrom `tabResidential Unit`\n\t\t\twhere expired = 0 and (start_from is not null or expires_on is not null) """\n , as_dict=1)\n', (1282, 1462), False, 'import frappe\n'), ((1843, 1864), 'frappe.get_doc', 'frappe.get_doc', (['"""Pin"""'], {}), "('Pin')\n", (1857, 1864), False, 'import frappe\n'), ((2292, 2313), 'frappe.get_doc', 'frappe.get_doc', (['"""Pin"""'], {}), "('Pin')\n", (2306, 2313), False, 'import frappe\n'), ((2876, 2897), 'frappe.get_doc', 'frappe.get_doc', (['"""Pin"""'], {}), "('Pin')\n", (2890, 2897), False, 'import frappe\n'), ((3834, 3878), 'frappe.db.set_value', 'frappe.db.set_value', (['"""Pin"""', 'None', '"""pin"""', 'pin'], {}), "('Pin', None, 'pin', pin)\n", (3853, 3878), False, 'import frappe\n'), ((3877, 3895), 'frappe.db.commit', 'frappe.db.commit', ([], {}), '()\n', (3893, 3895), False, 'import frappe\n'), ((4040, 4061), 'frappe.get_doc', 'frappe.get_doc', (['"""Pin"""'], {}), "('Pin')\n", (4054, 4061), False, 'import frappe\n'), ((4216, 4265), 'frappe.db.exists', 'frappe.db.exists', (['"""Resident"""', "{'contact': number}"], {}), "('Resident', {'contact': number})\n", (4232, 4265), False, 'import frappe\n'), ((4521, 4542), 'frappe.get_doc', 'frappe.get_doc', (['"""Pin"""'], {}), "('Pin')\n", (4535, 4542), False, 'import frappe\n'), ((468, 489), 'random.randint', 'randint', (['(10000)', '(99999)'], {}), '(10000, 99999)\n', (475, 489), False, 'from random import randint\n'), ((510, 528), 'frappe.utils.now', 'frappe.utils.now', ([], {}), '()\n', (526, 528), False, 'import frappe\n'), ((549, 591), 'frappe.utils.get_datetime', 'frappe.utils.get_datetime', (['self.start_from'], {}), '(self.start_from)\n', (574, 591), False, 'import frappe\n'), ((939, 981), 'frappe.utils.get_datetime', 'frappe.utils.get_datetime', (['self.start_from'], {}), '(self.start_from)\n', (964, 981), False, 'import frappe\n'), ((1465, 1492), 'frappe.utils.now_datetime', 'frappe.utils.now_datetime', ([], {}), '()\n', (1490, 1492), False, 'import frappe\n'), ((1944, 2146), 'frappe.db.sql', 'frappe.db.sql', (['"""select name, entry_pin, start_from, expires_on, expired\n\t\t\t\t\t\t\tfrom `tabResidential Unit`\n\t \t\t\t\t\twhere entry_pin=%(pin)s and expired=0"""', "{'pin': entry_pin}"], {'as_dict': '(True)'}), '(\n """select name, entry_pin, start_from, expires_on, expired\n\t\t\t\t\t\t\tfrom `tabResidential Unit`\n\t \t\t\t\t\twhere entry_pin=%(pin)s and expired=0"""\n , {\'pin\': entry_pin}, as_dict=True)\n', (1957, 2146), False, 'import frappe\n'), ((2365, 2431), 'frappe.db.exists', 'frappe.db.exists', (['"""Residential Unit"""', "{'unit_number': unit_number}"], {}), "('Residential Unit', {'unit_number': unit_number})\n", (2381, 2431), False, 'import frappe\n'), ((3818, 3830), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3828, 3830), False, 'import uuid\n'), ((4307, 4341), 'frappe.get_doc', 'frappe.get_doc', (['"""Resident"""', 'exists'], {}), "('Resident', exists)\n", (4321, 4341), False, 'import frappe\n'), ((4351, 4402), 'frappe.get_doc', 'frappe.get_doc', (['"""Residential Unit"""', 'resident.parent'], {}), "('Residential Unit', resident.parent)\n", (4365, 4402), False, 'import frappe\n'), ((4580, 4650), 'frappe.get_all', 'frappe.get_all', (['"""Residential Unit"""'], {'fields': "['unit_number', 'exit_pin']"}), "('Residential Unit', fields=['unit_number', 'exit_pin'])\n", (4594, 4650), False, 'import frappe\n'), ((600, 702), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'source_datetime.year', 'month': 'source_datetime.month', 'day': 'source_datetime.day'}), '(year=source_datetime.year, month=source_datetime.month,\n day=source_datetime.day)\n', (617, 702), False, 'import datetime\n'), ((714, 757), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)', 'microseconds': '(-1)'}), '(days=1, microseconds=-1)\n', (732, 757), False, 'import datetime\n'), ((990, 1092), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'source_datetime.year', 'month': 'source_datetime.month', 'day': 'source_datetime.day'}), '(year=source_datetime.year, month=source_datetime.month,\n day=source_datetime.day)\n', (1007, 1092), False, 'import datetime\n'), ((1104, 1147), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)', 'microseconds': '(-1)'}), '(days=1, microseconds=-1)\n', (1122, 1147), False, 'import datetime\n'), ((1611, 1669), 'frappe.set_value', 'frappe.set_value', (['"""Residential Unit"""', 'd.name', '"""expired"""', '(1)'], {}), "('Residential Unit', d.name, 'expired', 1)\n", (1627, 1669), False, 'import frappe\n'), ((1670, 1688), 'frappe.db.commit', 'frappe.db.commit', ([], {}), '()\n', (1686, 1688), False, 'import frappe\n'), ((2458, 2648), 'frappe.db.sql', 'frappe.db.sql', (['"""select name, contact_type, contact, show_as, sufix from `tabResident`\n\t\t\t\t\twhere parent=%(name)s and hidden=0\n\t\t\t\t\torder by idx"""', "{'name': exists}"], {'as_dict': '(True)'}), '(\n """select name, contact_type, contact, show_as, sufix from `tabResident`\n\t\t\t\t\twhere parent=%(name)s and hidden=0\n\t\t\t\t\torder by idx"""\n , {\'name\': exists}, as_dict=True)\n', (2471, 2648), False, 'import frappe\n'), ((3049, 3059), 'werkzeug.wrappers.Response', 'Response', ([], {}), '()\n', (3057, 3059), False, 'from werkzeug.wrappers import Response\n'), ((3133, 3152), 'xml.etree.ElementTree.Element', 'Element', (['"""Response"""'], {}), "('Response')\n", (3140, 3152), False, 'from xml.etree.ElementTree import Element, SubElement, Comment, tostring\n'), ((3163, 3187), 'xml.etree.ElementTree.SubElement', 'SubElement', (['data', '"""Dial"""'], {}), "(data, 'Dial')\n", (3173, 3187), False, 'from xml.etree.ElementTree import Element, SubElement, Comment, tostring\n'), ((3444, 3458), 'xml.etree.ElementTree.tostring', 'tostring', (['data'], {}), '(data)\n', (3452, 3458), False, 'from xml.etree.ElementTree import Element, SubElement, Comment, tostring\n'), ((3368, 3392), 'xml.etree.ElementTree.SubElement', 'SubElement', (['dial', '"""User"""'], {}), "(dial, 'User')\n", (3378, 3392), False, 'from xml.etree.ElementTree import Element, SubElement, Comment, tostring\n')] |
from XDSM import XDSM
opt = 'Optimization'
dat = 'DataInter'
mda = 'MDA'
anl = 'Analysis'
x = XDSM()
names = ['assembly inputs', 'tube', 'inlet' , 'comp1', 'duct1', 'split', 'nozzle',
'comp2', 'duct2', 'perf', 'assembly outputs']
for name in names:
x.addComp(name, anl, name)
x.addDep('tube', 'assembly inputs', dat, '', stack=True)
x.addDep('inlet', 'assembly inputs', dat, '', stack=True)
x.addDep('comp1', 'assembly inputs', dat, '', stack=True)
x.addDep('split', 'assembly inputs', dat, '', stack=True)
x.addDep('comp2', 'assembly inputs', dat, '', stack=True)
x.addDep('perf', 'assembly inputs', dat, '', stack=True)
x.addDep('inlet', 'tube', dat, '', stack=True)
x.addDep('comp1', 'inlet', dat, '', stack=True)
x.addDep('duct1', 'comp1', dat, '', stack=True)
x.addDep('split', 'duct1', dat, '', stack=True)
x.addDep('nozzle', 'split', dat, '', stack=True)
x.addDep('comp2', 'split', dat, '', stack=True)
x.addDep('duct2', 'comp2', dat, '', stack=True)
x.addDep('perf', 'nozzle', dat, '', stack=True)
x.addDep('perf', 'comp2', dat, '', stack=True)
x.addDep('comp2', 'perf', dat, '', stack=True)
x.addDep('perf', 'inlet', dat, '', stack=True)
x.addDep('perf', 'comp1', dat, '', stack=True)
x.addDep('assembly outputs', 'tube', dat, '', stack=True)
x.addDep('assembly outputs', 'inlet', dat, '', stack=True)
x.addDep('assembly outputs', 'comp1', dat, '', stack=True)
x.addDep('assembly outputs', 'duct2', dat, '', stack=True)
x.addDep('assembly outputs', 'perf', dat, '', stack=True)
x.write('compress',True)
| [
"XDSM.XDSM"
] | [((96, 102), 'XDSM.XDSM', 'XDSM', ([], {}), '()\n', (100, 102), False, 'from XDSM import XDSM\n')] |
from flask import Flask
from prime import primenos
app = Flask(__name__)
@app.route('/')
def hundred_primes():
return str(primenos(100))
@app.route('/<int:number>', methods=['GET'])
def primes(number):
return str(primenos(number))
if __name__ == '__main__':
app.run(debug = True)
| [
"prime.primenos",
"flask.Flask"
] | [((57, 72), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (62, 72), False, 'from flask import Flask\n'), ((124, 137), 'prime.primenos', 'primenos', (['(100)'], {}), '(100)\n', (132, 137), False, 'from prime import primenos\n'), ((217, 233), 'prime.primenos', 'primenos', (['number'], {}), '(number)\n', (225, 233), False, 'from prime import primenos\n')] |
import pygments
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_by_name, get_lexer_for_filename
def render(raw, ext=None, **kwargs):
try:
lexer = get_lexer_for_filename(ext)
except pygments.util.ClassNotFound:
lexer = get_lexer_by_name('python')
formatters = HtmlFormatter(full=True, linenos=True, h1_lines=[1, 2])
return highlight(raw, lexer, formatters)
| [
"pygments.highlight",
"pygments.formatters.HtmlFormatter",
"pygments.lexers.get_lexer_for_filename",
"pygments.lexers.get_lexer_by_name"
] | [((356, 411), 'pygments.formatters.HtmlFormatter', 'HtmlFormatter', ([], {'full': '(True)', 'linenos': '(True)', 'h1_lines': '[1, 2]'}), '(full=True, linenos=True, h1_lines=[1, 2])\n', (369, 411), False, 'from pygments.formatters import HtmlFormatter\n'), ((423, 456), 'pygments.highlight', 'highlight', (['raw', 'lexer', 'formatters'], {}), '(raw, lexer, formatters)\n', (432, 456), False, 'from pygments import highlight\n'), ((227, 254), 'pygments.lexers.get_lexer_for_filename', 'get_lexer_for_filename', (['ext'], {}), '(ext)\n', (249, 254), False, 'from pygments.lexers import get_lexer_by_name, get_lexer_for_filename\n'), ((311, 338), 'pygments.lexers.get_lexer_by_name', 'get_lexer_by_name', (['"""python"""'], {}), "('python')\n", (328, 338), False, 'from pygments.lexers import get_lexer_by_name, get_lexer_for_filename\n')] |
import inspect
import os
import logging, logging.handlers
from pathlib import Path
import traceback
import random
CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def generate_random_str(length: int=4):
if length is None:
length = 4
if not isinstance(length, int):
length = 4
if length < 1:
length = 1
if length > 1024:
length = 1024
s = ''
while len(s) < length:
s = '{}{}'.format(
s,
random.choice(CHARS)
)
return s
def generate_unique_run_id():
run_id = '{}-{}-{}-{}'.format(
generate_random_str(length=8),
generate_random_str(length=4),
generate_random_str(length=4),
generate_random_str(length=12)
)
return run_id
HOME = str(Path.home())
LOG_DIR = '{}/.cloud_console/logs'.format(HOME)
LOG_FILE = '{}/common.log'.format(LOG_DIR)
FILE_LOG_ENABLED = False
RUN_ID = generate_unique_run_id()
DEBUG = bool(os.getenv('DEBUG', None))
if os.path.exists(LOG_DIR) is False:
try:
os.makedirs(LOG_DIR, exist_ok=True)
FILE_LOG_ENABLED = True
except:
traceback.print_exc()
else:
FILE_LOG_ENABLED = True
def get_default_logger():
logger = logging.getLogger('cloud-console')
logger.setLevel(logging.INFO)
if os.getenv('DEBUG', None) is not None:
logger.setLevel(logging.DEBUG)
return logger
def get_file_log_handler():
fh = logging.handlers.RotatingFileHandler(LOG_FILE, maxBytes=10485760, backupCount=5)
fh.setLevel(logging.INFO)
if DEBUG is True:
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
return fh
def get_default_log_handler():
if FILE_LOG_ENABLED is True:
return get_file_log_handler()
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
if DEBUG is True:
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
return ch
def id_caller()->list:
result = list()
try:
caller_stack = inspect.stack()[2]
result.append(caller_stack[1].split(os.sep)[-1]) # File name
result.append(caller_stack[2]) # line number
result.append(caller_stack[3]) # function name
except: # pragma: no cover
pass
return result
class Logger:
def __init__(self, logger=get_default_logger() ,logging_handler=get_default_log_handler()):
logger.addHandler(logging_handler)
self.logger = logger
def _format_msg(self, stack_data: list, message: str)->str:
if message is not None:
message = '{}'.format(message)
if len(stack_data) == 3:
message = '{} [{}:{}:{}] {}'.format(
RUN_ID,
stack_data[0],
stack_data[1],
stack_data[2],
message
)
return message
return 'NO_INPUT_MESSAGE'
def info(self, message: str):
self.logger.info(
self._format_msg(
stack_data=id_caller(),
message=message
)
)
def warning(self, message: str):
self.logger.warning(
self._format_msg(
stack_data=id_caller(),
message=message
)
)
def error(self, message: str):
self.logger.error(
self._format_msg(
stack_data=id_caller(),
message=message
)
)
def debug(self, message: str):
self.logger.debug(
self._format_msg(
stack_data=id_caller(),
message=message
)
)
log = Logger()
log.info(message='*** Logging Initiated - FILE_LOG_ENABLED={} DEBUG={}'.format(FILE_LOG_ENABLED, DEBUG))
log.debug(message='DEBUG ENABLED')
# EOF
| [
"logging.getLogger",
"os.path.exists",
"logging.StreamHandler",
"random.choice",
"os.getenv",
"os.makedirs",
"inspect.stack",
"logging.Formatter",
"pathlib.Path.home",
"logging.handlers.RotatingFileHandler",
"traceback.print_exc"
] | [((806, 817), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (815, 817), False, 'from pathlib import Path\n'), ((982, 1006), 'os.getenv', 'os.getenv', (['"""DEBUG"""', 'None'], {}), "('DEBUG', None)\n", (991, 1006), False, 'import os\n'), ((1013, 1036), 'os.path.exists', 'os.path.exists', (['LOG_DIR'], {}), '(LOG_DIR)\n', (1027, 1036), False, 'import os\n'), ((1249, 1283), 'logging.getLogger', 'logging.getLogger', (['"""cloud-console"""'], {}), "('cloud-console')\n", (1266, 1283), False, 'import logging, logging.handlers\n'), ((1459, 1544), 'logging.handlers.RotatingFileHandler', 'logging.handlers.RotatingFileHandler', (['LOG_FILE'], {'maxBytes': '(10485760)', 'backupCount': '(5)'}), '(LOG_FILE, maxBytes=10485760, backupCount=5\n )\n', (1495, 1544), False, 'import logging, logging.handlers\n'), ((1643, 1716), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (1660, 1716), False, 'import logging, logging.handlers\n'), ((1875, 1898), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1896, 1898), False, 'import logging, logging.handlers\n'), ((2002, 2075), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (2019, 2075), False, 'import logging, logging.handlers\n'), ((1064, 1099), 'os.makedirs', 'os.makedirs', (['LOG_DIR'], {'exist_ok': '(True)'}), '(LOG_DIR, exist_ok=True)\n', (1075, 1099), False, 'import os\n'), ((1325, 1349), 'os.getenv', 'os.getenv', (['"""DEBUG"""', 'None'], {}), "('DEBUG', None)\n", (1334, 1349), False, 'import os\n'), ((502, 522), 'random.choice', 'random.choice', (['CHARS'], {}), '(CHARS)\n', (515, 522), False, 'import random\n'), ((1152, 1173), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1171, 1173), False, 'import traceback\n'), ((2198, 2213), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (2211, 2213), False, 'import inspect\n')] |
from setuptools import setup
if __name__ == "__main__":
setup(
entry_points={
"console_scripts": ["quickdocs = quickdocs.__main__:main"]
},
install_requires=[
"dataclasses",
"pyimport",
"pypandoc",
"ruamel.yaml",
"simple_pipes",
"sphinx-autodoc-annotation",
"sphinx",
"sphinxcontrib.apidoc",
"sphinxcontrib.pandoc_markdown",
"walkmate",
"yummy_sphinx_theme",
],
extras_require={
"tests": [
"coverage",
"codacy-coverage",
"pytest-bdd",
"pytest-cov",
"pytest-html",
"pytest-sugar",
"pytest-watch",
"pytest",
"tox-travis",
"tox",
],
"tools": [
"autoflake",
"bandit",
"black",
"bump2version",
"isort",
"mypy",
"pylint",
"quickdocs",
"twine",
"wheel",
],
},
)
| [
"setuptools.setup"
] | [((61, 676), 'setuptools.setup', 'setup', ([], {'entry_points': "{'console_scripts': ['quickdocs = quickdocs.__main__:main']}", 'install_requires': "['dataclasses', 'pyimport', 'pypandoc', 'ruamel.yaml', 'simple_pipes',\n 'sphinx-autodoc-annotation', 'sphinx', 'sphinxcontrib.apidoc',\n 'sphinxcontrib.pandoc_markdown', 'walkmate', 'yummy_sphinx_theme']", 'extras_require': "{'tests': ['coverage', 'codacy-coverage', 'pytest-bdd', 'pytest-cov',\n 'pytest-html', 'pytest-sugar', 'pytest-watch', 'pytest', 'tox-travis',\n 'tox'], 'tools': ['autoflake', 'bandit', 'black', 'bump2version',\n 'isort', 'mypy', 'pylint', 'quickdocs', 'twine', 'wheel']}"}), "(entry_points={'console_scripts': [\n 'quickdocs = quickdocs.__main__:main']}, install_requires=[\n 'dataclasses', 'pyimport', 'pypandoc', 'ruamel.yaml', 'simple_pipes',\n 'sphinx-autodoc-annotation', 'sphinx', 'sphinxcontrib.apidoc',\n 'sphinxcontrib.pandoc_markdown', 'walkmate', 'yummy_sphinx_theme'],\n extras_require={'tests': ['coverage', 'codacy-coverage', 'pytest-bdd',\n 'pytest-cov', 'pytest-html', 'pytest-sugar', 'pytest-watch', 'pytest',\n 'tox-travis', 'tox'], 'tools': ['autoflake', 'bandit', 'black',\n 'bump2version', 'isort', 'mypy', 'pylint', 'quickdocs', 'twine', 'wheel']})\n", (66, 676), False, 'from setuptools import setup\n')] |
#!/usr/bin/python -u
from Crypto.Cipher import AES
from hashlib import md5
from Crypto import Random
from binascii import b2a_hex
from Crypto.SelfTest.st_common import a2b_hex
BS = 16 # BlockSize
pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
unpad = lambda s: s[0:-ord(s[-1:])]
class Scheme:
def __init__(self, key):
self.key = key
def encrypt(self, raw): # raw = admin , return hex(iv+ AES(admin))
raw = pad(raw)
raw = md5(raw.encode('utf8') ).digest() + raw.encode('utf8')
iv = Random.new().read(BS)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return b2a_hex(iv + cipher.encrypt(raw))
def decrypt(self, enc):
enc = a2b_hex(enc)
iv = enc[:BS]
enc = enc[BS:]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
blob = cipher.decrypt(enc)
checksum = blob[:BS]
data = blob[BS:]
if md5(data).digest() == checksum:
return unpad(data)
else:
return
key = Random.new().read(BS)
scheme = Scheme(key)
#flag = open("flag", 'r').readline()
flag="111111111111111111111111"
#alarm(30)
print("Welcome to 0CTF encryption service!")
while True:
print("Please [r]egister or [l]ogin")
cmd = input()
if not cmd:
break
if cmd[0] == 'r':
name = input().strip()
if (len(name) > 32):
print("username too long!")
break
if pad(name) == pad("admin"):
print("You cannot use this name!")
break
else:
print("Here is your secret:")
print(scheme.encrypt(name))
elif cmd[0] == 'l':
data = input().strip()
name = scheme.decrypt(data)
if name == "admin":
print("Welcome admin!")
print(flag)
else:
print("Welcome %s!" % name)
else:
print("Unknown cmd!")
break
| [
"hashlib.md5",
"Crypto.Random.new",
"Crypto.Cipher.AES.new",
"Crypto.SelfTest.st_common.a2b_hex"
] | [((583, 618), 'Crypto.Cipher.AES.new', 'AES.new', (['self.key', 'AES.MODE_CBC', 'iv'], {}), '(self.key, AES.MODE_CBC, iv)\n', (590, 618), False, 'from Crypto.Cipher import AES\n'), ((711, 723), 'Crypto.SelfTest.st_common.a2b_hex', 'a2b_hex', (['enc'], {}), '(enc)\n', (718, 723), False, 'from Crypto.SelfTest.st_common import a2b_hex\n'), ((786, 821), 'Crypto.Cipher.AES.new', 'AES.new', (['self.key', 'AES.MODE_CBC', 'iv'], {}), '(self.key, AES.MODE_CBC, iv)\n', (793, 821), False, 'from Crypto.Cipher import AES\n'), ((1026, 1038), 'Crypto.Random.new', 'Random.new', ([], {}), '()\n', (1036, 1038), False, 'from Crypto import Random\n'), ((544, 556), 'Crypto.Random.new', 'Random.new', ([], {}), '()\n', (554, 556), False, 'from Crypto import Random\n'), ((922, 931), 'hashlib.md5', 'md5', (['data'], {}), '(data)\n', (925, 931), False, 'from hashlib import md5\n')] |
from requests.auth import HTTPBasicAuth
from datetime import datetime
import requests
import base64
from .credentials import *
def generate_access_token():
consumerKey = consumer_key
consumerSecret = consumer_secret
api_URL = "https://sandbox.safaricom.co.ke/oauth/v1/generate?grant_type=client_credentials"
try:
r = requests.get(api_URL, auth=HTTPBasicAuth(consumerKey, consumerSecret))
except:
r = requests.get(api_URL, auth=HTTPBasicAuth(consumerKey, consumerSecret), verify=False)
# print(r.text)
json_response = (r.json())
my_access_token = json_response["access_token"]
return my_access_token
def get_timestamp():
unformatted_time = datetime.now()
formatted_time = unformatted_time.strftime("%Y%m%d%H%M%S")
return formatted_time
def generate_password(formatted_time):
data_to_encode = (
business_short_code + pass_key + formatted_time
)
encoded_string = base64.b64encode(data_to_encode.encode())
decoded_password = encoded_string.decode("utf-8")
return decoded_password
| [
"datetime.datetime.now",
"requests.auth.HTTPBasicAuth"
] | [((703, 717), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (715, 717), False, 'from datetime import datetime\n'), ((371, 413), 'requests.auth.HTTPBasicAuth', 'HTTPBasicAuth', (['consumerKey', 'consumerSecret'], {}), '(consumerKey, consumerSecret)\n', (384, 413), False, 'from requests.auth import HTTPBasicAuth\n'), ((466, 508), 'requests.auth.HTTPBasicAuth', 'HTTPBasicAuth', (['consumerKey', 'consumerSecret'], {}), '(consumerKey, consumerSecret)\n', (479, 508), False, 'from requests.auth import HTTPBasicAuth\n')] |
# Create your models here.
# Create your models here.
from django.db import models
from datetime import date
class Teacher(models.Model):
name = models.CharField(max_length=30)
techer_id = models.CharField(unique=True, max_length=6, null=True)
designation = models.CharField(null=False, max_length=30)
joined = models.DateField('Year-Month')
phone = models.CharField(null=True, max_length=12)
def __str__(self):
return self.name
class TeacherInfo(models.Model):
name = models.ForeignKey(Teacher, on_delete=models.CASCADE)
image = models.ImageField(upload_to='media')
gender = models.CharField(max_length=7)
qualification = models.CharField(max_length=350)
dateOfBirth = models.DateField()
shortBio = models.CharField(max_length=100)
email = models.EmailField(max_length=30)
currentLocation = models.CharField(max_length=80)
description = models.TextField(max_length=400)
def __str__(self):
return str(self.name)
class EduLevel(models.Model):
''' This is to generate educational levels like
ssc/hsc etc
'''
teacher = models.ForeignKey(Teacher, on_delete=models.CASCADE)
yearCompleted = models.DateField()
instituteName = models.CharField(max_length=100)
Field_of_expertise = models.CharField(max_length=100, verbose_name="Field of Interest", default="C")
class TeacherQualification(models.Model):
teacher = models.ForeignKey(Teacher, on_delete=models.CASCADE)
level = models.ManyToManyField(EduLevel)
def __str__(self):
return str(self.teacher)
| [
"django.db.models.EmailField",
"django.db.models.DateField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.ImageField",
"django.db.models.CharField"
] | [((153, 184), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (169, 184), False, 'from django.db import models\n'), ((201, 255), 'django.db.models.CharField', 'models.CharField', ([], {'unique': '(True)', 'max_length': '(6)', 'null': '(True)'}), '(unique=True, max_length=6, null=True)\n', (217, 255), False, 'from django.db import models\n'), ((274, 317), 'django.db.models.CharField', 'models.CharField', ([], {'null': '(False)', 'max_length': '(30)'}), '(null=False, max_length=30)\n', (290, 317), False, 'from django.db import models\n'), ((331, 361), 'django.db.models.DateField', 'models.DateField', (['"""Year-Month"""'], {}), "('Year-Month')\n", (347, 361), False, 'from django.db import models\n'), ((374, 416), 'django.db.models.CharField', 'models.CharField', ([], {'null': '(True)', 'max_length': '(12)'}), '(null=True, max_length=12)\n', (390, 416), False, 'from django.db import models\n'), ((512, 564), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Teacher'], {'on_delete': 'models.CASCADE'}), '(Teacher, on_delete=models.CASCADE)\n', (529, 564), False, 'from django.db import models\n'), ((577, 613), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""media"""'}), "(upload_to='media')\n", (594, 613), False, 'from django.db import models\n'), ((627, 657), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(7)'}), '(max_length=7)\n', (643, 657), False, 'from django.db import models\n'), ((678, 710), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(350)'}), '(max_length=350)\n', (694, 710), False, 'from django.db import models\n'), ((729, 747), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (745, 747), False, 'from django.db import models\n'), ((763, 795), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (779, 795), False, 'from django.db import models\n'), ((808, 840), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (825, 840), False, 'from django.db import models\n'), ((863, 894), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)'}), '(max_length=80)\n', (879, 894), False, 'from django.db import models\n'), ((913, 945), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(400)'}), '(max_length=400)\n', (929, 945), False, 'from django.db import models\n'), ((1123, 1175), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Teacher'], {'on_delete': 'models.CASCADE'}), '(Teacher, on_delete=models.CASCADE)\n', (1140, 1175), False, 'from django.db import models\n'), ((1196, 1214), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (1212, 1214), False, 'from django.db import models\n'), ((1235, 1267), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1251, 1267), False, 'from django.db import models\n'), ((1293, 1372), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""Field of Interest"""', 'default': '"""C"""'}), "(max_length=100, verbose_name='Field of Interest', default='C')\n", (1309, 1372), False, 'from django.db import models\n'), ((1433, 1485), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Teacher'], {'on_delete': 'models.CASCADE'}), '(Teacher, on_delete=models.CASCADE)\n', (1450, 1485), False, 'from django.db import models\n'), ((1498, 1530), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['EduLevel'], {}), '(EduLevel)\n', (1520, 1530), False, 'from django.db import models\n')] |
import socket
import threading
import os
from messages import send_json_message, receive_json_message
from security import generate_symmetric_key, encrypt_message, decrypt_message
class P2P_Client:
def __init__(self, peer_ip, peer_port, uid, peer_uid, peer_ik, private_key):
"""
Client in P2P connection
:param peer_ip: IP address of the server
:param peer_port: port of the server
:param uid: UID of the client
:param peer_uid: UID of the peer who starts the server
:param peer_ik: peer public key
:param private_key: client private key
"""
# Open client socket
self.p2p_c = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.p2p_c.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Set expected server peer information
self.peer_ip = peer_ip
self.peer_port = peer_port
self.peer_uid = peer_uid
self.peer_ik = peer_ik
# Set client private key
self.private_key = private_key
def _send_message(self):
"""
Send encrypted message to the server
"""
while True:
msg = input('')
aes_key = generate_symmetric_key(self.private_key, self.peer_ik)
iv = os.urandom(16)
encrypted_msg = encrypt_message(aes_key, iv, msg.encode())
send_json_message(self.p2p_c, { 'Message' : encrypted_msg.hex(), 'iv' : iv.hex() })
def start(self):
"""
Run the P2P client, connect to the server and send messages
"""
# Connect to the server
self.p2p_c.connect((self.peer_ip, self.peer_port))
print(f'Established P2P connection with ({self.peer_ip},{self.peer_port})')
print(f'\nPress enter to join P2P chat with {self.peer_uid}')
# Start thread for message sending
send_thread = threading.Thread(target=self._send_message)
send_thread.daemon = True
send_thread.start()
# Listen for incoming messages from the server
while True:
msg = receive_json_message(self.p2p_c)
if msg is None:
print(f'Lost connection to the peer {self.peer_uid}:({self.peer_ip},{self.peer_port}), closing...')
break
aes_key = generate_symmetric_key(self.private_key, self.peer_ik)
decrypted_msg = decrypt_message(aes_key, bytes.fromhex(msg['iv']), bytes.fromhex(msg['Message']))
print(f'{self.peer_uid} ({self.peer_ip},{self.peer_port}): {decrypted_msg.decode()}') | [
"socket.socket",
"os.urandom",
"messages.receive_json_message",
"security.generate_symmetric_key",
"threading.Thread"
] | [((672, 721), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (685, 721), False, 'import socket\n'), ((1920, 1963), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._send_message'}), '(target=self._send_message)\n', (1936, 1963), False, 'import threading\n'), ((1220, 1274), 'security.generate_symmetric_key', 'generate_symmetric_key', (['self.private_key', 'self.peer_ik'], {}), '(self.private_key, self.peer_ik)\n', (1242, 1274), False, 'from security import generate_symmetric_key, encrypt_message, decrypt_message\n'), ((1292, 1306), 'os.urandom', 'os.urandom', (['(16)'], {}), '(16)\n', (1302, 1306), False, 'import os\n'), ((2120, 2152), 'messages.receive_json_message', 'receive_json_message', (['self.p2p_c'], {}), '(self.p2p_c)\n', (2140, 2152), False, 'from messages import send_json_message, receive_json_message\n'), ((2343, 2397), 'security.generate_symmetric_key', 'generate_symmetric_key', (['self.private_key', 'self.peer_ik'], {}), '(self.private_key, self.peer_ik)\n', (2365, 2397), False, 'from security import generate_symmetric_key, encrypt_message, decrypt_message\n')] |
from __future__ import (print_function,
division)
from keras import backend as K
import numpy as np
def dice_loss(target_class=1, mask_class=None):
'''
Dice loss.
Expects integer or one-hot class labeling in y_true.
Expects outputs in range [0, 1] in y_pred.
Computes the soft dice loss considering all classes in target_class as one
aggregate target class and ignoring all elements with ground truth classes
in mask_class.
target_class : integer or list
mask_class : integer or list
'''
if not hasattr(target_class, '__len__'):
target_class = [target_class]
if mask_class is not None and not hasattr(mask_class, '__len__'):
mask_class = [mask_class]
# Define the keras expression.
def dice(y_true, y_pred):
smooth = 1
# If needed, change ground truth from categorical to integer format.
if K.ndim(y_true) > K.ndim(y_pred):
data_format = K.image_data_format()
if data_format=='channels_first':
class_axis = 1
elif data_format=='channels_last':
class_axis = K.ndim(y_true)-1
else:
raise ValueError("Unknown data_format {}".format(data_format))
y_true = K.argmax(y_true, axis=class_axis)
# Flatten all inputs.
y_true_f = K.flatten(y_true)
y_true_f = K.cast(y_true_f, 'int32')
y_pred_f = K.flatten(y_pred)
# Aggregate target classes, mask out classes in mask_class.
if K.backend()=='theano':
y_target = K.sum([K.equal(y_true_f, t) for t in target_class],
axis=0)
if mask_class is not None:
mask_out = K.sum([K.equal(y_true_f, t) for t in mask_class],
axis=0)
idxs = K.equal(mask_out, 0).nonzero()
y_target = y_target[idxs]
y_pred_f = y_pred_f[idxs]
elif K.backend()=='tensorflow':
y_target = K.sum([K.cast(K.equal(y_true_f, t), K.floatx()) \
for t in target_class], axis=0)
if mask_class is not None:
mask_out = K.sum([K.cast(K.equal(y_true_f, t), K.floatx()) \
for t in mask_class], axis=0)
mask_bool = K.equal(mask_out, 0)
y_target = tf.boolean_mask(y_target, mask_bool)
y_pred_f = tf.boolean_mask(y_pred_f, mask_bool)
else:
raise NotImplementedError("dice loss not implemented for backend: "
"{}".format(K.backend()))
# Compute dice value.
intersection = K.sum(y_target * y_pred_f)
dice_val = -(2.*intersection+smooth) / \
(K.sum(y_target)+K.sum(y_pred_f)+smooth)
return dice_val
# Set a custom function name
tag = "_"+"_".join(str(i) for i in target_class)
if mask_class is not None:
tag += "_"+"_".join("m"+str(i) for i in mask_class)
dice.__name__ = "dice_loss"+tag
return dice
| [
"keras.backend.cast",
"keras.backend.image_data_format",
"keras.backend.sum",
"keras.backend.equal",
"keras.backend.flatten",
"keras.backend.floatx",
"keras.backend.argmax",
"keras.backend.backend",
"keras.backend.ndim"
] | [((1407, 1424), 'keras.backend.flatten', 'K.flatten', (['y_true'], {}), '(y_true)\n', (1416, 1424), True, 'from keras import backend as K\n'), ((1444, 1469), 'keras.backend.cast', 'K.cast', (['y_true_f', '"""int32"""'], {}), "(y_true_f, 'int32')\n", (1450, 1469), True, 'from keras import backend as K\n'), ((1489, 1506), 'keras.backend.flatten', 'K.flatten', (['y_pred'], {}), '(y_pred)\n', (1498, 1506), True, 'from keras import backend as K\n'), ((2775, 2801), 'keras.backend.sum', 'K.sum', (['(y_target * y_pred_f)'], {}), '(y_target * y_pred_f)\n', (2780, 2801), True, 'from keras import backend as K\n'), ((942, 956), 'keras.backend.ndim', 'K.ndim', (['y_true'], {}), '(y_true)\n', (948, 956), True, 'from keras import backend as K\n'), ((959, 973), 'keras.backend.ndim', 'K.ndim', (['y_pred'], {}), '(y_pred)\n', (965, 973), True, 'from keras import backend as K\n'), ((1001, 1022), 'keras.backend.image_data_format', 'K.image_data_format', ([], {}), '()\n', (1020, 1022), True, 'from keras import backend as K\n'), ((1311, 1344), 'keras.backend.argmax', 'K.argmax', (['y_true'], {'axis': 'class_axis'}), '(y_true, axis=class_axis)\n', (1319, 1344), True, 'from keras import backend as K\n'), ((1595, 1606), 'keras.backend.backend', 'K.backend', ([], {}), '()\n', (1604, 1606), True, 'from keras import backend as K\n'), ((2038, 2049), 'keras.backend.backend', 'K.backend', ([], {}), '()\n', (2047, 2049), True, 'from keras import backend as K\n'), ((1648, 1668), 'keras.backend.equal', 'K.equal', (['y_true_f', 't'], {}), '(y_true_f, t)\n', (1655, 1668), True, 'from keras import backend as K\n'), ((2406, 2426), 'keras.backend.equal', 'K.equal', (['mask_out', '(0)'], {}), '(mask_out, 0)\n', (2413, 2426), True, 'from keras import backend as K\n'), ((2872, 2887), 'keras.backend.sum', 'K.sum', (['y_target'], {}), '(y_target)\n', (2877, 2887), True, 'from keras import backend as K\n'), ((2888, 2903), 'keras.backend.sum', 'K.sum', (['y_pred_f'], {}), '(y_pred_f)\n', (2893, 2903), True, 'from keras import backend as K\n'), ((1176, 1190), 'keras.backend.ndim', 'K.ndim', (['y_true'], {}), '(y_true)\n', (1182, 1190), True, 'from keras import backend as K\n'), ((1803, 1823), 'keras.backend.equal', 'K.equal', (['y_true_f', 't'], {}), '(y_true_f, t)\n', (1810, 1823), True, 'from keras import backend as K\n'), ((1910, 1930), 'keras.backend.equal', 'K.equal', (['mask_out', '(0)'], {}), '(mask_out, 0)\n', (1917, 1930), True, 'from keras import backend as K\n'), ((2699, 2710), 'keras.backend.backend', 'K.backend', ([], {}), '()\n', (2708, 2710), True, 'from keras import backend as K\n'), ((2102, 2122), 'keras.backend.equal', 'K.equal', (['y_true_f', 't'], {}), '(y_true_f, t)\n', (2109, 2122), True, 'from keras import backend as K\n'), ((2124, 2134), 'keras.backend.floatx', 'K.floatx', ([], {}), '()\n', (2132, 2134), True, 'from keras import backend as K\n'), ((2279, 2299), 'keras.backend.equal', 'K.equal', (['y_true_f', 't'], {}), '(y_true_f, t)\n', (2286, 2299), True, 'from keras import backend as K\n'), ((2301, 2311), 'keras.backend.floatx', 'K.floatx', ([], {}), '()\n', (2309, 2311), True, 'from keras import backend as K\n')] |
from pytest_cases import parametrize_with_cases
from jonahlint.comments_getter import CommentData, CommentsGetter
def case_standalone_comment():
code = """a = 2
# This is a comment
print(min(1, 2, 3))
"""
expected_comments = [CommentData(line_number=2, content="This is a comment")]
return code, expected_comments
def case_comment_in_assignment():
code = "a = 2 # This is a comment"
expected_comments = [CommentData(line_number=1, content="This is a comment")]
return code, expected_comments
def case_multiple_comments():
code = """class A: # This class is awesome
def __init__(): # I am a constructor
pass
# I'm a barrier
def f():
return None # Nothing is returned!
"""
expected_comments = [
CommentData(line_number=1, content="This class is awesome"),
CommentData(line_number=3, content="I am a constructor"),
CommentData(line_number=5, content="I'm a barrier"),
CommentData(line_number=8, content="Nothing is returned!"),
]
return code, expected_comments
@parametrize_with_cases(argnames=["code", "expected_comments"], cases=".")
def test_comments_getter(code, expected_comments):
assert CommentsGetter.get_comments(code) == expected_comments
| [
"pytest_cases.parametrize_with_cases",
"jonahlint.comments_getter.CommentsGetter.get_comments",
"jonahlint.comments_getter.CommentData"
] | [((1088, 1161), 'pytest_cases.parametrize_with_cases', 'parametrize_with_cases', ([], {'argnames': "['code', 'expected_comments']", 'cases': '"""."""'}), "(argnames=['code', 'expected_comments'], cases='.')\n", (1110, 1161), False, 'from pytest_cases import parametrize_with_cases\n'), ((242, 297), 'jonahlint.comments_getter.CommentData', 'CommentData', ([], {'line_number': '(2)', 'content': '"""This is a comment"""'}), "(line_number=2, content='This is a comment')\n", (253, 297), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n'), ((435, 490), 'jonahlint.comments_getter.CommentData', 'CommentData', ([], {'line_number': '(1)', 'content': '"""This is a comment"""'}), "(line_number=1, content='This is a comment')\n", (446, 490), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n'), ((788, 847), 'jonahlint.comments_getter.CommentData', 'CommentData', ([], {'line_number': '(1)', 'content': '"""This class is awesome"""'}), "(line_number=1, content='This class is awesome')\n", (799, 847), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n'), ((857, 913), 'jonahlint.comments_getter.CommentData', 'CommentData', ([], {'line_number': '(3)', 'content': '"""I am a constructor"""'}), "(line_number=3, content='I am a constructor')\n", (868, 913), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n'), ((923, 974), 'jonahlint.comments_getter.CommentData', 'CommentData', ([], {'line_number': '(5)', 'content': '"""I\'m a barrier"""'}), '(line_number=5, content="I\'m a barrier")\n', (934, 974), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n'), ((984, 1042), 'jonahlint.comments_getter.CommentData', 'CommentData', ([], {'line_number': '(8)', 'content': '"""Nothing is returned!"""'}), "(line_number=8, content='Nothing is returned!')\n", (995, 1042), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n'), ((1224, 1257), 'jonahlint.comments_getter.CommentsGetter.get_comments', 'CommentsGetter.get_comments', (['code'], {}), '(code)\n', (1251, 1257), False, 'from jonahlint.comments_getter import CommentData, CommentsGetter\n')] |
from django.contrib import admin
from .models import Owner, Car, Ownership, Drivers_license
admin.site.register(Owner)
admin.site.register(Car)
admin.site.register(Ownership)
admin.site.register(Drivers_license)
| [
"django.contrib.admin.site.register"
] | [((93, 119), 'django.contrib.admin.site.register', 'admin.site.register', (['Owner'], {}), '(Owner)\n', (112, 119), False, 'from django.contrib import admin\n'), ((120, 144), 'django.contrib.admin.site.register', 'admin.site.register', (['Car'], {}), '(Car)\n', (139, 144), False, 'from django.contrib import admin\n'), ((145, 175), 'django.contrib.admin.site.register', 'admin.site.register', (['Ownership'], {}), '(Ownership)\n', (164, 175), False, 'from django.contrib import admin\n'), ((176, 212), 'django.contrib.admin.site.register', 'admin.site.register', (['Drivers_license'], {}), '(Drivers_license)\n', (195, 212), False, 'from django.contrib import admin\n')] |
from kernel_tuner import tune_kernel
import numpy
import argparse
import json
def generate_code(tuning_parameters):
code = \
"__global__ void fct_ale_a2(const int maxLevels, const int * __restrict__ nLevels, const int * __restrict__ elementNodes, <%REAL_TYPE%><%VECTOR_SIZE%> * __restrict__ UV_rhs, const <%REAL_TYPE%> * __restrict__ fct_ttf_max, const <%REAL_TYPE%> * __restrict__ fct_ttf_min)\n" \
"{\n" \
"const <%INT_TYPE%> elementIndex = (blockIdx.x * maxLevels * 2);\n" \
"const <%INT_TYPE%> nodeOneIndex = (elementNodes[(blockIdx.x * 3)] - 1) * maxLevels;\n" \
"const <%INT_TYPE%> nodeTwoIndex = (elementNodes[(blockIdx.x * 3) + 1] - 1) * maxLevels;\n" \
"const <%INT_TYPE%> nodeThreeIndex = (elementNodes[(blockIdx.x * 3) + 2] - 1) * maxLevels;\n" \
"const <%INT_TYPE%> maxElementLevel = nLevels[blockIdx.x] - 1;\n" \
"\n" \
"for ( <%INT_TYPE%> level = threadIdx.x; level < maxLevels - 1; level += <%BLOCK_SIZE%> )\n" \
"{\n" \
"<%COMPUTE_BLOCK%>" \
"}\n" \
"}\n"
compute_block = \
"if ( level + <%OFFSET%> < maxElementLevel )\n" \
"{\n" \
"<%REAL_TYPE%> temp = 0.0;\n" \
"temp = <%FMAX%>(fct_ttf_max[nodeOneIndex + level + <%OFFSET%>], fct_ttf_max[nodeTwoIndex + level + <%OFFSET%>]);\n" \
"temp = <%FMAX%>(temp, fct_ttf_max[nodeThreeIndex + level + <%OFFSET%>]);\n" \
"UV_rhs[elementIndex + ((level + <%OFFSET%>) * 2)] = temp;\n" \
"temp = <%FMIN%>(fct_ttf_min[nodeOneIndex + level + <%OFFSET%>], fct_ttf_min[nodeTwoIndex + level + <%OFFSET%>]);\n" \
"temp = <%FMIN%>(temp, fct_ttf_min[nodeThreeIndex + level + <%OFFSET%>]);\n" \
"UV_rhs[elementIndex + ((level + <%OFFSET%>) * 2) + 1] = temp;\n" \
"}\n" \
"else if ( (level + <%OFFSET%> > maxElementLevel) && (level + <%OFFSET%> < maxLevels - 1) )\n" \
"{\n" \
"UV_rhs[elementIndex + ((level + <%OFFSET%>) * 2)] = <%MIN%>;\n" \
"UV_rhs[elementIndex + ((level + <%OFFSET%>) * 2) + 1] = <%MAX%>;\n" \
"}\n"
compute_block_vector = \
"if ( level + <%OFFSET%> < maxElementLevel )\n" \
"{\n" \
"<%REAL_TYPE%><%VECTOR_SIZE%> temp = make_<%REAL_TYPE%>2(0.0, 0.0);\n" \
"temp.x = <%FMAX%>(fct_ttf_max[nodeOneIndex + level + <%OFFSET%>], fct_ttf_max[nodeTwoIndex + level + <%OFFSET%>]);\n" \
"temp.x = <%FMAX%>(temp.x, fct_ttf_max[nodeThreeIndex + level + <%OFFSET%>]);\n" \
"temp.y = <%FMIN%>(fct_ttf_min[nodeOneIndex + level + <%OFFSET%>], fct_ttf_min[nodeTwoIndex + level + <%OFFSET%>]);\n" \
"temp.y = <%FMIN%>(temp.y, fct_ttf_min[nodeThreeIndex + level + <%OFFSET%>]);\n" \
"UV_rhs[elementIndex + level + <%OFFSET%>] = temp;\n" \
"}\n" \
"else if ( (level + <%OFFSET%> > maxElementLevel) && (level + <%OFFSET%> < maxLevels - 1) )\n" \
"{\n" \
"UV_rhs[elementIndex + level + <%OFFSET%>] = make_<%REAL_TYPE%>2(<%MIN%>, <%MAX%>);\n" \
"}\n"
if tuning_parameters["tiling_x"] > 1:
code = code.replace("<%BLOCK_SIZE%>", str(tuning_parameters["block_size_x"] * tuning_parameters["tiling_x"]))
else:
code = code.replace("<%BLOCK_SIZE%>", str(tuning_parameters["block_size_x"]))
compute = str()
for tile in range(0, tuning_parameters["tiling_x"]):
if tile == 0:
if tuning_parameters["vector_size"] == 1:
compute = compute + compute_block.replace(" + <%OFFSET%>", "")
else:
compute = compute + compute_block_vector.replace(" + <%OFFSET%>", "")
else:
if tuning_parameters["vector_size"] == 1:
compute = compute + compute_block.replace("<%OFFSET%>", str(tuning_parameters["block_size_x"] * tile))
else:
compute = compute + compute_block_vector.replace("<%OFFSET%>", str(tuning_parameters["block_size_x"] * tile))
if tuning_parameters["real_type"] == "float":
compute = compute.replace("<%MIN%>", str(numpy.finfo(numpy.float32).min))
compute = compute.replace("<%MAX%>", str(numpy.finfo(numpy.float32).max))
elif tuning_parameters["real_type"] == "double":
compute = compute.replace("<%MIN%>", str(numpy.finfo(numpy.float64).min))
compute = compute.replace("<%MAX%>", str(numpy.finfo(numpy.float64).max))
else:
raise ValueError
code = code.replace("<%COMPUTE_BLOCK%>", compute)
if tuning_parameters["real_type"] == "float":
code = code.replace("<%FMAX%>", "fmaxf")
code = code.replace("<%FMIN%>", "fminf")
elif tuning_parameters["real_type"] == "double":
code = code.replace("<%FMAX%>", "fmax")
code = code.replace("<%FMIN%>", "fmin")
else:
raise ValueError
code = code.replace("<%INT_TYPE%>", tuning_parameters["int_type"].replace("_", " "))
code = code.replace("<%REAL_TYPE%>", tuning_parameters["real_type"])
if tuning_parameters["vector_size"] == 1:
code = code.replace("<%VECTOR_SIZE%>", "")
else:
code = code.replace("<%VECTOR_SIZE%>", str(tuning_parameters["vector_size"]))
code = code.replace("maxLevels * 2", "maxLevels")
return code
def reference(elements, levels, max_levels, nodes, UV_rhs, fct_ttf_max, fct_ttf_min, real_type):
numpy_real_type = None
if real_type == "float":
numpy_real_type = numpy.float32
elif real_type == "double":
numpy_real_type = numpy.float64
else:
raise ValueError
memory_bytes = elements * 16
for element in range(0, elements):
for level in range(0, levels[element] - 1):
memory_bytes = memory_bytes + (8 * numpy.dtype(numpy_real_type).itemsize)
item = (element * max_levels * 2) + (level * 2)
UV_rhs[item] = max(fct_ttf_max[((nodes[element * 3] - 1) * max_levels) + level], fct_ttf_max[((nodes[(element * 3) + 1] - 1) * max_levels) + level], fct_ttf_max[((nodes[(element * 3) + 2] - 1) * max_levels) + level])
UV_rhs[item + 1] = min(fct_ttf_min[((nodes[element * 3] - 1) * max_levels) + level], fct_ttf_min[((nodes[(element * 3) + 1] - 1) * max_levels) + level], fct_ttf_min[((nodes[(element * 3) + 2] - 1) * max_levels) + level])
if levels[element] <= max_levels - 1:
for level in range(levels[element], max_levels - 1):
memory_bytes = memory_bytes + (2 * numpy.dtype(numpy_real_type).itemsize)
item = (element * max_levels * 2) + (level * 2)
UV_rhs[item] = numpy.finfo(numpy_real_type).min
UV_rhs[item + 1] = numpy.finfo(numpy_real_type).max
return memory_bytes
def tune(elements, nodes, max_levels, max_tile, real_type, quiet=True):
numpy_real_type = None
if real_type == "float":
numpy_real_type = numpy.float32
elif real_type == "double":
numpy_real_type = numpy.float64
else:
raise ValueError
# Tuning and code generation parameters
tuning_parameters = dict()
tuning_parameters["int_type"] = ["unsigned_int", "int"]
tuning_parameters["real_type"] = [real_type]
tuning_parameters["max_levels"] = [str(max_levels)]
tuning_parameters["block_size_x"] = [32 * i for i in range(1, 33)]
tuning_parameters["tiling_x"] = [i for i in range(1, max_tile)]
tuning_parameters["vector_size"] = [1, 2]
constraints = list()
constraints.append("block_size_x * tiling_x <= max_levels")
# Memory allocation and initialization
uv_rhs = numpy.zeros(elements * max_levels * 2).astype(numpy_real_type)
uv_rhs_control = numpy.zeros_like(uv_rhs).astype(numpy_real_type)
fct_ttf_max = numpy.random.randn(nodes * max_levels).astype(numpy_real_type)
fct_ttf_min = numpy.random.randn(nodes * max_levels).astype(numpy_real_type)
levels = numpy.zeros(elements).astype(numpy.int32)
element_nodes = numpy.zeros(elements * 3).astype(numpy.int32)
for element in range(0, elements):
levels[element] = numpy.random.randint(3, max_levels)
element_nodes[(element * 3)] = numpy.random.randint(1, nodes + 1)
element_nodes[(element * 3) + 1] = numpy.random.randint(1, nodes + 1)
element_nodes[(element * 3) + 2] = numpy.random.randint(1, nodes + 1)
arguments = [numpy.int32(max_levels), levels, element_nodes, uv_rhs, fct_ttf_max, fct_ttf_min]
# Reference
memory_bytes = reference(elements, levels, max_levels, element_nodes, uv_rhs_control, fct_ttf_max, fct_ttf_min, real_type)
arguments_control = [None, None, None, uv_rhs_control, None, None]
# Tuning
results, _ = tune_kernel("fct_ale_a2", generate_code, "{} * block_size_x".format(elements), arguments, tuning_parameters, lang="CUDA", answer=arguments_control, restrictions=constraints, quiet=quiet)
# Memory bandwidth
for result in results:
result["memory_bandwidth"] = memory_bytes / (result["time"] / 10**3)
return results
def parse_command_line():
parser = argparse.ArgumentParser(description="FESOM2 FCT ALE A2")
parser.add_argument("--elements", help="The number of elements.", type=int, required=True)
parser.add_argument("--nodes", help="The number of nodes.", type=int, required=True)
parser.add_argument("--max_levels", help="The maximum number of vertical levels per element.", type=int, required=True)
parser.add_argument("--max_tile", help="The maximum tiling factor.", type=int, default=2)
parser.add_argument("--real_type", help="The floating point type to use.", choices=["float", "double"], type=str, required=True)
parser.add_argument("--verbose", help="Print all kernel configurations.", default=True, action="store_false")
parser.add_argument("--store", help="Store performance results in a JSON file.", default=False, action="store_true")
return parser.parse_args()
if __name__ == "__main__":
command_line = parse_command_line()
results = tune(command_line.elements, command_line.nodes, command_line.max_levels, command_line.max_tile, command_line.real_type, command_line.verbose)
best_configuration = min(results, key=lambda x : x["time"])
print("/* Memory bandwidth: {:.2f} GB/s */".format(best_configuration["memory_bandwidth"] / 10**9))
print("/* Block size X: {} */".format(best_configuration["block_size_x"]))
print(generate_code(best_configuration))
if command_line.store:
try:
with open("fct_ale_a2_{}_{}_{}_{}.json".format(command_line.nodes, command_line.elements, command_line.max_levels, command_line.real_type), "x") as fp:
json.dump(results, fp)
except FileExistsError:
print("Impossible to save the results, a results file already exists for a similar experiment.") | [
"argparse.ArgumentParser",
"numpy.int32",
"numpy.random.randint",
"numpy.zeros",
"numpy.random.randn",
"numpy.finfo",
"numpy.dtype",
"numpy.zeros_like",
"json.dump"
] | [((9002, 9058), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""FESOM2 FCT ALE A2"""'}), "(description='FESOM2 FCT ALE A2')\n", (9025, 9058), False, 'import argparse\n'), ((8020, 8055), 'numpy.random.randint', 'numpy.random.randint', (['(3)', 'max_levels'], {}), '(3, max_levels)\n', (8040, 8055), False, 'import numpy\n'), ((8095, 8129), 'numpy.random.randint', 'numpy.random.randint', (['(1)', '(nodes + 1)'], {}), '(1, nodes + 1)\n', (8115, 8129), False, 'import numpy\n'), ((8173, 8207), 'numpy.random.randint', 'numpy.random.randint', (['(1)', '(nodes + 1)'], {}), '(1, nodes + 1)\n', (8193, 8207), False, 'import numpy\n'), ((8251, 8285), 'numpy.random.randint', 'numpy.random.randint', (['(1)', '(nodes + 1)'], {}), '(1, nodes + 1)\n', (8271, 8285), False, 'import numpy\n'), ((8303, 8326), 'numpy.int32', 'numpy.int32', (['max_levels'], {}), '(max_levels)\n', (8314, 8326), False, 'import numpy\n'), ((7539, 7577), 'numpy.zeros', 'numpy.zeros', (['(elements * max_levels * 2)'], {}), '(elements * max_levels * 2)\n', (7550, 7577), False, 'import numpy\n'), ((7623, 7647), 'numpy.zeros_like', 'numpy.zeros_like', (['uv_rhs'], {}), '(uv_rhs)\n', (7639, 7647), False, 'import numpy\n'), ((7690, 7728), 'numpy.random.randn', 'numpy.random.randn', (['(nodes * max_levels)'], {}), '(nodes * max_levels)\n', (7708, 7728), False, 'import numpy\n'), ((7771, 7809), 'numpy.random.randn', 'numpy.random.randn', (['(nodes * max_levels)'], {}), '(nodes * max_levels)\n', (7789, 7809), False, 'import numpy\n'), ((7847, 7868), 'numpy.zeros', 'numpy.zeros', (['elements'], {}), '(elements)\n', (7858, 7868), False, 'import numpy\n'), ((7909, 7934), 'numpy.zeros', 'numpy.zeros', (['(elements * 3)'], {}), '(elements * 3)\n', (7920, 7934), False, 'import numpy\n'), ((10596, 10618), 'json.dump', 'json.dump', (['results', 'fp'], {}), '(results, fp)\n', (10605, 10618), False, 'import json\n'), ((4057, 4083), 'numpy.finfo', 'numpy.finfo', (['numpy.float32'], {}), '(numpy.float32)\n', (4068, 4083), False, 'import numpy\n'), ((4139, 4165), 'numpy.finfo', 'numpy.finfo', (['numpy.float32'], {}), '(numpy.float32)\n', (4150, 4165), False, 'import numpy\n'), ((6568, 6596), 'numpy.finfo', 'numpy.finfo', (['numpy_real_type'], {}), '(numpy_real_type)\n', (6579, 6596), False, 'import numpy\n'), ((6636, 6664), 'numpy.finfo', 'numpy.finfo', (['numpy_real_type'], {}), '(numpy_real_type)\n', (6647, 6664), False, 'import numpy\n'), ((4274, 4300), 'numpy.finfo', 'numpy.finfo', (['numpy.float64'], {}), '(numpy.float64)\n', (4285, 4300), False, 'import numpy\n'), ((4356, 4382), 'numpy.finfo', 'numpy.finfo', (['numpy.float64'], {}), '(numpy.float64)\n', (4367, 4382), False, 'import numpy\n'), ((5711, 5739), 'numpy.dtype', 'numpy.dtype', (['numpy_real_type'], {}), '(numpy_real_type)\n', (5722, 5739), False, 'import numpy\n'), ((6434, 6462), 'numpy.dtype', 'numpy.dtype', (['numpy_real_type'], {}), '(numpy_real_type)\n', (6445, 6462), False, 'import numpy\n')] |
import json
import re
from fastjsonrpc import jsonrpc
from fastjsonrpc.jsonrpc import JSONRPCError
from .helpers import ExtendedTestCase, Regex
class TestEncodeRequest(ExtendedTestCase):
def assert_json(self, value, expected):
value = json.loads(value)
expected = json.loads(expected)
self.assertEquals(value, expected)
def assert_json_values(self, value, **kwargs):
value = json.loads(value)
for key, expected in kwargs.items():
if expected is list:
assert isinstance(value[key], list)
elif isinstance(expected, Regex):
value2 = str(value[key])
self.assertTrue(re.match(expected.pattern, value2))
else:
self.assertEquals(value[key], expected)
def test_noArgs(self):
self.assertRaises(TypeError, jsonrpc.encodeRequest)
def test_onlyMethod(self):
result = jsonrpc.encodeRequest('method')
pattern = r'\{"method": "method", "id": \d+\}'
self.assertTrue(re.match(pattern, result))
def test_methodIdInt(self):
result = jsonrpc.encodeRequest('method', id_=123)
expected = '{"method": "method", "id": 123}'
self.assert_json(result, expected)
def test_methodIdStr(self):
result = jsonrpc.encodeRequest('method', id_='abc')
expected = '{"method": "method", "id": "abc"}'
self.assert_json(result, expected)
def test_methodArgs(self):
result = jsonrpc.encodeRequest('method', ['abc', 'def'])
self.assert_json_values(result, params=['abc', 'def'], method='method', id=Regex(r'\d+'))
def test_methodKwargs(self):
result = jsonrpc.encodeRequest('method', {'first': 'a', 'second': 'b'})
self.assert_json_values(result, params={'first': 'a', 'second': 'b'}, method='method', id=Regex(r'\d+'))
def test_methodVersion1(self):
result = jsonrpc.encodeRequest('method', version=1.0)
self.assert_json_values(result, method='method', id=Regex(r'\d+'))
def test_methodVersion2(self):
result = jsonrpc.encodeRequest('method', version=2.0)
self.assert_json_values(result, jsonrpc='2.0', method='method', id=Regex(r'\d+'))
def test_methodVersion2int(self):
result = jsonrpc.encodeRequest('method', version=2)
self.assert_json_values(result, jsonrpc='2.0', method='method', id=Regex(r'\d+'))
def test_methodVersion3(self):
result = jsonrpc.encodeRequest('method', version=3)
self.assert_json_values(result, method='method', id=Regex(r'\d+'))
def test_methodIdVersion(self):
result = jsonrpc.encodeRequest('method', version=2.0, id_=123)
self.assert_json_values(result, jsonrpc='2.0', method='method', id=123)
def test_methodArgsId(self):
result = jsonrpc.encodeRequest('method', 'abcdef', id_=123)
self.assert_json_values(result, params='abcdef', method='method', id=123)
def test_methodArgsVersion2(self):
result = jsonrpc.encodeRequest('method', 'abcdef', version=2)
self.assert_json_values(result, jsonrpc='2.0', params='abcdef', method='method', id=Regex(r'\d+'))
def test_all(self):
result = jsonrpc.encodeRequest('method', 'abcdef', id_=123, version=2.0)
self.assert_json_values(result, jsonrpc='2.0', params='abcdef', method='method', id=123)
class TestDecodeRequest(ExtendedTestCase):
def test_empty(self):
self.assertRaises(Exception, jsonrpc.decodeRequest, '')
def test_malformed(self):
self.assertRaises(Exception, jsonrpc.decodeRequest, '{"method": "aa')
def test_onlyMethod(self):
result = jsonrpc.decodeRequest('{"method": "aa"}')
expected = {'method': 'aa'}
self.assert_json(result, expected)
def test_onlyParams(self):
request = '{"params": "abcdef"}'
result = jsonrpc.decodeRequest(request)
expected = {'params': 'abcdef'}
self.assert_json(result, expected)
def test_onlyIdInt(self):
request = '{"id": 123}'
result = jsonrpc.decodeRequest(request)
expected = {'id': 123}
self.assertEquals(result, expected)
def test_onlyIdStr(self):
request = '{"id": "1b3"}'
result = jsonrpc.decodeRequest(request)
expected = {'id': '1b3'}
self.assertEquals(result, expected)
def test_onlyVersionInt(self):
request = '{"jsonrpc": 1}'
result = jsonrpc.decodeRequest(request)
expected = {'jsonrpc': 1}
self.assertEquals(result, expected)
def test_onlyVersionFloat(self):
request = '{"jsonrpc": 2.0}'
result = jsonrpc.decodeRequest(request)
expected = {'jsonrpc': 2.0}
self.assertEquals(result, expected)
def test_onlyVersionStr(self):
request = '{"jsonrpc": "2"}'
result = jsonrpc.decodeRequest(request)
expected = {'jsonrpc': "2"}
self.assertEquals(result, expected)
def test_combined(self):
request = '{"method": "abc", "params": ["p1", 12321], "jsonrpc": 2.0, '
request += '"id": 123}'
result = jsonrpc.decodeRequest(request)
expected = {'method': 'abc', 'params': ['p1', 12321], 'jsonrpc': 2.0,
'id': 123}
self.assertEquals(result, expected)
class TestVerifyMethodCall(ExtendedTestCase):
def test_onlyMethod(self):
request = {'method': 'abc'}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
def test_onlyId(self):
request = {'id': 123}
self.assertRaises(JSONRPCError, jsonrpc.verifyMethodCall, request)
def test_onlyVersion(self):
request = {'jsonrpc': 2}
self.assertRaises(JSONRPCError, jsonrpc.verifyMethodCall, request)
def test_onlyParams(self):
request = {'params': [123, 'afaf']}
self.assertRaises(JSONRPCError, jsonrpc.verifyMethodCall, request)
def test_paramsNotSequence(self):
request = {'method': 'aa', 'params': 123}
self.assertRaises(JSONRPCError, jsonrpc.verifyMethodCall, request)
def test_paramsSequence(self):
request = {'method': 'aa', 'params': ['abcdef', 12321]}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
def test_paramsMapping(self):
request = {'method': 'aa', 'params': {'name': 'data', 'name2': 'data'}}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
def test_idInt(self):
request = {'method': 'aa', 'id': 1}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
def test_idStr(self):
request = {'method': 'aa', 'id': '1b3'}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
def test_versionInt(self):
request = {'method': 'aa', 'jsonrpc': 2}
self.assertRaises(JSONRPCError, jsonrpc.verifyMethodCall, request)
def test_versionFloat(self):
request = {'method': 'aa', 'jsonrpc': 2.0}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
def test_versionStr(self):
request = {'method': 'aa', 'jsonrpc': '2'}
self.assertEquals(request, jsonrpc.verifyMethodCall(request))
class TestPrepareMethodResponse(ExtendedTestCase):
def test_noResponseNoVersion(self):
result = jsonrpc.prepareMethodResponse(None, 123)
expected = {"error": None, "id": 123, "result": None}
self.assertEquals(result, expected)
def test_noResponseV2(self):
result = jsonrpc.prepareMethodResponse(None, 123, 2)
expected = {"jsonrpc": "2.0", "id": 123, "result": None}
self.assertEquals(result, expected)
def test_responseStr(self):
result = jsonrpc.prepareMethodResponse("result", 123)
expected = {"error": None, "id": 123, "result": "result"}
self.assertEquals(result, expected)
def test_responseInt(self):
result = jsonrpc.prepareMethodResponse(12321, 123)
expected = {"error": None, "id": 123, "result": 12321}
self.assertEquals(result, expected)
def test_noId(self):
result = jsonrpc.prepareMethodResponse(None, None)
expected = None
self.assertEquals(result, expected)
def test_idStr(self):
result = jsonrpc.prepareMethodResponse(None, '1b3')
expected = {"error": None, "id": "1b3", "result": None}
self.assertEquals(result, expected)
def test_responseException(self):
response = ValueError('The method raised an exception!')
result = jsonrpc.prepareMethodResponse(response, 123)
expected = {"result": None, "id": 123,
"error": {"message": "The method raised an exception!", "code": -32603}}
self.assertEquals(result, expected)
def test_invalidParams(self):
response = TypeError('Invalid params')
result = jsonrpc.prepareMethodResponse(response, 123)
expected = {"result": None, "id": 123,
"error": {"message": "Invalid params", "code": -32602}}
self.assertEquals(result, expected)
def test_methodNotFount(self):
response = JSONRPCError('Method aa not found',
jsonrpc.METHOD_NOT_FOUND)
result = jsonrpc.prepareMethodResponse(response, 123)
expected = {"result": None, "id": 123,
"error": {"message": "Method aa not found", "code": -32601}}
self.assertEquals(result, expected)
class TestDecodeResponse(ExtendedTestCase):
def test_noResponse(self):
self.assertRaises(Exception, jsonrpc.decodeResponse, '')
def test_malformedResponse(self):
self.assertRaises(Exception, jsonrpc.decodeResponse, '{"respons')
def test_onlyId(self):
response = '{"id": 123}'
self.assertRaises(ValueError, jsonrpc.decodeResponse, response)
def test_idVersion(self):
response = '{"id": 123, "jsonrpc": "2.0"}'
self.assertRaises(ValueError, jsonrpc.decodeResponse, response)
def test_onlyResult(self):
response = '{"result": "abcd"}'
ret = 'abcd'
self.assertEquals(ret, jsonrpc.decodeResponse(response))
def test_onlyErrorRaises(self):
response = '{"error": {"message": "some error", "code": 123}}'
self.assertRaises(JSONRPCError, jsonrpc.decodeResponse, response)
def test_onlyErrorExceptionDetails(self):
response = '{"error": {"message": "some error", "code": 123}}'
try:
jsonrpc.decodeResponse(response)
except jsonrpc.JSONRPCError as e:
self.assertEquals(e.strerror, 'some error')
self.assertEquals(e.errno, 123)
self.assertEquals(e.version, jsonrpc.VERSION_1)
def test_resultAndErrorNull(self):
response = '{"result": "abcd", "error": null}'
ret = 'abcd'
self.assertEquals(ret, jsonrpc.decodeResponse(response))
def test_errorAndResultNull(self):
response = '{"result": null, "error": {"message": "some error", '
response += '"code": 123}}'
self.assertRaises(JSONRPCError, jsonrpc.decodeResponse, response)
def test_errorAndResultNullExceptionDetails(self):
response = '{"result": null, "error": {"message": "some error", '
response += '"code": 123}}'
try:
jsonrpc.decodeResponse(response)
except jsonrpc.JSONRPCError as e:
self.assertEquals(e.strerror, 'some error')
self.assertEquals(e.errno, 123)
self.assertEquals(e.version, jsonrpc.VERSION_1)
def test_errorAndResult(self):
response = '{"error": {"message": "some error", "code": 123}, '
response += '"result": "abcd"}'
self.assertRaises(ValueError, jsonrpc.decodeResponse, response)
def test_errorAndResult2(self):
response = '{"error": {"message": "some error", "code": 123}, '
response += '"result": "abcd", "jsonrpc": "2.0"}'
self.assertRaises(ValueError, jsonrpc.decodeResponse, response)
def test_emptyResult(self):
response = '{"result": null}'
self.assertEquals(None, jsonrpc.decodeResponse(response))
| [
"fastjsonrpc.jsonrpc.encodeRequest",
"json.loads",
"fastjsonrpc.jsonrpc.JSONRPCError",
"re.match",
"fastjsonrpc.jsonrpc.decodeRequest",
"fastjsonrpc.jsonrpc.prepareMethodResponse",
"fastjsonrpc.jsonrpc.verifyMethodCall",
"fastjsonrpc.jsonrpc.decodeResponse"
] | [((252, 269), 'json.loads', 'json.loads', (['value'], {}), '(value)\n', (262, 269), False, 'import json\n'), ((289, 309), 'json.loads', 'json.loads', (['expected'], {}), '(expected)\n', (299, 309), False, 'import json\n'), ((421, 438), 'json.loads', 'json.loads', (['value'], {}), '(value)\n', (431, 438), False, 'import json\n'), ((935, 966), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {}), "('method')\n", (956, 966), False, 'from fastjsonrpc import jsonrpc\n'), ((1123, 1163), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'id_': '(123)'}), "('method', id_=123)\n", (1144, 1163), False, 'from fastjsonrpc import jsonrpc\n'), ((1310, 1352), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'id_': '"""abc"""'}), "('method', id_='abc')\n", (1331, 1352), False, 'from fastjsonrpc import jsonrpc\n'), ((1500, 1547), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""', "['abc', 'def']"], {}), "('method', ['abc', 'def'])\n", (1521, 1547), False, 'from fastjsonrpc import jsonrpc\n'), ((1697, 1759), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""', "{'first': 'a', 'second': 'b'}"], {}), "('method', {'first': 'a', 'second': 'b'})\n", (1718, 1759), False, 'from fastjsonrpc import jsonrpc\n'), ((1926, 1970), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'version': '(1.0)'}), "('method', version=1.0)\n", (1947, 1970), False, 'from fastjsonrpc import jsonrpc\n'), ((2099, 2143), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'version': '(2.0)'}), "('method', version=2.0)\n", (2120, 2143), False, 'from fastjsonrpc import jsonrpc\n'), ((2290, 2332), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'version': '(2)'}), "('method', version=2)\n", (2311, 2332), False, 'from fastjsonrpc import jsonrpc\n'), ((2476, 2518), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'version': '(3)'}), "('method', version=3)\n", (2497, 2518), False, 'from fastjsonrpc import jsonrpc\n'), ((2648, 2701), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""'], {'version': '(2.0)', 'id_': '(123)'}), "('method', version=2.0, id_=123)\n", (2669, 2701), False, 'from fastjsonrpc import jsonrpc\n'), ((2833, 2883), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""', '"""abcdef"""'], {'id_': '(123)'}), "('method', 'abcdef', id_=123)\n", (2854, 2883), False, 'from fastjsonrpc import jsonrpc\n'), ((3023, 3075), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""', '"""abcdef"""'], {'version': '(2)'}), "('method', 'abcdef', version=2)\n", (3044, 3075), False, 'from fastjsonrpc import jsonrpc\n'), ((3225, 3288), 'fastjsonrpc.jsonrpc.encodeRequest', 'jsonrpc.encodeRequest', (['"""method"""', '"""abcdef"""'], {'id_': '(123)', 'version': '(2.0)'}), "('method', 'abcdef', id_=123, version=2.0)\n", (3246, 3288), False, 'from fastjsonrpc import jsonrpc\n'), ((3680, 3721), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['"""{"method": "aa"}"""'], {}), '(\'{"method": "aa"}\')\n', (3701, 3721), False, 'from fastjsonrpc import jsonrpc\n'), ((3891, 3921), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (3912, 3921), False, 'from fastjsonrpc import jsonrpc\n'), ((4085, 4115), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (4106, 4115), False, 'from fastjsonrpc import jsonrpc\n'), ((4273, 4303), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (4294, 4303), False, 'from fastjsonrpc import jsonrpc\n'), ((4469, 4499), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (4490, 4499), False, 'from fastjsonrpc import jsonrpc\n'), ((4670, 4700), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (4691, 4700), False, 'from fastjsonrpc import jsonrpc\n'), ((4871, 4901), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (4892, 4901), False, 'from fastjsonrpc import jsonrpc\n'), ((5141, 5171), 'fastjsonrpc.jsonrpc.decodeRequest', 'jsonrpc.decodeRequest', (['request'], {}), '(request)\n', (5162, 5171), False, 'from fastjsonrpc import jsonrpc\n'), ((7316, 7356), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['None', '(123)'], {}), '(None, 123)\n', (7345, 7356), False, 'from fastjsonrpc import jsonrpc\n'), ((7514, 7557), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['None', '(123)', '(2)'], {}), '(None, 123, 2)\n', (7543, 7557), False, 'from fastjsonrpc import jsonrpc\n'), ((7717, 7761), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['"""result"""', '(123)'], {}), "('result', 123)\n", (7746, 7761), False, 'from fastjsonrpc import jsonrpc\n'), ((7922, 7963), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['(12321)', '(123)'], {}), '(12321, 123)\n', (7951, 7963), False, 'from fastjsonrpc import jsonrpc\n'), ((8114, 8155), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['None', 'None'], {}), '(None, None)\n', (8143, 8155), False, 'from fastjsonrpc import jsonrpc\n'), ((8268, 8310), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['None', '"""1b3"""'], {}), "(None, '1b3')\n", (8297, 8310), False, 'from fastjsonrpc import jsonrpc\n'), ((8540, 8584), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['response', '(123)'], {}), '(response, 123)\n', (8569, 8584), False, 'from fastjsonrpc import jsonrpc\n'), ((8868, 8912), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['response', '(123)'], {}), '(response, 123)\n', (8897, 8912), False, 'from fastjsonrpc import jsonrpc\n'), ((9135, 9196), 'fastjsonrpc.jsonrpc.JSONRPCError', 'JSONRPCError', (['"""Method aa not found"""', 'jsonrpc.METHOD_NOT_FOUND'], {}), "('Method aa not found', jsonrpc.METHOD_NOT_FOUND)\n", (9147, 9196), False, 'from fastjsonrpc.jsonrpc import JSONRPCError\n'), ((9246, 9290), 'fastjsonrpc.jsonrpc.prepareMethodResponse', 'jsonrpc.prepareMethodResponse', (['response', '(123)'], {}), '(response, 123)\n', (9275, 9290), False, 'from fastjsonrpc import jsonrpc\n'), ((1046, 1071), 're.match', 're.match', (['pattern', 'result'], {}), '(pattern, result)\n', (1054, 1071), False, 'import re\n'), ((5476, 5509), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (5500, 5509), False, 'from fastjsonrpc import jsonrpc\n'), ((6235, 6268), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (6259, 6268), False, 'from fastjsonrpc import jsonrpc\n'), ((6420, 6453), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (6444, 6453), False, 'from fastjsonrpc import jsonrpc\n'), ((6561, 6594), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (6585, 6594), False, 'from fastjsonrpc import jsonrpc\n'), ((6706, 6739), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (6730, 6739), False, 'from fastjsonrpc import jsonrpc\n'), ((7017, 7050), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (7041, 7050), False, 'from fastjsonrpc import jsonrpc\n'), ((7170, 7203), 'fastjsonrpc.jsonrpc.verifyMethodCall', 'jsonrpc.verifyMethodCall', (['request'], {}), '(request)\n', (7194, 7203), False, 'from fastjsonrpc import jsonrpc\n'), ((10130, 10162), 'fastjsonrpc.jsonrpc.decodeResponse', 'jsonrpc.decodeResponse', (['response'], {}), '(response)\n', (10152, 10162), False, 'from fastjsonrpc import jsonrpc\n'), ((10489, 10521), 'fastjsonrpc.jsonrpc.decodeResponse', 'jsonrpc.decodeResponse', (['response'], {}), '(response)\n', (10511, 10521), False, 'from fastjsonrpc import jsonrpc\n'), ((10871, 10903), 'fastjsonrpc.jsonrpc.decodeResponse', 'jsonrpc.decodeResponse', (['response'], {}), '(response)\n', (10893, 10903), False, 'from fastjsonrpc import jsonrpc\n'), ((11320, 11352), 'fastjsonrpc.jsonrpc.decodeResponse', 'jsonrpc.decodeResponse', (['response'], {}), '(response)\n', (11342, 11352), False, 'from fastjsonrpc import jsonrpc\n'), ((12117, 12149), 'fastjsonrpc.jsonrpc.decodeResponse', 'jsonrpc.decodeResponse', (['response'], {}), '(response)\n', (12139, 12149), False, 'from fastjsonrpc import jsonrpc\n'), ((688, 722), 're.match', 're.match', (['expected.pattern', 'value2'], {}), '(expected.pattern, value2)\n', (696, 722), False, 'import re\n')] |
import requests
import os
def getmatches(LiveCheck = False):
param = {os.environ['H1N']: os.environ['H1'], os.environ['H2N']: os.environ['H2']}
address = os.environ['CRIC_ADDRESS']
resp = requests.get(address + 'matches.php', headers = param)
js = resp.json()
lis = js['matchList']['matches']
if LiveCheck == True:
return list(filter(lambda x: x['status'] == 'LIVE', lis))
else:
s = ''
for match in lis:
if match['matchSummaryText'] == '':
continue
s += match['homeTeam']['shortName'] + ' vs ' + match['awayTeam']['shortName'] +'\n'
# s += match['series']['shortName'] + '\n' + match['name'] + '\n'
s += match['matchSummaryText'] + '\n\n'
return s
def getscorecard(sid, mid):
param = {os.environ['H1N']: os.environ['H1'], os.environ['H2N']: os.environ['H2']}
address = os.environ['CRIC_ADDRESS']
resp = requests.get(address + 'scorecards.php?seriesid=' + sid + '&matchid=' + mid, headers = param)
js1 = resp.json()
sc = js1['fullScorecard']
s = ''
#s = md['teamBatting']['shortName'] + ' vs ' + md['teamBowling']['shortName'] + '\n'
#s += md['tossMessage'] + '\n'
for inn in sc['innings']:
s += '\n' + inn['name'] + '\n'
s += inn['run'] + '-' + inn['wicket'] + '(' + inn['over'] + ')\n\n'
for bat in inn['batsmen']:
if bat['balls'] == '':
continue
s += bat['name'] + ' ' + bat['runs'] + '(' + bat['balls'] + ') ' + bat['howOut'] + '\n'
s += '\n'
for ball in inn['bowlers']:
s += ball['name'] +' '+ ball['overs'] + '-' +ball['maidens'] + '-' +ball['runsConceded'] + '-' +ball['wickets']+'\n'
return s
def livescore():
lis = getmatches(LiveCheck=True)
if len(lis) == 0:
return getmatches(LiveCheck=False)
s = ''
for match in lis:
s += match['homeTeam']['shortName'] + ' vs ' + match['awayTeam']['shortName'] + '\n'
s += match['matchSummaryText'] + '\n'
sid = str(match['series']['id'])
mid = str(match['id'])
s += getscorecard(sid, mid)
s += '\n\n\n'
return s
if __name__ == '__main__':
print(livescore())
| [
"requests.get"
] | [((201, 253), 'requests.get', 'requests.get', (["(address + 'matches.php')"], {'headers': 'param'}), "(address + 'matches.php', headers=param)\n", (213, 253), False, 'import requests\n'), ((941, 1036), 'requests.get', 'requests.get', (["(address + 'scorecards.php?seriesid=' + sid + '&matchid=' + mid)"], {'headers': 'param'}), "(address + 'scorecards.php?seriesid=' + sid + '&matchid=' + mid,\n headers=param)\n", (953, 1036), False, 'import requests\n')] |
import re
def strip_url(line):
ozon_url = re.search("(?P<url>https?://[^\s]+)", line).group("url")
return ozon_url
source = open('all.txt')
url_list = open('stripped_ozon.txt', 'w')
for line in source.readlines():
surl = strip_url(line)
url_list.write(surl + '\n')
source.close()
url_list.close() | [
"re.search"
] | [((48, 92), 're.search', 're.search', (['"""(?P<url>https?://[^\\\\s]+)"""', 'line'], {}), "('(?P<url>https?://[^\\\\s]+)', line)\n", (57, 92), False, 'import re\n')] |
"""
Geosoft vector.
:Classes:
=============== =========================
:class:`GXvv` data vector
=============== =========================
VA and VV classes are related based on a key called a *fiducial*,
which has a start value and increment between values. The :meth:`refid` method can be used to resample vector
data to the same fiducial so that vector-to-vector operations can be performed.
.. seealso:: :mod:`geosoft.gxpy.va`, :mod:`geosoft.gxapi.GXVV`, :mod:`geosoft.gxapi.GXVA`
.. note::
Regression tests provide usage examples:
`vv tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_vv.py>`_
"""
from collections.abc import Sequence
import geosoft
import numpy as np
import geosoft.gxapi as gxapi
from . import utility as gxu
__version__ = geosoft.__version__
MAX_VV_BYTES = 4096 #: maximum bytes per element in VV
def _t(s):
return geosoft.gxpy.system.translate(s)
class VVException(geosoft.GXRuntimeError):
"""
Exceptions from :mod:`geosoft.gxpy.vv`.
.. versionadded:: 9.1
"""
pass
def np_from_vvset(vvset, axis=1):
"""
Return a 2d numpy array from a set of `GXvv` instances.
:param vvset: (vv1, vv2, ...) set ot `geosoft.gxpy.vv.GXvv` instances
:param axis: axis for the vv, default is 1, such that each vv is a column, `axis=0` for vv to be rows
:return: numpy array shaped (max_vv_length, number_of_vv) for `axis=1`, or (number_of_vv, max_vv_length)
for `axis=0`.
.. versionadded:: 9.3.1
"""
nvv = len(vvset)
length = 0
for vv in vvset:
if len(vv) > length:
length = len(vv)
npd = np.empty((length, nvv), dtype=vvset[0].dtype)
for i in range(nvv):
npd[:, i] = vvset[i].np
if axis == 1:
return npd
else:
return npd.T
def vvset_from_np(npd, axis=1):
"""
Return a set of `GXvv` instances from a 2d numpy array.
:param npd: numpy data array of dimension 2. If the array has higher dimensions it will first
be reshaped to a 2-dimension array based on the last axis.
:param axis: axis for the vv, default is 1, such that each columns become vv, `axis=0` for rows to be vv
:return: [vv0, vv1, vv2, ...] `geosoft.gxpy.vv.GXvv` instances for each column or row (`axis=0`)
For example:
npd = np.array([[1, 2, 3], [4, 5, 6]])
returns (vv([1, 4]), vv([2, 5]), vv([3, 6]))
.. versionadded:: 9.3.1
"""
if npd.ndim == 1:
vv = [GXvv(npd)]
else:
if npd.ndim > 2:
npd = npd.reshape((-1, npd.shape[-1]))
if axis == 0:
npd = npd.T
vv = []
for i in range(npd.shape[1]):
vv.append(GXvv(npd[:, i]))
return tuple(vv)
class GXvv(Sequence):
"""
VV class wrapper.
:param array: array-like, None to create an empty VV. Can have 2 dimensions for float32 or
float64 data, in which case the second dimension can be 2 or 3 to use Geosoft
2D and 3D dimensioned types. This can also be another `GXvv` instance, in which
case a copy of the data is made and the dtype, dim, fid an unit_of_measurement
will default to the source instance.
:param dtype: numpy data type. For unicode strings 'U#', where # is a string length. If not specified
the type is taken from first element in array, of if no array the default is 'float'.
:param dim: dimension can be 1 (default), 2 (2D) or 3 (3D). Ignored if array is defined as the array
dimensions will be used.
:param fid: (start, increment) fiducial
:param unit_of_measure: unit of measure for the contained data.
:param len: length of VV
:Properties:
``vv`` :class:`geosoft.gxapi.GXvv` instance
``fid`` (start, increment) fiducial
``length`` number of elements in the VV
``gxtype`` GX data type
``dtype`` numpy data type
``dim`` dimension
.. versionadded:: 9.1
.. versionchanged:: 9.2 support construction directly from arrays
.. versionchanged:: 9.3 added unit_of_measure
.. versionchanged:: 9.3.1 added string support in __getitem__, and creates from a source `GVvv` instance.
.. versionchanged:: 9.6 Added length parameter.
"""
def __enter__(self):
return self
def __exit__(self, _type, _value, _traceback):
self.__del__()
def __del__(self):
if hasattr(self, '_gxvv'):
self._gxvv = None
def __eq__(self, other):
return np.array_equal(self.np, other.np) \
and self.fid == other.fid \
and self.dim == other.dim \
and self.unit_of_measure == other.unit_of_measure
def __init__(self, array=None, dtype=None, fid=None, unit_of_measure=None, dim=None, len=0):
if array is not None:
if isinstance(array, GXvv):
if fid is None:
fid = array.fid
if unit_of_measure is None:
unit_of_measure = array.unit_of_measure
if dtype is None:
dtype = array.dtype
if dim is None:
dim = array.dim
array = array.np
else:
if not isinstance(array, np.ndarray):
array = np.array(array)
if array.ndim == 2:
dim = array.shape[1]
else:
dim = 1
if dtype is None:
dtype = array.dtype
if fid is None:
fid = (0.0, 1.0)
if unit_of_measure is None:
unit_of_measure = ''
if dim is None:
dim = 1
elif dim not in (1, 2, 3):
raise VVException(_t('dimension (array, or dim=) must be 1, 2 or 3'))
self._dim = dim
if dtype is None:
dtype = np.float64
self._gxtype = max(-MAX_VV_BYTES, gxu.gx_dtype(dtype))
# Since we are using UTF-8 internally characters can take anywhere between 1 and 4 bytes.
# The gx_dtype method and the gxapi wrappers accounts for that by multiplying the dtype number accordingly.
# Specifying a numpy dtype to instantiate VV will ensure the internal space is enough to allocate up to
# that 4 times the Unicode characters, however any Numpy arrays should still used the passed dtype as specified
if dtype is not None and isinstance(dtype, np.dtype) and dtype.type is np.str_:
self._dtype = dtype
elif type(dtype) is str:
self._dtype = np.dtype(dtype)
else:
self._dtype = gxu.dtype_gx(self._gxtype)
self._is_float = self._is_int = self._is_string = False
if gxu.is_float(self._gxtype):
self._is_float = True
elif gxu.is_int(self._gxtype):
self._is_int = True
else:
self._is_string = True
if not self._is_float and self._dim != 1:
raise VVException(_t('2 or 3 dimensioned data must be float32 or float64'))
if self._dim != 1:
self._gxvv = gxapi.GXVV.create_ext(gxu.gx_dtype_dimension(self._dtype, self._dim), len)
else:
self._gxvv = gxapi.GXVV.create_ext(self._gxtype, len)
self.fid = fid
self._next = 0
self._unit_of_measure = unit_of_measure
if array is not None:
self.set_data(array, fid)
def __len__(self):
return self._gxvv.length()
def __iter__(self):
return self
def __next__(self):
if self._next >= self.length:
self._next = 0
raise StopIteration
else:
i = self._next
self._next += 1
return self.np[i], self.fid[0] + self.fid[1] * i
def __getitem__(self, item):
start, incr = self.fid
if self._is_float:
v = float(self.np[item])
elif self._is_int:
v = int(self.np[item])
else:
v = str(self.np[item])
return v, start + incr * item
def _set_data_np(self, npd, start=0):
"""set to data in a numpy array"""
if not npd.flags['C_CONTIGUOUS']:
npd = np.ascontiguousarray(npd)
self.gxvv.set_data(start, npd.shape[0], npd.data.tobytes(), gxu.gx_dtype_dimension(npd.dtype, self._dim))
def _get_data_np(self, start=0, n=None, dtype=None):
"""return data in a numpy array"""
if n is None:
n = self.length - start
if self._dim == 1:
sh = (n,)
else:
sh = (n, self._dim)
bytearr = np.empty(sh, dtype=dtype).tobytes()
self.gxvv.get_data(start, n, bytearr, gxu.gx_dtype_dimension(dtype, self._dim))
npd = np.frombuffer(bytearr, dtype=dtype).reshape(sh)
return np.array(npd)
@property
def unit_of_measure(self):
""" data unit of measurement"""
return self._unit_of_measure
@unit_of_measure.setter
def unit_of_measure(self, uom):
self._unit_of_measure = str(uom)
@property
def gxvv(self):
""":class:`geosoft.gxapi.GXVV` instance"""
return self._gxvv
@property
def fid(self):
"""
fid tuple (start,increment), can be set
.. versionadded:: 9.1
"""
return self._gxvv.get_fid_start(), self._gxvv.get_fid_incr()
@fid.setter
def fid(self, fid):
self._gxvv.set_fid_start(fid[0])
self._gxvv.set_fid_incr(fid[1])
@property
def length(self):
"""
number of elements in the VV, can be set
.. versionadded:: 9.1
.. versionchanged:: 9.3 can be set
"""
return self.__len__()
@length.setter
def length(self, length):
self.refid(self.fid, length)
@property
def gxtype(self):
"""
GX data type
.. versionadded:: 9.1
"""
return self._gxtype
@property
def dtype(self):
"""
numpy data type
.. versionadded:: 9.1
"""
return self._dtype
@property
def is_float(self):
""" True if a base float type, 32 or 64-bit"""
return self._is_float
@property
def is_float64(self):
"""
True if a base 64-bit float
.. versionadded:: 9.3.1
"""
if self.dtype == np.float64:
return True
return False
@property
def is_int(self):
""" True if a base integer type"""
return self._is_int
@property
def is_string(self):
""" True if a base string type"""
return self._is_string
@property
def dim(self):
"""Dimension of elements in the array, 1, 2 or 3."""
return self._dim
@property
def np(self):
"""
Numpy array of VV data, in the data type of the VV. Use :meth:`get_data` to get a numpy array
in another dtype.
Note that changing the data in the numpy array does NOT change the data in the VV. Use
`set_data` to change data in the VV.
.. versionadded:: 9.2
"""
return self.get_data()[0]
def get_data(self, dtype=None, start=0, n=None, float_dummies_to_nan=True):
"""
Return vv data in a numpy array
:param start: index of first value, must be >=0
:param n: number of values wanted
:param dtype: numpy data type wanted
:returns: (data, (fid_start, fid_incr))
.. versionadded:: 9.1
"""
if dtype is None:
dtype = self._dtype
else:
dtype = np.dtype(dtype)
if n is None:
n = self.length - start
else:
n = min((self.length - start), n)
if (n < 0) or (start < 0) or ((start >= self.length) and self.length > 0):
raise VVException(_t('Cannot get (start,n) ({},{}) from vv of length {}').format(start, n, self.length))
if (n == 0) or (self.length == 0):
npd = np.array([], dtype=dtype)
else:
# strings wanted
if dtype.type is np.str_:
sr = gxapi.str_ref()
npd = np.empty((n,), dtype=dtype)
for i in range(start, start + n):
self._gxvv.get_string(i, sr)
npd[i - start] = sr.value
# numeric wanted
else:
# strings to numeric
if self._gxtype < 0:
if np.issubclass_(dtype.type, np.integer):
vvd = gxapi.GXVV.create_ext(gxapi.GS_LONG, n)
else:
vvd = gxapi.GXVV.create_ext(gxapi.GS_DOUBLE, n)
vvd.copy(self._gxvv) # this will do the conversion
npd = vvd.get_data_np(start, n, dtype)
# numeric to numeric
else:
npd = self._get_data_np(start, n, dtype)
if float_dummies_to_nan:
if npd.dtype == np.float32 or npd.dtype == np.float64:
npd[npd == gxu.gx_dummy(npd.dtype)] = np.nan
fid = self.fid
start = fid[0] + start * fid[1]
return npd, (start, fid[1])
def set_data(self, data, fid=None):
"""
Set vv data from an iterable, which can be another `GXvv` instance. If the data is float type numpy.nan
\ are used to indicate dummy values.
:param data: data array of `GXvv` instance, will be reshapped to VV dimension
:param fid: fid tuple (start,increment), default does not change current fid
.. versionadded:: 9.1
.. versionchanged:: 9.3
default fid leaves fid unchanged
.. versionchanged:: 9.3.1
now accepts `GXvv` instance as the source data.
"""
if isinstance(data, GXvv):
data = data.np
elif not isinstance(data, np.ndarray):
data = np.array(data)
if data.size == 0:
self.length = 0
if fid:
self.fid = fid
return
if self.dim == 1:
data = data.flatten()
else:
data = data.reshape((-1, self.dim))
if data.size > gxapi.iMAX:
raise VVException(_t('data length {}, max allowed is {})').format(np.size(data), gxapi.iMAX))
# numerical data
if self._gxtype >= 0:
# strings
if gxu.gx_dtype(data.dtype) < 0:
i = 0
for s in data:
self._gxvv.set_double(i, gxu.rdecode(s))
i += 1
else:
if data.dtype == np.float32 or data.dtype == np.float64:
if np.isnan(data).any():
data = data.copy()
data[np.isnan(data)] = gxu.gx_dummy(data.dtype)
self._set_data_np(data)
# strings
else:
i = 0
for d in data:
self._gxvv.set_string(i, str(d))
i += 1
self._gxvv.set_len(data.shape[0])
if fid:
self.fid = fid
def refid(self, fid, length=None):
"""
Resample VV to a new fiducial and length
:param fid: (start, incr)
:param length: length, if not specified the length is calculated to the end of the data.
.. versionadded:: 9.1
.. versionchanged:: 9.3.1 added default length calculation
"""
if fid[1] <= 0.:
raise VVException(_t('fid increment must be greater than 0.'))
if length is None:
end_fid = self.fid[0] + self.fid[1] * (self.length - 1)
length = (((end_fid - fid[0]) + fid[1] * 0.5) // fid[1]) + 1
if length < 0:
length = 0
self._gxvv.re_fid(fid[0], fid[1], int(length))
self.fid = fid
def list(self):
"""
Return the content of the VV as a list.
.. versionadded:: 9.2
"""
return [v[0] for v in self]
def fill(self, value):
"""
Fill a vv with a constant value.
:param value: value to fill
.. versionadded:: 9.3.1
"""
if self.is_float:
self.gxvv.fill_double(float(value))
if self.is_int:
self.gxvv.fill_int(int(value))
else:
self.gxvv.fill_string(str(value))
def min_max(self):
"""
Return the minimum and maximum values as doubles. Strings are converted if possible.
:return: (minimum, maximum), or if all dummy, (None, None)
.. versionadded:: 9.3.1
"""
rmin = gxapi.float_ref()
rmax = gxapi.float_ref()
self._gxvv.range_double(rmin, rmax)
if rmin.value == gxapi.rDUMMY:
return (None, None)
return rmin.value, rmax.value
| [
"geosoft.gxapi.GXVV.create_ext",
"numpy.size",
"numpy.ascontiguousarray",
"numpy.array",
"geosoft.gxapi.str_ref",
"geosoft.gxpy.system.translate",
"numpy.array_equal",
"numpy.empty",
"numpy.isnan",
"numpy.frombuffer",
"numpy.dtype",
"geosoft.gxapi.float_ref",
"numpy.issubclass_"
] | [((914, 946), 'geosoft.gxpy.system.translate', 'geosoft.gxpy.system.translate', (['s'], {}), '(s)\n', (943, 946), False, 'import geosoft\n'), ((1691, 1736), 'numpy.empty', 'np.empty', (['(length, nvv)'], {'dtype': 'vvset[0].dtype'}), '((length, nvv), dtype=vvset[0].dtype)\n', (1699, 1736), True, 'import numpy as np\n'), ((9123, 9136), 'numpy.array', 'np.array', (['npd'], {}), '(npd)\n', (9131, 9136), True, 'import numpy as np\n'), ((17027, 17044), 'geosoft.gxapi.float_ref', 'gxapi.float_ref', ([], {}), '()\n', (17042, 17044), True, 'import geosoft.gxapi as gxapi\n'), ((17060, 17077), 'geosoft.gxapi.float_ref', 'gxapi.float_ref', ([], {}), '()\n', (17075, 17077), True, 'import geosoft.gxapi as gxapi\n'), ((4797, 4830), 'numpy.array_equal', 'np.array_equal', (['self.np', 'other.np'], {}), '(self.np, other.np)\n', (4811, 4830), True, 'import numpy as np\n'), ((7525, 7565), 'geosoft.gxapi.GXVV.create_ext', 'gxapi.GXVV.create_ext', (['self._gxtype', 'len'], {}), '(self._gxtype, len)\n', (7546, 7565), True, 'import geosoft.gxapi as gxapi\n'), ((8510, 8535), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['npd'], {}), '(npd)\n', (8530, 8535), True, 'import numpy as np\n'), ((11951, 11966), 'numpy.dtype', 'np.dtype', (['dtype'], {}), '(dtype)\n', (11959, 11966), True, 'import numpy as np\n'), ((12349, 12374), 'numpy.array', 'np.array', (['[]'], {'dtype': 'dtype'}), '([], dtype=dtype)\n', (12357, 12374), True, 'import numpy as np\n'), ((6880, 6895), 'numpy.dtype', 'np.dtype', (['dtype'], {}), '(dtype)\n', (6888, 6895), True, 'import numpy as np\n'), ((8922, 8947), 'numpy.empty', 'np.empty', (['sh'], {'dtype': 'dtype'}), '(sh, dtype=dtype)\n', (8930, 8947), True, 'import numpy as np\n'), ((9060, 9095), 'numpy.frombuffer', 'np.frombuffer', (['bytearr'], {'dtype': 'dtype'}), '(bytearr, dtype=dtype)\n', (9073, 9095), True, 'import numpy as np\n'), ((12479, 12494), 'geosoft.gxapi.str_ref', 'gxapi.str_ref', ([], {}), '()\n', (12492, 12494), True, 'import geosoft.gxapi as gxapi\n'), ((12517, 12544), 'numpy.empty', 'np.empty', (['(n,)'], {'dtype': 'dtype'}), '((n,), dtype=dtype)\n', (12525, 12544), True, 'import numpy as np\n'), ((14294, 14308), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (14302, 14308), True, 'import numpy as np\n'), ((5599, 5614), 'numpy.array', 'np.array', (['array'], {}), '(array)\n', (5607, 5614), True, 'import numpy as np\n'), ((12836, 12874), 'numpy.issubclass_', 'np.issubclass_', (['dtype.type', 'np.integer'], {}), '(dtype.type, np.integer)\n', (12850, 12874), True, 'import numpy as np\n'), ((14672, 14685), 'numpy.size', 'np.size', (['data'], {}), '(data)\n', (14679, 14685), True, 'import numpy as np\n'), ((12906, 12945), 'geosoft.gxapi.GXVV.create_ext', 'gxapi.GXVV.create_ext', (['gxapi.GS_LONG', 'n'], {}), '(gxapi.GS_LONG, n)\n', (12927, 12945), True, 'import geosoft.gxapi as gxapi\n'), ((13002, 13043), 'geosoft.gxapi.GXVV.create_ext', 'gxapi.GXVV.create_ext', (['gxapi.GS_DOUBLE', 'n'], {}), '(gxapi.GS_DOUBLE, n)\n', (13023, 13043), True, 'import geosoft.gxapi as gxapi\n'), ((15079, 15093), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (15087, 15093), True, 'import numpy as np\n'), ((15173, 15187), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (15181, 15187), True, 'import numpy as np\n')] |
import sys
sys.path.insert(0,"./models")
import dnnlib.tflib as tflib
import dnnlib
import pickle
import os
import numpy as np
import matplotlib.pyplot as plt
# StyleGAN only works on tensorflow v1.x
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# Silence deprecation warnings from TensorFlow 1.13 onwards
import logging
logging.getLogger('tensorflow').setLevel(logging.ERROR)
class ImageGenGAN:
def __init__(self):
tflib.init_tf()
self.pickle_path = "./models/pretrained/stylegan2-ffhq-config-f.pkl" # 9fps
#self.pickle_path = "./models/pretrained/stylegan2-car-config-f.pkl" # 14fps
self._G, self._D, self.Gs = None, None, None
self.import_pickle()
self.z = np.random.RandomState(0).randn(*self.Gs.input_shape[1:])
self.w = self.Gs.components.mapping.run(np.stack([self.z]), None)
self.Gs_syn_kwargs = dnnlib.EasyDict()
self.Gs_syn_kwargs.output_transform = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
self.Gs_syn_kwargs.randomize_noise = False
self.Gs_syn_kwargs.minibatch_size = 4
self.layers = len(self.w[0])
print(f"No of layers = {self.layers}")
self.generate_image() # to initialise and test
def import_pickle(self):
print("Pickle Loading...")
with open(self.pickle_path, 'rb') as f:
self._G, self._D, self.Gs = pickle.load(f)
print("Pickle Loaded.")
#print(self.Gs.components.summary())
def set_Z(self,z):
self.z = z
def set_W(self):
self.w = self.Gs.components.mapping.run(np.stack([self.z]), None)
def get_W(self,seed):
return self.Gs.components.mapping.run(np.stack([np.random.RandomState(seed).randn(*self.Gs.input_shape[1:])]), None)
def generate_image(self):
return self.Gs.components.synthesis.run(self.w, **self.Gs_syn_kwargs)[0]
if __name__ == "__main__":
Igen = ImageGenGAN()
Igen.pickle_path = "./models/pretrained/stylegan2-car-config-f.pkl"
import cv2, time
#Igen.generate_image()
print("Generating 60 images...")
start = time.time()
seed = 1
while True:
#for seed in range(1340,1400):
#seed = 1
Igen.set_Z(np.random.RandomState(seed).randn(*Igen.Gs.input_shape[1:]))
Igen.set_W()
cv2.imshow("images",cv2.cvtColor(Igen.generate_image(), cv2.COLOR_BGR2RGB))
cv2.waitKey(1)
if time.time() - start >= 0.5:
seed +=1
start = time.time()
time_taken = time.time() - start
print(f"Completed in {time_taken} seconds.")
print(f"resulted in approx. {60//time_taken} fps.") | [
"logging.getLogger",
"sys.path.insert",
"tensorflow.compat.v1.disable_v2_behavior",
"dnnlib.EasyDict",
"pickle.load",
"dnnlib.tflib.init_tf",
"numpy.stack",
"cv2.waitKey",
"time.time",
"numpy.random.RandomState"
] | [((11, 41), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./models"""'], {}), "(0, './models')\n", (26, 41), False, 'import sys\n'), ((237, 261), 'tensorflow.compat.v1.disable_v2_behavior', 'tf.disable_v2_behavior', ([], {}), '()\n', (259, 261), True, 'import tensorflow.compat.v1 as tf\n'), ((1963, 1974), 'time.time', 'time.time', ([], {}), '()\n', (1972, 1974), False, 'import cv2, time\n'), ((338, 369), 'logging.getLogger', 'logging.getLogger', (['"""tensorflow"""'], {}), "('tensorflow')\n", (355, 369), False, 'import logging\n'), ((439, 454), 'dnnlib.tflib.init_tf', 'tflib.init_tf', ([], {}), '()\n', (452, 454), True, 'import dnnlib.tflib as tflib\n'), ((845, 862), 'dnnlib.EasyDict', 'dnnlib.EasyDict', ([], {}), '()\n', (860, 862), False, 'import dnnlib\n'), ((2215, 2229), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2226, 2229), False, 'import cv2, time\n'), ((2315, 2326), 'time.time', 'time.time', ([], {}), '()\n', (2324, 2326), False, 'import cv2, time\n'), ((795, 813), 'numpy.stack', 'np.stack', (['[self.z]'], {}), '([self.z])\n', (803, 813), True, 'import numpy as np\n'), ((1303, 1317), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1314, 1317), False, 'import pickle\n'), ((1480, 1498), 'numpy.stack', 'np.stack', (['[self.z]'], {}), '([self.z])\n', (1488, 1498), True, 'import numpy as np\n'), ((2287, 2298), 'time.time', 'time.time', ([], {}), '()\n', (2296, 2298), False, 'import cv2, time\n'), ((695, 719), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (716, 719), True, 'import numpy as np\n'), ((2236, 2247), 'time.time', 'time.time', ([], {}), '()\n', (2245, 2247), False, 'import cv2, time\n'), ((2057, 2084), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (2078, 2084), True, 'import numpy as np\n'), ((1580, 1607), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (1601, 1607), True, 'import numpy as np\n')] |
import collections
import itertools
from tensorflow.keras.preprocessing.sequence import pad_sequences
from nn_project.utils import get_project_file
def get_data_generator(
kind='train',
batch_size=1,
limit=None,
offset=None,
vocab_size=None,
vocab_en=None,
vocab_cs=None,
length_en=None,
length_cs=None,
padding='post',
):
if None in (vocab_en, vocab_cs, length_en, length_cs):
vocab_en, vocab_cs, length_en, length_cs = get_vocabs(
kind=kind,
limit=limit,
offset=offset,
vocab_size=vocab_size,
)
data_generator = get_epochs(
kind=kind,
batch_size=batch_size,
limit=limit,
offset=offset,
vocab_en=vocab_en,
vocab_cs=vocab_cs,
length_en=length_en,
length_cs=length_cs,
padding=padding,
)
return data_generator
def get_epochs(
kind,
batch_size,
limit,
offset,
vocab_en,
vocab_cs,
length_en,
length_cs,
padding,
):
while True:
data_en, data_cs = get_files(kind=kind)
samples_en = get_samples(
data=data_en,
limit=limit,
offset=offset,
vocab=vocab_en,
)
samples_cs = get_samples(
data=data_cs,
limit=limit,
offset=offset,
vocab=vocab_cs,
)
yield from get_batches(
samples_en=samples_en,
samples_cs=samples_cs,
length_en=length_en,
length_cs=length_cs,
batch_size=batch_size,
padding=padding,
)
data_en.close()
data_cs.close()
def get_batches(
samples_en,
samples_cs,
length_en,
length_cs,
batch_size,
padding,
):
while True:
batch_en = list(itertools.islice(samples_en, batch_size))
batch_cs = list(itertools.islice(samples_cs, batch_size))
if not batch_en or not batch_cs:
break
batch_en = pad_sequences(
sequences=batch_en,
maxlen=length_en,
padding=padding,
)
batch_cs = pad_sequences(
sequences=batch_cs,
maxlen=length_cs,
padding=padding,
)
yield batch_en, batch_cs
def get_data(
kind='train',
limit=None,
offset=None,
vocab_en=None,
vocab_cs=None,
vocab_size=None,
encoded=False,
):
if vocab_en is None or vocab_cs is None:
vocab_en, vocab_cs, length_en, length_cs = get_vocabs(
kind=kind,
limit=limit,
offset=offset,
vocab_size=vocab_size,
)
data_en, data_cs = get_files(kind=kind)
with data_en, data_cs:
if encoded:
samples_en = get_samples(
data=data_en,
limit=limit,
offset=offset,
vocab=vocab_en,
)
samples_cs = get_samples(
data=data_cs,
limit=limit,
offset=offset,
vocab=vocab_cs,
)
else:
samples_en = get_samples(
data=data_en,
limit=limit,
offset=offset,
)
samples_cs = get_samples(
data=data_cs,
limit=limit,
offset=offset,
)
return list(samples_en), list(samples_cs), vocab_en, vocab_cs
def get_vocabs(kind, limit=None, offset=None, vocab_size=None):
data_en, data_cs = get_files(kind=kind)
with data_en, data_cs:
lines_en = get_lines(data=data_en, limit=limit, offset=offset)
lines_cs = get_lines(data=data_cs, limit=limit, offset=offset)
vocab_en, length_en = make_vocab(lines=lines_en, size=vocab_size)
vocab_cs, length_cs = make_vocab(lines=lines_cs, size=vocab_size)
return vocab_en, vocab_cs, length_en, length_cs
def make_vocab(lines, size=None):
max_length = 0
counter = collections.Counter()
for line in lines:
words = line.split()
counter.update(words)
max_length = max(len(words), max_length)
vocab = {'<?>': 0}
for index, (word, _) in enumerate(counter.most_common(size), start=1):
vocab[word] = index
return vocab, max_length
def get_samples(data, limit=None, offset=None, vocab=None):
lines = get_lines(data=data, limit=limit, offset=offset)
for line in lines:
yield list(encode_sample(line=line, vocab=vocab))
def encode_sample(line, vocab=None):
tokens = line.split()
for token in tokens:
if vocab is None:
yield token
else:
yield vocab.get(token, 0)
def get_files(kind):
data_en = open(get_project_file('data', 'raw', f'{kind}.en.txt'), 'r')
data_cs = open(get_project_file('data', 'raw', f'{kind}.cs.txt'), 'r')
return data_en, data_cs
def get_lines(data, limit=None, offset=None):
start = 0 if offset is None else offset
stop = None if limit is None else start + limit
return itertools.islice(data, start, stop)
MAX_ROWS_TRAIN = 15794564
MAX_ROWS_TEST = 2656
| [
"collections.Counter",
"itertools.islice",
"nn_project.utils.get_project_file",
"tensorflow.keras.preprocessing.sequence.pad_sequences"
] | [((4182, 4203), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (4201, 4203), False, 'import collections\n'), ((5242, 5277), 'itertools.islice', 'itertools.islice', (['data', 'start', 'stop'], {}), '(data, start, stop)\n', (5258, 5277), False, 'import itertools\n'), ((2139, 2207), 'tensorflow.keras.preprocessing.sequence.pad_sequences', 'pad_sequences', ([], {'sequences': 'batch_en', 'maxlen': 'length_en', 'padding': 'padding'}), '(sequences=batch_en, maxlen=length_en, padding=padding)\n', (2152, 2207), False, 'from tensorflow.keras.preprocessing.sequence import pad_sequences\n'), ((2274, 2342), 'tensorflow.keras.preprocessing.sequence.pad_sequences', 'pad_sequences', ([], {'sequences': 'batch_cs', 'maxlen': 'length_cs', 'padding': 'padding'}), '(sequences=batch_cs, maxlen=length_cs, padding=padding)\n', (2287, 2342), False, 'from tensorflow.keras.preprocessing.sequence import pad_sequences\n'), ((4928, 4977), 'nn_project.utils.get_project_file', 'get_project_file', (['"""data"""', '"""raw"""', 'f"""{kind}.en.txt"""'], {}), "('data', 'raw', f'{kind}.en.txt')\n", (4944, 4977), False, 'from nn_project.utils import get_project_file\n'), ((5003, 5052), 'nn_project.utils.get_project_file', 'get_project_file', (['"""data"""', '"""raw"""', 'f"""{kind}.cs.txt"""'], {}), "('data', 'raw', f'{kind}.cs.txt')\n", (5019, 5052), False, 'from nn_project.utils import get_project_file\n'), ((1953, 1993), 'itertools.islice', 'itertools.islice', (['samples_en', 'batch_size'], {}), '(samples_en, batch_size)\n', (1969, 1993), False, 'import itertools\n'), ((2019, 2059), 'itertools.islice', 'itertools.islice', (['samples_cs', 'batch_size'], {}), '(samples_cs, batch_size)\n', (2035, 2059), False, 'import itertools\n')] |
import numpy as np
def maxpooling2(img):
fr = len(img)//2
cr = len(img[0]) // 2
resr = 0
resultado = np.zeros((fr,cr),np.uint8)
for i in range(0, len(img),2):
print(i)
resc = 0
for j in range(0, len(img[0]), 2):
print(j)
matrizlocal = img[i:(i+2),j:(j+2)]
resultado[resr][resc] = np.amax(matrizlocal)
resc += 1
resr += 1
return resultado
print(maxpooling2(img)) | [
"numpy.zeros",
"numpy.amax"
] | [((113, 141), 'numpy.zeros', 'np.zeros', (['(fr, cr)', 'np.uint8'], {}), '((fr, cr), np.uint8)\n', (121, 141), True, 'import numpy as np\n'), ((325, 345), 'numpy.amax', 'np.amax', (['matrizlocal'], {}), '(matrizlocal)\n', (332, 345), True, 'import numpy as np\n')] |
# fix gym.env.classic_control.render is missing
# code is borrow from
# https://github.com/openai/gym/blob/06e16dd586b010d1987eb37e499d1a291a183341/gym/envs/classic_control/rendering.py
import os
import sys
if "Apple" in sys.version:
if "DYLD_FALLBACK_LIBRARY_PATH" in os.environ:
os.environ["DYLD_FALLBACK_LIBRARY_PATH"] += ":/usr/lib"
# (JDS 2016/04/15): avoid bug on Anaconda 2.3.0 / Yosemite
from gym import error
try:
import pyglet
except ImportError as e:
raise ImportError(
"""
Cannot import pyglet.
HINT: you can install pyglet directly via 'pip install pyglet'.
But if you really just want to install all Gym dependencies and not have to think about it,
'pip install -e .[all]' or 'pip install gym[all]' will do it.
"""
)
try:
from pyglet.gl import *
except ImportError as e:
raise ImportError(
"""
Error occurred while running `from pyglet.gl import *`
HINT: make sure you have OpenGL installed. On Ubuntu, you can run 'apt-get install python-opengl'.
If you're running on a server, you may need a virtual frame buffer; something like this should work:
'xvfb-run -s \"-screen 0 1400x900x24\" python <your_script.py>'
"""
)
def get_display(spec):
"""Convert a display specification (such as :0) into an actual Display
object.
Pyglet only supports multiple Displays on Linux.
"""
if spec is None:
return pyglet.canvas.get_display()
# returns already available pyglet_display,
# if there is no pyglet display available then it creates one
elif isinstance(spec, str):
return pyglet.canvas.Display(spec)
else:
raise error.Error(
"Invalid display specification: {}. (Must be a string like :0 or None.)".format(
spec
)
)
def get_window(width, height, display, **kwargs):
"""
Will create a pyglet window from the display specification provided.
"""
screen = display.get_screens() # available screens
config = screen[0].get_best_config() # selecting the first screen
context = config.create_context(None) # create GL context
return pyglet.window.Window(
width=width,
height=height,
display=display,
config=config,
context=context,
**kwargs
)
class SimpleImageViewer(object):
def __init__(self, display=None, maxwidth=500):
self.window = None
self.isopen = False
self.display = get_display(display)
self.maxwidth = maxwidth
def imshow(self, arr):
if self.window is None:
height, width, _channels = arr.shape
if width > self.maxwidth:
scale = self.maxwidth / width
width = int(scale * width)
height = int(scale * height)
self.window = get_window(
width=width,
height=height,
display=self.display,
vsync=False,
resizable=True,
)
self.width = width
self.height = height
self.isopen = True
@self.window.event
def on_resize(width, height):
self.width = width
self.height = height
@self.window.event
def on_close():
self.isopen = False
assert len(arr.shape) == 3, "You passed in an image with the wrong number shape"
image = pyglet.image.ImageData(
arr.shape[1], arr.shape[0], "RGB", arr.tobytes(), pitch=arr.shape[1] * -3
)
texture = image.get_texture()
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST)
texture.width = self.width
texture.height = self.height
self.window.clear()
self.window.switch_to()
self.window.dispatch_events()
texture.blit(0, 0) # draw
self.window.flip()
def close(self):
if self.isopen and sys.meta_path:
# ^^^ check sys.meta_path to avoid 'ImportError: sys.meta_path is None, Python is likely shutting down'
self.window.close()
self.isopen = False
def __del__(self):
self.close()
| [
"pyglet.window.Window",
"pyglet.canvas.Display",
"pyglet.canvas.get_display"
] | [((2189, 2301), 'pyglet.window.Window', 'pyglet.window.Window', ([], {'width': 'width', 'height': 'height', 'display': 'display', 'config': 'config', 'context': 'context'}), '(width=width, height=height, display=display, config=\n config, context=context, **kwargs)\n', (2209, 2301), False, 'import pyglet\n'), ((1446, 1473), 'pyglet.canvas.get_display', 'pyglet.canvas.get_display', ([], {}), '()\n', (1471, 1473), False, 'import pyglet\n'), ((1643, 1670), 'pyglet.canvas.Display', 'pyglet.canvas.Display', (['spec'], {}), '(spec)\n', (1664, 1670), False, 'import pyglet\n')] |
import torch
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
import argparse
import numpy as np
import os
from data import build_train_dataset
from gmflow.gmflow import GMFlow
from loss import flow_loss_func
from evaluate import (validate_chairs, validate_things, validate_sintel, validate_kitti,
create_sintel_submission, create_kitti_submission, inference_on_dir)
from utils.logger import Logger
from utils import misc
from utils.dist_utils import get_dist_info, init_dist, setup_for_distributed
def get_args_parser():
parser = argparse.ArgumentParser()
# dataset
parser.add_argument('--checkpoint_dir', default='tmp', type=str,
help='where to save the training log and models')
parser.add_argument('--stage', default='chairs', type=str,
help='training stage')
parser.add_argument('--image_size', default=[384, 512], type=int, nargs='+',
help='image size for training')
parser.add_argument('--padding_factor', default=16, type=int,
help='the input should be divisible by padding_factor, otherwise do padding')
parser.add_argument('--max_flow', default=400, type=int,
help='exclude very large motions during training')
parser.add_argument('--val_dataset', default=['chairs'], type=str, nargs='+',
help='validation dataset')
parser.add_argument('--with_speed_metric', action='store_true',
help='with speed metric when evaluation')
# training
parser.add_argument('--lr', default=4e-4, type=float)
parser.add_argument('--batch_size', default=12, type=int)
parser.add_argument('--num_workers', default=4, type=int)
parser.add_argument('--weight_decay', default=1e-4, type=float)
parser.add_argument('--grad_clip', default=1.0, type=float)
parser.add_argument('--num_steps', default=100000, type=int)
parser.add_argument('--seed', default=326, type=int)
parser.add_argument('--summary_freq', default=100, type=int)
parser.add_argument('--val_freq', default=10000, type=int)
parser.add_argument('--save_ckpt_freq', default=10000, type=int)
parser.add_argument('--save_latest_ckpt_freq', default=1000, type=int)
# resume pretrained model or resume training
parser.add_argument('--resume', default=None, type=str,
help='resume from pretrain model for finetuing or resume from terminated training')
parser.add_argument('--strict_resume', action='store_true')
parser.add_argument('--no_resume_optimizer', action='store_true')
# GMFlow model
parser.add_argument('--num_scales', default=1, type=int,
help='basic gmflow model uses a single 1/8 feature, the refinement uses 1/4 feature')
parser.add_argument('--feature_channels', default=128, type=int)
parser.add_argument('--upsample_factor', default=8, type=int)
parser.add_argument('--num_transformer_layers', default=6, type=int)
parser.add_argument('--num_head', default=1, type=int)
parser.add_argument('--attention_type', default='swin', type=str)
parser.add_argument('--ffn_dim_expansion', default=4, type=int)
parser.add_argument('--attn_splits_list', default=[2], type=int, nargs='+',
help='number of splits in attention')
parser.add_argument('--corr_radius_list', default=[-1], type=int, nargs='+',
help='correlation radius for matching, -1 indicates global matching')
parser.add_argument('--prop_radius_list', default=[-1], type=int, nargs='+',
help='self-attention radius for flow propagation, -1 indicates global attention')
# loss
parser.add_argument('--gamma', default=0.9, type=float,
help='loss weight')
# evaluation
parser.add_argument('--eval', action='store_true')
parser.add_argument('--save_eval_to_file', action='store_true')
parser.add_argument('--evaluate_matched_unmatched', action='store_true')
# inference on a directory
parser.add_argument('--inference_dir', default=None, type=str)
parser.add_argument('--inference_size', default=None, type=int, nargs='+',
help='can specify the inference size')
parser.add_argument('--dir_paired_data', action='store_true',
help='Paired data in a dir instead of a sequence')
parser.add_argument('--save_flo_flow', action='store_true')
parser.add_argument('--pred_bidir_flow', action='store_true',
help='predict bidirectional flow')
parser.add_argument('--fwd_bwd_consistency_check', action='store_true',
help='forward backward consistency check with bidirection flow')
# predict on sintel and kitti test set for submission
parser.add_argument('--submission', action='store_true',
help='submission to sintel or kitti test sets')
parser.add_argument('--output_path', default='output', type=str,
help='where to save the prediction results')
parser.add_argument('--save_vis_flow', action='store_true',
help='visualize flow prediction as .png image')
parser.add_argument('--no_save_flo', action='store_true',
help='not save flow as .flo')
# distributed training
parser.add_argument('--local_rank', default=0, type=int)
parser.add_argument('--distributed', action='store_true')
parser.add_argument('--launcher', default='none', type=str, choices=['none', 'pytorch'])
parser.add_argument('--gpu_ids', default=0, type=int, nargs='+')
parser.add_argument('--count_time', action='store_true',
help='measure the inference time on sintel')
return parser
def main(args):
if not args.eval and not args.submission and args.inference_dir is None:
if args.local_rank == 0:
print('pytorch version:', torch.__version__)
print(args)
misc.save_args(args)
misc.check_path(args.checkpoint_dir)
misc.save_command(args.checkpoint_dir)
seed = args.seed
torch.manual_seed(seed)
np.random.seed(seed)
torch.backends.cudnn.benchmark = True
if args.launcher == 'none':
args.distributed = False
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
else:
args.distributed = True
# adjust batch size for each gpu
assert args.batch_size % torch.cuda.device_count() == 0
args.batch_size = args.batch_size // torch.cuda.device_count()
dist_params = dict(backend='nccl')
init_dist(args.launcher, **dist_params)
# re-set gpu_ids with distributed training mode
_, world_size = get_dist_info()
args.gpu_ids = range(world_size)
device = torch.device('cuda:{}'.format(args.local_rank))
setup_for_distributed(args.local_rank == 0)
# model
model = GMFlow(feature_channels=args.feature_channels,
num_scales=args.num_scales,
upsample_factor=args.upsample_factor,
num_head=args.num_head,
attention_type=args.attention_type,
ffn_dim_expansion=args.ffn_dim_expansion,
num_transformer_layers=args.num_transformer_layers,
).to(device)
if not args.eval and not args.submission and not args.inference_dir:
print('Model definition:')
print(model)
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(
model.to(device),
device_ids=[args.local_rank],
output_device=args.local_rank)
model_without_ddp = model.module
else:
if torch.cuda.device_count() > 1:
print('Use %d GPUs' % torch.cuda.device_count())
model = torch.nn.DataParallel(model)
model_without_ddp = model.module
else:
model_without_ddp = model
num_params = sum(p.numel() for p in model.parameters())
print('Number of params:', num_params)
if not args.eval and not args.submission and args.inference_dir is None:
save_name = '%d_parameters' % num_params
open(os.path.join(args.checkpoint_dir, save_name), 'a').close()
optimizer = torch.optim.AdamW(model_without_ddp.parameters(), lr=args.lr,
weight_decay=args.weight_decay)
start_epoch = 0
start_step = 0
# resume checkpoints
if args.resume:
print('Load checkpoint: %s' % args.resume)
loc = 'cuda:{}'.format(args.local_rank)
checkpoint = torch.load(args.resume, map_location=loc)
weights = checkpoint['model'] if 'model' in checkpoint else checkpoint
model_without_ddp.load_state_dict(weights, strict=args.strict_resume)
if 'optimizer' in checkpoint and 'step' in checkpoint and 'epoch' in checkpoint and not \
args.no_resume_optimizer:
print('Load optimizer')
optimizer.load_state_dict(checkpoint['optimizer'])
start_epoch = checkpoint['epoch']
start_step = checkpoint['step']
print('start_epoch: %d, start_step: %d' % (start_epoch, start_step))
# evaluate
if args.eval:
val_results = {}
if 'chairs' in args.val_dataset:
results_dict = validate_chairs(model_without_ddp,
with_speed_metric=args.with_speed_metric,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
val_results.update(results_dict)
if 'things' in args.val_dataset:
results_dict = validate_things(model_without_ddp,
padding_factor=args.padding_factor,
with_speed_metric=args.with_speed_metric,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
val_results.update(results_dict)
if 'sintel' in args.val_dataset:
results_dict = validate_sintel(model_without_ddp,
count_time=args.count_time,
padding_factor=args.padding_factor,
with_speed_metric=args.with_speed_metric,
evaluate_matched_unmatched=args.evaluate_matched_unmatched,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
val_results.update(results_dict)
if 'kitti' in args.val_dataset:
results_dict = validate_kitti(model_without_ddp,
padding_factor=args.padding_factor,
with_speed_metric=args.with_speed_metric,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
val_results.update(results_dict)
if args.save_eval_to_file:
misc.check_path(args.checkpoint_dir)
val_file = os.path.join(args.checkpoint_dir, 'val_results.txt')
with open(val_file, 'a') as f:
f.write('\neval results after training done\n\n')
metrics = ['chairs_epe', 'chairs_s0_10', 'chairs_s10_40', 'chairs_s40+',
'things_clean_epe', 'things_clean_s0_10', 'things_clean_s10_40', 'things_clean_s40+',
'things_final_epe', 'things_final_s0_10', 'things_final_s10_40', 'things_final_s40+',
'sintel_clean_epe', 'sintel_clean_s0_10', 'sintel_clean_s10_40', 'sintel_clean_s40+',
'sintel_final_epe', 'sintel_final_s0_10', 'sintel_final_s10_40', 'sintel_final_s40+',
'kitti_epe', 'kitti_f1', 'kitti_s0_10', 'kitti_s10_40', 'kitti_s40+',
]
eval_metrics = []
for metric in metrics:
if metric in val_results.keys():
eval_metrics.append(metric)
metrics_values = [val_results[metric] for metric in eval_metrics]
num_metrics = len(eval_metrics)
# save as markdown format
f.write(("| {:>20} " * num_metrics + '\n').format(*eval_metrics))
f.write(("| {:20.3f} " * num_metrics).format(*metrics_values))
f.write('\n\n')
return
# Sintel and KITTI submission
if args.submission:
# NOTE: args.val_dataset is a list
if args.val_dataset[0] == 'sintel':
create_sintel_submission(model_without_ddp,
output_path=args.output_path,
padding_factor=args.padding_factor,
save_vis_flow=args.save_vis_flow,
no_save_flo=args.no_save_flo,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
elif args.val_dataset[0] == 'kitti':
create_kitti_submission(model_without_ddp,
output_path=args.output_path,
padding_factor=args.padding_factor,
save_vis_flow=args.save_vis_flow,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
else:
raise ValueError(f'Not supported dataset for submission')
return
# inferece on a dir
if args.inference_dir is not None:
inference_on_dir(model_without_ddp,
inference_dir=args.inference_dir,
output_path=args.output_path,
padding_factor=args.padding_factor,
inference_size=args.inference_size,
paired_data=args.dir_paired_data,
save_flo_flow=args.save_flo_flow,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
pred_bidir_flow=args.pred_bidir_flow,
fwd_bwd_consistency_check=args.fwd_bwd_consistency_check,
)
return
# training datset
train_dataset = build_train_dataset(args)
print('Number of training images:', len(train_dataset))
# Multi-processing
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(
train_dataset,
num_replicas=torch.cuda.device_count(),
rank=args.local_rank)
else:
train_sampler = None
shuffle = False if args.distributed else True
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size,
shuffle=shuffle, num_workers=args.num_workers,
pin_memory=True, drop_last=True,
sampler=train_sampler)
last_epoch = start_step if args.resume and start_step > 0 else -1
lr_scheduler = torch.optim.lr_scheduler.OneCycleLR(
optimizer, args.lr,
args.num_steps + 10,
pct_start=0.05,
cycle_momentum=False,
anneal_strategy='cos',
last_epoch=last_epoch,
)
if args.local_rank == 0:
summary_writer = SummaryWriter(args.checkpoint_dir)
logger = Logger(lr_scheduler, summary_writer, args.summary_freq,
start_step=start_step)
total_steps = start_step
epoch = start_epoch
print('Start training')
while total_steps < args.num_steps:
model.train()
# mannual change random seed for shuffling every epoch
if args.distributed:
train_sampler.set_epoch(epoch)
for i, sample in enumerate(train_loader):
img1, img2, flow_gt, valid = [x.to(device) for x in sample]
results_dict = model(img1, img2,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
flow_preds = results_dict['flow_preds']
loss, metrics = flow_loss_func(flow_preds, flow_gt, valid,
gamma=args.gamma,
max_flow=args.max_flow,
)
if isinstance(loss, float):
continue
if torch.isnan(loss):
continue
metrics.update({'total_loss': loss.item()})
# more efficient zero_grad
for param in model_without_ddp.parameters():
param.grad = None
loss.backward()
# Gradient clipping
torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
lr_scheduler.step()
if args.local_rank == 0:
logger.push(metrics)
logger.add_image_summary(img1, img2, flow_preds, flow_gt)
total_steps += 1
if total_steps % args.save_ckpt_freq == 0 or total_steps == args.num_steps:
if args.local_rank == 0:
checkpoint_path = os.path.join(args.checkpoint_dir, 'step_%06d.pth' % total_steps)
torch.save({
'model': model_without_ddp.state_dict()
}, checkpoint_path)
if total_steps % args.save_latest_ckpt_freq == 0:
checkpoint_path = os.path.join(args.checkpoint_dir, 'checkpoint_latest.pth')
if args.local_rank == 0:
torch.save({
'model': model_without_ddp.state_dict(),
'optimizer': optimizer.state_dict(),
'step': total_steps,
'epoch': epoch,
}, checkpoint_path)
if total_steps % args.val_freq == 0:
print('Start validation')
val_results = {}
# support validation on multiple datasets
if 'chairs' in args.val_dataset:
results_dict = validate_chairs(model_without_ddp,
with_speed_metric=args.with_speed_metric,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
if args.local_rank == 0:
val_results.update(results_dict)
if 'things' in args.val_dataset:
results_dict = validate_things(model_without_ddp,
padding_factor=args.padding_factor,
with_speed_metric=args.with_speed_metric,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
if args.local_rank == 0:
val_results.update(results_dict)
if 'sintel' in args.val_dataset:
results_dict = validate_sintel(model_without_ddp,
count_time=args.count_time,
padding_factor=args.padding_factor,
with_speed_metric=args.with_speed_metric,
evaluate_matched_unmatched=args.evaluate_matched_unmatched,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
if args.local_rank == 0:
val_results.update(results_dict)
if 'kitti' in args.val_dataset:
results_dict = validate_kitti(model_without_ddp,
padding_factor=args.padding_factor,
with_speed_metric=args.with_speed_metric,
attn_splits_list=args.attn_splits_list,
corr_radius_list=args.corr_radius_list,
prop_radius_list=args.prop_radius_list,
)
if args.local_rank == 0:
val_results.update(results_dict)
if args.local_rank == 0:
logger.write_dict(val_results)
# Save validation results
val_file = os.path.join(args.checkpoint_dir, 'val_results.txt')
with open(val_file, 'a') as f:
f.write('step: %06d\n' % total_steps)
if args.evaluate_matched_unmatched:
metrics = ['chairs_epe',
'chairs_s0_10', 'chairs_s10_40', 'chairs_s40+',
'things_clean_epe', 'things_clean_s0_10', 'things_clean_s10_40',
'things_clean_s40+',
'sintel_clean_epe', 'sintel_clean_matched', 'sintel_clean_unmatched',
'sintel_clean_s0_10', 'sintel_clean_s10_40',
'sintel_clean_s40+',
'sintel_final_epe', 'sintel_final_matched', 'sintel_final_unmatched',
'sintel_final_s0_10', 'sintel_final_s10_40',
'sintel_final_s40+',
'kitti_epe', 'kitti_f1', 'kitti_s0_10', 'kitti_s10_40', 'kitti_s40+',
]
else:
metrics = ['chairs_epe', 'chairs_s0_10', 'chairs_s10_40', 'chairs_s40+',
'things_clean_epe', 'things_clean_s0_10', 'things_clean_s10_40',
'things_clean_s40+',
'sintel_clean_epe', 'sintel_clean_s0_10', 'sintel_clean_s10_40',
'sintel_clean_s40+',
'sintel_final_epe', 'sintel_final_s0_10', 'sintel_final_s10_40',
'sintel_final_s40+',
'kitti_epe', 'kitti_f1', 'kitti_s0_10', 'kitti_s10_40', 'kitti_s40+',
]
eval_metrics = []
for metric in metrics:
if metric in val_results.keys():
eval_metrics.append(metric)
metrics_values = [val_results[metric] for metric in eval_metrics]
num_metrics = len(eval_metrics)
# save as markdown format
if args.evaluate_matched_unmatched:
f.write(("| {:>25} " * num_metrics + '\n').format(*eval_metrics))
f.write(("| {:25.3f} " * num_metrics).format(*metrics_values))
else:
f.write(("| {:>20} " * num_metrics + '\n').format(*eval_metrics))
f.write(("| {:20.3f} " * num_metrics).format(*metrics_values))
f.write('\n\n')
model.train()
if total_steps >= args.num_steps:
print('Training done')
return
epoch += 1
if __name__ == '__main__':
parser = get_args_parser()
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
main(args)
| [
"utils.dist_utils.get_dist_info",
"torch.cuda.device_count",
"data.build_train_dataset",
"torch.cuda.is_available",
"gmflow.gmflow.GMFlow",
"torch.utils.tensorboard.SummaryWriter",
"evaluate.validate_chairs",
"argparse.ArgumentParser",
"evaluate.create_sintel_submission",
"evaluate.validate_kitti",
"numpy.random.seed",
"utils.misc.check_path",
"evaluate.create_kitti_submission",
"utils.misc.save_args",
"evaluate.inference_on_dir",
"loss.flow_loss_func",
"evaluate.validate_sintel",
"utils.dist_utils.init_dist",
"utils.logger.Logger",
"utils.misc.save_command",
"torch.manual_seed",
"utils.dist_utils.setup_for_distributed",
"torch.optim.lr_scheduler.OneCycleLR",
"torch.load",
"os.path.join",
"torch.nn.DataParallel",
"torch.utils.data.DataLoader",
"evaluate.validate_things",
"torch.isnan"
] | [((623, 648), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (646, 648), False, 'import argparse\n'), ((6394, 6417), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (6411, 6417), False, 'import torch\n'), ((6423, 6443), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (6437, 6443), True, 'import numpy as np\n'), ((16120, 16145), 'data.build_train_dataset', 'build_train_dataset', (['args'], {}), '(args)\n', (16139, 16145), False, 'from data import build_train_dataset\n'), ((16563, 16744), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_dataset'], {'batch_size': 'args.batch_size', 'shuffle': 'shuffle', 'num_workers': 'args.num_workers', 'pin_memory': '(True)', 'drop_last': '(True)', 'sampler': 'train_sampler'}), '(train_dataset, batch_size=args.batch_size,\n shuffle=shuffle, num_workers=args.num_workers, pin_memory=True,\n drop_last=True, sampler=train_sampler)\n', (16590, 16744), False, 'import torch\n'), ((16974, 17143), 'torch.optim.lr_scheduler.OneCycleLR', 'torch.optim.lr_scheduler.OneCycleLR', (['optimizer', 'args.lr', '(args.num_steps + 10)'], {'pct_start': '(0.05)', 'cycle_momentum': '(False)', 'anneal_strategy': '"""cos"""', 'last_epoch': 'last_epoch'}), "(optimizer, args.lr, args.num_steps + 10,\n pct_start=0.05, cycle_momentum=False, anneal_strategy='cos', last_epoch\n =last_epoch)\n", (17009, 17143), False, 'import torch\n'), ((6917, 6956), 'utils.dist_utils.init_dist', 'init_dist', (['args.launcher'], {}), '(args.launcher, **dist_params)\n', (6926, 6956), False, 'from utils.dist_utils import get_dist_info, init_dist, setup_for_distributed\n'), ((7039, 7054), 'utils.dist_utils.get_dist_info', 'get_dist_info', ([], {}), '()\n', (7052, 7054), False, 'from utils.dist_utils import get_dist_info, init_dist, setup_for_distributed\n'), ((7174, 7217), 'utils.dist_utils.setup_for_distributed', 'setup_for_distributed', (['(args.local_rank == 0)'], {}), '(args.local_rank == 0)\n', (7195, 7217), False, 'from utils.dist_utils import get_dist_info, init_dist, setup_for_distributed\n'), ((8987, 9028), 'torch.load', 'torch.load', (['args.resume'], {'map_location': 'loc'}), '(args.resume, map_location=loc)\n', (8997, 9028), False, 'import torch\n'), ((15286, 15767), 'evaluate.inference_on_dir', 'inference_on_dir', (['model_without_ddp'], {'inference_dir': 'args.inference_dir', 'output_path': 'args.output_path', 'padding_factor': 'args.padding_factor', 'inference_size': 'args.inference_size', 'paired_data': 'args.dir_paired_data', 'save_flo_flow': 'args.save_flo_flow', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list', 'pred_bidir_flow': 'args.pred_bidir_flow', 'fwd_bwd_consistency_check': 'args.fwd_bwd_consistency_check'}), '(model_without_ddp, inference_dir=args.inference_dir,\n output_path=args.output_path, padding_factor=args.padding_factor,\n inference_size=args.inference_size, paired_data=args.dir_paired_data,\n save_flo_flow=args.save_flo_flow, attn_splits_list=args.\n attn_splits_list, corr_radius_list=args.corr_radius_list,\n prop_radius_list=args.prop_radius_list, pred_bidir_flow=args.\n pred_bidir_flow, fwd_bwd_consistency_check=args.fwd_bwd_consistency_check)\n', (15302, 15767), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((17255, 17289), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', (['args.checkpoint_dir'], {}), '(args.checkpoint_dir)\n', (17268, 17289), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((17308, 17386), 'utils.logger.Logger', 'Logger', (['lr_scheduler', 'summary_writer', 'args.summary_freq'], {'start_step': 'start_step'}), '(lr_scheduler, summary_writer, args.summary_freq, start_step=start_step)\n', (17314, 17386), False, 'from utils.logger import Logger\n'), ((6242, 6262), 'utils.misc.save_args', 'misc.save_args', (['args'], {}), '(args)\n', (6256, 6262), False, 'from utils import misc\n'), ((6276, 6312), 'utils.misc.check_path', 'misc.check_path', (['args.checkpoint_dir'], {}), '(args.checkpoint_dir)\n', (6291, 6312), False, 'from utils import misc\n'), ((6326, 6364), 'utils.misc.save_command', 'misc.save_command', (['args.checkpoint_dir'], {}), '(args.checkpoint_dir)\n', (6343, 6364), False, 'from utils import misc\n'), ((6836, 6861), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (6859, 6861), False, 'import torch\n'), ((7246, 7525), 'gmflow.gmflow.GMFlow', 'GMFlow', ([], {'feature_channels': 'args.feature_channels', 'num_scales': 'args.num_scales', 'upsample_factor': 'args.upsample_factor', 'num_head': 'args.num_head', 'attention_type': 'args.attention_type', 'ffn_dim_expansion': 'args.ffn_dim_expansion', 'num_transformer_layers': 'args.num_transformer_layers'}), '(feature_channels=args.feature_channels, num_scales=args.num_scales,\n upsample_factor=args.upsample_factor, num_head=args.num_head,\n attention_type=args.attention_type, ffn_dim_expansion=args.\n ffn_dim_expansion, num_transformer_layers=args.num_transformer_layers)\n', (7252, 7525), False, 'from gmflow.gmflow import GMFlow\n'), ((8071, 8096), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (8094, 8096), False, 'import torch\n'), ((8185, 8213), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (8206, 8213), False, 'import torch\n'), ((9744, 9949), 'evaluate.validate_chairs', 'validate_chairs', (['model_without_ddp'], {'with_speed_metric': 'args.with_speed_metric', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, with_speed_metric=args.with_speed_metric,\n attn_splits_list=args.attn_splits_list, corr_radius_list=args.\n corr_radius_list, prop_radius_list=args.prop_radius_list)\n', (9759, 9949), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((10283, 10528), 'evaluate.validate_things', 'validate_things', (['model_without_ddp'], {'padding_factor': 'args.padding_factor', 'with_speed_metric': 'args.with_speed_metric', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, padding_factor=args.padding_factor,\n with_speed_metric=args.with_speed_metric, attn_splits_list=args.\n attn_splits_list, corr_radius_list=args.corr_radius_list,\n prop_radius_list=args.prop_radius_list)\n', (10298, 10528), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((10900, 11243), 'evaluate.validate_sintel', 'validate_sintel', (['model_without_ddp'], {'count_time': 'args.count_time', 'padding_factor': 'args.padding_factor', 'with_speed_metric': 'args.with_speed_metric', 'evaluate_matched_unmatched': 'args.evaluate_matched_unmatched', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, count_time=args.count_time,\n padding_factor=args.padding_factor, with_speed_metric=args.\n with_speed_metric, evaluate_matched_unmatched=args.\n evaluate_matched_unmatched, attn_splits_list=args.attn_splits_list,\n corr_radius_list=args.corr_radius_list, prop_radius_list=args.\n prop_radius_list)\n', (10915, 11243), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((11692, 11936), 'evaluate.validate_kitti', 'validate_kitti', (['model_without_ddp'], {'padding_factor': 'args.padding_factor', 'with_speed_metric': 'args.with_speed_metric', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, padding_factor=args.padding_factor,\n with_speed_metric=args.with_speed_metric, attn_splits_list=args.\n attn_splits_list, corr_radius_list=args.corr_radius_list,\n prop_radius_list=args.prop_radius_list)\n', (11706, 11936), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((12281, 12317), 'utils.misc.check_path', 'misc.check_path', (['args.checkpoint_dir'], {}), '(args.checkpoint_dir)\n', (12296, 12317), False, 'from utils import misc\n'), ((12342, 12394), 'os.path.join', 'os.path.join', (['args.checkpoint_dir', '"""val_results.txt"""'], {}), "(args.checkpoint_dir, 'val_results.txt')\n", (12354, 12394), False, 'import os\n'), ((13923, 14233), 'evaluate.create_sintel_submission', 'create_sintel_submission', (['model_without_ddp'], {'output_path': 'args.output_path', 'padding_factor': 'args.padding_factor', 'save_vis_flow': 'args.save_vis_flow', 'no_save_flo': 'args.no_save_flo', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, output_path=args.output_path,\n padding_factor=args.padding_factor, save_vis_flow=args.save_vis_flow,\n no_save_flo=args.no_save_flo, attn_splits_list=args.attn_splits_list,\n corr_radius_list=args.corr_radius_list, prop_radius_list=args.\n prop_radius_list)\n', (13947, 14233), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((18222, 18311), 'loss.flow_loss_func', 'flow_loss_func', (['flow_preds', 'flow_gt', 'valid'], {'gamma': 'args.gamma', 'max_flow': 'args.max_flow'}), '(flow_preds, flow_gt, valid, gamma=args.gamma, max_flow=args.\n max_flow)\n', (18236, 18311), False, 'from loss import flow_loss_func\n'), ((18528, 18545), 'torch.isnan', 'torch.isnan', (['loss'], {}), '(loss)\n', (18539, 18545), False, 'import torch\n'), ((6599, 6624), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6622, 6624), False, 'import torch\n'), ((6759, 6784), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (6782, 6784), False, 'import torch\n'), ((14582, 14857), 'evaluate.create_kitti_submission', 'create_kitti_submission', (['model_without_ddp'], {'output_path': 'args.output_path', 'padding_factor': 'args.padding_factor', 'save_vis_flow': 'args.save_vis_flow', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, output_path=args.output_path,\n padding_factor=args.padding_factor, save_vis_flow=args.save_vis_flow,\n attn_splits_list=args.attn_splits_list, corr_radius_list=args.\n corr_radius_list, prop_radius_list=args.prop_radius_list)\n', (14605, 14857), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((16387, 16412), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (16410, 16412), False, 'import torch\n'), ((19644, 19702), 'os.path.join', 'os.path.join', (['args.checkpoint_dir', '"""checkpoint_latest.pth"""'], {}), "(args.checkpoint_dir, 'checkpoint_latest.pth')\n", (19656, 19702), False, 'import os\n'), ((8137, 8162), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (8160, 8162), False, 'import torch\n'), ((8565, 8609), 'os.path.join', 'os.path.join', (['args.checkpoint_dir', 'save_name'], {}), '(args.checkpoint_dir, save_name)\n', (8577, 8609), False, 'import os\n'), ((19339, 19403), 'os.path.join', 'os.path.join', (['args.checkpoint_dir', "('step_%06d.pth' % total_steps)"], {}), "(args.checkpoint_dir, 'step_%06d.pth' % total_steps)\n", (19351, 19403), False, 'import os\n'), ((20313, 20518), 'evaluate.validate_chairs', 'validate_chairs', (['model_without_ddp'], {'with_speed_metric': 'args.with_speed_metric', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, with_speed_metric=args.with_speed_metric,\n attn_splits_list=args.attn_splits_list, corr_radius_list=args.\n corr_radius_list, prop_radius_list=args.prop_radius_list)\n', (20328, 20518), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((20964, 21209), 'evaluate.validate_things', 'validate_things', (['model_without_ddp'], {'padding_factor': 'args.padding_factor', 'with_speed_metric': 'args.with_speed_metric', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, padding_factor=args.padding_factor,\n with_speed_metric=args.with_speed_metric, attn_splits_list=args.\n attn_splits_list, corr_radius_list=args.corr_radius_list,\n prop_radius_list=args.prop_radius_list)\n', (20979, 21209), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((21703, 22046), 'evaluate.validate_sintel', 'validate_sintel', (['model_without_ddp'], {'count_time': 'args.count_time', 'padding_factor': 'args.padding_factor', 'with_speed_metric': 'args.with_speed_metric', 'evaluate_matched_unmatched': 'args.evaluate_matched_unmatched', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, count_time=args.count_time,\n padding_factor=args.padding_factor, with_speed_metric=args.\n with_speed_metric, evaluate_matched_unmatched=args.\n evaluate_matched_unmatched, attn_splits_list=args.attn_splits_list,\n corr_radius_list=args.corr_radius_list, prop_radius_list=args.\n prop_radius_list)\n', (21718, 22046), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((22633, 22877), 'evaluate.validate_kitti', 'validate_kitti', (['model_without_ddp'], {'padding_factor': 'args.padding_factor', 'with_speed_metric': 'args.with_speed_metric', 'attn_splits_list': 'args.attn_splits_list', 'corr_radius_list': 'args.corr_radius_list', 'prop_radius_list': 'args.prop_radius_list'}), '(model_without_ddp, padding_factor=args.padding_factor,\n with_speed_metric=args.with_speed_metric, attn_splits_list=args.\n attn_splits_list, corr_radius_list=args.corr_radius_list,\n prop_radius_list=args.prop_radius_list)\n', (22647, 22877), False, 'from evaluate import validate_chairs, validate_things, validate_sintel, validate_kitti, create_sintel_submission, create_kitti_submission, inference_on_dir\n'), ((23454, 23506), 'os.path.join', 'os.path.join', (['args.checkpoint_dir', '"""val_results.txt"""'], {}), "(args.checkpoint_dir, 'val_results.txt')\n", (23466, 23506), False, 'import os\n')] |
import numpy as np
import torch
def pad_tensor_with_nans(tensor: torch.Tensor, desired_length) -> torch.Tensor:
assert len(tensor.shape) == 2
l, w = tensor.shape
nan_tensor = torch.tensor([[np.NAN] * w] * (desired_length - l))
padded_tensor = torch.cat([tensor, nan_tensor])
return padded_tensor
def unpad_nans_from_tensor(tensor: torch.Tensor) -> torch.Tensor:
assert len(tensor.shape) == 2
filtered_tensor = tensor[~torch.all(tensor.isnan(), dim=1)]
return filtered_tensor
| [
"torch.tensor",
"torch.cat"
] | [((189, 240), 'torch.tensor', 'torch.tensor', (['([[np.NAN] * w] * (desired_length - l))'], {}), '([[np.NAN] * w] * (desired_length - l))\n', (201, 240), False, 'import torch\n'), ((261, 292), 'torch.cat', 'torch.cat', (['[tensor, nan_tensor]'], {}), '([tensor, nan_tensor])\n', (270, 292), False, 'import torch\n')] |
import tweepy
import time
class Tweet:
def __init__(self, config):
self.delay = config['Twitter']['delay']
consumer_key = config['Twitter']['TwitterApi']['consumer_key']
consumer_secret = config['Twitter']['TwitterApi']['consumer_secret']
access_token = config['Twitter']['TwitterApi']['access_token']
access_token_secret = config['Twitter']['TwitterApi']['token_secret']
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
self._api = tweepy.API(auth)
def postStatus(self, status):
self._api.update_status(status)
def postMedia(self, imagePaths, status):
mediaIds = []
for imagePath in imagePaths:
res = self._api.media_upload(imagePath)
mediaIds.append(res.media_id)
# Tweet with multiple images
self._api.update_status(status=status, media_ids=mediaIds)
def postBatch(self, listDicCharts):
for chart in listDicCharts:
self.postMedia(chart['Files'], chart['Message'])
time.sleep(self.delay)
| [
"tweepy.API",
"time.sleep",
"tweepy.OAuthHandler"
] | [((434, 484), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (453, 484), False, 'import tweepy\n'), ((571, 587), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (581, 587), False, 'import tweepy\n'), ((1117, 1139), 'time.sleep', 'time.sleep', (['self.delay'], {}), '(self.delay)\n', (1127, 1139), False, 'import time\n')] |
"""
This module contains functions to parse MI obo files into a dictionary of
:class:`Term` instances representing `PSI-MI` annotations.
"""
import gzip
from ..base.file_paths import psimi_obo_file
__PSIMI_GRAPH__ = None
def get_active_instance(**kwargs):
global __PSIMI_GRAPH__
if __PSIMI_GRAPH__ is None:
filename = kwargs.get("filename", psimi_obo_file)
__PSIMI_GRAPH__ = parse_miobo_file(filename)
return __PSIMI_GRAPH__
# ------------------------------------------------------ #
#
# OBO PARSER
#
# ------------------------------------------------------ #
MiOntology = dict
class Term(object):
"""A class representing subset of the Psi-Mi term properties.
Parameters
----------
id : str
Accession of the term.
name : str
Text desctription of the term.
is_obsolete : bool
Boolean indicating if the term is obsolete or not.
"""
def __init__(self, id, name, is_obsolete):
self.id = id
self.name = name
self.is_obsolete = is_obsolete
def process_term(fp):
"""Parse obo entry into a :class:`Term` instance."""
id_ = None
term = None
line = "[Term]"
is_obsolete = False
alt_ids = []
while line.strip() != "":
line = fp.readline().strip()
if line.startswith("id:"):
_, id_ = [x.strip() for x in line.split('id: ')]
elif line.startswith("alt_id:"):
_, alt_id = [x.strip() for x in line.split('alt_id: ')]
alt_ids += [alt_id]
elif line.startswith("name:"):
_, name = [x.strip() for x in line.split('name: ')]
elif line.startswith("is_obsolete"):
_, is_obsolete = [x.strip() for x in line.split('is_obsolete: ')]
is_obsolete = bool(is_obsolete)
else:
continue
term = Term(id_, name, is_obsolete)
return id_, alt_ids, term
def parse_miobo_file(filename):
"""
Parses all Term objects into a dictionary of :class:`Term`s. Each term
contains a small subset of the possible keys: id, name, namespace, is_a,
part_of and is_obsolete.
Parameters
----------
filename : str
Path for obo file. Must be gzipped.
Returns
-------
`dict`
Mapping from accession to :class:`Term`
"""
graph = MiOntology()
alt_id_map = {}
with gzip.open(filename, 'rt') as fp:
for line in fp:
line = line.strip()
if "format-version" in line:
_, version = [x.strip() for x in line.split(":")]
version = float(version)
if version != 1.2:
raise ValueError("Parser only supports version 1.2.")
elif "[Term]" in line:
tid, alt, term = process_term(fp)
alt_id_map[tid] = alt
graph[tid] = term
else:
continue
# Turn the string ids into object references.
for tid, alts in alt_id_map.items():
term = graph[tid]
for alt_tid in alts:
graph[alt_tid] = term
return graph
| [
"gzip.open"
] | [((2402, 2427), 'gzip.open', 'gzip.open', (['filename', '"""rt"""'], {}), "(filename, 'rt')\n", (2411, 2427), False, 'import gzip\n')] |
import pytest
from helper_functions.functions import ListFunction
@pytest.fixture()
def test_list():
test_list = [a, b, c, d]
return test_list
@pytest.fixture()
def test_df():
data = {
'Name': ['Tom', 'nick', 'krish', 'jack'],
'Age': [20, 21, 19, 18]
}
df = pd.DataFrame(data)
| [
"pytest.fixture"
] | [((68, 84), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (82, 84), False, 'import pytest\n'), ((154, 170), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (168, 170), False, 'import pytest\n')] |
"""
Convective boundary layer
ingredients:
- linear reference buoyancy profile (stable)
- uniform heating from the bottom
- no cooling in the column
- viscosity and diffusion (taken equal)
"""
from param import Param
from grid import Grid
from fluid2d import Fluid2d
import numpy as np
param = Param('default.xml')
param.modelname = 'boussinesq'
param.expname = 'RB_128'
# domain and resolution
param.nx = 128*2
param.ny = 128
param.npx = 1
param.Lx = 2.
param.Ly = 1.
param.geometry = 'xchannel'
# time
param.tend = 25.
param.cfl = 0.9 # <- 1.0 is too large
assert param.cfl <= 0.9
param.adaptable_dt = True
param.dt = 1e2
param.dtmax = 0.2
param.exacthistime = True
# discretization
param.order = 5
# output
param.plot_var = 'buoyancy'
param.var_to_save = ['vorticity', 'buoyancy', 'v', 'psi']
param.list_diag = 'all'
param.freq_his = 0.2
param.freq_diag = 0.2
# plot
param.plot_interactive = True
param.freq_plot = 10
param.colorscheme = 'imposed'
param.cax = [0, 2]
param.generate_mp4 = False
# physics
param.gravity = 1.
param.forcing = True
param.forcing_module = 'embedded'
param.diffusion = True
param.noslip = True
grid = Grid(param)
# Prandtl number is Kvorticity / Kbuoyancy
prandtl = 1.
visco = .05*grid.dy
diffus = visco / prandtl
param.Kdiff = {}
param.Kdiff['vorticity'] = visco
param.Kdiff['buoyancy'] = diffus
param.heatflux = 1e-2
class Forcing:
""" define the forcing """
def __init__(self, param, grid):
self.list_param = ['deltab', 'dt']
param.copy(self, self.list_param)
self.list_param = ['j0', 'npy', 'nh']
grid.copy(self, self.list_param)
Q = param.heatflux
nh = param.nh
self.nh = nh
self.forc = np.zeros_like(grid.yr)
if grid.j0 == 0:
self.forc[nh, :] = +Q
self.forc *= grid.msk
# transform the surface flux into a volume flux
self.forc *= (1./grid.dx)
self.dz = grid.dy
self.K = param.Kdiff['buoyancy']
def add_forcing(self, x, t, dxdt,coef=1):
""" add the forcing term on x[0]=the vorticity """
dxdt[4] += self.forc*coef
nh=self.nh
dxdt[4][-nh-1] += self.K*(x[4][-nh-1]-x[4][-nh-2])/self.dz**2*coef
f2d = Fluid2d(param, grid)
model = f2d.model
model.forc = Forcing(param, grid)
xr, zr = grid.xr, grid.yr
buoy = model.var.get('buoyancy')
# linear reference stratification
buoy[:] = zr*2
# add noise to trigger the instability
np.random.seed(42)
noise = np.random.normal(size=np.shape(xr))*grid.msk
noise -= grid.domain_integration(noise)*grid.msk/grid.area
grid.fill_halo(noise)
buoy += 1e-3*noise
model.set_psi_from_vorticity()
f2d.loop()
#print(buoy.shape, nh)
| [
"grid.Grid",
"numpy.shape",
"fluid2d.Fluid2d",
"numpy.random.seed",
"param.Param",
"numpy.zeros_like"
] | [((310, 330), 'param.Param', 'Param', (['"""default.xml"""'], {}), "('default.xml')\n", (315, 330), False, 'from param import Param\n'), ((1156, 1167), 'grid.Grid', 'Grid', (['param'], {}), '(param)\n', (1160, 1167), False, 'from grid import Grid\n'), ((2252, 2272), 'fluid2d.Fluid2d', 'Fluid2d', (['param', 'grid'], {}), '(param, grid)\n', (2259, 2272), False, 'from fluid2d import Fluid2d\n'), ((2476, 2494), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (2490, 2494), True, 'import numpy as np\n'), ((1730, 1752), 'numpy.zeros_like', 'np.zeros_like', (['grid.yr'], {}), '(grid.yr)\n', (1743, 1752), True, 'import numpy as np\n'), ((2525, 2537), 'numpy.shape', 'np.shape', (['xr'], {}), '(xr)\n', (2533, 2537), True, 'import numpy as np\n')] |
from subprocess import check_call
def runserver() -> None:
check_call(["python", "manage.py", "runserver"])
def runprod():
check_call(["python", "manage.py", "runprod"])
def runworker():
check_call(["python", "manage.py", "runworker"])
def format():
check_call(["black", "api/"])
check_call(["black", "tests/"])
check_call(["black", "manage.py"])
check_call(["black", "index.py"])
check_call(["black", "scripts.py"])
| [
"subprocess.check_call"
] | [((65, 113), 'subprocess.check_call', 'check_call', (["['python', 'manage.py', 'runserver']"], {}), "(['python', 'manage.py', 'runserver'])\n", (75, 113), False, 'from subprocess import check_call\n'), ((135, 181), 'subprocess.check_call', 'check_call', (["['python', 'manage.py', 'runprod']"], {}), "(['python', 'manage.py', 'runprod'])\n", (145, 181), False, 'from subprocess import check_call\n'), ((205, 253), 'subprocess.check_call', 'check_call', (["['python', 'manage.py', 'runworker']"], {}), "(['python', 'manage.py', 'runworker'])\n", (215, 253), False, 'from subprocess import check_call\n'), ((274, 303), 'subprocess.check_call', 'check_call', (["['black', 'api/']"], {}), "(['black', 'api/'])\n", (284, 303), False, 'from subprocess import check_call\n'), ((308, 339), 'subprocess.check_call', 'check_call', (["['black', 'tests/']"], {}), "(['black', 'tests/'])\n", (318, 339), False, 'from subprocess import check_call\n'), ((344, 378), 'subprocess.check_call', 'check_call', (["['black', 'manage.py']"], {}), "(['black', 'manage.py'])\n", (354, 378), False, 'from subprocess import check_call\n'), ((383, 416), 'subprocess.check_call', 'check_call', (["['black', 'index.py']"], {}), "(['black', 'index.py'])\n", (393, 416), False, 'from subprocess import check_call\n'), ((421, 456), 'subprocess.check_call', 'check_call', (["['black', 'scripts.py']"], {}), "(['black', 'scripts.py'])\n", (431, 456), False, 'from subprocess import check_call\n')] |
import x16r_hash
from binascii import hexlify, unhexlify
teststart = '700000005d385ba114d079970b29a9418fd0549e7d68a95c7f168621a314201000000000578586d149fd07b22f3a8a347c516de7052f034d2b76ff68e0d6ecff9b77a45489e3fd511732011df0731000'
testbin = unhexlify(teststart)
hash_bin = x16r_hash.getPoWHash(testbin)
testoutput = b'77a19463753c27887c5697b47118719f4af6fba0647eddde71a938e7b3dd0d48'
assert hexlify(hash_bin) == testoutput
print('Test succeeded')
| [
"x16r_hash.getPoWHash",
"binascii.hexlify",
"binascii.unhexlify"
] | [((243, 263), 'binascii.unhexlify', 'unhexlify', (['teststart'], {}), '(teststart)\n', (252, 263), False, 'from binascii import hexlify, unhexlify\n'), ((275, 304), 'x16r_hash.getPoWHash', 'x16r_hash.getPoWHash', (['testbin'], {}), '(testbin)\n', (295, 304), False, 'import x16r_hash\n'), ((395, 412), 'binascii.hexlify', 'hexlify', (['hash_bin'], {}), '(hash_bin)\n', (402, 412), False, 'from binascii import hexlify, unhexlify\n')] |
import winsound
import time
playSound = True
frequency = 2500 # Hz
shortDuration = 100 # ms
longDuration = 400 # ms
breakTime = .05 # ms
alphabet = {'a': ".-", 'b': "-...", 'c': "-.-.", 'd': "-..", 'e': ".", 'f': "..-.", 'g': "--.", 'h': "....", 'i': "..",
'j': ".---", 'k': "-.-", 'l': ".-..", 'm': "--", 'n': "-.", 'o': "---", 'p': ".--.", 'q': "--.-",
'r': ".-.", 's': "...", 't': "-", 'u': "..-", 'v': "...-", 'w': ".--", 'x': "-..-", 'y': "-.--",
'z': "--..", '0': "-----", '1': ".----", '2': "..---", '3': "...--", '4': "....-", '5': ".....",
'6': "-....", '7': "--...", '8': "---..", '9': "----."}
message = input("Input message: ").lower()
morse = ""
unknown = ""
for x in message:
if x == " ":
morse += " "
elif alphabet.keys().__contains__(x):
morse += alphabet[x] + " "
else:
unknown += x
print("\n Input in Morse code: " + morse)
print("\n Nonconvertible characters: " + unknown)
if playSound:
for x in morse:
if x == ".":
winsound.Beep(frequency, shortDuration)
time.sleep(breakTime)
elif x == "-":
winsound.Beep(frequency, shortDuration)
time.sleep(breakTime * (longDuration / shortDuration))
elif x == " ":
time.sleep(breakTime)
| [
"winsound.Beep",
"time.sleep"
] | [((1057, 1096), 'winsound.Beep', 'winsound.Beep', (['frequency', 'shortDuration'], {}), '(frequency, shortDuration)\n', (1070, 1096), False, 'import winsound\n'), ((1109, 1130), 'time.sleep', 'time.sleep', (['breakTime'], {}), '(breakTime)\n', (1119, 1130), False, 'import time\n'), ((1166, 1205), 'winsound.Beep', 'winsound.Beep', (['frequency', 'shortDuration'], {}), '(frequency, shortDuration)\n', (1179, 1205), False, 'import winsound\n'), ((1218, 1272), 'time.sleep', 'time.sleep', (['(breakTime * (longDuration / shortDuration))'], {}), '(breakTime * (longDuration / shortDuration))\n', (1228, 1272), False, 'import time\n'), ((1308, 1329), 'time.sleep', 'time.sleep', (['breakTime'], {}), '(breakTime)\n', (1318, 1329), False, 'import time\n')] |
from flask import Blueprint
from flask import flash
from flask import g
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from werkzeug.exceptions import abort
import movlist.auth
import movlist.db
bp = Blueprint("blog", __name__)
@bp.route("/")
def index():
"""Show all the posts, most recent first."""
db = movlist.db.get()
user_id = g.user['id'] if g.user else -1
entries = db.execute(
"SELECT l.id as list_id, m.id as movie_id, m.title, u.username, COALESCE(r.rating, 0) as user_rating, l.avg_rating, l.date_added"
" FROM movie_list l"
" LEFT JOIN user u ON l.user_id = u.id"
" LEFT JOIN movie m ON l.movie_id = m.id"
f" LEFT JOIN bridge_movie_user_rating r on l.movie_id = r.movie_id and r.user_id = {user_id}"
" ORDER BY date_added DESC"
).fetchall()
return render_template("blog/index.html", entries=entries)
def get_post(id, check_author=True):
"""Get a post and its author by id.
Checks that the id exists and optionally that the current user is
the author.
:param id: id of post to get
:param check_author: require the current user to be the author
:return: the post with author information
:raise 404: if a post with the given id doesn't exist
:raise 403: if the current user isn't the author
"""
post = (
movlist.db.get()
.execute(
"SELECT l.id, title, body, created, author_id, username"
" FROM post p JOIN user u ON p.author_id = u.id"
" WHERE p.id = ?",
(id,),
).fetchone()
)
if post is None:
abort(404, f"Post id {id} doesn't exist.")
if check_author and post["author_id"] != g.user["id"]:
abort(403)
return post
@bp.route("/create", methods=("GET", "POST"))
@movlist.auth.login_required
def create():
"""Create a new film for the current user."""
if request.method == "POST":
title = request.form["title"]
error = None
# TODO: Check title before passing it to the database
if not title:
error = "Title is required."
if error is not None:
flash(error)
else:
db = movlist.db.get()
# check if movie exists in the database
movie_exist_entry = db.execute(
f"SELECT id FROM movie m WHERE m.title = \"{title}\""
).fetchone()
if movie_exist_entry is None:
db.execute(
f"INSERT INTO movie (title) VALUES (\"{title}\")",
)
movie_entry = db.execute(
f"SELECT id FROM movie m WHERE m.title = \"{title}\""
).fetchone()
db.execute(
"INSERT INTO movie_list (movie_id, user_id) VALUES (?,?)",
(movie_entry["id"], g.user["id"]),
)
db.commit()
return redirect(url_for("blog.index"))
return render_template("blog/create.html")
@bp.route("/<int:id>/update", methods=("GET", "POST"))
@movlist.auth.login_required
def update(id):
"""Update a post if the current user is the author."""
post = get_post(id)
if request.method == "POST":
title = request.form["title"]
body = request.form["body"]
error = None
if not title:
error = "Title is required."
if error is not None:
flash(error)
else:
db = movlist.db.get()
db.execute(
"UPDATE post SET title = ?, body = ? WHERE id = ?", (title, body, id)
)
db.commit()
return redirect(url_for("blog.index"))
return render_template("blog/update.html", post=post)
@bp.route("/<int:id>/delete", methods=("POST",))
@movlist.auth.login_required
def delete(id):
"""Delete a post.
Ensures that the post exists and that the logged in user is the
author of the post.
"""
get_post(id)
db = movlist.db.get()
db.execute("DELETE FROM post WHERE id = ?", (id,))
db.commit()
return redirect(url_for("blog.index"))
| [
"flask.render_template",
"flask.flash",
"flask.url_for",
"werkzeug.exceptions.abort",
"flask.Blueprint"
] | [((268, 295), 'flask.Blueprint', 'Blueprint', (['"""blog"""', '__name__'], {}), "('blog', __name__)\n", (277, 295), False, 'from flask import Blueprint\n'), ((903, 954), 'flask.render_template', 'render_template', (['"""blog/index.html"""'], {'entries': 'entries'}), "('blog/index.html', entries=entries)\n", (918, 954), False, 'from flask import render_template\n'), ((3048, 3083), 'flask.render_template', 'render_template', (['"""blog/create.html"""'], {}), "('blog/create.html')\n", (3063, 3083), False, 'from flask import render_template\n'), ((3777, 3823), 'flask.render_template', 'render_template', (['"""blog/update.html"""'], {'post': 'post'}), "('blog/update.html', post=post)\n", (3792, 3823), False, 'from flask import render_template\n'), ((1680, 1722), 'werkzeug.exceptions.abort', 'abort', (['(404)', 'f"""Post id {id} doesn\'t exist."""'], {}), '(404, f"Post id {id} doesn\'t exist.")\n', (1685, 1722), False, 'from werkzeug.exceptions import abort\n'), ((1791, 1801), 'werkzeug.exceptions.abort', 'abort', (['(403)'], {}), '(403)\n', (1796, 1801), False, 'from werkzeug.exceptions import abort\n'), ((4177, 4198), 'flask.url_for', 'url_for', (['"""blog.index"""'], {}), "('blog.index')\n", (4184, 4198), False, 'from flask import url_for\n'), ((2221, 2233), 'flask.flash', 'flash', (['error'], {}), '(error)\n', (2226, 2233), False, 'from flask import flash\n'), ((3505, 3517), 'flask.flash', 'flash', (['error'], {}), '(error)\n', (3510, 3517), False, 'from flask import flash\n'), ((3013, 3034), 'flask.url_for', 'url_for', (['"""blog.index"""'], {}), "('blog.index')\n", (3020, 3034), False, 'from flask import url_for\n'), ((3742, 3763), 'flask.url_for', 'url_for', (['"""blog.index"""'], {}), "('blog.index')\n", (3749, 3763), False, 'from flask import url_for\n')] |
#!/usr/bin/python
#
# This program demonstrates how to convert the raw values from an accelerometer to Gs
#
# The BerryIMUv1, BerryIMUv2 and BerryIMUv3 are supported
#
# This script is python 2.7 and 3 compatible
#
# Feel free to do whatever you like with this code.
# Distributed as-is; no warranty is given.
#
# https://ozzmaker.com/accelerometer-to-g/
import time
import IMU
import sys
IMU.detectIMU() #Detect if BerryIMU is connected.
if(IMU.BerryIMUversion == 99):
print(" No BerryIMU found... exiting ")
sys.exit()
IMU.initIMU() #Initialise the accelerometer, gyroscope and compass
while True:
#Read the accelerometer,gyroscope and magnetometer values
ACCx = IMU.readACCx()
ACCy = IMU.readACCy()
ACCz = IMU.readACCz()
yG = (ACCx * 0.244)/1000
xG = (ACCy * 0.244)/1000
zG = (ACCz * 0.244)/1000
print("##### X = %fG ##### Y = %fG ##### Z = %fG #####" % ( yG, xG, zG))
#slow program down a bit, makes the output more readable
time.sleep(0.03)
| [
"IMU.readACCy",
"IMU.readACCz",
"time.sleep",
"IMU.detectIMU",
"sys.exit",
"IMU.readACCx",
"IMU.initIMU"
] | [((430, 445), 'IMU.detectIMU', 'IMU.detectIMU', ([], {}), '()\n', (443, 445), False, 'import IMU\n'), ((574, 587), 'IMU.initIMU', 'IMU.initIMU', ([], {}), '()\n', (585, 587), False, 'import IMU\n'), ((563, 573), 'sys.exit', 'sys.exit', ([], {}), '()\n', (571, 573), False, 'import sys\n'), ((737, 751), 'IMU.readACCx', 'IMU.readACCx', ([], {}), '()\n', (749, 751), False, 'import IMU\n'), ((763, 777), 'IMU.readACCy', 'IMU.readACCy', ([], {}), '()\n', (775, 777), False, 'import IMU\n'), ((789, 803), 'IMU.readACCz', 'IMU.readACCz', ([], {}), '()\n', (801, 803), False, 'import IMU\n'), ((1042, 1058), 'time.sleep', 'time.sleep', (['(0.03)'], {}), '(0.03)\n', (1052, 1058), False, 'import time\n')] |
#!/usr/bin/env python
import os
import pygame
import random
import sys
import time
from collections import namedtuple
from datetime import timedelta, datetime
pygame.init()
pygame.font.init()
sys_font = pygame.font.SysFont("fixed", 14)
iphone_dims = 320, 480
target_box_width = 0.075
target_box_padding = 0.025
draw_color = pygame.Color(127, 127, 127, 255)
line_color = pygame.Color(237, 237, 237, 255)
backdrop_color = pygame.Color(255, 255, 255, 255)
text_color = pygame.Color(67, 67, 67, 255)
rect_color = pygame.Color(217, 67, 67, 255)
# Query display modes and choose the second largest
modes = pygame.display.list_modes()
window_height = modes[1][1] if len(modes) > 1 else modes[0][1]
def DimensionToPixels(dimension):
return int((1.0 * dimension * window_height) / iphone_dims[1])
def DimensionsToPixels(dimensions):
return [DimensionToPixels(x) for x in dimensions]
# Create the simulation window
window_dimensions = [d for d in DimensionsToPixels(iphone_dims)]
window = pygame.display.set_mode(window_dimensions, pygame.DOUBLEBUF | \
pygame.HWSURFACE | \
pygame.RESIZABLE)
pygame.display.set_caption('Timeline')
def GetSurface():
return pygame.display.get_surface()
def ClearSurface():
GetSurface().fill(backdrop_color)
def DrawTargetBox():
box = [iphone_dims[0] * (1.0 - target_box_width - target_box_padding),
iphone_dims[1] * target_box_padding,
iphone_dims[0] * target_box_width,
iphone_dims[1] * (1.0 - target_box_padding * 2)]
pygame.draw.rect(GetSurface(), draw_color, DimensionsToPixels(box), 1)
def DrawLabel(x_off, y_off, label, color, border=False):
max_y = 1.0 - target_box_padding * 2
y_off = y_off * max_y + target_box_padding * iphone_dims[1]
ts = sys_font.render(label, True, color)
offset = DimensionsToPixels([x_off, y_off])
GetSurface().blit(ts, offset)
pygame.draw.line(GetSurface(), line_color,
(offset[0] + ts.get_width(), offset[1] + ts.get_height()),
(window_dimensions[0], offset[1] + ts.get_height()), 1)
if border:
pygame.draw.rect(GetSurface(), color,
[offset[0], offset[1], ts.get_width(), ts.get_height()], 1)
def DrawLabels(labels, x_off, transparency):
color = pygame.Color(*[int(x + transparency * (y - x)) for x, y in zip(text_color, backdrop_color)])
for label, pos in zip(labels, xrange(len(labels))):
DrawLabel(x_off, (float(pos) * iphone_dims[1]) / (len(labels) - 1), label, color)
def EnforceBounds(val, min_val, max_val):
val = max(val, min_val)
return min(val, max_val)
def ProcessMotion(active, last_pos):
new_pos = last_pos
for event in pygame.event.get():
if event.type == pygame.QUIT or event.type == pygame.KEYDOWN:
return sys.exit(0)
if active:
if event.type == pygame.MOUSEMOTION:
if event.buttons[0]:
new_pos = event.pos
elif event.type == pygame.MOUSEBUTTONUP:
if event.button == 1:
active = False
else:
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
x_pos, y_pos = [float(pos) / dim for pos, dim in zip(event.pos, window_dimensions)]
if x_pos > (1.0 - target_box_width - target_box_padding) and \
x_pos < (1.0 - target_box_padding) and \
y_pos > target_box_padding and \
y_pos < 1.0 - target_box_padding:
active = True
new_pos = event.pos
x_ratio = EnforceBounds(float(new_pos[0]) / window_dimensions[0], 0.0, 1.0)
old_y_ratio = EnforceBounds(float(last_pos[1]) / window_dimensions[1], 0.0, 1.0)
y_ratio = EnforceBounds(float(new_pos[1]) / window_dimensions[1], 0.0, 1.0)
y_delta = y_ratio - old_y_ratio
return active, new_pos, x_ratio, y_ratio, y_delta
def GetNextMonth(date):
if date.month == 12:
return date.replace(year=date.year + 1, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
else:
return date.replace(month=date.month + 1, day=1, hour=0, minute=0, second=0, microsecond=0)
def GetRange(level, max_date, min_date, cur_date):
num_ticks = max_date.year - min_date.year + 1
end_fmt = '%Y'
cur_fmt = '%B %Y'
tick_fmt = '%Y'
if level == 'years':
max_date = max_date.replace(year=max_date.year + 1, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
min_date = min_date.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
num_ticks = max_date.year - min_date.year + 1
if level == 'months':
max_date = cur_date.replace(year=cur_date.year + 1, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
min_date = cur_date.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
num_ticks = 13
end_fmt = '%B %Y'
cur_fmt = '%B %d'
tick_fmt = '%B %Y'
if level == 'days':
max_date = GetNextMonth(cur_date)
min_date = cur_date.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
num_ticks = (max_date - min_date).days + 1
end_fmt = '%B %d %Y'
cur_fmt = '%H:00'
tick_fmt = '%B %d'
if level == 'hours':
min_date = cur_date.replace(hour=0, minute=0, second=0, microsecond=0)
max_date = min_date + timedelta(days=1)
num_ticks = 13
end_fmt = '%B %d %Y'
cur_fmt = '%H:%M'
tick_fmt = '%H:00'
return (max_date, min_date, num_ticks, tick_fmt, cur_fmt, end_fmt)
def main():
max_date = datetime.utcnow()
min_date = datetime(year=2008, month=3, day=13)
cur_date = datetime.utcnow()
active = False
last_pos = 0, 0
x_ratio, y_ratio = 0.0, 0.0
# Rates encompass: X years, 12 months, 28-31 days, 24 hours.
levels = ['hours', 'days', 'months', 'years']
level = levels[-1]
while True:
ClearSurface()
active, last_pos, x_ratio, y_ratio, y_delta = ProcessMotion(active, last_pos)
if active:
active_range = x_ratio / (1.0 / len(levels))
level = levels[int(active_range)]
end_date, start_date, num_ticks, tick_fmt, cur_fmt, end_fmt = GetRange(level, max_date, min_date, cur_date)
delta = end_date - start_date
secs_delta = delta.days * (3600*24) + delta.seconds
if not active:
DrawTargetBox()
else:
cur_date = end_date - timedelta(seconds=int(y_ratio * secs_delta))
# This is a small fix to keep the dates from going haywire at the top end.
if cur_date == end_date:
cur_date -= timedelta(seconds=1)
labels = [(end_date - timedelta(seconds=(secs_delta*i)/num_ticks)).strftime(end_fmt if i==0 or i==num_ticks-1 else tick_fmt) for i in xrange(num_ticks)]
DrawLabels(labels, int(0.85 * iphone_dims[0]), 0)
cur_delta = end_date - cur_date
cur_secs_delta = cur_delta.days * (3600*24) + cur_delta.seconds
DrawLabel(int(0.85 * iphone_dims[0]), (cur_secs_delta * iphone_dims[1]) / secs_delta,
cur_date.strftime(cur_fmt), rect_color, border=True)
pygame.display.flip()
if __name__ == '__main__':
main()
| [
"datetime.datetime",
"sys.exit",
"pygame.init",
"pygame.event.get",
"datetime.datetime.utcnow",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.display.list_modes",
"pygame.display.get_surface",
"pygame.font.init",
"pygame.display.set_caption",
"pygame.Color",
"datetime.timedelta",
"pygame.font.SysFont"
] | [((161, 174), 'pygame.init', 'pygame.init', ([], {}), '()\n', (172, 174), False, 'import pygame\n'), ((175, 193), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (191, 193), False, 'import pygame\n'), ((205, 237), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""fixed"""', '(14)'], {}), "('fixed', 14)\n", (224, 237), False, 'import pygame\n'), ((327, 359), 'pygame.Color', 'pygame.Color', (['(127)', '(127)', '(127)', '(255)'], {}), '(127, 127, 127, 255)\n', (339, 359), False, 'import pygame\n'), ((373, 405), 'pygame.Color', 'pygame.Color', (['(237)', '(237)', '(237)', '(255)'], {}), '(237, 237, 237, 255)\n', (385, 405), False, 'import pygame\n'), ((423, 455), 'pygame.Color', 'pygame.Color', (['(255)', '(255)', '(255)', '(255)'], {}), '(255, 255, 255, 255)\n', (435, 455), False, 'import pygame\n'), ((469, 498), 'pygame.Color', 'pygame.Color', (['(67)', '(67)', '(67)', '(255)'], {}), '(67, 67, 67, 255)\n', (481, 498), False, 'import pygame\n'), ((512, 542), 'pygame.Color', 'pygame.Color', (['(217)', '(67)', '(67)', '(255)'], {}), '(217, 67, 67, 255)\n', (524, 542), False, 'import pygame\n'), ((604, 631), 'pygame.display.list_modes', 'pygame.display.list_modes', ([], {}), '()\n', (629, 631), False, 'import pygame\n'), ((990, 1093), 'pygame.display.set_mode', 'pygame.display.set_mode', (['window_dimensions', '(pygame.DOUBLEBUF | pygame.HWSURFACE | pygame.RESIZABLE)'], {}), '(window_dimensions, pygame.DOUBLEBUF | pygame.\n HWSURFACE | pygame.RESIZABLE)\n', (1013, 1093), False, 'import pygame\n'), ((1163, 1201), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Timeline"""'], {}), "('Timeline')\n", (1189, 1201), False, 'import pygame\n'), ((1230, 1258), 'pygame.display.get_surface', 'pygame.display.get_surface', ([], {}), '()\n', (1256, 1258), False, 'import pygame\n'), ((2705, 2723), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2721, 2723), False, 'import pygame\n'), ((5420, 5437), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5435, 5437), False, 'from datetime import timedelta, datetime\n'), ((5451, 5487), 'datetime.datetime', 'datetime', ([], {'year': '(2008)', 'month': '(3)', 'day': '(13)'}), '(year=2008, month=3, day=13)\n', (5459, 5487), False, 'from datetime import timedelta, datetime\n'), ((5501, 5518), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5516, 5518), False, 'from datetime import timedelta, datetime\n'), ((6902, 6923), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (6921, 6923), False, 'import pygame\n'), ((2804, 2815), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2812, 2815), False, 'import sys\n'), ((5218, 5235), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (5227, 5235), False, 'from datetime import timedelta, datetime\n'), ((6399, 6419), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (6408, 6419), False, 'from datetime import timedelta, datetime\n'), ((6448, 6493), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(secs_delta * i / num_ticks)'}), '(seconds=secs_delta * i / num_ticks)\n', (6457, 6493), False, 'from datetime import timedelta, datetime\n')] |
import pytest
from sdk.data.ByteArrayFilter import ByteArrayFilter
def refernce_serialization(byte_num, byte_val):
return {"byte" + str(byte_num): byte_val}
def test_serializing_empty_ByteArrayFilter():
byte_array_filter = ByteArrayFilter()
assert byte_array_filter.to_json() == "{}"
@pytest.mark.parametrize("byte_num,byte_val", [
(0, 234),
(1, 234),
(2, 12),
(3, 99),
(5, 12),
(6, 0),
(7, 255),
])
def test_serializng_ByteArrayFilter(byte_num, byte_val):
reference_serialization = {"byte" + str(byte_num): byte_val}
byte_array_filter = ByteArrayFilter()
byte_array_filter.set_byte(byte_num, byte_val)
assert byte_array_filter.to_map() == refernce_serialization(byte_num, byte_val)
@pytest.mark.parametrize("byte_num,byte_val", [
(8, 12),
(-1, 12),
(2, 399),
(3, -12)
])
def test_wrong_serializng_ByteArrayFilter(byte_num, byte_val):
byte_array_filter = ByteArrayFilter()
byte_array_filter.set_byte(byte_num, byte_val)
assert byte_array_filter.to_map() != refernce_serialization(byte_num, byte_val)
def test_custom_equality():
byte_array_filter = ByteArrayFilter()
byte_array_filter.set_byte(2, 20)
byte_array_filter.set_byte(1, 21)
byte_array_filter.set_byte(0, 22)
byte_array_filter_another = ByteArrayFilter()
byte_array_filter_another.set_byte(2, 20)
byte_array_filter_another.set_byte(1, 21)
byte_array_filter_another.set_byte(0, 22)
assert byte_array_filter == byte_array_filter_another
def test_custom_equality_fail():
byte_array_filter = ByteArrayFilter()
byte_array_filter.set_byte(2, 20)
byte_array_filter.set_byte(1, 21)
byte_array_filter.set_byte(0, 22)
assert not byte_array_filter == {}
| [
"sdk.data.ByteArrayFilter.ByteArrayFilter",
"pytest.mark.parametrize"
] | [((304, 419), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""byte_num,byte_val"""', '[(0, 234), (1, 234), (2, 12), (3, 99), (5, 12), (6, 0), (7, 255)]'], {}), "('byte_num,byte_val', [(0, 234), (1, 234), (2, 12),\n (3, 99), (5, 12), (6, 0), (7, 255)])\n", (327, 419), False, 'import pytest\n'), ((749, 838), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""byte_num,byte_val"""', '[(8, 12), (-1, 12), (2, 399), (3, -12)]'], {}), "('byte_num,byte_val', [(8, 12), (-1, 12), (2, 399),\n (3, -12)])\n", (772, 838), False, 'import pytest\n'), ((236, 253), 'sdk.data.ByteArrayFilter.ByteArrayFilter', 'ByteArrayFilter', ([], {}), '()\n', (251, 253), False, 'from sdk.data.ByteArrayFilter import ByteArrayFilter\n'), ((593, 610), 'sdk.data.ByteArrayFilter.ByteArrayFilter', 'ByteArrayFilter', ([], {}), '()\n', (608, 610), False, 'from sdk.data.ByteArrayFilter import ByteArrayFilter\n'), ((940, 957), 'sdk.data.ByteArrayFilter.ByteArrayFilter', 'ByteArrayFilter', ([], {}), '()\n', (955, 957), False, 'from sdk.data.ByteArrayFilter import ByteArrayFilter\n'), ((1147, 1164), 'sdk.data.ByteArrayFilter.ByteArrayFilter', 'ByteArrayFilter', ([], {}), '()\n', (1162, 1164), False, 'from sdk.data.ByteArrayFilter import ByteArrayFilter\n'), ((1312, 1329), 'sdk.data.ByteArrayFilter.ByteArrayFilter', 'ByteArrayFilter', ([], {}), '()\n', (1327, 1329), False, 'from sdk.data.ByteArrayFilter import ByteArrayFilter\n'), ((1586, 1603), 'sdk.data.ByteArrayFilter.ByteArrayFilter', 'ByteArrayFilter', ([], {}), '()\n', (1601, 1603), False, 'from sdk.data.ByteArrayFilter import ByteArrayFilter\n')] |
from multiprocessing import Value
import dash_core_components as dcc
import dash_html_components as html
import matplotlib
import matplotlib.pyplot as plt
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from easy_dash import EasyDash
from .IntegrationTests import IntegrationTests
from .utils import assert_clean_console, invincible, wait_for
matplotlib.use("Agg")
class Tests(IntegrationTests):
def setUp(self):
def wait_for_element_by_id(id):
wait_for(
lambda: None
is not invincible(lambda: self.driver.find_element_by_id(id))
)
return self.driver.find_element_by_id(id)
self.wait_for_element_by_id = wait_for_element_by_id
def test_auto_callback(self):
app = EasyDash("auto_callback")
app.layout = html.Div(
[
dcc.Input(id="input", value="initial value"),
html.Div(html.Div([1.5, None, "string", html.Div(id="output1")])),
dcc.Input(id="fixed_message", value="junk"),
html.Div(id="output2"),
]
)
call_count = Value("i", 0)
@app.auto_callback()
def update_output1(input):
call_count.value = call_count.value + 1
return input
@app.auto_callback()
def update_children_of_output2(value_of_input, state_fixed_message):
return value_of_input + "::" + state_fixed_message
self.startServer(app, 8050)
self.wait_for_text_to_equal("#output1", "initial value")
self.percy_snapshot(name="auto-callback-1")
self.wait_for_text_to_equal("#output2", "initial value::junk")
self.percy_snapshot(name="auto-callback-2")
input1 = self.wait_for_element_by_id("input")
chain = (
ActionChains(self.driver)
.click(input1)
.send_keys(Keys.HOME)
.key_down(Keys.SHIFT)
.send_keys(Keys.END)
.key_up(Keys.SHIFT)
.send_keys(Keys.DELETE)
)
chain.perform()
input1.send_keys("hello world")
self.wait_for_text_to_equal("#output1", "hello world")
self.percy_snapshot(name="auto-callback-3")
self.wait_for_text_to_equal("#output2", "hello world::junk")
self.percy_snapshot(name="auto-callback-4")
self.assertEqual(
call_count.value,
# an initial call to retrieve the first value
# and one for clearing the input
2 +
# one for each hello world character
len("hello world"),
)
assert_clean_console(self)
def test_mpl_callback(self):
app = EasyDash("mpl_callback")
app.layout = html.Div(
[
dcc.Input(id="plot_size", value="5"),
html.Div(id="output_mpl"),
html.Div(id="plot"),
]
)
@app.auto_callback()
def update_output_mpl(value_of_plot_size):
return "New Size: {}".format(value_of_plot_size)
@app.mpl_callback()
def update_children_of_plot(value_of_plot_size):
height = float(value_of_plot_size)
fig, ax1 = plt.subplots(1, 1, figsize=(height, 5))
ax1.plot([0, 1, 1, 0], [0, 0, 1, 1], "r-.")
ax1.set_title(height)
return fig
self.startServer(app, 8052)
self.wait_for_text_to_equal("#output_mpl", "New Size: 5")
self.percy_snapshot(name="mpl-callback-1")
input_ps = self.wait_for_element_by_id("plot_size")
chain = (
ActionChains(self.driver)
.click(input_ps)
.send_keys(Keys.HOME)
.key_down(Keys.SHIFT)
.send_keys(Keys.END)
.key_up(Keys.SHIFT)
.send_keys(Keys.DELETE)
)
chain.perform()
input_ps.send_keys("20")
self.wait_for_text_to_equal("#output_mpl", "New Size: 20")
self.percy_snapshot(name="mpl-callback-2")
assert_clean_console(self)
| [
"matplotlib.use",
"multiprocessing.Value",
"selenium.webdriver.common.action_chains.ActionChains",
"easy_dash.EasyDash",
"dash_core_components.Input",
"matplotlib.pyplot.subplots",
"dash_html_components.Div"
] | [((411, 432), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (425, 432), False, 'import matplotlib\n'), ((835, 860), 'easy_dash.EasyDash', 'EasyDash', (['"""auto_callback"""'], {}), "('auto_callback')\n", (843, 860), False, 'from easy_dash import EasyDash\n'), ((1198, 1211), 'multiprocessing.Value', 'Value', (['"""i"""', '(0)'], {}), "('i', 0)\n", (1203, 1211), False, 'from multiprocessing import Value\n'), ((2775, 2799), 'easy_dash.EasyDash', 'EasyDash', (['"""mpl_callback"""'], {}), "('mpl_callback')\n", (2783, 2799), False, 'from easy_dash import EasyDash\n'), ((3301, 3340), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(height, 5)'}), '(1, 1, figsize=(height, 5))\n', (3313, 3340), True, 'import matplotlib.pyplot as plt\n'), ((922, 966), 'dash_core_components.Input', 'dcc.Input', ([], {'id': '"""input"""', 'value': '"""initial value"""'}), "(id='input', value='initial value')\n", (931, 966), True, 'import dash_core_components as dcc\n'), ((1067, 1110), 'dash_core_components.Input', 'dcc.Input', ([], {'id': '"""fixed_message"""', 'value': '"""junk"""'}), "(id='fixed_message', value='junk')\n", (1076, 1110), True, 'import dash_core_components as dcc\n'), ((1128, 1150), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""output2"""'}), "(id='output2')\n", (1136, 1150), True, 'import dash_html_components as html\n'), ((2861, 2897), 'dash_core_components.Input', 'dcc.Input', ([], {'id': '"""plot_size"""', 'value': '"""5"""'}), "(id='plot_size', value='5')\n", (2870, 2897), True, 'import dash_core_components as dcc\n'), ((2915, 2940), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""output_mpl"""'}), "(id='output_mpl')\n", (2923, 2940), True, 'import dash_html_components as html\n'), ((2958, 2977), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""plot"""'}), "(id='plot')\n", (2966, 2977), True, 'import dash_html_components as html\n'), ((1024, 1046), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""output1"""'}), "(id='output1')\n", (1032, 1046), True, 'import dash_html_components as html\n'), ((1889, 1914), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['self.driver'], {}), '(self.driver)\n', (1901, 1914), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((3701, 3726), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['self.driver'], {}), '(self.driver)\n', (3713, 3726), False, 'from selenium.webdriver.common.action_chains import ActionChains\n')] |
"""An indicator that shows if one or more ROEntry widgets
have been set to nondefault values.
History:
2003-04-14 ROwen
2003-04-15 ROwen Modified to use opscore.RO.Wdg.CtxMenu 2003-04-15.
2003-04-24 ROwen Modified to use addCallback if available.
2003-07-07 ROwen Modified to use opscore.RO.MathUtil.asList.
2003-11-07 ROwen Modified to not create a StringVar unless it'll be used.
2003-11-18 ROwen Modified to use SeqUtil instead of MathUtil.
2004-08-11 ROwen Define __all__ to restrict import.
2004-09-14 ROwen Tweaked the imports.
2015-09-24 ROwen Replace "== None" with "is None" to modernize the code.
"""
__all__ = ['ChangedIndicator']
from six.moves import tkinter
import opscore.RO.SeqUtil
from .CtxMenu import CtxMenuMixin
class ChangedIndicator (tkinter.Label, CtxMenuMixin):
def __init__(self,
master,
wdgOrSet,
var = None,
helpText = None,
helpURL = None,
clearMenu = "Clear",
defMenu = "Default",
**kargs):
"""Creates a new ChangedIndicator.
Inputs:
- wdgOrSet one or more ROEntry widgets
- var variable to contain current value ("" or "!")
- helpText text for hot help
- helpURL URL for longer help
- all remaining keyword arguments are used to configure the Menu
"""
if var is None:
var = tkinter.StringVar()
self.__var = var
self.__inputCont = None
self.wdgSet = []
self.helpText = helpText
self.clearMenu = clearMenu
self.defMenu = defMenu
kargs.setdefault("width", 1)
tkinter.Label.__init__(self,
master = master,
textvariable = self.__var,
**kargs)
CtxMenuMixin.__init__(self,
helpURL = helpURL,
)
if wdgOrSet:
self.addWdg(wdgOrSet)
def addWdg(self, wdgOrSet):
"""Adds a single ROEntry widget or set of widgets to control.
Then sets the enabled state appropriately for all widgets.
"""
if wdgOrSet is None:
return
wdgSet = opscore.RO.SeqUtil.asList(wdgOrSet)
self.wdgSet += wdgSet
for wdg in wdgSet:
try:
wdg.addCallback(self._wdgChanged)
except AttributeError:
var = wdg.getVar()
var.trace_variable('w', self._wdgChanged)
def ctxConfigMenu(self, menu):
if self.clearMenu:
menu.add_command(label = self.clearMenu, command = self.clear)
if self.defMenu:
menu.add_command(label = self.defMenu, command = self.restoreDefault)
return True
def getVar(self):
return self.__var
def getString(self):
return str(self.__var.get())
def isChanged(self):
return bool(self.__var.get())
def setEnable(self, doEnable):
"""Changes the enable state
"""
if doEnable:
self.configure(state="normal")
else:
self.configure(state="disabled")
def restoreDefault(self):
"""Restores all controlled widgets to their default values.
"""
for wdg in self.wdgSet:
wdg.restoreDefault()
def clear(self):
"""Restores all controlled widgets to their default values.
"""
for wdg in self.wdgSet:
wdg.clear()
def _wdgChanged(self, *args, **kargs):
"""Called when any widget is changed"""
isChanged = False
for wdg in self.wdgSet:
if wdg.getDefault() != wdg.getString():
isChanged = True
if isChanged:
self.__var.set("!")
else:
self.__var.set("")
| [
"six.moves.tkinter.Label.__init__",
"six.moves.tkinter.StringVar"
] | [((1639, 1716), 'six.moves.tkinter.Label.__init__', 'tkinter.Label.__init__', (['self'], {'master': 'master', 'textvariable': 'self.__var'}), '(self, master=master, textvariable=self.__var, **kargs)\n', (1661, 1716), False, 'from six.moves import tkinter\n'), ((1391, 1410), 'six.moves.tkinter.StringVar', 'tkinter.StringVar', ([], {}), '()\n', (1408, 1410), False, 'from six.moves import tkinter\n')] |
import glob
import pandas as pd
filenames = glob.glob('../../data/aides/aides-2020-*.csv')
arr = []
for filename in filenames:
print(filename.split("../../data/aides/aides-")[1].split(".csv")[0])
df = pd.read_csv(filename)
for dt in df.date_paiement.unique():
mydict = {}
df['Montant'] = df['Montant'].astype(float)
mydict['date_paiement'] = dt
mydict['nombre'] = df[df['date_paiement'] == dt].shape[0]
mydict['montant'] = df[df['date_paiement'] == dt]['Montant'].sum()
mydict['file'] = filename.split("../../data/aides/aides-")[1]
arr.append(mydict)
df = pd.DataFrame(arr)
df = df.groupby(['date_paiement','file'],as_index=False).sum()
df.to_csv("../../data/stats/stats-files.csv",index=False)
df2 = pd.read_csv('../../data/stats/extract-stats.csv')
df3 = pd.merge(df,df2,on='date_paiement',how='outer')
df3.sort_values(by=['date_paiement'])
arr = []
for index,row in df3.iterrows():
mydict = row
if(round(row['nombre_x'],0) == round(row['nombre_y'],0)):
mydict['isNombreGood'] = True
else:
mydict['isSumGood'] = False
if(round(row['montant_x'],0) == round(row['montant_y'],0)):
mydict['isSumGood'] = True
else:
mydict['isSumGood'] = False
arr.append(mydict)
df4 = pd.DataFrame(arr)
df4 = df4.sort_values(by=['date_paiement'])
df4.to_csv("../../data/stats/stats-agg.csv",index=False)
df5 = df4[(df4['isNombreGood'] == False) | (df4['isSumGood'] == False)]
df5.to_csv("../../data/stats/stats-agg-errors.csv",index=False)
| [
"pandas.DataFrame",
"pandas.merge",
"glob.glob",
"pandas.read_csv"
] | [((44, 90), 'glob.glob', 'glob.glob', (['"""../../data/aides/aides-2020-*.csv"""'], {}), "('../../data/aides/aides-2020-*.csv')\n", (53, 90), False, 'import glob\n'), ((626, 643), 'pandas.DataFrame', 'pd.DataFrame', (['arr'], {}), '(arr)\n', (638, 643), True, 'import pandas as pd\n'), ((775, 824), 'pandas.read_csv', 'pd.read_csv', (['"""../../data/stats/extract-stats.csv"""'], {}), "('../../data/stats/extract-stats.csv')\n", (786, 824), True, 'import pandas as pd\n'), ((832, 882), 'pandas.merge', 'pd.merge', (['df', 'df2'], {'on': '"""date_paiement"""', 'how': '"""outer"""'}), "(df, df2, on='date_paiement', how='outer')\n", (840, 882), True, 'import pandas as pd\n'), ((1300, 1317), 'pandas.DataFrame', 'pd.DataFrame', (['arr'], {}), '(arr)\n', (1312, 1317), True, 'import pandas as pd\n'), ((210, 231), 'pandas.read_csv', 'pd.read_csv', (['filename'], {}), '(filename)\n', (221, 231), True, 'import pandas as pd\n')] |
"""Image processing tools."""
import numpy as np
from PIL import Image
def array_to_img(x, mode='YCbCr'):
return Image.fromarray(x.astype('uint8'), mode=mode)
def bicubic_rescale(image, scale):
if isinstance(scale, (float, int)):
size = (np.array(image.size) * scale).astype(int)
return image.resize(size, resample=Image.BICUBIC)
def modcrop(image, scale):
size = np.array(image.size)
size -= size % scale
return image.crop([0, 0, *size])
| [
"numpy.array"
] | [((394, 414), 'numpy.array', 'np.array', (['image.size'], {}), '(image.size)\n', (402, 414), True, 'import numpy as np\n'), ((258, 278), 'numpy.array', 'np.array', (['image.size'], {}), '(image.size)\n', (266, 278), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
import re
import string
import logging
import importlib
from .constants import PROVIDER_CODE
from ..constants import SENTENCE_ENDING_CHARS, SRE_EMAIL
from ..util import clean_content, strip_html_entities
from ..sentences import split_sentence as split_sentence_by_tagger
log = logging.getLogger('ntrust.sanitizer')
_PUNCTUATIONS = ''.join(map(re.escape, string.punctuation))
_PUNCTUATIONS_WITHOUT_ENDING_CHARS = ''.join(map(re.escape, set(string.punctuation) - set(SENTENCE_ENDING_CHARS)))
_PUNCTUATIONS_WITHOUT_AT_SIGN = ''.join(map(re.escape, set(string.punctuation) - set(['@'])))
_PUNCTUATIONS_END_OF_LINE_AT_ARTICLE = '|'.join(map(re.escape, ['[', '][', ']']))
RE_PUNCTUATION_LINE = re.compile(
'^[\s]*[%s][%s]*[\s]*$' % (_PUNCTUATIONS_WITHOUT_ENDING_CHARS, _PUNCTUATIONS_WITHOUT_ENDING_CHARS),
)
RE_PUNCTUATION_FIRST_IN_LINE = re.compile(
'^([\s]*[%s][%s]*)' % (_PUNCTUATIONS, _PUNCTUATIONS),
)
RE_PUNCTUATION_END_OF_LINE_FOR_BYLINE = re.compile(
'([\s]*[%s][%s]*)$' % (_PUNCTUATIONS_WITHOUT_AT_SIGN, _PUNCTUATIONS_WITHOUT_AT_SIGN),
)
RE_PUNCTUATION_END_OF_LINE_FOR_ARTICLE = re.compile(
'([\s]*(%s)+)$' % (_PUNCTUATIONS_END_OF_LINE_AT_ARTICLE),
)
RE_MULTIPLE_NEW_LINE = re.compile('[\n][\n][\n][\n]*')
RE_SPACE = re.compile('([\s]+)')
def remove_punctuations(lines):
if type(lines) in (str,):
lines = lines.split('\n')
nn = 0
for n, i in enumerate(lines[::-1]):
nn = n
if not i.strip():
continue
if not RE_PUNCTUATION_LINE.search(i):
break
if nn > 0:
lines = lines[:-1 * nn]
return list(filter(lambda x: RE_PUNCTUATION_LINE.search(x) is None, lines))
RE_SENTENCE_ENDING_CHARS = re.compile('([\w!][%s]\s)' % ''.join(map(re.escape, SENTENCE_ENDING_CHARS)))
def split_sentence(s):
return list(split_sentence_by_tagger(s))
def remove_related_articles(lines, item_chars=None):
log.debug('check related articles')
if item_chars is None:
raise RuntimeError('`item_chars` must be given.')
RE_LIST_ITEM = re.compile('^([%s][\s]*).*' % ''.join(map(re.escape, item_chars)))
for n, i in enumerate(lines[::-1]):
if not i.strip():
continue
m = RE_LIST_ITEM.search(i.strip())
if m is None:
break
lines[len(lines) - (n + 1)] = ''
if n < 1:
log.debug('related article not found.')
return lines
def remove_macros(lines, patterns=None):
log.debug('check related articles')
if patterns is None:
raise RuntimeError('`patterns` must be given.')
for n, i in enumerate(lines):
for r in patterns:
if r['s'].search(i) is None:
continue
log.debug('found pattern, %s', r)
i = r['r'](i)
lines[n] = i
return lines
def remove_redundants(lines, patterns=None, reverse=True, line_based=True):
if patterns is None:
raise RuntimeError('`patterns` must be given.')
if reverse:
l = lines[::-1]
else:
l = lines
log.debug('check redundants')
for n, i in enumerate(l):
if not i.strip():
continue
m = list(filter(lambda x: x.search(i) is not None, patterns))
if not m:
break
if line_based:
new_line = ''
else:
new_line = i
for p in m:
new_line = p.sub('', new_line)
log.debug('redundant found, `%s`' % i)
if reverse:
idx = len(lines) - (n + 1)
else:
idx = n
lines[idx] = new_line
return lines
RE_BYLINE = re.compile('[\s]+\/(.*)기자[\s]*')
def remove_gija_byline(lines, pattern=None, reverse=True):
if pattern is None:
pattern = RE_BYLINE
log.debug('remove byline')
if reverse:
l = lines[::-1]
else:
l = lines
for n, i in enumerate(l):
if not i.strip():
continue
m = pattern.search(i)
if m is None:
break
log.debug('byline found, %d: `%s`' % (n, i))
ss = split_sentence(i)
if reverse:
idx = len(lines) - (n + 1)
else:
idx = n
lines[idx] = ' '.join(filter(lambda x: pattern.search(x) is None, ss))
return lines
class Processor(object):
def __init__(self, provider_code, default_provider_code=None):
if provider_code not in PROVIDER_CODE:
if default_provider_code is not None:
provider_code = default_provider_code
else:
raise RuntimeError('unknown `provider_code` was given, `%s`', provider_code)
try:
self.m = importlib.import_module('.sanitizer.providers.p_{}'.format(provider_code), package='ntrust')
except ImportError:
log.debug('trying to import provider processor.')
self.m = importlib.import_module('.sanitizer.providers.default', package='ntrust')
def sanitize(self, content):
content = clean_content(content, strip=True)
content = strip_html_entities(content.strip())
lines = remove_punctuations(content)
lines = list(map(str.rstrip, lines))
s = self.m.Sanitizer()
lines = s.remove(lines)
lines = list(map(str.rstrip, lines))
lines = remove_punctuations(lines)
content = RE_MULTIPLE_NEW_LINE.sub('\n\n', '\n'.join(lines).strip())
return dict(
content=content,
byline=s.get_byline(),
)
class BaseSanitizer(object):
byline = None
def __init__(self):
pass
def remove(self, lines=None):
return self._remove(lines)
def _remove(self, lines=None):
raise NotImplemented
def get_byline(self):
b = self._get_byline()
if b is None:
return b
b = sorted(b, reverse=True)
return ' '.join(b).strip()
def _get_byline(self):
return None
def check_byline(self, b):
if len(RE_SPACE.findall(b)) > 4:
return False
return True
def add_byline(self, b):
if self.byline is None:
self.byline = []
if type(b) not in (str,):
b = ' '.join(list(b))
b = RE_PUNCTUATION_FIRST_IN_LINE.sub('', b.strip())
b = RE_PUNCTUATION_END_OF_LINE_FOR_BYLINE.sub('', b.strip())
self.byline.append(b)
return
class BylineInEndOfLine(object):
RE_EMAIL_END_OF_LINE = re.compile('[\s]*({})[\s]*$'.format(SRE_EMAIL))
RE_GIJA = (
re.compile('([\w\s\(\)\=\/][\w\s\(\)\=\/]*[\s]+(기자|위원|특파원|논설실장))[\s·]+'),
# `·` was added for 01600301-01600301.20160608141220647-removed.txt
re.compile('([\w][\w]*(기자|위원|특파원|논설실장))[\s·]+'),
)
RE_REPORTER = (
re.compile('([\w][\w][\s\=\/]*[\w][\w]*[\s]*{}[\s]*)'.format(SRE_EMAIL)),
)
RE_NOT_BYLINE = (
re.compile('[\s]*기사제보[\s]*'),
)
def _remove(self, lines=None):
if lines is None:
lines = self.content.strip().split('\n')
for method in (
self._remove_by_email,
self._remove_by_broken_email,
self._remove_gija,
self._remove_reporter,
self._remove_useless,
# self._generate_method_remove_by_regex(self.RE_GIJA),
):
lines = method(lines)
return lines
def _remove_by_email(self, lines):
'''
* If email address is found from the last line, it will be byline and removed.
* If email pattern found in the line, split the line by the ending
charaters(`.`, `?`, `!`) and remove the part, which contains the email pattern.
'''
def _0(x):
if self.RE_EMAIL_END_OF_LINE.search(x) is not None and self.check_byline(x):
return True
return False
def _1(x):
if self.RE_EMAIL_END_OF_LINE.search(x) is None:
return True
if not self.check_byline(x):
return True
return False
log.debug('check email')
for n, i in enumerate(lines[::-1]):
if not i.strip():
continue
m = self.RE_EMAIL_END_OF_LINE.search(i)
if m is None:
break
log.debug('found email, `%s`', m.groups())
ss = split_sentence(i)
if len(list(filter(lambda x: x.search(i) is not None, self.RE_NOT_BYLINE))) < 1:
self.add_byline(filter(_0, ss))
lines[len(lines) - (n + 1)] = ' '.join(filter(_1, ss))
return lines
def _remove_by_broken_email(self, lines):
log.debug('check broken email')
for n, i in enumerate(lines[::-1]):
if not i.strip():
continue
if '@' not in i:
break
log.debug('found broken email, `%s`', i)
ss = split_sentence(i)
lines[len(lines) - (n + 1)] = ' '.join(filter(lambda x: '@' not in x, ss))
break
return lines
def _remove_gija(self, lines):
log.debug('check gija')
nn = -1
def _0(r, x):
if r.search(x + ' ') is not None and self.check_byline(x):
return True
return False
def _1(r, x):
if r.search(x + ' ') is None:
return True
if not self.check_byline(x):
return True
return False
for n, i in enumerate(lines[::-1]):
if not i.strip():
continue
if len(i.strip()) > 40:
positions = list(map(
lambda y: y.search(i.strip() + ' ').span(),
filter(lambda x: x.search(i + ' '), self.RE_GIJA)
))
if len(list(positions)) < 1:
break
if (float(positions[-1][0]) / float(len(i.strip()))) < 0.7:
break
m = list(filter(lambda x: x.search(i + ' '), self.RE_GIJA))
if not m:
continue
nn = n
log.debug('found gija, %d: `%s`', n, map(lambda x: x.search(i + ' ').groups(), m))
ss = split_sentence(i)
self.add_byline(filter(lambda y: list(filter(lambda x: _0(x, y), m)), ss))
lines[len(lines) - (n + 1)] = ' '.join(filter(lambda y: list(filter(lambda x: _1(x, y), m)), ss))
if nn < 0:
log.debug('gija was not found.')
return lines[:len(lines) - nn]
def _remove_reporter(self, lines):
log.debug('check reporter')
nn = -1
def _0(r, x):
if r.search(x + ' ') is not None and self.check_byline(x):
return True
return False
def _1(r, x):
if r.search(x + ' ') is None:
return True
if not self.check_byline(x):
return True
return False
for n, i in enumerate(lines[::-1]):
if not i.strip():
continue
if len(i.strip()) > 40:
positions = list(map(
lambda y: y.search(i.strip() + ' ').span(),
filter(lambda x: x.search(i + ' '), self.RE_REPORTER)
))
if len(list(positions)) < 1:
break
if (float(positions[-1][0]) / float(len(i.strip()))) < 0.7:
break
m = list(filter(lambda x: x.search(i + ' '), self.RE_REPORTER))
if not m:
continue
log.debug('found reporter, %d: `%s`', n, map(lambda x: x.search(i + ' ').groups(), m))
nn = n
ss = split_sentence(i)
self.add_byline(filter(lambda y: list(filter(lambda x: _0(x, y), m)), ss))
lines[len(lines) - (n + 1)] = ' '.join(filter(lambda y: list(filter(lambda x: _1(x, y), m)), ss))
if nn < 0:
log.debug('reporter was not found.')
return lines[:len(lines) - nn]
def _remove_useless(self, lines):
for n, i in enumerate(lines[::-1]):
if not i.strip():
continue
if not RE_PUNCTUATION_END_OF_LINE_FOR_ARTICLE.search(i):
continue
lines[len(lines) - (n + 1)] = RE_PUNCTUATION_END_OF_LINE_FOR_ARTICLE.sub('', i)
break
return lines
def _generate_method_remove_by_regex(self, regex, help_string=None):
if help_string is None:
help_string = regex
def w(lines):
log.debug('check %s', help_string)
m = regex.search(lines[-1])
if m is None:
log.debug('%s was not found.', help_string)
return lines
ss = split_sentence(lines[-1])
if len(ss) < 2:
lines = lines[:-1]
else:
lines[-1] = ' '.join(filter(lambda x: regex.search(x) is None, ss))
log.debug('found `%s`', m.groups())
return lines
return w
def sanitize_title(s):
s = clean_content(s, strip=True)
s = s.replace('\n', ' ')
s = strip_html_entities(s)
return s
| [
"logging.getLogger",
"importlib.import_module",
"re.compile"
] | [((305, 342), 'logging.getLogger', 'logging.getLogger', (['"""ntrust.sanitizer"""'], {}), "('ntrust.sanitizer')\n", (322, 342), False, 'import logging\n'), ((719, 835), 're.compile', 're.compile', (["('^[\\\\s]*[%s][%s]*[\\\\s]*$' % (_PUNCTUATIONS_WITHOUT_ENDING_CHARS,\n _PUNCTUATIONS_WITHOUT_ENDING_CHARS))"], {}), "('^[\\\\s]*[%s][%s]*[\\\\s]*$' % (_PUNCTUATIONS_WITHOUT_ENDING_CHARS,\n _PUNCTUATIONS_WITHOUT_ENDING_CHARS))\n", (729, 835), False, 'import re\n'), ((868, 933), 're.compile', 're.compile', (["('^([\\\\s]*[%s][%s]*)' % (_PUNCTUATIONS, _PUNCTUATIONS))"], {}), "('^([\\\\s]*[%s][%s]*)' % (_PUNCTUATIONS, _PUNCTUATIONS))\n", (878, 933), False, 'import re\n'), ((980, 1081), 're.compile', 're.compile', (["('([\\\\s]*[%s][%s]*)$' % (_PUNCTUATIONS_WITHOUT_AT_SIGN,\n _PUNCTUATIONS_WITHOUT_AT_SIGN))"], {}), "('([\\\\s]*[%s][%s]*)$' % (_PUNCTUATIONS_WITHOUT_AT_SIGN,\n _PUNCTUATIONS_WITHOUT_AT_SIGN))\n", (990, 1081), False, 'import re\n'), ((1126, 1193), 're.compile', 're.compile', (["('([\\\\s]*(%s)+)$' % _PUNCTUATIONS_END_OF_LINE_AT_ARTICLE)"], {}), "('([\\\\s]*(%s)+)$' % _PUNCTUATIONS_END_OF_LINE_AT_ARTICLE)\n", (1136, 1193), False, 'import re\n'), ((1225, 1256), 're.compile', 're.compile', (['"""[\n][\n][\n][\n]*"""'], {}), "('[\\n][\\n][\\n][\\n]*')\n", (1235, 1256), False, 'import re\n'), ((1268, 1290), 're.compile', 're.compile', (['"""([\\\\s]+)"""'], {}), "('([\\\\s]+)')\n", (1278, 1290), False, 'import re\n'), ((3651, 3686), 're.compile', 're.compile', (['"""[\\\\s]+\\\\/(.*)기자[\\\\s]*"""'], {}), "('[\\\\s]+\\\\/(.*)기자[\\\\s]*')\n", (3661, 3686), False, 'import re\n'), ((6584, 6675), 're.compile', 're.compile', (['"""([\\\\w\\\\s\\\\(\\\\)\\\\=\\\\/][\\\\w\\\\s\\\\(\\\\)\\\\=\\\\/]*[\\\\s]+(기자|위원|특파원|논설실장))[\\\\s·]+"""'], {}), "(\n '([\\\\w\\\\s\\\\(\\\\)\\\\=\\\\/][\\\\w\\\\s\\\\(\\\\)\\\\=\\\\/]*[\\\\s]+(기자|위원|특파원|논설실장))[\\\\s·]+')\n", (6594, 6675), False, 'import re\n'), ((6742, 6792), 're.compile', 're.compile', (['"""([\\\\w][\\\\w]*(기자|위원|특파원|논설실장))[\\\\s·]+"""'], {}), "('([\\\\w][\\\\w]*(기자|위원|특파원|논설실장))[\\\\s·]+')\n", (6752, 6792), False, 'import re\n'), ((6935, 6965), 're.compile', 're.compile', (['"""[\\\\s]*기사제보[\\\\s]*"""'], {}), "('[\\\\s]*기사제보[\\\\s]*')\n", (6945, 6965), False, 'import re\n'), ((4919, 4992), 'importlib.import_module', 'importlib.import_module', (['""".sanitizer.providers.default"""'], {'package': '"""ntrust"""'}), "('.sanitizer.providers.default', package='ntrust')\n", (4942, 4992), False, 'import importlib\n')] |
import numpy as np
class Creep:
def __init__(self):
# Creep Parameters
self.DEFAULT_X = 7
self.DEFAULT_Z = -18
self.Y_MAX = 5
self.Y_MIN = -2
self.Y_MEAN = (self.Y_MIN+self.Y_MAX)/2
self.Z_STEP_UP_HEIGHT = -12
self.totalShiftSize = (self.Y_MAX-self.Y_MIN)/2
self.stanceIncrements = 1.0
# Delays
self.shiftAllInterDelay = 0.01
class Trot:
def __init__(self):
# Trot Parameters
self.DEFAULT_X = 7
self.DEFAULT_Z = -18
self.Y_MAX = 5
self.Y_MIN = -2
self.Z_STEP_UP_HEIGHT = -12
self.trotDelay = 0.2
# Dynamic Leg Placing Params
# Pins for BumpSwitch
BUMPA=18
BUMPB=17
BUMPC=22
BUMPD=27
dropDownMax = -22
dropDownIncrements=0.2
dropDownDelay=0.1
# Servo Props
servoId = [6,4,3,
9,8,7,
9,2,12,
4,3,15]
#Set-Point of Each Servo
# FixedPoints = [91 - 45,40 + 50,116,
# 94 + 45,51 + 50,0,
# 98 -45,49 + 50,90,
# 103+45,50 + 50,0 ]
FixedPoints = [20,95 ,100,
112,95 ,85,
55,95 ,95,
43,111 ,110 ]
# Direction of Motion
dirVector = [ 1,1,1,
1,1,1,
-1,-1,-1,
1,1,1,]
# Venom Props
linkLength = [5.5,0,7.6,16.3]
# linikLength = [1,0,1,1]
import board
import busio
from adafruit_servokit import ServoKit
i2c = busio.I2C(board.SCL_1,board.SDA_1)
# kit1=None
# kit2=None
# def i2c_init():
print("Connecting to the I2C Bus......")
kit1 = ServoKit(channels=16,i2c=i2c,address=0x40)
kit2= ServoKit(channels=16,i2c=i2c,address=0x41)
print("I2C Bus Connected.")
# Reference Constants
A = 0
B = 1
C = 2
D = 3
TOP = 0
MIDDLE = 1
BOTTOM = 2
CREEP = 0
TROT = 1
TROT_BACK=2
CREEP_DYN=3
| [
"busio.I2C",
"adafruit_servokit.ServoKit"
] | [((1454, 1489), 'busio.I2C', 'busio.I2C', (['board.SCL_1', 'board.SDA_1'], {}), '(board.SCL_1, board.SDA_1)\n', (1463, 1489), False, 'import busio\n'), ((1580, 1622), 'adafruit_servokit.ServoKit', 'ServoKit', ([], {'channels': '(16)', 'i2c': 'i2c', 'address': '(64)'}), '(channels=16, i2c=i2c, address=64)\n', (1588, 1622), False, 'from adafruit_servokit import ServoKit\n'), ((1629, 1671), 'adafruit_servokit.ServoKit', 'ServoKit', ([], {'channels': '(16)', 'i2c': 'i2c', 'address': '(65)'}), '(channels=16, i2c=i2c, address=65)\n', (1637, 1671), False, 'from adafruit_servokit import ServoKit\n')] |
from collections import Counter
from copy import deepcopy
from itertools import chain
import random
import string
from typing import Optional
import pytest
from odin.core import wire_inputs
CHARS = list(chain(string.ascii_letters, string.digits))
if '^' in CHARS:
CHARS.remove('^')
def rand_str(length: Optional[int] = None, min_: int = 3, max_: int = 10):
if length is None:
length = random.randint(min_, max_)
return ''.join([random.choice(CHARS) for _ in range(length)])
def dummy(a, b, c=12):
pass
def test_wire_inputs_all_inputs_present():
inputs = {'a': '^first', 'b': '^second', 'c': '^third'}
og = deepcopy(inputs)
results = {'first': rand_str(), 'second': rand_str(), 'third': rand_str(), 'last': rand_str()}
inputs = wire_inputs(inputs, results, dummy)
for k, v in inputs.items():
assert v == results[og[k].replace('^', '')]
assert inputs != results
def test_wire_inputs_missing_inputs():
inputs = {'a': '^first', 'b': '^second', 'c': '^third'}
og = deepcopy(inputs)
results = {'first': rand_str(), 'last': rand_str()}
inputs = wire_inputs(inputs, results, dummy)
for k, v in inputs.items():
if og[k].replace('^', '') in results:
assert v == results[og[k].replace('^', '')]
else:
assert v is None
assert inputs != results
def test_wire_inputs_index_lookup():
inputs = {'a': '^first.second'}
og = deepcopy(inputs)
results = {'first': {'second': rand_str()}}
inputs = wire_inputs(inputs, results, lambda a: None)
for k, v in inputs.items():
assert v == results['first']['second']
def test_wire_inputs_missing_param():
inputs = {}
results = {'first': 'b'}
inputs = wire_inputs(inputs, results, dummy)
assert 'a' in inputs
assert inputs['a'] is None
assert 'b' in inputs
assert inputs['b'] is None
assert 'c' not in inputs
def test_wire_inputs_supply_default_param():
inputs = {'c': '^first'}
results = {'first': rand_str()}
inputs = wire_inputs(inputs, results, dummy)
assert 'a' in inputs
assert inputs['a'] is None
assert 'b' in inputs
assert inputs['b'] is None
assert 'c' in inputs
assert inputs['c'] == results['first']
def test_wire_inputs_list_of_inputs():
inputs = {'a': ['^first', '^second', '^third']}
og = deepcopy(inputs)
results = {'first': rand_str(), 'second': rand_str(), 'third': rand_str(), 'last': rand_str()}
inputs = wire_inputs(inputs, results, lambda a: None)
for k, vs in inputs.items():
for i in range(len(vs)):
assert vs[i] == results[og[k][i].replace('^', '')]
assert inputs != results
def test_wire_inputs_input_pass_through():
for _ in range(100):
raw_inputs = {rand_str(): rand_str() for _ in range(random.randint(1, 5))}
gold_inputs = deepcopy(raw_inputs)
chore = lambda **kwargs: None
inputs = wire_inputs(raw_inputs, {}, chore)
assert inputs == gold_inputs
class RecordingDict(dict):
def __init__(self, *args, top=True, **kwargs):
self.requested = Counter()
super().__init__(*args, **kwargs)
if top:
self.sub = RecordingDict(top=False)
def get(self, key, default=None):
self.requested[key] += 1
default = self.sub if isinstance(default, dict) else default
return super().get(key, default)
| [
"itertools.chain",
"random.choice",
"odin.core.wire_inputs",
"collections.Counter",
"copy.deepcopy",
"random.randint"
] | [((204, 246), 'itertools.chain', 'chain', (['string.ascii_letters', 'string.digits'], {}), '(string.ascii_letters, string.digits)\n', (209, 246), False, 'from itertools import chain\n'), ((645, 661), 'copy.deepcopy', 'deepcopy', (['inputs'], {}), '(inputs)\n', (653, 661), False, 'from copy import deepcopy\n'), ((774, 809), 'odin.core.wire_inputs', 'wire_inputs', (['inputs', 'results', 'dummy'], {}), '(inputs, results, dummy)\n', (785, 809), False, 'from odin.core import wire_inputs\n'), ((1033, 1049), 'copy.deepcopy', 'deepcopy', (['inputs'], {}), '(inputs)\n', (1041, 1049), False, 'from copy import deepcopy\n'), ((1119, 1154), 'odin.core.wire_inputs', 'wire_inputs', (['inputs', 'results', 'dummy'], {}), '(inputs, results, dummy)\n', (1130, 1154), False, 'from odin.core import wire_inputs\n'), ((1445, 1461), 'copy.deepcopy', 'deepcopy', (['inputs'], {}), '(inputs)\n', (1453, 1461), False, 'from copy import deepcopy\n'), ((1523, 1567), 'odin.core.wire_inputs', 'wire_inputs', (['inputs', 'results', '(lambda a: None)'], {}), '(inputs, results, lambda a: None)\n', (1534, 1567), False, 'from odin.core import wire_inputs\n'), ((1745, 1780), 'odin.core.wire_inputs', 'wire_inputs', (['inputs', 'results', 'dummy'], {}), '(inputs, results, dummy)\n', (1756, 1780), False, 'from odin.core import wire_inputs\n'), ((2047, 2082), 'odin.core.wire_inputs', 'wire_inputs', (['inputs', 'results', 'dummy'], {}), '(inputs, results, dummy)\n', (2058, 2082), False, 'from odin.core import wire_inputs\n'), ((2365, 2381), 'copy.deepcopy', 'deepcopy', (['inputs'], {}), '(inputs)\n', (2373, 2381), False, 'from copy import deepcopy\n'), ((2494, 2538), 'odin.core.wire_inputs', 'wire_inputs', (['inputs', 'results', '(lambda a: None)'], {}), '(inputs, results, lambda a: None)\n', (2505, 2538), False, 'from odin.core import wire_inputs\n'), ((404, 430), 'random.randint', 'random.randint', (['min_', 'max_'], {}), '(min_, max_)\n', (418, 430), False, 'import random\n'), ((2872, 2892), 'copy.deepcopy', 'deepcopy', (['raw_inputs'], {}), '(raw_inputs)\n', (2880, 2892), False, 'from copy import deepcopy\n'), ((2948, 2982), 'odin.core.wire_inputs', 'wire_inputs', (['raw_inputs', '{}', 'chore'], {}), '(raw_inputs, {}, chore)\n', (2959, 2982), False, 'from odin.core import wire_inputs\n'), ((3125, 3134), 'collections.Counter', 'Counter', ([], {}), '()\n', (3132, 3134), False, 'from collections import Counter\n'), ((451, 471), 'random.choice', 'random.choice', (['CHARS'], {}), '(CHARS)\n', (464, 471), False, 'import random\n'), ((2827, 2847), 'random.randint', 'random.randint', (['(1)', '(5)'], {}), '(1, 5)\n', (2841, 2847), False, 'import random\n')] |
import os
from os.path import isfile, join
from menu import Menu
os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = "hide"
import pygame
def get_plugin():
return Radio("resources")
class Radio:
def __init__(self, path):
self.path = path
self.songs = [f for f in os.listdir(path) if isfile(join(path, f)) and self.is_audio_file(f)]
pygame.mixer.init()
def get_name(self):
return "Radio"
def get_prio(self):
return 900
def setup(self, parent_menu):
play_item = Menu("Play")
parent_menu.add_submenu(play_item)
stop_item = Menu("Stop")
stop_item.register_action(self.stop_song)
parent_menu.add_submenu(stop_item)
for song in self.songs:
item = Menu(".".join(song.split(".")[:-1]))
item.register_action(lambda state, song=song: self.play_song(song, state))
play_item.add_submenu(item)
def is_audio_file(self, file):
return file.endswith(".mp3") or file.endswith(".wav")
def play_song(self, song, state):
print("Playing %s" % song)
self.stop_song(state)
if state.get("display"):
state["display"].set_text("Playing!", center=True)
radio_state = self.get_radio_state(state)
radio_state["current_song"] = song
pygame.mixer.music.load(join(self.path, song))
pygame.mixer.music.play()
def stop_song(self, state):
radio_state = self.get_radio_state(state)
if radio_state.get("current_song"):
pygame.mixer.music.stop()
radio_state["current_song"] = None
pygame.mixer.quit()
pygame.mixer.init()
def get_radio_state(self, state):
if not state.get("radio"):
state["radio"] = {}
return state["radio"]
def cleanup(self):
pass
| [
"pygame.mixer.quit",
"os.listdir",
"pygame.mixer.init",
"os.path.join",
"pygame.mixer.music.stop",
"pygame.mixer.music.play",
"menu.Menu"
] | [((360, 379), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (377, 379), False, 'import pygame\n'), ((551, 563), 'menu.Menu', 'Menu', (['"""Play"""'], {}), "('Play')\n", (555, 563), False, 'from menu import Menu\n'), ((627, 639), 'menu.Menu', 'Menu', (['"""Stop"""'], {}), "('Stop')\n", (631, 639), False, 'from menu import Menu\n'), ((1433, 1458), 'pygame.mixer.music.play', 'pygame.mixer.music.play', ([], {}), '()\n', (1456, 1458), False, 'import pygame\n'), ((1687, 1706), 'pygame.mixer.quit', 'pygame.mixer.quit', ([], {}), '()\n', (1704, 1706), False, 'import pygame\n'), ((1715, 1734), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (1732, 1734), False, 'import pygame\n'), ((1402, 1423), 'os.path.join', 'join', (['self.path', 'song'], {}), '(self.path, song)\n', (1406, 1423), False, 'from os.path import isfile, join\n'), ((1606, 1631), 'pygame.mixer.music.stop', 'pygame.mixer.music.stop', ([], {}), '()\n', (1629, 1631), False, 'import pygame\n'), ((283, 299), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (293, 299), False, 'import os\n'), ((310, 323), 'os.path.join', 'join', (['path', 'f'], {}), '(path, f)\n', (314, 323), False, 'from os.path import isfile, join\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import loader
import os
from girder.utility.config import _mergeConfig
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
# Read the configuration files
_cfgs = ('minerva.dist.cfg', 'minerva.local.cfg')
for f in _cfgs:
configPath = os.path.join(PACKAGE_DIR, 'conf', f)
if os.path.exists(configPath):
_mergeConfig(configPath)
def load(info):
loader.load(info)
| [
"loader.load",
"os.path.exists",
"os.path.join",
"girder.utility.config._mergeConfig",
"os.path.abspath"
] | [((890, 915), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (905, 915), False, 'import os\n'), ((1032, 1068), 'os.path.join', 'os.path.join', (['PACKAGE_DIR', '"""conf"""', 'f'], {}), "(PACKAGE_DIR, 'conf', f)\n", (1044, 1068), False, 'import os\n'), ((1076, 1102), 'os.path.exists', 'os.path.exists', (['configPath'], {}), '(configPath)\n', (1090, 1102), False, 'import os\n'), ((1159, 1176), 'loader.load', 'loader.load', (['info'], {}), '(info)\n', (1170, 1176), False, 'import loader\n'), ((1112, 1136), 'girder.utility.config._mergeConfig', '_mergeConfig', (['configPath'], {}), '(configPath)\n', (1124, 1136), False, 'from girder.utility.config import _mergeConfig\n')] |
# encoding=UTF-8
# Copyright © 2007-2018 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''
plain text formatter
'''
import collections
import io
from . import transliterate
del transliterate # Hi, pyflakes!
class YdpFormatter():
class UnhandledTag(Exception):
pass
def parse_body(self, node):
for subnode in node:
self(subnode)
self.write('\n\n')
def parse_p(self, node):
self._strip = True
self.write(node.text)
for subnode in node:
self(subnode)
self.write('\n')
self.write(node.tail)
def parse_a(self, node):
self.write(node.text)
for subnode in node:
self(subnode)
self.write(node.tail)
def parse_b(self, node):
tmp_color = self.set_bold_color() # pylint: disable=assignment-from-no-return
self.write(node.text)
for subnode in node:
self(subnode)
self.set_color(tmp_color)
self.write(node.tail)
def parse_i(self, node):
self.write(node.text)
for subnode in node:
self(subnode)
self.write(node.tail)
def parse_sup(self, node):
self.write('^')
self.write(node.text)
for subnode in node:
self(subnode)
self.write(node.tail)
def parse_div(self, node):
tmp_file = self._file
self._file = io.StringIO()
for subnode in node:
self(subnode)
result = str(self)
self._file = tmp_file
self.write('\n ')
self.write(result.replace('\n', '\n '))
self.write('\n\n')
self._strip = True
def parse_span(self, node):
style = node.get('style')
color = self._color_map[style]
tmp_color = self.set_color(color) # pylint: disable=assignment-from-no-return
self.write(node.text)
for subnode in node:
self(subnode)
self.set_color(tmp_color)
self.write(node.tail)
def parse_br(self, node):
self.write('\n')
self.write(node.tail)
self._strip = True
def write(self, value, strip=True):
value = value or ''
if self._strip and strip:
if value:
value = value.lstrip()
self._strip = False
self._file.write(value)
def set_color(self, value):
pass
def set_bold_color(self):
pass
def cleanup(self): # pylint: disable=no-self-use
return ''
def __init__(self, encoding):
self._file = io.StringIO()
self._strip = False
self._color_map = collections.defaultdict(str)
self._encoding = encoding
def __str__(self):
return self._file.getvalue()
def encode(self):
return str(self).encode(self._encoding, 'transliterate')
def __call__(self, node):
if node.tag.isalpha():
try:
getattr(self, 'parse_{tag}'.format(tag=node.tag))(node)
return
except AttributeError:
pass
raise YdpFormatter.UnhandledTag(node.tag)
# vim:ts=4 sts=4 sw=4 et
| [
"io.StringIO",
"collections.defaultdict"
] | [((2434, 2447), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2445, 2447), False, 'import io\n'), ((3596, 3609), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (3607, 3609), False, 'import io\n'), ((3664, 3692), 'collections.defaultdict', 'collections.defaultdict', (['str'], {}), '(str)\n', (3687, 3692), False, 'import collections\n')] |
# coding=utf-8
# Author: <NAME>
# Date: October 08, 2018
# Edited by X<NAME> Oct 2020
#
# Description:
# Build Mention Tables (for network construction) on PubMed Abstracts
#
# Add package folder location
import os
import sys
py_include_path = os.path.abspath(os.path.join(os.path.dirname(__file__),os.pardir, os.pardir, 'include'))
sys.path.insert(0, py_include_path)
#
import db_init as db
import pandas as pd
import json
pd.set_option('display.max_rows', 100)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 1000)
from termdictparser import Sentences
from load_dictionary import load_dictionary, build_term_parser
# import utils
if __name__ == '__main__':
#
# Init
#
#
dicttimestamp = '20180706' # raw_input("dict timestamp [yyyymmdd]:") #'20171221' # datetime.today().strftime('%Y%m%d')
with open(os.path.join(os.path.dirname(__file__), '..', 'scripts', 'var.sh')) as varfile:
defline = varfile.readline()
dicttimestamp = defline.split('=')[1].strip()
# Load Dictionary
dfD = load_dictionary(dicttimestamp=dicttimestamp, server='cns-postgres-myaura')
# Build Parser Vocabulary
tdp = build_term_parser(dfD)
dict_token = dfD['token'].to_dict()
dict_id_parent = dfD['id_parent'].to_dict()
dict_parent = dfD['parent'].to_dict()
# dict_dictionary = dfD['dictionary'].to_dict()
dict_type = dfD['type'].to_dict()
# dict_source = dfD['source'].to_dict()
engine_pubmed = db.connectToPostgreSQL(server='cns-postgres-myaura')
# engine_prediction_result = db.connectToPostgreSQL('postgres_cns_test')
# db_pubmed = 'pubmed_medline17'
# db_mention = 'ddi_pubmed_mentions'
mention_table = 'mention_pubmed_epilepsy_%s.mention' % (dicttimestamp)
psql_mention = db.connectToPostgreSQL('cns-postgres-myaura')
# mongo_mention, _ = db.connectToMongoDB(server='mongo_ddi', db=db_mention)
for i in range(10000):
offset = i*100
print('> Parsing PubMedID: %d - %d' % (i*100, (i+1)*100))
i += 1
# SQL Query
sql = """SELECT pmid, article_title, abstract_text, pub_year FROM pubmed.view_epilepsy offset %d limit 100""" % (offset)
q = engine_pubmed.execute(sql)
# No pmid found
if q.rowcount < 1:
break
else:
for row in q.fetchall():
pmid = row[0]
title = row[1] if (row[1] is not None) else ''
abstract = row[2] if (row[2] is not None) else ''
date_publication = row[3] if (row[3] is not None) else -10000
# date_publication = datetime.combine(row['date_publication'],
# datetime.min.time()) # convert date to datetime for MongoDB
#
# Find Mentions in Title & Abstract
#
text = title + "\n" + abstract
# Parser
s = Sentences(text).preprocess(lower=True).tokenize().match_tokens(parser=tdp)
if s.has_match():
mj = {
'_id': pmid,
'created_time': date_publication,
'matches': []
}
for match in s.get_unique_matches():
for mid in match.id:
mj['matches'].append({
'id': mid,
'id_parent': dict_id_parent[mid],
'token': dict_token[mid],
'parent': dict_parent[mid],
'type': dict_type[mid]
})
sql = "INSERT INTO %s VALUES (%s, '%s', '%s');" % \
(mention_table, pmid, date_publication, json.dumps(mj).replace("'", "''"))
try:
q = psql_mention.execute(sql)
except ValueError as error:
print("Error! Args: '{:s}'".format(error.args))
| [
"sys.path.insert",
"load_dictionary.build_term_parser",
"json.dumps",
"termdictparser.Sentences",
"pandas.set_option",
"os.path.dirname",
"load_dictionary.load_dictionary",
"db_init.connectToPostgreSQL"
] | [((337, 372), 'sys.path.insert', 'sys.path.insert', (['(0)', 'py_include_path'], {}), '(0, py_include_path)\n', (352, 372), False, 'import sys\n'), ((430, 468), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', '(100)'], {}), "('display.max_rows', 100)\n", (443, 468), True, 'import pandas as pd\n'), ((469, 510), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', '(500)'], {}), "('display.max_columns', 500)\n", (482, 510), True, 'import pandas as pd\n'), ((511, 547), 'pandas.set_option', 'pd.set_option', (['"""display.width"""', '(1000)'], {}), "('display.width', 1000)\n", (524, 547), True, 'import pandas as pd\n'), ((1065, 1139), 'load_dictionary.load_dictionary', 'load_dictionary', ([], {'dicttimestamp': 'dicttimestamp', 'server': '"""cns-postgres-myaura"""'}), "(dicttimestamp=dicttimestamp, server='cns-postgres-myaura')\n", (1080, 1139), False, 'from load_dictionary import load_dictionary, build_term_parser\n'), ((1180, 1202), 'load_dictionary.build_term_parser', 'build_term_parser', (['dfD'], {}), '(dfD)\n', (1197, 1202), False, 'from load_dictionary import load_dictionary, build_term_parser\n'), ((1490, 1542), 'db_init.connectToPostgreSQL', 'db.connectToPostgreSQL', ([], {'server': '"""cns-postgres-myaura"""'}), "(server='cns-postgres-myaura')\n", (1512, 1542), True, 'import db_init as db\n'), ((1794, 1839), 'db_init.connectToPostgreSQL', 'db.connectToPostgreSQL', (['"""cns-postgres-myaura"""'], {}), "('cns-postgres-myaura')\n", (1816, 1839), True, 'import db_init as db\n'), ((276, 301), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (291, 301), False, 'import os\n'), ((874, 899), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (889, 899), False, 'import os\n'), ((3872, 3886), 'json.dumps', 'json.dumps', (['mj'], {}), '(mj)\n', (3882, 3886), False, 'import json\n'), ((2975, 2990), 'termdictparser.Sentences', 'Sentences', (['text'], {}), '(text)\n', (2984, 2990), False, 'from termdictparser import Sentences\n')] |
#!/usr/bin/env python3.7
import requests
import json
# key + secret from downloaded apikey.txt
api_key="<KEY>"
api_secret="<KEY>"
# define the basics, hostname to use and description used to identify our test rule
rule_description='OPNsense_fw_api_testrule_1'
remote_uri="https://192.168.1.1"
# search for rule
r = requests.get(
"%s/api/firewall/filter/searchRule?current=1&rowCount=7&searchPhrase=%s" % (
remote_uri, rule_description
),
auth=(api_key, api_secret), verify=False
)
if r.status_code == 200:
response = json.loads(r.text)
if len(response['rows']) > 0:
rule_uuid = response['rows'][0]['uuid']
r = requests.post("%s/api/firewall/filter/savepoint" % remote_uri, auth=(api_key, api_secret), verify=False)
if r.status_code == 200:
sp_response = json.loads(r.text)
# disable rule
r = requests.post("%s/api/firewall/filter/toggleRule/%s/0" % (remote_uri, rule_uuid),
auth=(api_key, api_secret), verify=False
)
# apply changes, revert to sp_response['revision'] after 60 seconds
r = requests.post("%s/api/firewall/filter/apply/%s" % (remote_uri, sp_response['revision']),
auth=(api_key, api_secret), verify=False
)
print("revert to revision %s in 60 seconds (%s changed)" % (sp_response['revision'], rule_uuid))
else:
print("rule %s not found" % rule_description)
| [
"json.loads",
"requests.post",
"requests.get"
] | [((318, 488), 'requests.get', 'requests.get', (["('%s/api/firewall/filter/searchRule?current=1&rowCount=7&searchPhrase=%s' %\n (remote_uri, rule_description))"], {'auth': '(api_key, api_secret)', 'verify': '(False)'}), "(\n '%s/api/firewall/filter/searchRule?current=1&rowCount=7&searchPhrase=%s' %\n (remote_uri, rule_description), auth=(api_key, api_secret), verify=False)\n", (330, 488), False, 'import requests\n'), ((545, 563), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (555, 563), False, 'import json\n'), ((658, 767), 'requests.post', 'requests.post', (["('%s/api/firewall/filter/savepoint' % remote_uri)"], {'auth': '(api_key, api_secret)', 'verify': '(False)'}), "('%s/api/firewall/filter/savepoint' % remote_uri, auth=(\n api_key, api_secret), verify=False)\n", (671, 767), False, 'import requests\n'), ((822, 840), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (832, 840), False, 'import json\n'), ((884, 1011), 'requests.post', 'requests.post', (["('%s/api/firewall/filter/toggleRule/%s/0' % (remote_uri, rule_uuid))"], {'auth': '(api_key, api_secret)', 'verify': '(False)'}), "('%s/api/firewall/filter/toggleRule/%s/0' % (remote_uri,\n rule_uuid), auth=(api_key, api_secret), verify=False)\n", (897, 1011), False, 'import requests\n'), ((1147, 1282), 'requests.post', 'requests.post', (["('%s/api/firewall/filter/apply/%s' % (remote_uri, sp_response['revision']))"], {'auth': '(api_key, api_secret)', 'verify': '(False)'}), "('%s/api/firewall/filter/apply/%s' % (remote_uri, sp_response[\n 'revision']), auth=(api_key, api_secret), verify=False)\n", (1160, 1282), False, 'import requests\n')] |
import click
import pandas as pd
from os import environ
from pangea_api import (
Knex,
User,
)
from .api import upload_cap_uri_list, link_reads, upload_metadata, upload_reads
from .utils import bcify
@click.group()
def pangea():
pass
@pangea.group()
def upload():
pass
@upload.command('cap')
@click.option('--replicate', default=None)
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('--s3-endpoint', default='https://s3.wasabisys.com')
@click.option('-m', '--module-prefix', default='cap1::')
@click.option('-e', '--email', default=environ.get('PANGEA_USER', None))
@click.option('-p', '--password', default=environ.get('PANGEA_PASS', None))
@click.argument('org_name')
@click.argument('lib_name')
@click.argument('uri_list', type=click.File('r'))
def main(replicate, endpoint, s3_endpoint, module_prefix, email, password, org_name, lib_name, uri_list):
"""Register a list of S3 URIs with Pangea."""
knex = Knex(endpoint)
User(knex, email, password).login()
for field in upload_cap_uri_list(knex, org_name, lib_name,
(line.strip() for line in uri_list),
endpoint_url=s3_endpoint,
threads=1,
module_prefix=module_prefix,
replicate=replicate):
click.echo(
f'{field.parent.sample.name} {field.parent.module_name} {field.name}',
err=True
)
@pangea.group()
def s3():
"""Functions involving S3."""
pass
@s3.command('make-uris')
@click.option('-s', '--sep', default='\t')
@click.option('-f', '--filename-list', type=click.File('r'), default='-')
@click.option('-o', '--outfile', type=click.File('w'), default='-')
@click.argument('prefix')
def make_uri(sep, filename_list, outfile, prefix):
"""Convert a list of filenames to a list suitable for upload to s3.
filename -> (filename, s3_uri)
meant for use with xargs as roughly:
`cat filenames.txt | <this command> | xargs -l <upload_to_s3>`
"""
assert prefix.startswith('s3://')
if not prefix.endswith('/'):
prefix = prefix + '/'
for line in filename_list:
path = line.strip()
fname = path.split('/')[-1]
print(f'{path}{sep}{prefix}{fname}', file=outfile)
@s3.command('make-cap-uris')
@click.option('-s', '--sep', default='\t')
@click.option('-f', '--filename-list', type=click.File('r'), default='-')
@click.option('-o', '--outfile', type=click.File('w'), default='-')
@click.argument('bucket_name')
def make_uri(sep, filename_list, outfile, bucket_name):
"""Convert a list of filenames to a list suitable for upload to s3.
filename -> (filename, s3_uri)
meant for use with xargs as roughly:
`cat filenames.txt | <this command> | xargs -l <upload_to_s3>`
"""
prefix = f's3://{bucket_name}/analysis/metasub_cap1/results/'
for line in filename_list:
path = line.strip()
if path.endswith('.flag.registered'):
continue
fname = path.split('/')[-1]
tkns = fname.split('.')
if len(tkns) < 4: # sample name, module name, field name, ext+
continue
sample_name = bcify(tkns[0])
fname = sample_name + '.' + '.'.join(tkns[1:])
print(f'{path}{sep}{prefix}{sample_name}/{fname}', file=outfile)
@upload.command('link-data')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('--s3-endpoint', default='https://s3.wasabisys.com')
@click.option('--index-col', default=0)
@click.option('-m', '--module-name', default='raw::raw_reads')
@click.option('-e', '--email', default=environ.get('PANGEA_USER', None))
@click.option('-p', '--password', default=environ.get('PANGEA_PASS', None))
@click.argument('org_name')
@click.argument('lib_name')
@click.argument('uri_list', type=click.File('r'))
def cli_link_data(endpoint, s3_endpoint, index_col, module_name, email, password, org_name, lib_name, uri_list):
knex = Knex(endpoint)
User(knex, email, password).login()
for sample, ar, r1, r2 in link_reads(knex, org_name, lib_name,
[line.strip().split()[index_col] for line in uri_list],
module_name,
endpoint_url=s3_endpoint,
on_error=lambda e: click.echo(e, err=True)):
r1uri, r2uri = r1.stored_data['uri'], r2.stored_data['uri']
click.echo(
f'{sample.name} {ar.module_name} {r1uri} {r2uri}',
err=True
)
@upload.command('reads')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('--s3-endpoint', default='https://s3.wasabisys.com')
@click.option('-m', '--module-name', default='raw::raw_reads')
@click.option('-e', '--email', default=environ.get('PANGEA_USER', None))
@click.option('-p', '--password', default=environ.get('PANGEA_PASS', None))
@click.option('-d', '--delim', default=None)
@click.option('-1', '--ext-1', default='.R1.fastq.gz')
@click.option('-2', '--ext-2', default='.R2.fastq.gz')
@click.argument('org_name')
@click.argument('lib_name')
@click.argument('uri_list', type=click.File('r'))
def cli_link_data(endpoint, s3_endpoint, module_name, email, password,
delim, ext_1, ext_2,
org_name, lib_name, uri_list):
knex = Knex(endpoint)
User(knex, email, password).login()
for sample, ar, r1, r2 in upload_reads(knex, org_name, lib_name,
[line.strip().split()[0] for line in uri_list],
module_name, ext_1, ext_2,
delim=delim,
endpoint_url=s3_endpoint,
on_error=lambda e: click.echo(e, err=True)):
r1uri, r2uri = r1.stored_data['uri'], r2.stored_data['uri']
click.echo(
f'{sample.name} {ar.module_name} {r1uri} {r2uri}',
err=True
)
@upload.command('metadata')
@click.option('--create/--no-create', default=False)
@click.option('--overwrite/--no-overwrite', default=False)
@click.option('--index-col', default=0)
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('-e', '--email', default=environ.get('PANGEA_USER', None))
@click.option('-p', '--password', default=environ.get('PANGEA_PASS', None))
@click.argument('org_name')
@click.argument('lib_name')
@click.argument('table', type=click.File('r'))
def cli_metadata(create, overwrite, endpoint, index_col, email, password, org_name, lib_name, table):
knex = Knex(endpoint)
User(knex, email, password).login()
tbl = pd.read_csv(table, index_col=index_col)
tbl.index = tbl.index.to_series().map(str)
generator = upload_metadata(
knex, org_name, lib_name, tbl,
on_error=lambda e: click.echo(e, err=True), create=create, overwrite=overwrite
)
for sample in generator:
click.echo(sample)
if __name__ == '__main__':
pangea()
| [
"click.argument",
"pandas.read_csv",
"click.group",
"click.option",
"os.environ.get",
"pangea_api.Knex",
"click.File",
"click.echo",
"pangea_api.User"
] | [((212, 225), 'click.group', 'click.group', ([], {}), '()\n', (223, 225), False, 'import click\n'), ((316, 357), 'click.option', 'click.option', (['"""--replicate"""'], {'default': 'None'}), "('--replicate', default=None)\n", (328, 357), False, 'import click\n'), ((359, 424), 'click.option', 'click.option', (['"""--endpoint"""'], {'default': '"""https://pangea.gimmebio.com"""'}), "('--endpoint', default='https://pangea.gimmebio.com')\n", (371, 424), False, 'import click\n'), ((426, 491), 'click.option', 'click.option', (['"""--s3-endpoint"""'], {'default': '"""https://s3.wasabisys.com"""'}), "('--s3-endpoint', default='https://s3.wasabisys.com')\n", (438, 491), False, 'import click\n'), ((493, 548), 'click.option', 'click.option', (['"""-m"""', '"""--module-prefix"""'], {'default': '"""cap1::"""'}), "('-m', '--module-prefix', default='cap1::')\n", (505, 548), False, 'import click\n'), ((699, 725), 'click.argument', 'click.argument', (['"""org_name"""'], {}), "('org_name')\n", (713, 725), False, 'import click\n'), ((727, 753), 'click.argument', 'click.argument', (['"""lib_name"""'], {}), "('lib_name')\n", (741, 753), False, 'import click\n'), ((1632, 1673), 'click.option', 'click.option', (['"""-s"""', '"""--sep"""'], {'default': '"""\t"""'}), "('-s', '--sep', default='\\t')\n", (1644, 1673), False, 'import click\n'), ((1817, 1841), 'click.argument', 'click.argument', (['"""prefix"""'], {}), "('prefix')\n", (1831, 1841), False, 'import click\n'), ((2409, 2450), 'click.option', 'click.option', (['"""-s"""', '"""--sep"""'], {'default': '"""\t"""'}), "('-s', '--sep', default='\\t')\n", (2421, 2450), False, 'import click\n'), ((2594, 2623), 'click.argument', 'click.argument', (['"""bucket_name"""'], {}), "('bucket_name')\n", (2608, 2623), False, 'import click\n'), ((3459, 3524), 'click.option', 'click.option', (['"""--endpoint"""'], {'default': '"""https://pangea.gimmebio.com"""'}), "('--endpoint', default='https://pangea.gimmebio.com')\n", (3471, 3524), False, 'import click\n'), ((3526, 3591), 'click.option', 'click.option', (['"""--s3-endpoint"""'], {'default': '"""https://s3.wasabisys.com"""'}), "('--s3-endpoint', default='https://s3.wasabisys.com')\n", (3538, 3591), False, 'import click\n'), ((3593, 3631), 'click.option', 'click.option', (['"""--index-col"""'], {'default': '(0)'}), "('--index-col', default=0)\n", (3605, 3631), False, 'import click\n'), ((3633, 3694), 'click.option', 'click.option', (['"""-m"""', '"""--module-name"""'], {'default': '"""raw::raw_reads"""'}), "('-m', '--module-name', default='raw::raw_reads')\n", (3645, 3694), False, 'import click\n'), ((3845, 3871), 'click.argument', 'click.argument', (['"""org_name"""'], {}), "('org_name')\n", (3859, 3871), False, 'import click\n'), ((3873, 3899), 'click.argument', 'click.argument', (['"""lib_name"""'], {}), "('lib_name')\n", (3887, 3899), False, 'import click\n'), ((4710, 4775), 'click.option', 'click.option', (['"""--endpoint"""'], {'default': '"""https://pangea.gimmebio.com"""'}), "('--endpoint', default='https://pangea.gimmebio.com')\n", (4722, 4775), False, 'import click\n'), ((4777, 4842), 'click.option', 'click.option', (['"""--s3-endpoint"""'], {'default': '"""https://s3.wasabisys.com"""'}), "('--s3-endpoint', default='https://s3.wasabisys.com')\n", (4789, 4842), False, 'import click\n'), ((4844, 4905), 'click.option', 'click.option', (['"""-m"""', '"""--module-name"""'], {'default': '"""raw::raw_reads"""'}), "('-m', '--module-name', default='raw::raw_reads')\n", (4856, 4905), False, 'import click\n'), ((5056, 5099), 'click.option', 'click.option', (['"""-d"""', '"""--delim"""'], {'default': 'None'}), "('-d', '--delim', default=None)\n", (5068, 5099), False, 'import click\n'), ((5101, 5154), 'click.option', 'click.option', (['"""-1"""', '"""--ext-1"""'], {'default': '""".R1.fastq.gz"""'}), "('-1', '--ext-1', default='.R1.fastq.gz')\n", (5113, 5154), False, 'import click\n'), ((5156, 5209), 'click.option', 'click.option', (['"""-2"""', '"""--ext-2"""'], {'default': '""".R2.fastq.gz"""'}), "('-2', '--ext-2', default='.R2.fastq.gz')\n", (5168, 5209), False, 'import click\n'), ((5211, 5237), 'click.argument', 'click.argument', (['"""org_name"""'], {}), "('org_name')\n", (5225, 5237), False, 'import click\n'), ((5239, 5265), 'click.argument', 'click.argument', (['"""lib_name"""'], {}), "('lib_name')\n", (5253, 5265), False, 'import click\n'), ((6187, 6238), 'click.option', 'click.option', (['"""--create/--no-create"""'], {'default': '(False)'}), "('--create/--no-create', default=False)\n", (6199, 6238), False, 'import click\n'), ((6240, 6297), 'click.option', 'click.option', (['"""--overwrite/--no-overwrite"""'], {'default': '(False)'}), "('--overwrite/--no-overwrite', default=False)\n", (6252, 6297), False, 'import click\n'), ((6299, 6337), 'click.option', 'click.option', (['"""--index-col"""'], {'default': '(0)'}), "('--index-col', default=0)\n", (6311, 6337), False, 'import click\n'), ((6339, 6404), 'click.option', 'click.option', (['"""--endpoint"""'], {'default': '"""https://pangea.gimmebio.com"""'}), "('--endpoint', default='https://pangea.gimmebio.com')\n", (6351, 6404), False, 'import click\n'), ((6555, 6581), 'click.argument', 'click.argument', (['"""org_name"""'], {}), "('org_name')\n", (6569, 6581), False, 'import click\n'), ((6583, 6609), 'click.argument', 'click.argument', (['"""lib_name"""'], {}), "('lib_name')\n", (6597, 6609), False, 'import click\n'), ((971, 985), 'pangea_api.Knex', 'Knex', (['endpoint'], {}), '(endpoint)\n', (975, 985), False, 'from pangea_api import Knex, User\n'), ((4074, 4088), 'pangea_api.Knex', 'Knex', (['endpoint'], {}), '(endpoint)\n', (4078, 4088), False, 'from pangea_api import Knex, User\n'), ((5486, 5500), 'pangea_api.Knex', 'Knex', (['endpoint'], {}), '(endpoint)\n', (5490, 5500), False, 'from pangea_api import Knex, User\n'), ((6770, 6784), 'pangea_api.Knex', 'Knex', (['endpoint'], {}), '(endpoint)\n', (6774, 6784), False, 'from pangea_api import Knex, User\n'), ((6835, 6874), 'pandas.read_csv', 'pd.read_csv', (['table'], {'index_col': 'index_col'}), '(table, index_col=index_col)\n', (6846, 6874), True, 'import pandas as pd\n'), ((1407, 1507), 'click.echo', 'click.echo', (['f"""{field.parent.sample.name} {field.parent.module_name} {field.name}"""'], {'err': '(True)'}), "(\n f'{field.parent.sample.name} {field.parent.module_name} {field.name}',\n err=True)\n", (1417, 1507), False, 'import click\n'), ((588, 620), 'os.environ.get', 'environ.get', (['"""PANGEA_USER"""', 'None'], {}), "('PANGEA_USER', None)\n", (599, 620), False, 'from os import environ\n'), ((664, 696), 'os.environ.get', 'environ.get', (['"""PANGEA_PASS"""', 'None'], {}), "('PANGEA_PASS', None)\n", (675, 696), False, 'from os import environ\n'), ((787, 802), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (797, 802), False, 'import click\n'), ((1718, 1733), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (1728, 1733), False, 'import click\n'), ((1786, 1801), 'click.File', 'click.File', (['"""w"""'], {}), "('w')\n", (1796, 1801), False, 'import click\n'), ((2495, 2510), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (2505, 2510), False, 'import click\n'), ((2563, 2578), 'click.File', 'click.File', (['"""w"""'], {}), "('w')\n", (2573, 2578), False, 'import click\n'), ((4576, 4647), 'click.echo', 'click.echo', (['f"""{sample.name} {ar.module_name} {r1uri} {r2uri}"""'], {'err': '(True)'}), "(f'{sample.name} {ar.module_name} {r1uri} {r2uri}', err=True)\n", (4586, 4647), False, 'import click\n'), ((3734, 3766), 'os.environ.get', 'environ.get', (['"""PANGEA_USER"""', 'None'], {}), "('PANGEA_USER', None)\n", (3745, 3766), False, 'from os import environ\n'), ((3810, 3842), 'os.environ.get', 'environ.get', (['"""PANGEA_PASS"""', 'None'], {}), "('PANGEA_PASS', None)\n", (3821, 3842), False, 'from os import environ\n'), ((3933, 3948), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (3943, 3948), False, 'import click\n'), ((6050, 6121), 'click.echo', 'click.echo', (['f"""{sample.name} {ar.module_name} {r1uri} {r2uri}"""'], {'err': '(True)'}), "(f'{sample.name} {ar.module_name} {r1uri} {r2uri}', err=True)\n", (6060, 6121), False, 'import click\n'), ((4945, 4977), 'os.environ.get', 'environ.get', (['"""PANGEA_USER"""', 'None'], {}), "('PANGEA_USER', None)\n", (4956, 4977), False, 'from os import environ\n'), ((5021, 5053), 'os.environ.get', 'environ.get', (['"""PANGEA_PASS"""', 'None'], {}), "('PANGEA_PASS', None)\n", (5032, 5053), False, 'from os import environ\n'), ((5299, 5314), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (5309, 5314), False, 'import click\n'), ((7124, 7142), 'click.echo', 'click.echo', (['sample'], {}), '(sample)\n', (7134, 7142), False, 'import click\n'), ((6444, 6476), 'os.environ.get', 'environ.get', (['"""PANGEA_USER"""', 'None'], {}), "('PANGEA_USER', None)\n", (6455, 6476), False, 'from os import environ\n'), ((6520, 6552), 'os.environ.get', 'environ.get', (['"""PANGEA_PASS"""', 'None'], {}), "('PANGEA_PASS', None)\n", (6531, 6552), False, 'from os import environ\n'), ((6640, 6655), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (6650, 6655), False, 'import click\n'), ((990, 1017), 'pangea_api.User', 'User', (['knex', 'email', 'password'], {}), '(knex, email, password)\n', (994, 1017), False, 'from pangea_api import Knex, User\n'), ((4093, 4120), 'pangea_api.User', 'User', (['knex', 'email', 'password'], {}), '(knex, email, password)\n', (4097, 4120), False, 'from pangea_api import Knex, User\n'), ((5505, 5532), 'pangea_api.User', 'User', (['knex', 'email', 'password'], {}), '(knex, email, password)\n', (5509, 5532), False, 'from pangea_api import Knex, User\n'), ((6789, 6816), 'pangea_api.User', 'User', (['knex', 'email', 'password'], {}), '(knex, email, password)\n', (6793, 6816), False, 'from pangea_api import Knex, User\n'), ((4474, 4497), 'click.echo', 'click.echo', (['e'], {'err': '(True)'}), '(e, err=True)\n', (4484, 4497), False, 'import click\n'), ((5948, 5971), 'click.echo', 'click.echo', (['e'], {'err': '(True)'}), '(e, err=True)\n', (5958, 5971), False, 'import click\n'), ((7021, 7044), 'click.echo', 'click.echo', (['e'], {'err': '(True)'}), '(e, err=True)\n', (7031, 7044), False, 'import click\n')] |
import os
import re
def get_start_and_end_frame(output_path):
filenames = os.listdir(output_path)
filenames.sort()
start_frame_ix = int(filenames[0][:6])
end_frame_ix = int(filenames[-1][:6])
return start_frame_ix, end_frame_ix
def get_output_frame_count(output_path):
contents = os.listdir(output_path)
count = 0
canonical_mesh_filename_pattern = re.compile(r'\d{6}_canonical_mesh\.ply')
for item in contents:
if canonical_mesh_filename_pattern.match(item) is not None:
count += 1
return count
def get_gn_iteration_count(start_frame_ix, output_path):
# point cloud setup
start_frame_ix_string = f"{start_frame_ix:06d}"
first_frame_point_file_pattern = re.compile(start_frame_ix_string + r"_deformed_points_iter_(\d{3})[.]npy")
first_frame_point_files_in_output_path = [file for file in os.listdir(output_path) if
first_frame_point_file_pattern.match(file) is not None]
return len(first_frame_point_files_in_output_path)
def source_and_target_point_clouds_are_present(start_frame_ix, output_path):
start_frame_source_pc_filename = f"{start_frame_ix:06d}_source_rgbxyz.npy"
start_frame_target_pc_filename = f"{start_frame_ix:06d}_target_rgbxyz.npy"
all_filenames = os.listdir(output_path)
return start_frame_source_pc_filename in all_filenames and start_frame_target_pc_filename in all_filenames
| [
"os.listdir",
"re.compile"
] | [((80, 103), 'os.listdir', 'os.listdir', (['output_path'], {}), '(output_path)\n', (90, 103), False, 'import os\n'), ((308, 331), 'os.listdir', 'os.listdir', (['output_path'], {}), '(output_path)\n', (318, 331), False, 'import os\n'), ((384, 425), 're.compile', 're.compile', (['"""\\\\d{6}_canonical_mesh\\\\.ply"""'], {}), "('\\\\d{6}_canonical_mesh\\\\.ply')\n", (394, 425), False, 'import re\n'), ((731, 805), 're.compile', 're.compile', (["(start_frame_ix_string + '_deformed_points_iter_(\\\\d{3})[.]npy')"], {}), "(start_frame_ix_string + '_deformed_points_iter_(\\\\d{3})[.]npy')\n", (741, 805), False, 'import re\n'), ((1310, 1333), 'os.listdir', 'os.listdir', (['output_path'], {}), '(output_path)\n', (1320, 1333), False, 'import os\n'), ((869, 892), 'os.listdir', 'os.listdir', (['output_path'], {}), '(output_path)\n', (879, 892), False, 'import os\n')] |
import unittest
import git
import shutil
import tempfile
import os
from requre.cassette import Cassette
from requre.modules_decorate_all_methods import (
record_requests_module,
record_tempfile_module,
record_git_module,
)
@record_git_module
@record_tempfile_module
@record_requests_module
class ApplyCommonCase(unittest.TestCase):
def setUp(self) -> None:
self._tempdir = None
self.git_url = "<EMAIL>:packit/hello-world.git"
@property
def tempdir(self):
if not self._tempdir:
self._tempdir = tempfile.mkdtemp()
return self._tempdir
def tearDown(self) -> None:
shutil.rmtree(self.tempdir)
def cassette_teardown(self, cassette: Cassette):
self.assertIn(
"tests.test_modules_decorate_all_methods.ApplyCommonCase.test_git.yaml",
str(cassette.storage_file),
)
def test_git(self):
repo = git.Repo.clone_from(self.git_url, to_path=self.tempdir)
repo.remotes[0].pull()
repo.remotes[0].fetch()
repo.remotes[0].push()
self.assertIn("static_tmp_1", self.tempdir)
self.assertIn("hello.spec", os.listdir(self.tempdir))
| [
"git.Repo.clone_from",
"os.listdir",
"tempfile.mkdtemp",
"shutil.rmtree"
] | [((645, 672), 'shutil.rmtree', 'shutil.rmtree', (['self.tempdir'], {}), '(self.tempdir)\n', (658, 672), False, 'import shutil\n'), ((925, 980), 'git.Repo.clone_from', 'git.Repo.clone_from', (['self.git_url'], {'to_path': 'self.tempdir'}), '(self.git_url, to_path=self.tempdir)\n', (944, 980), False, 'import git\n'), ((556, 574), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (572, 574), False, 'import tempfile\n'), ((1163, 1187), 'os.listdir', 'os.listdir', (['self.tempdir'], {}), '(self.tempdir)\n', (1173, 1187), False, 'import os\n')] |
"""
Tests for CameraCalibrator and related functions
"""
import numpy as np
import pytest
from scipy.stats import norm
from traitlets.config.configurable import Config
from astropy import units as u
from ctapipe.calib.camera.calibrator import CameraCalibrator
from ctapipe.image.extractor import LocalPeakWindowSum, FullWaveformSum
from ctapipe.instrument import CameraGeometry
from ctapipe.containers import DataContainer
def test_camera_calibrator(example_event, example_subarray):
telid = list(example_event.r0.tel)[0]
calibrator = CameraCalibrator(subarray=example_subarray)
calibrator(example_event)
image = example_event.dl1.tel[telid].image
peak_time = example_event.dl1.tel[telid].peak_time
assert image is not None
assert peak_time is not None
assert image.shape == (1764,)
assert peak_time.shape == (1764,)
def test_manual_extractor(example_subarray):
calibrator = CameraCalibrator(
subarray=example_subarray,
image_extractor=LocalPeakWindowSum(subarray=example_subarray),
)
assert isinstance(calibrator.image_extractor, LocalPeakWindowSum)
def test_config(example_subarray):
window_shift = 3
window_width = 9
config = Config(
{
"LocalPeakWindowSum": {
"window_shift": window_shift,
"window_width": window_width,
}
}
)
calibrator = CameraCalibrator(
subarray=example_subarray,
image_extractor=LocalPeakWindowSum(subarray=example_subarray, config=config),
config=config,
)
assert calibrator.image_extractor.window_shift.tel[None] == window_shift
assert calibrator.image_extractor.window_width.tel[None] == window_width
def test_check_r1_empty(example_event, example_subarray):
calibrator = CameraCalibrator(subarray=example_subarray)
telid = list(example_event.r0.tel)[0]
waveform = example_event.r1.tel[telid].waveform.copy()
with pytest.warns(UserWarning):
example_event.r1.tel[telid].waveform = None
calibrator._calibrate_dl0(example_event, telid)
assert example_event.dl0.tel[telid].waveform is None
assert calibrator._check_r1_empty(None) is True
assert calibrator._check_r1_empty(waveform) is False
calibrator = CameraCalibrator(
subarray=example_subarray,
image_extractor=FullWaveformSum(subarray=example_subarray),
)
event = DataContainer()
event.dl0.tel[telid].waveform = np.full((2048, 128), 2)
with pytest.warns(UserWarning):
calibrator(event)
assert (event.dl0.tel[telid].waveform == 2).all()
assert (event.dl1.tel[telid].image == 2 * 128).all()
def test_check_dl0_empty(example_event, example_subarray):
calibrator = CameraCalibrator(subarray=example_subarray)
telid = list(example_event.r0.tel)[0]
calibrator._calibrate_dl0(example_event, telid)
waveform = example_event.dl0.tel[telid].waveform.copy()
with pytest.warns(UserWarning):
example_event.dl0.tel[telid].waveform = None
calibrator._calibrate_dl1(example_event, telid)
assert example_event.dl1.tel[telid].image is None
assert calibrator._check_dl0_empty(None) is True
assert calibrator._check_dl0_empty(waveform) is False
calibrator = CameraCalibrator(subarray=example_subarray)
event = DataContainer()
event.dl1.tel[telid].image = np.full(2048, 2)
with pytest.warns(UserWarning):
calibrator(event)
assert (event.dl1.tel[telid].image == 2).all()
def test_dl1_charge_calib(example_subarray):
camera = CameraGeometry.from_name("CHEC")
n_pixels = camera.n_pixels
n_samples = 96
mid = n_samples // 2
pulse_sigma = 6
random = np.random.RandomState(1)
x = np.arange(n_samples)
# Randomize times and create pulses
time_offset = random.uniform(mid - 10, mid + 10, n_pixels)[:, np.newaxis]
y = norm.pdf(x, time_offset, pulse_sigma).astype("float32")
# Define absolute calibration coefficients
absolute = random.uniform(100, 1000, n_pixels).astype("float32")
y *= absolute[:, np.newaxis]
# Define relative coefficients
relative = random.normal(1, 0.01, n_pixels)
y /= relative[:, np.newaxis]
# Define pedestal
pedestal = random.uniform(-4, 4, n_pixels)
y += pedestal[:, np.newaxis]
event = DataContainer()
telid = list(example_subarray.tel.keys())[0]
event.dl0.tel[telid].waveform = y
# Test default
calibrator = CameraCalibrator(
subarray=example_subarray,
image_extractor=FullWaveformSum(subarray=example_subarray),
)
calibrator(event)
np.testing.assert_allclose(event.dl1.tel[telid].image, y.sum(1), rtol=1e-4)
event.calibration.tel[telid].dl1.time_shift = time_offset
event.calibration.tel[telid].dl1.pedestal_offset = pedestal * n_samples
event.calibration.tel[telid].dl1.absolute_factor = absolute
event.calibration.tel[telid].dl1.relative_factor = relative
# Test without need for timing corrections
calibrator = CameraCalibrator(
subarray=example_subarray,
image_extractor=FullWaveformSum(subarray=example_subarray),
)
calibrator(event)
np.testing.assert_allclose(event.dl1.tel[telid].image, 1, rtol=1e-5)
# TODO: Test with timing corrections
| [
"ctapipe.image.extractor.LocalPeakWindowSum",
"ctapipe.instrument.CameraGeometry.from_name",
"numpy.full",
"ctapipe.image.extractor.FullWaveformSum",
"numpy.arange",
"numpy.testing.assert_allclose",
"pytest.warns",
"traitlets.config.configurable.Config",
"scipy.stats.norm.pdf",
"ctapipe.containers.DataContainer",
"ctapipe.calib.camera.calibrator.CameraCalibrator",
"numpy.random.RandomState"
] | [((546, 589), 'ctapipe.calib.camera.calibrator.CameraCalibrator', 'CameraCalibrator', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (562, 589), False, 'from ctapipe.calib.camera.calibrator import CameraCalibrator\n'), ((1212, 1308), 'traitlets.config.configurable.Config', 'Config', (["{'LocalPeakWindowSum': {'window_shift': window_shift, 'window_width':\n window_width}}"], {}), "({'LocalPeakWindowSum': {'window_shift': window_shift, 'window_width':\n window_width}})\n", (1218, 1308), False, 'from traitlets.config.configurable import Config\n'), ((1804, 1847), 'ctapipe.calib.camera.calibrator.CameraCalibrator', 'CameraCalibrator', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (1820, 1847), False, 'from ctapipe.calib.camera.calibrator import CameraCalibrator\n'), ((2421, 2436), 'ctapipe.containers.DataContainer', 'DataContainer', ([], {}), '()\n', (2434, 2436), False, 'from ctapipe.containers import DataContainer\n'), ((2473, 2496), 'numpy.full', 'np.full', (['(2048, 128)', '(2)'], {}), '((2048, 128), 2)\n', (2480, 2496), True, 'import numpy as np\n'), ((2748, 2791), 'ctapipe.calib.camera.calibrator.CameraCalibrator', 'CameraCalibrator', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (2764, 2791), False, 'from ctapipe.calib.camera.calibrator import CameraCalibrator\n'), ((3279, 3322), 'ctapipe.calib.camera.calibrator.CameraCalibrator', 'CameraCalibrator', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (3295, 3322), False, 'from ctapipe.calib.camera.calibrator import CameraCalibrator\n'), ((3335, 3350), 'ctapipe.containers.DataContainer', 'DataContainer', ([], {}), '()\n', (3348, 3350), False, 'from ctapipe.containers import DataContainer\n'), ((3384, 3400), 'numpy.full', 'np.full', (['(2048)', '(2)'], {}), '(2048, 2)\n', (3391, 3400), True, 'import numpy as np\n'), ((3574, 3606), 'ctapipe.instrument.CameraGeometry.from_name', 'CameraGeometry.from_name', (['"""CHEC"""'], {}), "('CHEC')\n", (3598, 3606), False, 'from ctapipe.instrument import CameraGeometry\n'), ((3715, 3739), 'numpy.random.RandomState', 'np.random.RandomState', (['(1)'], {}), '(1)\n', (3736, 3739), True, 'import numpy as np\n'), ((3748, 3768), 'numpy.arange', 'np.arange', (['n_samples'], {}), '(n_samples)\n', (3757, 3768), True, 'import numpy as np\n'), ((4335, 4350), 'ctapipe.containers.DataContainer', 'DataContainer', ([], {}), '()\n', (4348, 4350), False, 'from ctapipe.containers import DataContainer\n'), ((5189, 5258), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['event.dl1.tel[telid].image', '(1)'], {'rtol': '(1e-05)'}), '(event.dl1.tel[telid].image, 1, rtol=1e-05)\n', (5215, 5258), True, 'import numpy as np\n'), ((1958, 1983), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (1970, 1983), False, 'import pytest\n'), ((2506, 2531), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (2518, 2531), False, 'import pytest\n'), ((2955, 2980), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (2967, 2980), False, 'import pytest\n'), ((3410, 3435), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (3422, 3435), False, 'import pytest\n'), ((997, 1042), 'ctapipe.image.extractor.LocalPeakWindowSum', 'LocalPeakWindowSum', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (1015, 1042), False, 'from ctapipe.image.extractor import LocalPeakWindowSum, FullWaveformSum\n'), ((1482, 1542), 'ctapipe.image.extractor.LocalPeakWindowSum', 'LocalPeakWindowSum', ([], {'subarray': 'example_subarray', 'config': 'config'}), '(subarray=example_subarray, config=config)\n', (1500, 1542), False, 'from ctapipe.image.extractor import LocalPeakWindowSum, FullWaveformSum\n'), ((2359, 2401), 'ctapipe.image.extractor.FullWaveformSum', 'FullWaveformSum', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (2374, 2401), False, 'from ctapipe.image.extractor import LocalPeakWindowSum, FullWaveformSum\n'), ((3896, 3933), 'scipy.stats.norm.pdf', 'norm.pdf', (['x', 'time_offset', 'pulse_sigma'], {}), '(x, time_offset, pulse_sigma)\n', (3904, 3933), False, 'from scipy.stats import norm\n'), ((4552, 4594), 'ctapipe.image.extractor.FullWaveformSum', 'FullWaveformSum', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (4567, 4594), False, 'from ctapipe.image.extractor import LocalPeakWindowSum, FullWaveformSum\n'), ((5113, 5155), 'ctapipe.image.extractor.FullWaveformSum', 'FullWaveformSum', ([], {'subarray': 'example_subarray'}), '(subarray=example_subarray)\n', (5128, 5155), False, 'from ctapipe.image.extractor import LocalPeakWindowSum, FullWaveformSum\n')] |
'''@package triangle helper routines related to triangles.
'''
import numpy
import numpy.linalg
def compute_angle(p1, p2, p3):
'''
compute_angle: compute angle 23 of triangle 123.
p1:
p2:
p3:
return: angle value
'''
from numpy import array, arccos
from numpy.linalg import norm
d12 = norm(array(p1) - array(p2))
d13 = norm(array(p1) - array(p3))
d23 = norm(array(p2) - array(p3))
return_value = arccos((d12**2 + d13**2 - d23**2) / (2 * d12 * d13))
return return_value
def compute_area(p1, p2, p3):
'''
compute_area: compute the area of triangle 123.
p1:
p2:
p3:
return: area
'''
from numpy import array
from numpy.linalg import norm
d12 = norm(array(p1) - array(p2))
d13 = norm(array(p1) - array(p3))
d23 = norm(array(p2) - array(p3))
s = 0.5 * (d12 + d13 + d23)
return (s * (s - d12) * (s - d13) * (s - d23)) ** 0.5
| [
"numpy.array",
"numpy.arccos"
] | [((453, 511), 'numpy.arccos', 'arccos', (['((d12 ** 2 + d13 ** 2 - d23 ** 2) / (2 * d12 * d13))'], {}), '((d12 ** 2 + d13 ** 2 - d23 ** 2) / (2 * d12 * d13))\n', (459, 511), False, 'from numpy import array, arccos\n'), ((334, 343), 'numpy.array', 'array', (['p1'], {}), '(p1)\n', (339, 343), False, 'from numpy import array\n'), ((346, 355), 'numpy.array', 'array', (['p2'], {}), '(p2)\n', (351, 355), False, 'from numpy import array\n'), ((372, 381), 'numpy.array', 'array', (['p1'], {}), '(p1)\n', (377, 381), False, 'from numpy import array\n'), ((384, 393), 'numpy.array', 'array', (['p3'], {}), '(p3)\n', (389, 393), False, 'from numpy import array\n'), ((410, 419), 'numpy.array', 'array', (['p2'], {}), '(p2)\n', (415, 419), False, 'from numpy import array\n'), ((422, 431), 'numpy.array', 'array', (['p3'], {}), '(p3)\n', (427, 431), False, 'from numpy import array\n'), ((752, 761), 'numpy.array', 'array', (['p1'], {}), '(p1)\n', (757, 761), False, 'from numpy import array\n'), ((764, 773), 'numpy.array', 'array', (['p2'], {}), '(p2)\n', (769, 773), False, 'from numpy import array\n'), ((790, 799), 'numpy.array', 'array', (['p1'], {}), '(p1)\n', (795, 799), False, 'from numpy import array\n'), ((802, 811), 'numpy.array', 'array', (['p3'], {}), '(p3)\n', (807, 811), False, 'from numpy import array\n'), ((828, 837), 'numpy.array', 'array', (['p2'], {}), '(p2)\n', (833, 837), False, 'from numpy import array\n'), ((840, 849), 'numpy.array', 'array', (['p3'], {}), '(p3)\n', (845, 849), False, 'from numpy import array\n')] |
import ctypes
from pyshader import _types as types, shadertype_as_ctype
from pytest import raises
def test_simple():
assert types.type_from_name("f32") is types.f32
assert types.f32.__name__ == "f32"
for t in [types.i16, types.i32, types.i64]:
assert isinstance(t, type) and issubclass(t, types.Numeric)
assert types.type_from_name(t.__name__) is t
for t in [types.f16, types.f32, types.f64]:
assert isinstance(t, type) and issubclass(t, types.Numeric)
assert types.type_from_name(t.__name__) is t
for t in [types.boolean, types.void]:
assert isinstance(t, type) and issubclass(t, types.ShaderType)
assert types.type_from_name(t.__name__) is t
def test_vector():
assert types.type_from_name("Vector(2,f32)") is types.vec2
assert types.vec2.__name__ == "Vector(2,f32)"
for t in [types.vec2, types.vec3, types.vec4]:
assert isinstance(t, type) and issubclass(t, types.Vector)
assert types.type_from_name(t.__name__) is t
for t in [types.ivec2, types.ivec3, types.ivec4]:
assert isinstance(t, type) and issubclass(t, types.Vector)
assert types.type_from_name(t.__name__) is t
for t in [types.bvec2, types.bvec3, types.bvec4]:
assert isinstance(t, type) and issubclass(t, types.Vector)
assert types.type_from_name(t.__name__) is t
def test_matrix():
assert types.type_from_name("Matrix(2,2,f32)") is types.mat2
assert types.mat2.__name__ == "Matrix(2,2,f32)"
for t in [types.mat2, types.mat3, types.mat4]:
assert isinstance(t, type) and issubclass(t, types.Matrix)
assert types.type_from_name(t.__name__) is t
for name in ["Matrix(2,3,f32)", "Matrix(3,4,f32)", "Matrix(4,2,f32)"]:
assert isinstance(t, type) and issubclass(t, types.Matrix)
t = types.type_from_name(name)
assert t.__name__ == name
def test_array():
for n, subt in [
(1, "f32"),
(12, "i16"),
(5, "Matrix(2,4,f32)"),
(6, "Struct(foo=f32,bar=i16)"),
]:
# Array with a length
name = f"Array({n},{subt})"
t = types.type_from_name(name)
assert isinstance(t, type) and issubclass(t, types.Array)
assert t.__name__ == name
assert t.subtype.__name__ == subt
assert t.length == n
# Array with undefined length
name = f"Array({subt})"
t = types.type_from_name(name)
assert isinstance(t, type) and issubclass(t, types.Array)
assert t.__name__ == name
assert t.subtype.__name__ == subt
assert t.length == 0
def test_struct():
for kwargs in [
dict(),
dict(foo=types.f32),
dict(foo=types.i32, bar=types.Array(12, types.vec3)),
]:
fields = ",".join(f"{key}={val.__name__}" for key, val in kwargs.items())
name = f"Struct({fields})"
t = types.type_from_name(name)
assert isinstance(t, type) and issubclass(t, types.Struct)
assert t.__name__ == name
assert t.keys == tuple(kwargs.keys())
assert set(t.keys).difference(dir(t)) == set()
assert t.length == len(kwargs)
for i, key in enumerate(t.keys):
assert getattr(t, key) == kwargs[key]
# A struct within a struct
T = types.Struct(
foo=types.Struct(
spam=types.Vector(2, types.f32), eggs=types.Struct(x=types.i16, y=types.i16)
),
bar=types.Array(types.f64),
)
name = T.__name__
print(name)
assert types.type_from_name(name) is T
def test_integrity():
for name in [
"f32",
"boolean",
"i64",
"void",
"Vector(3,f32)",
"Matrix(2,2,f32)",
"Array(f32)",
]:
assert name in types._subtypes
def test_that_gpu_types_cannot_be_instantiated():
# Abstract classes cannot be instantiated
for cls in [
types.ShaderType,
types.Scalar,
types.Numeric,
types.Float,
types.Int,
types.Composite,
types.Aggregate,
]:
with raises(RuntimeError) as info:
cls()
assert "cannot instantiate" in str(info.value).lower()
assert "abstract" in str(info.value).lower()
# Actually, concrete classes cannot be instantiated either
for cls in [
types.f32,
types.vec2,
types.mat3,
types.Array(2, types.f32),
types.Struct(foo=types.f32, bar=types.vec2),
]:
with raises(RuntimeError) as info:
cls()
assert "cannot instantiate" in str(info.value).lower()
def test_ctypes_interop():
# Some meta-testing
assert ctypes.c_float * 2 == ctypes.c_float * 2
assert ctypes.c_float * 2 != ctypes.c_float * 3
# Pre-create struct classes
s1 = types.Struct(foo=types.f32, bar=types.vec2)
s2 = type(
"xxx",
(ctypes.Structure,),
{"_fields_": [("foo", ctypes.c_float), ("bar", ctypes.c_float * 2)]},
)
for shadertype, ctype1 in [
(types.f32, ctypes.c_float),
(types.vec2, ctypes.c_float * 2),
(types.vec4, ctypes.c_float * 4),
(types.mat4, ctypes.c_float * 16),
(types.Array(12, types.ivec2), ctypes.c_int32 * 2 * 12),
(s1, s2),
]:
ctype2 = shadertype_as_ctype(shadertype)
assert ctypes.sizeof(ctype1) == ctypes.sizeof(ctype2)
if not issubclass(ctype1, ctypes.Structure):
assert ctype1 == ctype2
else:
# For structs we cannot compare types like that
assert ctype1._fields_ == ctype2._fields_
if __name__ == "__main__":
test_simple()
test_vector()
test_matrix()
test_array()
test_struct()
test_integrity()
test_that_gpu_types_cannot_be_instantiated()
test_ctypes_interop()
| [
"pyshader._types.Vector",
"pyshader._types.Struct",
"pyshader.shadertype_as_ctype",
"pyshader._types.Array",
"pytest.raises",
"pyshader._types.type_from_name",
"ctypes.sizeof"
] | [((4816, 4859), 'pyshader._types.Struct', 'types.Struct', ([], {'foo': 'types.f32', 'bar': 'types.vec2'}), '(foo=types.f32, bar=types.vec2)\n', (4828, 4859), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((133, 160), 'pyshader._types.type_from_name', 'types.type_from_name', (['"""f32"""'], {}), "('f32')\n", (153, 160), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((753, 790), 'pyshader._types.type_from_name', 'types.type_from_name', (['"""Vector(2,f32)"""'], {}), "('Vector(2,f32)')\n", (773, 790), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((1410, 1449), 'pyshader._types.type_from_name', 'types.type_from_name', (['"""Matrix(2,2,f32)"""'], {}), "('Matrix(2,2,f32)')\n", (1430, 1449), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((1843, 1869), 'pyshader._types.type_from_name', 'types.type_from_name', (['name'], {}), '(name)\n', (1863, 1869), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((2144, 2170), 'pyshader._types.type_from_name', 'types.type_from_name', (['name'], {}), '(name)\n', (2164, 2170), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((2424, 2450), 'pyshader._types.type_from_name', 'types.type_from_name', (['name'], {}), '(name)\n', (2444, 2450), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((2907, 2933), 'pyshader._types.type_from_name', 'types.type_from_name', (['name'], {}), '(name)\n', (2927, 2933), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((3537, 3563), 'pyshader._types.type_from_name', 'types.type_from_name', (['name'], {}), '(name)\n', (3557, 3563), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((4405, 4430), 'pyshader._types.Array', 'types.Array', (['(2)', 'types.f32'], {}), '(2, types.f32)\n', (4416, 4430), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((4440, 4483), 'pyshader._types.Struct', 'types.Struct', ([], {'foo': 'types.f32', 'bar': 'types.vec2'}), '(foo=types.f32, bar=types.vec2)\n', (4452, 4483), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((5307, 5338), 'pyshader.shadertype_as_ctype', 'shadertype_as_ctype', (['shadertype'], {}), '(shadertype)\n', (5326, 5338), False, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((345, 377), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (365, 377), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((515, 547), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (535, 547), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((682, 714), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (702, 714), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((989, 1021), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (1009, 1021), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((1164, 1196), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (1184, 1196), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((1339, 1371), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (1359, 1371), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((1650, 1682), 'pyshader._types.type_from_name', 'types.type_from_name', (['t.__name__'], {}), '(t.__name__)\n', (1670, 1682), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((3458, 3480), 'pyshader._types.Array', 'types.Array', (['types.f64'], {}), '(types.f64)\n', (3469, 3480), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((4093, 4113), 'pytest.raises', 'raises', (['RuntimeError'], {}), '(RuntimeError)\n', (4099, 4113), False, 'from pytest import raises\n'), ((4505, 4525), 'pytest.raises', 'raises', (['RuntimeError'], {}), '(RuntimeError)\n', (4511, 4525), False, 'from pytest import raises\n'), ((5209, 5237), 'pyshader._types.Array', 'types.Array', (['(12)', 'types.ivec2'], {}), '(12, types.ivec2)\n', (5220, 5237), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((5354, 5375), 'ctypes.sizeof', 'ctypes.sizeof', (['ctype1'], {}), '(ctype1)\n', (5367, 5375), False, 'import ctypes\n'), ((5379, 5400), 'ctypes.sizeof', 'ctypes.sizeof', (['ctype2'], {}), '(ctype2)\n', (5392, 5400), False, 'import ctypes\n'), ((2741, 2768), 'pyshader._types.Array', 'types.Array', (['(12)', 'types.vec3'], {}), '(12, types.vec3)\n', (2752, 2768), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((3363, 3389), 'pyshader._types.Vector', 'types.Vector', (['(2)', 'types.f32'], {}), '(2, types.f32)\n', (3375, 3389), True, 'from pyshader import _types as types, shadertype_as_ctype\n'), ((3396, 3434), 'pyshader._types.Struct', 'types.Struct', ([], {'x': 'types.i16', 'y': 'types.i16'}), '(x=types.i16, y=types.i16)\n', (3408, 3434), True, 'from pyshader import _types as types, shadertype_as_ctype\n')] |
from typing import Union, List
import pandas as pd
Num = Union[int, float]
def transform_categorical(train_x: pd.DataFrame, test_x: pd.DataFrame, categorical_columns: List[str],
lowest_freq=0.005, one_hot_encode=True):
"""
Transform categorical features in Pandas dataframes, consistently over train and test data.
:param train_x: a Pandas dataframe containing the features of the train set
:param test_x: a Pandas dataframe containing the features of the test set
:param categorical_columns: columns containing the raw categorical feature data
:param lowest_freq: values with less occurences than this frequency will be replace by 'other'
:param one_hot_encode: if True, one-hot-encode the categorical columns and drop the original ones
:return: a tuple (main_df, test_x) containing the transformed train and test sets
"""
for col in categorical_columns:
train_x[col] = train_x[col].fillna('missing').str.lower().str.strip().str.replace('[^a-z0-9 ]', '')
test_x[col] = test_x[col].fillna('missing').str.lower().str.strip().str.replace('[^a-z0-9 ]', '')
min_num_examples = int(train_x[col].count() * lowest_freq)
to_keep = train_x[col].value_counts()[train_x[col].value_counts() >= min_num_examples].keys()
to_keep = set(to_keep) & set(test_x[col])
train_x.loc[~train_x[col].isin(to_keep), col] = 'other'
test_x.loc[~test_x[col].isin(to_keep), col] = 'other'
# Attention: Do not one-hot-encode for catboost
if one_hot_encode:
train_x = pd.concat([train_x, pd.get_dummies(train_x[col], prefix=col)], sort=False, axis=1) \
.drop(col, axis=1)
test_x = pd.concat([test_x, pd.get_dummies(test_x[col], prefix=col)], sort=False, axis=1) \
.drop(col, axis=1)
return train_x, test_x
def transform_numerical(train_x: pd.DataFrame, test_x: pd.DataFrame, numerical_columns: List[Num]):
"""
Transform numerical features in Pandas dataframes, consistently over train and test data.
:param train_x: a Pandas dataframe containing the features of the train set
:param test_x: a Pandas dataframe containing the features of the test set
:param numerical_columns: columns containing the raw numerical feature data
:return: a tuple (main_df, test_x) containing the transformed train and test sets
"""
for col in numerical_columns:
med = train_x[col].median()
train_x[col].fillna(med, inplace=True)
test_x[col].fillna(med, inplace=True)
return train_x, test_x
def transform_sparse_to_boolean(train_x: pd.DataFrame, test_x: pd.DataFrame, sparse_columns: List):
"""
Transform sparse features, containing many NaN or null values, into boolean features. The resulting features
are True if the row has a value and false otherwise. Consistent over train and test data.
:param train_x: a Pandas dataframe containing the features of the train set
:param test_x: a Pandas dataframe containing the features of the test set
:param sparse_columns: columns containing the raw feature data
:return: a tuple (main_df, test_x) containing the transformed train and test sets
"""
for col in sparse_columns:
train_x[col] = train_x[col].notnull().astype('bool')
train_x.rename(index=str, columns={col: f'has_{col}'}, inplace=True)
test_x[col] = test_x[col].notnull().astype('bool')
test_x.rename(index=str, columns={col: f'has_{col}'}, inplace=True)
return train_x, test_x
| [
"pandas.get_dummies"
] | [((1612, 1652), 'pandas.get_dummies', 'pd.get_dummies', (['train_x[col]'], {'prefix': 'col'}), '(train_x[col], prefix=col)\n', (1626, 1652), True, 'import pandas as pd\n'), ((1752, 1791), 'pandas.get_dummies', 'pd.get_dummies', (['test_x[col]'], {'prefix': 'col'}), '(test_x[col], prefix=col)\n', (1766, 1791), True, 'import pandas as pd\n')] |
from configparser import ConfigParser
from pathlib import Path
import os
class ConfigManager:
__config: ConfigParser
def __init__(self):
self.__config = None
def __load(self):
filepath = os.path.join(Path.home(), '.doku.ini')
self.__config = ConfigParser()
if not os.path.isfile(filepath):
raise FileNotFoundError(f'File not found: {filepath}')
self.__config.read(filepath)
def __getitem__(self, key):
if self.__config is None:
self.__load()
try:
return self.__config[key]
except KeyError:
return None
def get(self, section, key, default):
if self.__config is None:
self.__load()
try:
return self.__config[section][key]
except KeyError:
return default
def sections(self):
if self.__config is None:
self.__load()
return self.__config.sections()
| [
"os.path.isfile",
"pathlib.Path.home",
"configparser.ConfigParser"
] | [((283, 297), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (295, 297), False, 'from configparser import ConfigParser\n'), ((232, 243), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (241, 243), False, 'from pathlib import Path\n'), ((313, 337), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (327, 337), False, 'import os\n')] |
"""
Get Package Information from a inputted file path
"""
__author__ = '<NAME>'
__twitter__ = '@brencass86'
__version__ = '1.0.0'
import clr
#Sets up new path to ironpython module Library
#See https://docs.python.org/2/library/sys.html
import sys
sys.path.append("C:\Program Files (x86)\IronPython 2.7\Lib")
#Imports operating system interfaces
#See https://docs.python.org/2/library/os.html
import os
#Imports Json module
import json
#Gets all subdirectories and files from a inputted directory
output=[["Name", "Version"]]
for root, dirs, files in os.walk(IN[0]):
for f in files:
if "pkg." in f:
with open(os.path.join(root, f)) as packageFile:
data = json.load(packageFile)
output.append([data['name'],data['version']])
#Output
OUT= output | [
"json.load",
"os.path.join",
"sys.path.append",
"os.walk"
] | [((263, 326), 'sys.path.append', 'sys.path.append', (['"""C:\\\\Program Files (x86)\\\\IronPython 2.7\\\\Lib"""'], {}), "('C:\\\\Program Files (x86)\\\\IronPython 2.7\\\\Lib')\n", (278, 326), False, 'import sys\n'), ((580, 594), 'os.walk', 'os.walk', (['IN[0]'], {}), '(IN[0])\n', (587, 594), False, 'import os\n'), ((698, 720), 'json.load', 'json.load', (['packageFile'], {}), '(packageFile)\n', (707, 720), False, 'import json\n'), ((647, 668), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (659, 668), False, 'import os\n')] |
import requests
import sys
import random
from time import sleep
from collections import deque
from lxml import html
# The class that handles craigslist item monitoring and parsing.
class item_scraper():
# The last_item_ids que stores existing item ids to determine whether an
# item is new.
last_item_ids = deque(maxlen=121)
def __init__(self, monitor, options, should_quit):
log("Created a scraper.")
self.options = options
# If the user specifies an item name instead of a url, requests gets
# the default craigslist and is redirected according to the user's
# location. The resulting url and the item name are used to create
# a craigslist search url.
if monitor[:7] == "https:":
self.monitor_url = monitor
else:
response = requests.get("https://www.craigslist.org")
self.monitor_url = response.url + "/search/sss?query=" + monitor + "&sort=rel"
self.should_quit = should_quit
# Gets initial results of craigslist search and stores all existing
# item ids.
self.update_page()
self.last_item_ids.extend(
int(item.xpath("./@data-pid")[0]) for item in self.get_new_items())
# For testing
# self.last_item_ids.popleft()
self.check_for_new_items()
# Waits the specified amount of seconds in intervals of one second. After
# each second, the should_quit event is checked. If the event is set, the
# wait function returns True scraper main loop is exited. Else, the wait
# function returns False after it has waited out the specified amount of
# seconds and the scraper's main loop continues.
def wait(self, seconds):
for _ in range(seconds):
sleep(1)
if self.should_quit.is_set():
return True
else:
return False
def update_page(self):
self.parsed_html = html.fromstring(requests.get(self.monitor_url).text)
def check_for_new_items(self):
while True:
self.update_page()
# All unprocessed item ids are stored in relation to the html that
# contains their data.
new_items = self.get_new_items()
new_item = next(new_items)
new_item_id = int(new_item.xpath("./@data-pid")[0])
# If the scraper finds a new item id and the item's properties
# comply to the user's options, the scraper logs the item's details
# and proceeds to check the next item.
while new_item_id not in self.last_item_ids:
self.last_item_ids.appendleft(new_item_id)
properties = self.parse_item(new_item)
if self.complies_to_options(properties):
log("Found new item-", properties)
new_item = next(new_items)
new_item_id = int(new_item.xpath("./@data-pid")[0])
# If the scraper does not receive a signal to quit, it proceeds to
# check for new items.
if self.wait(
random.randrange(self.options["refresh"][0],
self.options["refresh"][1])):
log("Stopped scraper.")
log_file.close()
break
# Returns a generator that upon each iteration returns a new item from the
# craigslist html.
def get_new_items(self):
return (
item
for item in self.parsed_html.xpath("//li[@class=\"result-row\"]"))
# Checks to see if the item's properties (name and renewal status) comply
# to the user's settings.
def complies_to_options(self, properties):
if self.options["exclude_words"]:
for word in properties["name"].split():
if word.lower() in self.options["exclude_words"]:
return False
if not self.options["renewals"] and properties["is_renewal"]:
return False
return True
# Parses craigslist html of a specific item for its properties.
def parse_item(self, item):
link = item.xpath("./a/@href")[0]
name = item.xpath("(.//a)[2]/text()")[0]
time = item.xpath(".//time/@title")[0]
is_renewal = bool(item.xpath("./@data-repost-of"))
price = item.xpath("(.//span[@class=\"result-price\"])[1]/text()")
price = price[0] if price else "Price not listed"
location = item.xpath(".//span[@class=\"result-hood\"]/text()")
location = location[0] if location else "Location not listed"
properties = {
"name": name,
"price": price,
"location": location,
"time": time,
"link": link,
"is_renewal": is_renewal
}
return properties
# Flushes the stdout of the scraper (a file) so that output is live.
def log(*args):
print(*args)
log_file.flush()
# Interface for creating the scraper.
def create_scraper(monitor, renewals, exclude_words, should_quit, output,
time_refresh):
import os
global log_file
# Uses the specified file output for the stdout and stderr of the scraper.
log_file = open(os.path.join(os.getcwd(), output), "w+")
sys.stdout = log_file
sys.stderr = log_file
options = {
"renewals": renewals,
"exclude_words": exclude_words,
"refresh": time_refresh
}
item_scraper(monitor, options, should_quit)
| [
"collections.deque",
"random.randrange",
"time.sleep",
"requests.get",
"os.getcwd"
] | [((322, 339), 'collections.deque', 'deque', ([], {'maxlen': '(121)'}), '(maxlen=121)\n', (327, 339), False, 'from collections import deque\n'), ((836, 878), 'requests.get', 'requests.get', (['"""https://www.craigslist.org"""'], {}), "('https://www.craigslist.org')\n", (848, 878), False, 'import requests\n'), ((1784, 1792), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (1789, 1792), False, 'from time import sleep\n'), ((5273, 5284), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5282, 5284), False, 'import os\n'), ((1973, 2003), 'requests.get', 'requests.get', (['self.monitor_url'], {}), '(self.monitor_url)\n', (1985, 2003), False, 'import requests\n'), ((3124, 3196), 'random.randrange', 'random.randrange', (["self.options['refresh'][0]", "self.options['refresh'][1]"], {}), "(self.options['refresh'][0], self.options['refresh'][1])\n", (3140, 3196), False, 'import random\n')] |
import unittest
import sys
sys.path.append('../src')
import core
class TestCase(unittest.TestCase):
def test(self):
self.assertEqual(core.absolute(10), 10)
if __name__ == '__main__':
unittest.main()
### test code goes here
| [
"unittest.main",
"sys.path.append",
"core.absolute"
] | [((27, 52), 'sys.path.append', 'sys.path.append', (['"""../src"""'], {}), "('../src')\n", (42, 52), False, 'import sys\n'), ((201, 216), 'unittest.main', 'unittest.main', ([], {}), '()\n', (214, 216), False, 'import unittest\n'), ((146, 163), 'core.absolute', 'core.absolute', (['(10)'], {}), '(10)\n', (159, 163), False, 'import core\n')] |
from aiohttp import web
app = web.Application()
async def index(request):
return web.Response(text="Hello World!!")
app.router.add_get('/', index)
if __name__ == '__main__':
web.run_app(app, host='0.0.0.0', port=8888) | [
"aiohttp.web.run_app",
"aiohttp.web.Response",
"aiohttp.web.Application"
] | [((30, 47), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (45, 47), False, 'from aiohttp import web\n'), ((86, 120), 'aiohttp.web.Response', 'web.Response', ([], {'text': '"""Hello World!!"""'}), "(text='Hello World!!')\n", (98, 120), False, 'from aiohttp import web\n'), ((185, 228), 'aiohttp.web.run_app', 'web.run_app', (['app'], {'host': '"""0.0.0.0"""', 'port': '(8888)'}), "(app, host='0.0.0.0', port=8888)\n", (196, 228), False, 'from aiohttp import web\n')] |
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
# Create Class to create data and create batches
class TimeSeriesData():
def __init__(self, num_points, xmin, xmax): # Creating data, a sinus function
self.xmin = xmin
self.xmax = xmax
self.num_points = num_points
self.resolution = (xmax-xmin)/num_points
self.x_data = np.linspace(xmin, xmax, num_points)
self.y_true = np.sin(self.x_data)
def ret_true(self, x_series): # Convinience Function
return np.sin(x_series)
def next_batch(self, batch_size, steps, return_batch_ts=False): # Generating batches from this data
# Grab random starting point for each batch
rand_start = np.random.rand(batch_size, 1)
# Convert to be on time series
ts_start = rand_start * (self.xmax - self.xmin - (steps*self.resolution))
# Create batch time series on the x-axis
batch_ts = ts_start + np.arange(0.0, steps+1) * self.resolution
# Create the Y data for the time series x-axis from prev step
y_batch = np.sin(batch_ts)
# Formatting for RNN
if return_batch_ts:
return y_batch[:, :-1].reshape(-1, steps, 1) , y_batch[:, 1:].reshape(-1, steps, 1), batch_ts
else:
return y_batch[:, :-1].reshape(-1, steps, 1) , y_batch[:, 1:].reshape(-1, steps, 1) # Everything along the rows and everything along the column -1
# Let's create some data
ts_data = TimeSeriesData(250, 0, 10) #250 points between 0 and 10
plt.plot(ts_data.x_data, ts_data.y_true)
# Creating random batches
num_time_steps = 30
y1, y2, ts = ts_data.next_batch(1, num_time_steps, True) # 1 Batch, 30 steps
plt.plot(ts.flatten()[1:], y2.flatten(), '*')
plt.plot(ts_data.x_data, ts_data.y_true, label='Sin(t)')
plt.plot(ts.flatten()[1:], y2.flatten(), '*', label='Single Training Instance')
plt.legend()
plt.tight_layout()
plt.show()
# Training data
# Training instance
train_inst = np.linspace(5, 5 + ts_data.resolution * (num_time_steps+1), num_time_steps+1 )
plt.title('A training instance')
plt.plot(train_inst[:-1], ts_data.ret_true(train_inst[:-1]), 'bo', markersize=15, alpha=0.5, label='Instance')
plt.plot(train_inst[1:], ts_data.ret_true(train_inst[1:]), 'ko', markersize=7, label='Target')
plt.show()
tf.reset_default_graph()
# Constants
# Just one feature, the time series
num_inputs = 1
# 100 neuron layer, play with this
num_neurons = 100
# Just one output, predicted time series
num_outputs = 1
# learning rate, 0.0001 default, but you can play with this
learning_rate = 0.0001
# how many iterations to go through (training steps), you can play with this
num_train_iterations = 2000
# Size of the batch of data
batch_size = 1
# Placeholders
X = tf.placeholder(tf.float32, [None, num_time_steps, num_inputs])
y = tf.placeholder(tf.float32, [None, num_time_steps, num_inputs])
# RNN Cell Layer
cell = tf.contrib.rnn.OutputProjectionWrapper(
tf.contrib.rnn.BasicRNNCell(
num_units=num_neurons,
activation=tf.nn.relu),
output_size=num_outputs
)
outputs, states = tf.nn.dynamic_rnn(cell, X, dtype=tf.float32)
# Loss Function
## MSE
loss = tf.reduce_mean(tf.square(outputs - y))
# Optimizer
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
# Train
train = optimizer.minimize(loss)
init = tf.global_variables_initializer()
# Session
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(init)
for iteration in range(num_train_iterations):
X_batch, y_batch = ts_data.next_batch(batch_size, num_time_steps)
sess.run(train, feed_dict={X:X_batch, y:y_batch})
if iteration % 100 == 0:
mse = loss.eval(feed_dict={X:X_batch, y:y_batch})
print(iteration, ' \tMSE', mse)
saver.save(sess, './rnn_time_series_model')
with tf.Session() as sess:
saver.restore(sess, './rnn_time_series_model')
X_new = np.sin(np.array(train_inst[:-1].reshape(-1, num_time_steps, num_inputs)))
y_pred = sess.run(outputs, feed_dict={X:X_new})
plt.title('Testing the model')
## Training instance
plt.plot(
train_inst[:-1],
np.sin(train_inst[:-1]),
'bo',
markersize=15,
alpha=0.5,
label='Training Instance'
)
## Target to predict (correct test values np.sin(train))
plt.plot(
train_inst[1:],
np.sin(train_inst[1:]),
'ko',
markersize=10,
label='Target'
)
## Models prediction
plt.plot(
train_inst[1:],
y_pred[0, :, 0],
'r.',
markersize=10,
label='Predictions'
)
plt.xlabel('Time')
plt.legend()
plt.tight_layout()
plt.show()
| [
"numpy.random.rand",
"numpy.sin",
"numpy.arange",
"tensorflow.placeholder",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.xlabel",
"tensorflow.Session",
"tensorflow.nn.dynamic_rnn",
"numpy.linspace",
"tensorflow.square",
"tensorflow.train.AdamOptimizer",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"tensorflow.reset_default_graph",
"tensorflow.train.Saver",
"tensorflow.contrib.rnn.BasicRNNCell",
"tensorflow.global_variables_initializer",
"matplotlib.pyplot.tight_layout"
] | [((1556, 1596), 'matplotlib.pyplot.plot', 'plt.plot', (['ts_data.x_data', 'ts_data.y_true'], {}), '(ts_data.x_data, ts_data.y_true)\n', (1564, 1596), True, 'import matplotlib.pyplot as plt\n'), ((1772, 1828), 'matplotlib.pyplot.plot', 'plt.plot', (['ts_data.x_data', 'ts_data.y_true'], {'label': '"""Sin(t)"""'}), "(ts_data.x_data, ts_data.y_true, label='Sin(t)')\n", (1780, 1828), True, 'import matplotlib.pyplot as plt\n'), ((1909, 1921), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1919, 1921), True, 'import matplotlib.pyplot as plt\n'), ((1922, 1940), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1938, 1940), True, 'import matplotlib.pyplot as plt\n'), ((1941, 1951), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1949, 1951), True, 'import matplotlib.pyplot as plt\n'), ((2005, 2091), 'numpy.linspace', 'np.linspace', (['(5)', '(5 + ts_data.resolution * (num_time_steps + 1))', '(num_time_steps + 1)'], {}), '(5, 5 + ts_data.resolution * (num_time_steps + 1), \n num_time_steps + 1)\n', (2016, 2091), True, 'import numpy as np\n'), ((2085, 2117), 'matplotlib.pyplot.title', 'plt.title', (['"""A training instance"""'], {}), "('A training instance')\n", (2094, 2117), True, 'import matplotlib.pyplot as plt\n'), ((2325, 2335), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2333, 2335), True, 'import matplotlib.pyplot as plt\n'), ((2337, 2361), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (2359, 2361), True, 'import tensorflow as tf\n'), ((2791, 2853), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, num_time_steps, num_inputs]'], {}), '(tf.float32, [None, num_time_steps, num_inputs])\n', (2805, 2853), True, 'import tensorflow as tf\n'), ((2858, 2920), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, num_time_steps, num_inputs]'], {}), '(tf.float32, [None, num_time_steps, num_inputs])\n', (2872, 2920), True, 'import tensorflow as tf\n'), ((3134, 3178), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', (['cell', 'X'], {'dtype': 'tf.float32'}), '(cell, X, dtype=tf.float32)\n', (3151, 3178), True, 'import tensorflow as tf\n'), ((3278, 3329), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (3300, 3329), True, 'import tensorflow as tf\n'), ((3381, 3414), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (3412, 3414), True, 'import tensorflow as tf\n'), ((3436, 3452), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (3450, 3452), True, 'import tensorflow as tf\n'), ((4117, 4147), 'matplotlib.pyplot.title', 'plt.title', (['"""Testing the model"""'], {}), "('Testing the model')\n", (4126, 4147), True, 'import matplotlib.pyplot as plt\n'), ((4502, 4590), 'matplotlib.pyplot.plot', 'plt.plot', (['train_inst[1:]', 'y_pred[0, :, 0]', '"""r."""'], {'markersize': '(10)', 'label': '"""Predictions"""'}), "(train_inst[1:], y_pred[0, :, 0], 'r.', markersize=10, label=\n 'Predictions')\n", (4510, 4590), True, 'import matplotlib.pyplot as plt\n'), ((4610, 4628), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (4620, 4628), True, 'import matplotlib.pyplot as plt\n'), ((4629, 4641), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4639, 4641), True, 'import matplotlib.pyplot as plt\n'), ((4642, 4660), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4658, 4660), True, 'import matplotlib.pyplot as plt\n'), ((4661, 4671), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4669, 4671), True, 'import matplotlib.pyplot as plt\n'), ((2992, 3065), 'tensorflow.contrib.rnn.BasicRNNCell', 'tf.contrib.rnn.BasicRNNCell', ([], {'num_units': 'num_neurons', 'activation': 'tf.nn.relu'}), '(num_units=num_neurons, activation=tf.nn.relu)\n', (3019, 3065), True, 'import tensorflow as tf\n'), ((3227, 3249), 'tensorflow.square', 'tf.square', (['(outputs - y)'], {}), '(outputs - y)\n', (3236, 3249), True, 'import tensorflow as tf\n'), ((3459, 3471), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (3469, 3471), True, 'import tensorflow as tf\n'), ((3903, 3915), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (3913, 3915), True, 'import tensorflow as tf\n'), ((4206, 4229), 'numpy.sin', 'np.sin', (['train_inst[:-1]'], {}), '(train_inst[:-1])\n', (4212, 4229), True, 'import numpy as np\n'), ((4403, 4425), 'numpy.sin', 'np.sin', (['train_inst[1:]'], {}), '(train_inst[1:])\n', (4409, 4425), True, 'import numpy as np\n'), ((393, 428), 'numpy.linspace', 'np.linspace', (['xmin', 'xmax', 'num_points'], {}), '(xmin, xmax, num_points)\n', (404, 428), True, 'import numpy as np\n'), ((451, 470), 'numpy.sin', 'np.sin', (['self.x_data'], {}), '(self.x_data)\n', (457, 470), True, 'import numpy as np\n'), ((548, 564), 'numpy.sin', 'np.sin', (['x_series'], {}), '(x_series)\n', (554, 564), True, 'import numpy as np\n'), ((744, 773), 'numpy.random.rand', 'np.random.rand', (['batch_size', '(1)'], {}), '(batch_size, 1)\n', (758, 773), True, 'import numpy as np\n'), ((1107, 1123), 'numpy.sin', 'np.sin', (['batch_ts'], {}), '(batch_ts)\n', (1113, 1123), True, 'import numpy as np\n'), ((976, 1001), 'numpy.arange', 'np.arange', (['(0.0)', '(steps + 1)'], {}), '(0.0, steps + 1)\n', (985, 1001), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import os
import rospkg
import roslib, rospy
from datetime import datetime
from snc_sensors_publisher.msg import SnCSensorsMsg
from std_msgs.msg import Bool
queue = []
status_queue = []
chair_on_spot = True
use_ble = False
def init():
global use_ble, chair_on_spot
rospy.init_node('snc_events_wrapper')
topic = rospy.get_param("~events_topic", "/snc_sensors/events")
tv_chair_topic = rospy.get_param("~tv_chair_topic", "room_status_publisher/tv_chair")
use_ble = rospy.get_param("~use_ble", False)
if use_ble:
chair_on_spot = False
rospy.Subscriber(tv_chair_topic, Bool, tv_chair_callback)
rospy.Subscriber(topic, SnCSensorsMsg, eventCallback)
while not rospy.is_shutdown():
rospy.spin()
def tv_chair_callback(msg):
global chair_on_spot
chair_on_spot = msg.data
def eventCallback(msg):
global chair_on_spot, queue, status_queue
for event in msg.sensors:
dt = datetime.now()
if event.name == 'No activity in room':
if not ('No activity in room' in queue) or not (event.status in status_queue):
rospack = rospkg.RosPack()
filename = 'official_log_presence_'+datetime.today().strftime("%d-%m-%Y")+'_'+dt.strftime("%H%M%S%f")+'.csv'
logs_path = rospack.get_path('snc_events_wrapper') + '/logs/' + filename
queue.append(event.name)
status_queue.append(event.status)
with open(logs_path,'ab+') as f:
f.write("No presence in the room timestamp\n")
f.write(event.status+'\n')
elif event.name == 'Going out the room':
if not ('Going out the room' in queue) or not (event.status in status_queue):
rospack = rospkg.RosPack()
filename = 'official_log_going_'+datetime.today().strftime("%d-%m-%Y")+'_'+dt.strftime("%H%M%S%f")+'.csv'
logs_path = rospack.get_path('snc_events_wrapper') + '/logs/' + filename
queue.append(event.name)
status_queue.append(event.status)
with open(logs_path,'ab+') as f:
f.write("Going out of the room timestamp\n")
f.write(event.status+'\n')
elif event.name == 'Going into the room':
if not ('Going into the room' in queue) or not (event.status in status_queue):
rospack = rospkg.RosPack()
filename = 'official_log_coming_'+datetime.today().strftime("%d-%m-%Y")+'_'+dt.strftime("%H%M%S%f")+'.csv'
logs_path = rospack.get_path('snc_events_wrapper') + '/logs/' + filename
queue.append(event.name)
status_queue.append(event.status)
with open(logs_path,'ab+') as f:
f.write("Coming in the room timestamp\n")
f.write(event.status+'\n')
elif ('TV watching' in event.name):
if chair_on_spot:
if not (('TV watching on chair' in queue or 'TV watching on sofa' in queue)) or not (event.status in status_queue):
rospack = rospkg.RosPack()
filename = 'official_log_tv_'+datetime.today().strftime("%d-%m-%Y")+'_'+dt.strftime("%H%M%S%f")+'.csv'
logs_path = rospack.get_path('snc_events_wrapper') + '/logs/' + filename
queue.append(event.name)
status_queue.append(event.status)
with open(logs_path,'ab+') as f:
f.write("Watching TV timestamp\n")
f.write(event.status+'\n')
elif event.name == 'Finish cooking':
if not ('Finish cooking' in queue) or not (event.status in status_queue):
rospack = rospkg.RosPack()
filename = 'official_log_cooking_'+datetime.today().strftime("%d-%m-%Y")+'_'+dt.strftime("%H%M%S%f")+'.csv'
logs_path = rospack.get_path('snc_events_wrapper') + '/logs/' + filename
queue.append(event.name)
status_queue.append(event.status)
with open(logs_path,'ab+') as f:
f.write("Finished cooking timestamp\n")
f.write(event.status+'\n')
if len(queue) > 10:
del queue[0]
del status_queue[0]
if __name__ == '__main__':
init()
| [
"rospy.is_shutdown",
"rospy.init_node",
"rospy.get_param",
"datetime.datetime.now",
"rospkg.RosPack",
"rospy.spin",
"datetime.datetime.today",
"rospy.Subscriber"
] | [((297, 334), 'rospy.init_node', 'rospy.init_node', (['"""snc_events_wrapper"""'], {}), "('snc_events_wrapper')\n", (312, 334), False, 'import roslib, rospy\n'), ((347, 402), 'rospy.get_param', 'rospy.get_param', (['"""~events_topic"""', '"""/snc_sensors/events"""'], {}), "('~events_topic', '/snc_sensors/events')\n", (362, 402), False, 'import roslib, rospy\n'), ((424, 492), 'rospy.get_param', 'rospy.get_param', (['"""~tv_chair_topic"""', '"""room_status_publisher/tv_chair"""'], {}), "('~tv_chair_topic', 'room_status_publisher/tv_chair')\n", (439, 492), False, 'import roslib, rospy\n'), ((507, 541), 'rospy.get_param', 'rospy.get_param', (['"""~use_ble"""', '(False)'], {}), "('~use_ble', False)\n", (522, 541), False, 'import roslib, rospy\n'), ((658, 711), 'rospy.Subscriber', 'rospy.Subscriber', (['topic', 'SnCSensorsMsg', 'eventCallback'], {}), '(topic, SnCSensorsMsg, eventCallback)\n', (674, 711), False, 'import roslib, rospy\n'), ((596, 653), 'rospy.Subscriber', 'rospy.Subscriber', (['tv_chair_topic', 'Bool', 'tv_chair_callback'], {}), '(tv_chair_topic, Bool, tv_chair_callback)\n', (612, 653), False, 'import roslib, rospy\n'), ((726, 745), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (743, 745), False, 'import roslib, rospy\n'), ((755, 767), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (765, 767), False, 'import roslib, rospy\n'), ((967, 981), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (979, 981), False, 'from datetime import datetime\n'), ((1148, 1164), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (1162, 1164), False, 'import rospkg\n'), ((1798, 1814), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (1812, 1814), False, 'import rospkg\n'), ((2445, 2461), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (2459, 2461), False, 'import rospkg\n'), ((3159, 3175), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (3173, 3175), False, 'import rospkg\n'), ((3811, 3827), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (3825, 3827), False, 'import rospkg\n'), ((1217, 1233), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1231, 1233), False, 'from datetime import datetime\n'), ((1864, 1880), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1878, 1880), False, 'from datetime import datetime\n'), ((2512, 2528), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (2526, 2528), False, 'from datetime import datetime\n'), ((3226, 3242), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (3240, 3242), False, 'from datetime import datetime\n'), ((3879, 3895), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (3893, 3895), False, 'from datetime import datetime\n')] |
#!/usr/bin/python
import sys
import logging
#logging.basicConfig(level = logging.DEBUG)
from gi.repository import Vips
a = Vips.Image.black(100, 100)
b = a.bandjoin(2)
b.write_to_file("x.v")
txt = Vips.Image.text("left corner", dpi = 300)
c = txt.ifthenelse(2, [0, 255, 0], blend = True)
c.write_to_file("x2.v")
| [
"gi.repository.Vips.Image.black",
"gi.repository.Vips.Image.text"
] | [((128, 154), 'gi.repository.Vips.Image.black', 'Vips.Image.black', (['(100)', '(100)'], {}), '(100, 100)\n', (144, 154), False, 'from gi.repository import Vips\n'), ((204, 243), 'gi.repository.Vips.Image.text', 'Vips.Image.text', (['"""left corner"""'], {'dpi': '(300)'}), "('left corner', dpi=300)\n", (219, 243), False, 'from gi.repository import Vips\n')] |
#!/usr/bin/env python
# coding: utf-8
import plotly.offline as py
import plotly.graph_objs as go
import numpy as np
import collections as cl
import itertools as it
from tqdm import tqdm
import os
import argparse as ap
def _draw_scatter(all_vocabs, all_freqs, output_prefix):
colors = [(s and t) and (s < t and s / t or t / s) or 0
for s, t in all_freqs]
colors = [c and np.log(c) or 0 for c in colors]
trace = go.Scattergl(
x=[s for s, t in all_freqs],
y=[t for s, t in all_freqs],
mode='markers',
text=all_vocabs,
marker=dict(color=colors, showscale=True, colorscale='Viridis'))
layout = go.Layout(
title='Scatter plot of shared tokens',
hovermode='closest',
xaxis=dict(title='src freq', type='log', autorange=True),
yaxis=dict(title='trg freq', type='log', autorange=True))
fig = go.Figure(data=[trace], layout=layout)
py.plot(
fig, filename='{}_scatter.html'.format(output_prefix), auto_open=False)
def _draw_rate(all_vocabs, all_freqs, output_prefix):
biases = np.array(
[(s and t) and (s / t if s > t else t / s) or 0 for s, t in all_freqs])
freqs = np.array([s + t for s, t in all_freqs])
hist, bin_edges = np.histogram(
biases[biases > 0], weights=freqs[biases > 0], bins=int(max(biases)))
bin_centers = bin_edges[:-1]
t1 = go.Scatter(
x=bin_centers,
y=hist,
name='num of tokens',
mode='lines',
fill='tozeroy')
share_token_rates = np.cumsum(hist) / sum(freqs)
t2 = go.Scatter(
x=bin_centers,
y=share_token_rates,
name='share token rates',
mode='lines',
yaxis='y2')
layout = go.Layout(
title='Shared tokens rates',
xaxis=dict(title='bias', autorange=True),
yaxis=dict(title='num of tokens', type='log', autorange=True),
yaxis2=dict(
title='accumlative share token rates',
autorange=True,
side='right',
overlaying='y'))
fig = go.Figure(data=[t1, t2], layout=layout)
py.plot(
fig, filename='{}_rate.html'.format(output_prefix), auto_open=False)
def main(args):
src_freqs = cl.Counter(
w for l in tqdm(
open(args.src_fname),
desc='gen vocab from {}'.format(os.path.basename(args.src_fname)))
for w in l.strip().split())
trg_freqs = cl.Counter(
w for l in tqdm(
open(args.trg_fname),
desc='gen vocab from {}'.format(os.path.basename(args.trg_fname)))
for w in l.strip().split())
if len(src_freqs) * len(trg_freqs) == 0:
return
all_vocabs = list(src_freqs.keys() | trg_freqs.keys())
all_freqs = [(src_freqs.get(v, 0), trg_freqs.get(v, 0))
for v in all_vocabs]
if args.type == 'scatter':
_draw_scatter(all_vocabs, all_freqs, args.output_prefix)
elif args.type == 'rate':
_draw_rate(all_vocabs, all_freqs, args.output_prefix)
elif args.type == 'both':
_draw_rate(all_vocabs, all_freqs, args.output_prefix)
_draw_scatter(all_vocabs, all_freqs, args.output_prefix)
if __name__ == '__main__':
draw_parser = ap.ArgumentParser()
draw_parser.add_argument(
'src_fname', type=str, help='the source file name.')
draw_parser.add_argument(
'trg_fname', type=str, help='the target file name')
draw_parser.add_argument(
'--type',
type=str,
choices=['scatter', 'rate', 'both'],
help='whether to only draw shared tokens')
draw_parser.add_argument(
'--output_prefix', default='pref', help='output prefix.')
args = draw_parser.parse_args()
main(args)
| [
"argparse.ArgumentParser",
"numpy.log",
"plotly.graph_objs.Scatter",
"numpy.array",
"numpy.cumsum",
"os.path.basename",
"plotly.graph_objs.Figure"
] | [((893, 931), 'plotly.graph_objs.Figure', 'go.Figure', ([], {'data': '[trace]', 'layout': 'layout'}), '(data=[trace], layout=layout)\n', (902, 931), True, 'import plotly.graph_objs as go\n'), ((1094, 1180), 'numpy.array', 'np.array', (['[((s and t) and (s / t if s > t else t / s) or 0) for s, t in all_freqs]'], {}), '([((s and t) and (s / t if s > t else t / s) or 0) for s, t in\n all_freqs])\n', (1102, 1180), True, 'import numpy as np\n'), ((1196, 1237), 'numpy.array', 'np.array', (['[(s + t) for s, t in all_freqs]'], {}), '([(s + t) for s, t in all_freqs])\n', (1204, 1237), True, 'import numpy as np\n'), ((1394, 1484), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'x': 'bin_centers', 'y': 'hist', 'name': '"""num of tokens"""', 'mode': '"""lines"""', 'fill': '"""tozeroy"""'}), "(x=bin_centers, y=hist, name='num of tokens', mode='lines', fill=\n 'tozeroy')\n", (1404, 1484), True, 'import plotly.graph_objs as go\n'), ((1585, 1687), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'x': 'bin_centers', 'y': 'share_token_rates', 'name': '"""share token rates"""', 'mode': '"""lines"""', 'yaxis': '"""y2"""'}), "(x=bin_centers, y=share_token_rates, name='share token rates',\n mode='lines', yaxis='y2')\n", (1595, 1687), True, 'import plotly.graph_objs as go\n'), ((2073, 2112), 'plotly.graph_objs.Figure', 'go.Figure', ([], {'data': '[t1, t2]', 'layout': 'layout'}), '(data=[t1, t2], layout=layout)\n', (2082, 2112), True, 'import plotly.graph_objs as go\n'), ((3242, 3261), 'argparse.ArgumentParser', 'ap.ArgumentParser', ([], {}), '()\n', (3259, 3261), True, 'import argparse as ap\n'), ((1546, 1561), 'numpy.cumsum', 'np.cumsum', (['hist'], {}), '(hist)\n', (1555, 1561), True, 'import numpy as np\n'), ((396, 405), 'numpy.log', 'np.log', (['c'], {}), '(c)\n', (402, 405), True, 'import numpy as np\n'), ((2352, 2384), 'os.path.basename', 'os.path.basename', (['args.src_fname'], {}), '(args.src_fname)\n', (2368, 2384), False, 'import os\n'), ((2555, 2587), 'os.path.basename', 'os.path.basename', (['args.trg_fname'], {}), '(args.trg_fname)\n', (2571, 2587), False, 'import os\n')] |
from sqlalchemy import Table, MetaData, create_engine
def inc(x):
try:
db = create_engine(x)
db.execute("Select NOW()")
return True
except:
return False
def test_answer():
assert inc('postgresql+psycopg2://postgres:integra@localhost:5432/postgres') == True | [
"sqlalchemy.create_engine"
] | [((89, 105), 'sqlalchemy.create_engine', 'create_engine', (['x'], {}), '(x)\n', (102, 105), False, 'from sqlalchemy import Table, MetaData, create_engine\n')] |
import socket
from threading import Thread,Lock
from queue import Queue
from sys import argv
from sys import stdin
from argparse import ArgumentParser
import select
def printv(s):
if args.v:
print(f'\x1B[1;34m{s}\x1B[0m')
def read(fd):
msg = b''
try:
while not msg.endswith(b'\r\n\r\n'):
buf = fd.recv(1)
if len(buf) == 0:
return None
msg+=buf
except (ConnectionResetError, socket.timeout) as e:
return None
printv(f"{msg.decode('utf-8')[:-4]}")
return msg
def listen(address):
s = None
for res in socket.getaddrinfo(address[0], address[1], socket.AF_UNSPEC, \
socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
s = socket.socket(af, socktype, proto)
except OSError as msg:
s = None
continue
try:
s.bind(sa)
s.listen(1)
except OSError as msg:
s.close()
s = None
continue
break
return s
def me2u(fd, cmd):
fd.sendall(b'U2EM\r\n\r\n')
printv("U2EM")
def iam(fd,cmd):
name = cmd.split('\r\n\r\n')[0]
with lock:
if name in fds:
fd.sendall(b'ETAKEN\r\n\r\n')
printv("ETAKEN")
fd.shutdown(socket.SHUT_WR)
return
fds[name] = fd
users[fd] = name
fd.sendall(b'MAI\r\n\r\n')
printv("MAI")
fd.sendall(f'MOTD {MOTD}\r\n\r\n'.encode())
printv(f"MOTD {MOTD}")
def send_utsil(fd, msg):
with lock:
ul = ' '.join(fds.keys())
send_msg = f'UTSIL {ul}\r\n\r\n'
fd.sendall(send_msg.encode())
printv(f"{send_msg[:-4]}")
return
def send_ot(readfd, msg):
receiver_name = msg.split('\r\n\r\n')[0]
with lock:
if(receiver_name not in fds):
print('Garbage')
return
fd = fds[receiver_name]
sender_name = users[readfd]
fd.sendall(f'OT {sender_name}\r\n\r\n'.encode())
printv(f"OT {sender_name}")
return
def send_from(readfd, msg):
receiver_name, msg = msg.split(' ', 1)
with lock:
if(receiver_name not in fds):
readfd.sendall(f'EDNE {receiver_name}\r\n\r\n'.encode())
printv(f"EDNE {receiver_name}")
return
fd = fds[receiver_name]
sender_name = users[readfd]
fd.sendall(f'FROM {sender_name} {msg}'.encode()) #msg already has /r/n/r/n
printv(f"FROM {sender_name} {msg[:-4]}")
return
def send_off(fd, msg):
with lock:
sender_name = users[fd]
fd.sendall(b'EYB\r\n\r\n')
del users[fd]
del fds[sender_name]
for user in users:
user.sendall(f'UOFF {sender_name}\r\n\r\n'.encode())
printv(f"UOFF {sender_name} {msg}")
return
def shutdown():
for fd in users:
if cmd == b'SHUTDOWN':
thread_exit();
fd.close()
for n in range(len(threads)):
job_queue.put((-1,b''))
for t in threads:
t.join()
exit(0)
def thread_exit():
exit(0)
def list_user():
print('Online Users:')
for user in fds:
print(user)
def display_help():
print('''/users: Dumps a list of currently logged in users to stdout.
/shutdown: Cleanly shuts the server down by disconnecting all connected users, closing all open file descriptors, and freeing any allocated memory.''')
def handle():
while(1):
fd, msg = job_queue.get()
if fd == -1:
thread_exit()
if(' ' in msg):
cmd, tail = msg.split(' ',1)
else:
cmd = msg.split('\r\n\r\n')[0]
tail = ''
if cmd in socket_handlers:
socket_handlers[cmd](fd, tail)
else:
fd.shutdown(socket.SHUT_WR)
if __name__ == '__main__':
global login_queue
global job_queue
global users
global fds
global socket_handlers
global epoll
job_queue = Queue()
login_queue = Queue()
lock = Lock()
users = {}
fds = {}
parser = ArgumentParser(description="ME2U Server")
parser.add_argument('-v',action='store_true',help='Logs client server communication')
parser.add_argument('port',metavar='PORT', help='Port number to listen on',default='8080')
parser.add_argument('n', metavar='NUM WORKERS',help='Number of worker threads to spawn', default=5, type=int)
parser.add_argument('motd', metavar='MOTD',help='Message of the Day to display', default='Welcome')
parser.add_argument('addr', metavar='ADDR',nargs='?',help='Address to listen on',default='localhost')
args = parser.parse_args()
MOTD = args.motd
MAX_EVENTS=10
n_workers= args.n
socket_handlers = {
'LISTU': send_utsil,
'TO': send_from,
'MORF': send_ot,
'BYE': send_off,
'ME2U': me2u,
'IAM': iam
}
stdin_handlers = {
'/shutdown': shutdown,
'/users': list_user,
'/help': display_help
}
threads = []
s = listen((args.addr,args.port))
if not s:
print('error in listen')
exit(1)
for i in range(n_workers):
t = Thread(target=handle)
t.start()
threads.append(t)
epoll = select.epoll()
epoll.register(s.fileno())
epoll.register(stdin.fileno(), select.EPOLLIN)
connections = {}
while 1:
l = epoll.poll(timeout=10, maxevents=1)
for fd, event in l:
if fd == s.fileno():
(clientsocket, address) = s.accept()
clientsocket.settimeout(5)
connections[clientsocket.fileno()] = clientsocket;
epoll.register(clientsocket.fileno(), select.EPOLLIN)
elif event & select.EPOLLIN:
if fd == stdin.fileno():
cmd = input().strip()
stdin_handlers[cmd]() if cmd in stdin_handlers \
else print('invalid command')
else:
with lock:
readfd = connections[fd]
msg = read(readfd)
if not msg:
with lock:
if readfd in users:
user = users[readfd]
del fds[user]
del users[readfd]
for u in users:
u.sendall(f'UOFF {user}\r\n\r\n'.encode())
printv(f"UOFF {user}")
readfd.close()
del connections[fd]
epoll.unregister(fd)
continue
msg = msg.decode()
job_queue.put((readfd, msg))
| [
"sys.stdin.fileno",
"argparse.ArgumentParser",
"socket.socket",
"threading.Lock",
"select.epoll",
"socket.getaddrinfo",
"threading.Thread",
"queue.Queue"
] | [((611, 718), 'socket.getaddrinfo', 'socket.getaddrinfo', (['address[0]', 'address[1]', 'socket.AF_UNSPEC', 'socket.SOCK_STREAM', '(0)', 'socket.AI_PASSIVE'], {}), '(address[0], address[1], socket.AF_UNSPEC, socket.\n SOCK_STREAM, 0, socket.AI_PASSIVE)\n', (629, 718), False, 'import socket\n'), ((4021, 4028), 'queue.Queue', 'Queue', ([], {}), '()\n', (4026, 4028), False, 'from queue import Queue\n'), ((4047, 4054), 'queue.Queue', 'Queue', ([], {}), '()\n', (4052, 4054), False, 'from queue import Queue\n'), ((4066, 4072), 'threading.Lock', 'Lock', ([], {}), '()\n', (4070, 4072), False, 'from threading import Thread, Lock\n'), ((4115, 4156), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""ME2U Server"""'}), "(description='ME2U Server')\n", (4129, 4156), False, 'from argparse import ArgumentParser\n'), ((5350, 5364), 'select.epoll', 'select.epoll', ([], {}), '()\n', (5362, 5364), False, 'import select\n'), ((5271, 5292), 'threading.Thread', 'Thread', ([], {'target': 'handle'}), '(target=handle)\n', (5277, 5292), False, 'from threading import Thread, Lock\n'), ((5415, 5429), 'sys.stdin.fileno', 'stdin.fileno', ([], {}), '()\n', (5427, 5429), False, 'from sys import stdin\n'), ((807, 841), 'socket.socket', 'socket.socket', (['af', 'socktype', 'proto'], {}), '(af, socktype, proto)\n', (820, 841), False, 'import socket\n'), ((5889, 5903), 'sys.stdin.fileno', 'stdin.fileno', ([], {}), '()\n', (5901, 5903), False, 'from sys import stdin\n')] |
def setup_raven(dsn):
from raven import Client
from raven.conf import setup_logging
from raven.handlers.logging import SentryHandler
client = Client(dsn)
handler = SentryHandler(client)
setup_logging(handler)
def foo(m, n):
h = m / n - 10
return m / h
if __name__ == '__main__':
import logging
import os
dsn = os.environ['SENTRY_DSN']
logging.basicConfig()
setup_raven(dsn)
m, n = 10, 1
try:
foo(m, n)
except Exception as e:
logging.exception(e)
| [
"logging.basicConfig",
"logging.exception",
"raven.conf.setup_logging",
"raven.handlers.logging.SentryHandler",
"raven.Client"
] | [((158, 169), 'raven.Client', 'Client', (['dsn'], {}), '(dsn)\n', (164, 169), False, 'from raven import Client\n'), ((184, 205), 'raven.handlers.logging.SentryHandler', 'SentryHandler', (['client'], {}), '(client)\n', (197, 205), False, 'from raven.handlers.logging import SentryHandler\n'), ((210, 232), 'raven.conf.setup_logging', 'setup_logging', (['handler'], {}), '(handler)\n', (223, 232), False, 'from raven.conf import setup_logging\n'), ((386, 407), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (405, 407), False, 'import logging\n'), ((508, 528), 'logging.exception', 'logging.exception', (['e'], {}), '(e)\n', (525, 528), False, 'import logging\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.