index
int64 0
10k
| blob_id
stringlengths 40
40
| step-1
stringlengths 13
984k
| step-2
stringlengths 6
1.23M
⌀ | step-3
stringlengths 15
1.34M
⌀ | step-4
stringlengths 30
1.34M
⌀ | step-5
stringlengths 64
1.2M
⌀ | step-ids
listlengths 1
5
|
---|---|---|---|---|---|---|---|
2,200 |
5055743c9ed8c92bcfab5379162f28315409ff91
|
<mask token>
|
<mask token>
pull_links(artist)
<mask token>
os.remove('./links.json')
shutil.rmtree('./songs')
<mask token>
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
|
<mask token>
artist = sys.argv[1].title()
pull_links(artist)
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
sentimentScores = getSentimentScores(lyrics)
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
|
from pull_links import pull_links
from scrape_lyrics import scrape_lyrics
from vader_sentiment import getSentimentScores
import sys
import os
import shutil
artist = sys.argv[1].title()
pull_links(artist)
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
sentimentScores = getSentimentScores(lyrics)
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
|
from pull_links import pull_links
from scrape_lyrics import scrape_lyrics
from vader_sentiment import getSentimentScores
import sys
import os
import shutil
# Get user input for artist -> capitalize it
artist = sys.argv[1].title()
pull_links(artist)
# Dictionary w/ song name as key and lyrics as value
lyrics = scrape_lyrics('links.json')
os.remove('./links.json')
shutil.rmtree('./songs')
# Dictionary w/ song name as key and sentiment data as value
sentimentScores = getSentimentScores(lyrics)
# Print out sentimentScores
for song in sentimentScores:
print(song + ': ')
print(sentimentScores[song])
|
[
0,
1,
2,
3,
4
] |
2,201 |
8cd290dc1e682222c97172a0f23e5b93c54838a7
|
<mask token>
|
<mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
|
<mask token>
class Migration(migrations.Migration):
dependencies = [('leasing', '0037_make_lease_basis_of_rent_archivable')]
operations = [migrations.AddField(model_name='invoicepayment', name=
'filing_code', field=models.CharField(blank=True, max_length=35,
null=True, verbose_name='Name'))]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('leasing', '0037_make_lease_basis_of_rent_archivable')]
operations = [migrations.AddField(model_name='invoicepayment', name=
'filing_code', field=models.CharField(blank=True, max_length=35,
null=True, verbose_name='Name'))]
|
# Generated by Django 2.1.3 on 2019-01-02 12:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('leasing', '0037_make_lease_basis_of_rent_archivable'),
]
operations = [
migrations.AddField(
model_name='invoicepayment',
name='filing_code',
field=models.CharField(blank=True, max_length=35, null=True, verbose_name='Name'),
),
]
|
[
0,
1,
2,
3,
4
] |
2,202 |
6175ce6534d44d703df6cdef94fc2b1285e25f49
|
<mask token>
class testAbRectangularGridBuilder(unittest.TestCase):
def getMockHiResAlphaMtxAndCstCellDet(self, posCellCentroids=None):
class _mockClass:
def __init__(self, posCellCentroids):
self.posCellCentroids = posCellCentroids
self.cell = None
def getAlphaSubMatrix(self, cell):
sm = _mockClass(self.posCellCentroids)
sm.cell = cell
return sm
def _positive(self, cell):
cntrs = self.posCellCentroids
if cell is None or cntrs is None:
return False
else:
for c in cntrs:
if cell.contains(gm.Point([c[0], c[1]])):
return True
return False
def onLand(self):
cell = self.cell
return self._positive(cell)
def isCoastalCell(self, cell, boundary=None, surface=-1):
return self._positive(cell)
return _mockClass(posCellCentroids)
def testGetSeaGridSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=1, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertFalse(grd.wrapAroundDateline)
self.assertEqual(1, grd.nParWorker)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
<mask token>
def testGetNeighborsSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=1)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(1, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsWrapAroundGlobalParallel(self):
minx = -179.0
miny = -89.0
dx = 2
dy = 2
nx = 180
ny = 90
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertTrue(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[65]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[89]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[200]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[nx * ny - 22]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[5].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
cell = cells[nx * ny - 1]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[3].boundary.coords[1][0])
self.assertEqual(182, ncls[4].boundary.coords[1][0])
<mask token>
|
<mask token>
class testAbRectangularGridBuilder(unittest.TestCase):
def getMockHiResAlphaMtxAndCstCellDet(self, posCellCentroids=None):
class _mockClass:
def __init__(self, posCellCentroids):
self.posCellCentroids = posCellCentroids
self.cell = None
def getAlphaSubMatrix(self, cell):
sm = _mockClass(self.posCellCentroids)
sm.cell = cell
return sm
def _positive(self, cell):
cntrs = self.posCellCentroids
if cell is None or cntrs is None:
return False
else:
for c in cntrs:
if cell.contains(gm.Point([c[0], c[1]])):
return True
return False
def onLand(self):
cell = self.cell
return self._positive(cell)
def isCoastalCell(self, cell, boundary=None, surface=-1):
return self._positive(cell)
return _mockClass(posCellCentroids)
def testGetSeaGridSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=1, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertFalse(grd.wrapAroundDateline)
self.assertEqual(1, grd.nParWorker)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
def testGetSeaGridParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=4, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
def testGetNeighborsSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=1)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(1, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsWrapAroundGlobalParallel(self):
minx = -179.0
miny = -89.0
dx = 2
dy = 2
nx = 180
ny = 90
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertTrue(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[65]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[89]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[200]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[nx * ny - 22]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[5].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
cell = cells[nx * ny - 1]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[3].boundary.coords[1][0])
self.assertEqual(182, ncls[4].boundary.coords[1][0])
<mask token>
|
<mask token>
class testAbRectangularGridBuilder(unittest.TestCase):
def getMockHiResAlphaMtxAndCstCellDet(self, posCellCentroids=None):
class _mockClass:
def __init__(self, posCellCentroids):
self.posCellCentroids = posCellCentroids
self.cell = None
def getAlphaSubMatrix(self, cell):
sm = _mockClass(self.posCellCentroids)
sm.cell = cell
return sm
def _positive(self, cell):
cntrs = self.posCellCentroids
if cell is None or cntrs is None:
return False
else:
for c in cntrs:
if cell.contains(gm.Point([c[0], c[1]])):
return True
return False
def onLand(self):
cell = self.cell
return self._positive(cell)
def isCoastalCell(self, cell, boundary=None, surface=-1):
return self._positive(cell)
return _mockClass(posCellCentroids)
def testGetSeaGridSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=1, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertFalse(grd.wrapAroundDateline)
self.assertEqual(1, grd.nParWorker)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
def testGetSeaGridParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=4, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
def testGetNeighborsSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=1)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(1, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsWrapAroundGlobalParallel(self):
minx = -179.0
miny = -89.0
dx = 2
dy = 2
nx = 180
ny = 90
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertTrue(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[65]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[89]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[200]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[nx * ny - 22]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[5].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
cell = cells[nx * ny - 1]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[3].boundary.coords[1][0])
self.assertEqual(182, ncls[4].boundary.coords[1][0])
if __name__ == '__main__':
unittest.main()
|
import unittest
import shapely.geometry as gm
from alphaBetaLab.abRectangularGridBuilder import abRectangularGridBuilder
class testAbRectangularGridBuilder(unittest.TestCase):
def getMockHiResAlphaMtxAndCstCellDet(self, posCellCentroids=None):
class _mockClass:
def __init__(self, posCellCentroids):
self.posCellCentroids = posCellCentroids
self.cell = None
def getAlphaSubMatrix(self, cell):
sm = _mockClass(self.posCellCentroids)
sm.cell = cell
return sm
def _positive(self, cell):
cntrs = self.posCellCentroids
if cell is None or cntrs is None:
return False
else:
for c in cntrs:
if cell.contains(gm.Point([c[0], c[1]])):
return True
return False
def onLand(self):
cell = self.cell
return self._positive(cell)
def isCoastalCell(self, cell, boundary=None, surface=-1):
return self._positive(cell)
return _mockClass(posCellCentroids)
def testGetSeaGridSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=1, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertFalse(grd.wrapAroundDateline)
self.assertEqual(1, grd.nParWorker)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
def testGetSeaGridParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=4, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
self.assertEqual(nx * ny - 3, len(cells))
def testGetNeighborsSerial(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=1)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(1, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsParallel(self):
minx = 100.0
miny = 45.0
dx = 0.5
dy = 1.0
nx = 30
ny = 10
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
def testGetNeighborsWrapAroundGlobalParallel(self):
minx = -179.0
miny = -89.0
dx = 2
dy = 2
nx = 180
ny = 90
maxx = minx + nx * dx
maxy = miny + ny * dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertTrue(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[65]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[89]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[200]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
cell = cells[nx * ny - 22]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[5].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
cell = cells[nx * ny - 1]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue(cell.distance(nc) < 1e-09)
self.assertEqual(182, ncls[3].boundary.coords[1][0])
self.assertEqual(182, ncls[4].boundary.coords[1][0])
if __name__ == '__main__':
unittest.main()
|
import unittest
import shapely.geometry as gm
from alphaBetaLab.abRectangularGridBuilder import abRectangularGridBuilder
class testAbRectangularGridBuilder(unittest.TestCase):
def getMockHiResAlphaMtxAndCstCellDet(self, posCellCentroids = None):
class _mockClass:
def __init__(self, posCellCentroids):
self.posCellCentroids = posCellCentroids
self.cell = None
def getAlphaSubMatrix(self, cell):
sm = _mockClass(self.posCellCentroids)
sm.cell = cell
return sm
def _positive(self, cell):
cntrs = self.posCellCentroids
if cell is None or cntrs is None:
return False
else:
for c in cntrs:
if cell.contains(gm.Point([c[0], c[1]])):
return True
return False
def onLand(self):
cell = self.cell
return self._positive(cell)
def isCoastalCell(self, cell, boundary = None, surface = -1):
return self._positive(cell)
return _mockClass(posCellCentroids)
def testGetSeaGridSerial(self):
minx = 100.
miny = 45.
dx = .5
dy = 1.
nx = 30
ny = 10
maxx = minx + nx*dx
maxy = miny + ny*dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny,
nParWorker=1, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertFalse(grd.wrapAroundDateline)
self.assertEqual(1, grd.nParWorker)
cells = grd.cells
self.assertEqual(nx*ny - 3, len(cells))
def testGetSeaGridParallel(self):
minx = 100.
miny = 45.
dx = .5
dy = 1.
nx = 30
ny = 10
maxx = minx + nx*dx
maxy = miny + ny*dy
landCntrs = [[100.25, 45.25], [105.25, 47.25]]
coastCntrs = [[100.75, 45.25], [105.25, 47.25]]
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker=4, minXYIsCentroid=False)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet(landCntrs)
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet(coastCntrs)
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
self.assertEqual(nx*ny - 3, len(cells))
def testGetNeighborsSerial(self):
minx = 100.
miny = 45.
dx = .5
dy = 1.
nx = 30
ny = 10
maxx = minx + nx*dx
maxy = miny + ny*dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker = 1)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(1, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
def testGetNeighborsParallel(self):
minx = 100.
miny = 45.
dx = .5
dy = 1.
nx = 30
ny = 10
maxx = minx + nx*dx
maxy = miny + ny*dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker = 4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertEqual(4, grd.nParWorker)
self.assertFalse(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(3, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
cell = cells[100]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
def testGetNeighborsWrapAroundGlobalParallel(self):
minx = -179.
miny = -89.
dx = 2
dy = 2
nx = 180
ny = 90
maxx = minx + nx*dx
maxy = miny + ny*dy
gb = abRectangularGridBuilder(minx, miny, dx, dy, nx, ny, nParWorker = 4)
hiResMtx = self.getMockHiResAlphaMtxAndCstCellDet()
cstClDet = self.getMockHiResAlphaMtxAndCstCellDet()
grd = gb.buildGrid(hiResMtx, cstClDet)
self.assertTrue(grd.wrapAroundDateline)
cells = grd.cells
cell = cells[0]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[45]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[65]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
self.assertEqual(-182, ncls[5].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
self.assertEqual(-182, ncls[6].boundary.coords[0][0])
cell = cells[89]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
self.assertEqual(-182, ncls[3].boundary.coords[0][0])
self.assertEqual(-182, ncls[4].boundary.coords[0][0])
cell = cells[200]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
cell = cells[nx*ny-22]
ncls = grd.getNeighbors(cell)
self.assertEqual(8, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
self.assertEqual(182, ncls[5].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
self.assertEqual(182, ncls[6].boundary.coords[1][0])
cell = cells[nx*ny-1]
ncls = grd.getNeighbors(cell)
self.assertEqual(5, len(ncls))
for nc in ncls:
self.assertTrue( cell.distance(nc) < .000000001 )
self.assertEqual(182, ncls[3].boundary.coords[1][0])
self.assertEqual(182, ncls[4].boundary.coords[1][0])
if __name__ == '__main__':
unittest.main()
|
[
6,
7,
8,
9,
10
] |
2,203 |
37b23dc520abc7cbb6798f41063696916065626f
|
<mask token>
|
<mask token>
print(lista)
<mask token>
print(listasemana[0])
<mask token>
print(listasemana[-1])
<mask token>
print(listasemana[0, 3])
<mask token>
print(conjunto)
<mask token>
print(lista1palabras, lista2palabras)
<mask token>
print(lista1palabras[1, 2, 4, 5], lista2palabras)
<mask token>
print(lista2palabras[0, 1, 4, 5], lista1palabras)
<mask token>
print(lista1palabras, lista2palabras[0, 3])
|
lista = []
print(lista)
listasemana = ['Lunes', 'Martes', 'Miercoles', 'Jueves', 'Viernes']
print(listasemana[0])
listasemana = ['Lunes', 'Martes', 'Miercoles', 'Jueves', 'Viernes']
print(listasemana[-1])
listasemana = ['Lunes', 'Martes', 'Miercoles', 'Jueves', 'Viernes']
print(listasemana[0, 3])
listaa = [1, 2, 3, 4, 'hola', 2, 2]
conjunto = set(listaa)
listaa = listaa(conjunto)
print(conjunto)
lista1palabras = ['Sofia', 'Karla', 'Verinica', 'Lina', 'Natalia', 'Estefania']
lista2palabras = ['Enrique', 'Erica', 'Sofia', 'Lina', 'Carlos', 'Pablo']
print(lista1palabras, lista2palabras)
lista1palabras = ['Sofia', 'Karla', 'Verinica', 'Lina', 'Natalia', 'Estefania']
lista2palabras = ['Enrique', 'Erica', 'Sofia', 'Lina', 'Carlos', 'Pablo']
print(lista1palabras[1, 2, 4, 5], lista2palabras)
lista1palabras = ['Sofia', 'Karla', 'Verinica', 'Lina', 'Natalia', 'Estefania']
lista2palabras = ['Enrique', 'Erica', 'Sofia', 'Lina', 'Carlos', 'Pablo']
print(lista2palabras[0, 1, 4, 5], lista1palabras)
lista1palabras = ['Sofia', 'Karla', 'Verinica', 'Lina', 'Natalia', 'Estefania']
lista2palabras = ['Enrique', 'Erica', 'Sofia', 'Lina', 'Carlos', 'Pablo']
print(lista1palabras, lista2palabras[0, 3])
|
#listas
lista=[]
print(lista)
#lista semana
listasemana=["Lunes","Martes","Miercoles","Jueves","Viernes"]
print(listasemana[0])
#lista semana
listasemana=["Lunes","Martes","Miercoles","Jueves","Viernes"]
print(listasemana[-1])
#lista semana
listasemana=["Lunes","Martes","Miercoles","Jueves","Viernes"]
print(listasemana[0,3])
#quitar los elementos repetidos de una lista
listaa=[1,2,3,4,"hola",2,2]
conjunto=set(listaa)
listaa=listaa(conjunto)
print(conjunto)
#listas palabras de 2 listas
lista1palabras=["Sofia","Karla","Verinica","Lina","Natalia","Estefania"]
lista2palabras=["Enrique","Erica","Sofia","Lina","Carlos","Pablo"]
print(lista1palabras,lista2palabras)
#listas de palabras que aparecen en la primera lista
lista1palabras=["Sofia","Karla","Verinica","Lina","Natalia","Estefania"]
lista2palabras=["Enrique","Erica","Sofia","Lina","Carlos","Pablo"]
print(lista1palabras[1,2,4,5],lista2palabras)
#listas de palabras que aparecen en la segunda lista
lista1palabras=["Sofia","Karla","Verinica","Lina","Natalia","Estefania"]
lista2palabras=["Enrique","Erica","Sofia","Lina","Carlos","Pablo"]
print(lista2palabras[0,1,4,5],lista1palabras)
#listas palabras repetidas en ambas listas
lista1palabras=["Sofia","Karla","Verinica","Lina","Natalia","Estefania"]
lista2palabras=["Enrique","Erica","Sofia","Lina","Carlos","Pablo"]
print(lista1palabras,lista2palabras[0,3])
| null |
[
0,
1,
2,
3
] |
2,204 |
32c18bd578bbf91c76604f063421a65a4f7a8b63
|
<mask token>
|
<mask token>
class BruteForce(Mono):
<mask token>
def _count_inconsistencies(self):
if self.num_cores == 1:
for ni in self.nonmatch_indices:
self.index2count[ni] = 0
for mi in self.match_indices:
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
self.index2count[ni] += 1
self.index2count[mi] = count
else:
nmatch = len(self.match_indices)
threads2incons_count = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons_count = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
if ni in local_index2incons_count:
local_index2incons_count[ni] += 1
else:
local_index2incons_count[ni] = 1
if count > 0:
local_index2incons_count[mi] = count
threads2incons_count[p.thread_num] = local_index2incons_count
for _, local_index2incons_count in threads2incons_count.items():
for index, count in local_index2incons_count.items():
if index in self.index2count:
self.index2count[index] += count
else:
self.index2count[index] = count
return self.index2count
def _get_inconsistency_indices(self):
if self.num_cores == 1:
for mi in self.match_indices:
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices) > 0:
self.index2incons[mi] = incons_indices
for ni in incons_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
else:
nmatch = len(self.match_indices)
threads2incons = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices) > 0:
local_index2incons[mi] = incons_indices
threads2incons[p.thread_num] = local_index2incons
for _, local_index2incons in threads2incons.items():
for mi, ni_indices in local_index2incons.items():
self.index2incons[mi] = ni_indices
for ni in ni_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
return self.index2incons
|
<mask token>
class BruteForce(Mono):
def __init__(self, features, labels, params):
super(BruteForce, self).__init__(features, labels, params)
def _count_inconsistencies(self):
if self.num_cores == 1:
for ni in self.nonmatch_indices:
self.index2count[ni] = 0
for mi in self.match_indices:
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
self.index2count[ni] += 1
self.index2count[mi] = count
else:
nmatch = len(self.match_indices)
threads2incons_count = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons_count = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
if ni in local_index2incons_count:
local_index2incons_count[ni] += 1
else:
local_index2incons_count[ni] = 1
if count > 0:
local_index2incons_count[mi] = count
threads2incons_count[p.thread_num] = local_index2incons_count
for _, local_index2incons_count in threads2incons_count.items():
for index, count in local_index2incons_count.items():
if index in self.index2count:
self.index2count[index] += count
else:
self.index2count[index] = count
return self.index2count
def _get_inconsistency_indices(self):
if self.num_cores == 1:
for mi in self.match_indices:
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices) > 0:
self.index2incons[mi] = incons_indices
for ni in incons_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
else:
nmatch = len(self.match_indices)
threads2incons = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices) > 0:
local_index2incons[mi] = incons_indices
threads2incons[p.thread_num] = local_index2incons
for _, local_index2incons in threads2incons.items():
for mi, ni_indices in local_index2incons.items():
self.index2incons[mi] = ni_indices
for ni in ni_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
return self.index2incons
|
<mask token>
import pymp
from v6.mono import Mono
class BruteForce(Mono):
def __init__(self, features, labels, params):
super(BruteForce, self).__init__(features, labels, params)
def _count_inconsistencies(self):
if self.num_cores == 1:
for ni in self.nonmatch_indices:
self.index2count[ni] = 0
for mi in self.match_indices:
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
self.index2count[ni] += 1
self.index2count[mi] = count
else:
nmatch = len(self.match_indices)
threads2incons_count = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons_count = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
if ni in local_index2incons_count:
local_index2incons_count[ni] += 1
else:
local_index2incons_count[ni] = 1
if count > 0:
local_index2incons_count[mi] = count
threads2incons_count[p.thread_num] = local_index2incons_count
for _, local_index2incons_count in threads2incons_count.items():
for index, count in local_index2incons_count.items():
if index in self.index2count:
self.index2count[index] += count
else:
self.index2count[index] = count
return self.index2count
def _get_inconsistency_indices(self):
if self.num_cores == 1:
for mi in self.match_indices:
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices) > 0:
self.index2incons[mi] = incons_indices
for ni in incons_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
else:
nmatch = len(self.match_indices)
threads2incons = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features,
self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices) > 0:
local_index2incons[mi] = incons_indices
threads2incons[p.thread_num] = local_index2incons
for _, local_index2incons in threads2incons.items():
for mi, ni_indices in local_index2incons.items():
self.index2incons[mi] = ni_indices
for ni in ni_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
return self.index2incons
|
'''
Created on Mar 7, 2019
@author: hzhang0418
'''
import pymp
from v6.mono import Mono
class BruteForce(Mono):
def __init__(self, features, labels, params):
super(BruteForce, self).__init__(features, labels, params)
def _count_inconsistencies(self):
if self.num_cores==1:
for ni in self.nonmatch_indices:
self.index2count[ni] = 0
for mi in self.match_indices:
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features, self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
self.index2count[ni] += 1
self.index2count[mi] = count
else:
nmatch = len(self.match_indices)
threads2incons_count = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons_count = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
count = 0
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features, self.features[ni], self.min_con_dim)
if inconsistent == True:
count += 1
if ni in local_index2incons_count:
local_index2incons_count[ni] += 1
else:
local_index2incons_count[ni] = 1
if count>0:
local_index2incons_count[mi] = count
threads2incons_count[p.thread_num] = local_index2incons_count
for _, local_index2incons_count in threads2incons_count.items():
for index, count in local_index2incons_count.items():
if index in self.index2count:
self.index2count[index] += count
else:
self.index2count[index] = count
return self.index2count
def _get_inconsistency_indices(self):
if self.num_cores==1:
for mi in self.match_indices:
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features, self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices)>0:
self.index2incons[mi] = incons_indices
for ni in incons_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
else:
nmatch = len(self.match_indices)
threads2incons = pymp.shared.dict()
with pymp.Parallel(self.num_cores) as p:
local_index2incons = {}
for index in p.range(nmatch):
mi = self.match_indices[index]
match_features = self.features[mi]
incons_indices = []
for ni in self.nonmatch_indices:
inconsistent = self.compare_features(match_features, self.features[ni], self.min_con_dim)
if inconsistent == True:
incons_indices.append(ni)
if len(incons_indices)>0:
local_index2incons[mi] = incons_indices
threads2incons[p.thread_num] = local_index2incons
for _, local_index2incons in threads2incons.items():
for mi, ni_indices in local_index2incons.items():
self.index2incons[mi] = ni_indices
for ni in ni_indices:
if ni in self.index2incons:
self.index2incons[ni].append(mi)
else:
self.index2incons[ni] = [mi]
return self.index2incons
|
[
0,
3,
4,
5,
6
] |
2,205 |
736b84bbcf1d5954b491068be4060edeade2c1c5
|
<mask token>
|
print('Choose from the following options: ')
<mask token>
print(one, '\n', two, '\n', three, '\n', four, '\n', five)
<mask token>
if value == 1:
modem_on = input('\nIs your modem on? (Enter Y or N): ')
if modem_on == 'Y':
router_on = input('\nIs your router on? (Enter Y or N): ')
if router_on == 'Y':
redlight = input(
'\nDoes your router emit a red light? (Enter Y or N): ')
if redlight == 'Y':
print(
'Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
comp_wifi_on = input(
'\nAre both your computer and wifi on? (Enter Y or N): ')
if comp_wifi_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!"
)
else:
print(
'Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 2:
cable_on = input('\nIs your cable box on? (Enter Y or N): ')
if cable_on == 'Y':
tv_on = input('\nIs your TV on? (Enter Y or N): ')
if tv_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 3:
phones_on = input('\nAre your phones on? (Enter Y or N): ')
if phone_on == 'Y':
landline_plugged = input(
"""
Is there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): """
)
if landline_plugged == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 4:
late_payment = input(
'\nWere you late on your last payment? (Enter Y or N): ')
if late_payment == 'Y':
print(
'If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!'
)
else:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value == 5:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value < 1 or value > 5:
print(
'You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
|
print('Choose from the following options: ')
one = ' 1. My internet is not working.'
two = '2. My cable is not working.'
three = '3. My phones are not working.'
four = '4. My bill is wrong.'
five = '5. I want to upgrade my plan.'
print(one, '\n', two, '\n', three, '\n', four, '\n', five)
value = int(input('(Enter a value 1 - 5): '))
if value == 1:
modem_on = input('\nIs your modem on? (Enter Y or N): ')
if modem_on == 'Y':
router_on = input('\nIs your router on? (Enter Y or N): ')
if router_on == 'Y':
redlight = input(
'\nDoes your router emit a red light? (Enter Y or N): ')
if redlight == 'Y':
print(
'Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
comp_wifi_on = input(
'\nAre both your computer and wifi on? (Enter Y or N): ')
if comp_wifi_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
"If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!"
)
else:
print(
'Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 2:
cable_on = input('\nIs your cable box on? (Enter Y or N): ')
if cable_on == 'Y':
tv_on = input('\nIs your TV on? (Enter Y or N): ')
if tv_on == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 3:
phones_on = input('\nAre your phones on? (Enter Y or N): ')
if phone_on == 'Y':
landline_plugged = input(
"""
Is there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): """
)
if landline_plugged == 'Y':
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
else:
print(
'Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
else:
print(
'Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!'
)
if value == 4:
late_payment = input(
'\nWere you late on your last payment? (Enter Y or N): ')
if late_payment == 'Y':
print(
'If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!'
)
else:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value == 5:
print(
'It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
if value < 1 or value > 5:
print(
'You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.'
)
|
# "Time Warner Python" Salma Hashem netid: sh5640
#Design costumer service application by asking users series of questions, and based on the customers' answers to the questions, provide them with instructions.
#Ask the user to choose from the following options
print("Choose from the following options: ")
#assign each menu option to a number
one= " 1. My internet is not working."
two= "2. My cable is not working."
three= "3. My phones are not working."
four= "4. My bill is wrong."
five= "5. I want to upgrade my plan."
#Print the options each on its own line and ask the user to input a number and convert into an integer
print(one, "\n", two, "\n", three, "\n", four, "\n", five)
value= int(input("(Enter a value 1 - 5): "))
#assign variables to user inputs using if else statements for scenario one and print output based on user inputs
if value==1:
modem_on=input("\nIs your modem on? (Enter Y or N): ")
if modem_on=="Y":
router_on=input("\nIs your router on? (Enter Y or N): ")
if router_on=="Y":
redlight= input("\nDoes your router emit a red light? (Enter Y or N): ")
if redlight=="Y":
print("Unplug your router and wait thirty seconds. Then plug your router into the nearest outlet to restart your router. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!")
else:
comp_wifi_on=input("\nAre both your computer and wifi on? (Enter Y or N): ")
if comp_wifi_on=="Y":
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
else:
print("If your computer is not on, please turn it on by pressing the power button. Also make sure your computer's wifi is on. If you still cannot connect to the internet, restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your router into the nearest outlet to turn on your router. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your modem into the nearest outlet to turn on your modem. If you still cannot connect to the Internet, restart this program. Note, this program will now terminate. Goodbye!")
#assign variables to user inputs using if statements for scenario two and print output based on user inputs
if value==2:
cable_on=input("\nIs your cable box on? (Enter Y or N): ")
if cable_on=="Y":
tv_on=input("\nIs your TV on? (Enter Y or N): ")
if tv_on=="Y":
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
else:
print("Plug your TV into the nearest outlet and press the power button on your remote to turn on your TV. If you still do not recieve a cable signal, restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your cable box into the nearest outlet to turn on your cable box. If you still do not recieve a cable signal, please restart this program. Note, this program will now terminate. Goodbye!")
#assign variables to user inputs using if statements for scenario three and print output based on user inputs
if value==3:
phones_on=input("\nAre your phones on? (Enter Y or N): ")
if phone_on=="Y":
landline_plugged=input("\nIs there a landline wire plugged into each phone or the wireless phone terminal? (Enter Y or N): ")
if landline_plugged=="Y":
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
else:
print("Plug a landline wire into each phone or phone terminal. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!")
else:
print("Plug your phones into the nearest outlet to turn them on. If you are using wireless phones, please make sure you change them before attempting to use them. If you still cannot use your phones, please restart this program. Note, this program will now terminate. Goodbye!")
#assign variables to user inputs using if statements for scenario four and print output based on user inputs
if value==4:
late_payment= input("\nWere you late on your last payment? (Enter Y or N): ")
if late_payment=="Y":
print("If you were late on your last payment, you will be charged an additional 5% interest fee. Therefore, your bill may be more than usual. If you would like to contest your charge, please call 555-555-5555 for additional support with this matter. Thank you for your patience. Note, this program will now terminate. Goodbye!")
else:
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
#scenario 5--evaluate input and print output based on user input
if value==5:
print("It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
#create if statements to evaluate invalid user inputs
if value<1 or value>5:
print("You entered an invalid menu choice. It looks like you may need additional support. Please call 555-555-5555 for additional support with this matter. Thank you for your patience.")
| null |
[
0,
1,
2,
3
] |
2,206 |
72e03e7199044f3ed1d562db622a7b884fa186b0
|
<mask token>
@application.route('/')
def hello_world():
return jsonify({'Hello': 'World'})
<mask token>
|
<mask token>
load_dotenv(dotenv_path='./.env')
<mask token>
@application.route('/')
def hello_world():
return jsonify({'Hello': 'World'})
@application.route('/jira-issue-transition', methods=['POST'])
def jira_issue_transition_update():
if not helper.check_github_ip(src_ip=request.access_route[0]):
raise NotAcceptable('Github IP whitelist check failed! IP: {}'.
format(request.access_route[0]))
response = None
if (not request.json or not 'review' in request.json or not 'action' in
request.json):
raise NotAcceptable('Invalid JSON')
if request.json['review']['state'] == 'changes_requested':
response = jiraservice.issue_transition_update(issue_id=request.
json['pull_request']['head']['ref'], new_transition_id=os.
getenv('JIRA_TRANSITION_REJECT_ID'))
elif request.json['review']['state'] == 'approved':
response = jiraservice.issue_transition_update(issue_id=request.
json['pull_request']['head']['ref'], new_transition_id=os.
getenv('JIRA_TRANSITION_APPROVED_ID'))
if request.json['review']['state'] == 'approved' or request.json['review'][
'state'] == 'changes_requested':
helper.save_pull_request_review(issue_id=request.json[
'pull_request']['head']['ref'], pr_id=request.json[
'pull_request']['number'], issue_owner_username=request.json[
'pull_request']['user']['login'], reviewer_username=request.
json['review']['user']['login'], action=request.json['review'][
'state'])
if response:
return jsonify({'ack': 'OK'})
else:
return jsonify({'ack': 'NOT OK'})
if __name__ == '__main__':
application.run(debug=True)
|
<mask token>
load_dotenv(dotenv_path='./.env')
<mask token>
application = FlaskAPI(__name__)
jiraservice = jira()
helper = helper()
@application.route('/')
def hello_world():
return jsonify({'Hello': 'World'})
@application.route('/jira-issue-transition', methods=['POST'])
def jira_issue_transition_update():
if not helper.check_github_ip(src_ip=request.access_route[0]):
raise NotAcceptable('Github IP whitelist check failed! IP: {}'.
format(request.access_route[0]))
response = None
if (not request.json or not 'review' in request.json or not 'action' in
request.json):
raise NotAcceptable('Invalid JSON')
if request.json['review']['state'] == 'changes_requested':
response = jiraservice.issue_transition_update(issue_id=request.
json['pull_request']['head']['ref'], new_transition_id=os.
getenv('JIRA_TRANSITION_REJECT_ID'))
elif request.json['review']['state'] == 'approved':
response = jiraservice.issue_transition_update(issue_id=request.
json['pull_request']['head']['ref'], new_transition_id=os.
getenv('JIRA_TRANSITION_APPROVED_ID'))
if request.json['review']['state'] == 'approved' or request.json['review'][
'state'] == 'changes_requested':
helper.save_pull_request_review(issue_id=request.json[
'pull_request']['head']['ref'], pr_id=request.json[
'pull_request']['number'], issue_owner_username=request.json[
'pull_request']['user']['login'], reviewer_username=request.
json['review']['user']['login'], action=request.json['review'][
'state'])
if response:
return jsonify({'ack': 'OK'})
else:
return jsonify({'ack': 'NOT OK'})
if __name__ == '__main__':
application.run(debug=True)
|
import os
from flask import request, jsonify
from flask_api import FlaskAPI
from flask_api.exceptions import NotAcceptable
from dotenv import load_dotenv
load_dotenv(dotenv_path='./.env')
from src.service.jira import jira
from src.service.helper import helper
application = FlaskAPI(__name__)
jiraservice = jira()
helper = helper()
@application.route('/')
def hello_world():
return jsonify({'Hello': 'World'})
@application.route('/jira-issue-transition', methods=['POST'])
def jira_issue_transition_update():
if not helper.check_github_ip(src_ip=request.access_route[0]):
raise NotAcceptable('Github IP whitelist check failed! IP: {}'.
format(request.access_route[0]))
response = None
if (not request.json or not 'review' in request.json or not 'action' in
request.json):
raise NotAcceptable('Invalid JSON')
if request.json['review']['state'] == 'changes_requested':
response = jiraservice.issue_transition_update(issue_id=request.
json['pull_request']['head']['ref'], new_transition_id=os.
getenv('JIRA_TRANSITION_REJECT_ID'))
elif request.json['review']['state'] == 'approved':
response = jiraservice.issue_transition_update(issue_id=request.
json['pull_request']['head']['ref'], new_transition_id=os.
getenv('JIRA_TRANSITION_APPROVED_ID'))
if request.json['review']['state'] == 'approved' or request.json['review'][
'state'] == 'changes_requested':
helper.save_pull_request_review(issue_id=request.json[
'pull_request']['head']['ref'], pr_id=request.json[
'pull_request']['number'], issue_owner_username=request.json[
'pull_request']['user']['login'], reviewer_username=request.
json['review']['user']['login'], action=request.json['review'][
'state'])
if response:
return jsonify({'ack': 'OK'})
else:
return jsonify({'ack': 'NOT OK'})
if __name__ == '__main__':
application.run(debug=True)
|
import os
from flask import request, jsonify
from flask_api import FlaskAPI
from flask_api.exceptions import NotAcceptable
from dotenv import load_dotenv
load_dotenv(dotenv_path='./.env')
from src.service.jira import jira
from src.service.helper import helper
application = FlaskAPI(__name__)
jiraservice = jira()
helper = helper()
@application.route('/')
def hello_world():
return jsonify({'Hello': 'World'})
@application.route('/jira-issue-transition', methods=['POST'])
def jira_issue_transition_update():
if not helper.check_github_ip(src_ip=request.access_route[0]):
raise NotAcceptable('Github IP whitelist check failed! IP: {}'.format(request.access_route[0]))
response = None
if not request.json or not 'review' in request.json or not 'action' in request.json:
raise NotAcceptable('Invalid JSON')
if request.json['review']['state'] == 'changes_requested':
response = jiraservice.issue_transition_update(issue_id=request.json['pull_request']['head']['ref'],
new_transition_id=os.getenv('JIRA_TRANSITION_REJECT_ID'))
elif request.json['review']['state'] == 'approved':
response = jiraservice.issue_transition_update(issue_id=request.json['pull_request']['head']['ref'],
new_transition_id=os.getenv('JIRA_TRANSITION_APPROVED_ID'))
if request.json['review']['state'] == 'approved' or request.json['review']['state'] == 'changes_requested':
helper.save_pull_request_review(issue_id=request.json['pull_request']['head']['ref'],
pr_id=request.json['pull_request']['number'],
issue_owner_username=request.json['pull_request']['user']['login'],
reviewer_username=request.json['review']['user']['login'],
action=request.json['review']['state'])
if response:
return jsonify({'ack': 'OK'})
else:
return jsonify({'ack': 'NOT OK'})
if __name__ == '__main__':
application.run(debug=True)
|
[
1,
3,
4,
5,
6
] |
2,207 |
2eecc852a6438db19e0ed55ba6cc6610d76c6ed0
|
<mask token>
|
<mask token>
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(Config)
app.config.from_object(config_options[config_name])
app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47'
bootstrap.init_app(app)
csrf.init_app(app)
db.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/authenticate')
return app
|
<mask token>
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.loginview = 'auth.login'
bootstrap = Bootstrap()
csrf = CSRFProtect()
db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(Config)
app.config.from_object(config_options[config_name])
app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47'
bootstrap.init_app(app)
csrf.init_app(app)
db.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/authenticate')
return app
|
from flask import Flask, render_template
from config import Config
from flask_bootstrap import Bootstrap
from config import config_options
from flask_login import LoginManager
from flask_wtf.csrf import CSRFProtect
from flask_sqlalchemy import SQLAlchemy
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.loginview = 'auth.login'
bootstrap = Bootstrap()
csrf = CSRFProtect()
db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(Config)
app.config.from_object(config_options[config_name])
app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47'
bootstrap.init_app(app)
csrf.init_app(app)
db.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/authenticate')
return app
|
from flask import Flask, render_template
from config import Config
from flask_bootstrap import Bootstrap
from config import config_options
from flask_login import LoginManager
from flask_wtf.csrf import CSRFProtect
from flask_sqlalchemy import SQLAlchemy
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.loginview = 'auth.login'
bootstrap = Bootstrap()
csrf=CSRFProtect()
db = SQLAlchemy()
def create_app(config_name):
app= Flask(__name__)
#create app configs
app.config.from_object(Config)
app.config.from_object(config_options[config_name])
app.config['SECRET_KEY']='d686414d5eeb7d38df7e8c385b2c2c47'
#initializing
bootstrap.init_app(app)
csrf.init_app(app)
db.init_app(app)
#registering
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix = '/authenticate')
return app
|
[
0,
1,
2,
3,
4
] |
2,208 |
1c085ea8f9b21ea7bef94ad4ecbb1771a57f697a
|
<mask token>
|
<mask token>
@dataclass_with_properties
class ExternalMap:
external_id: str
verified_using: List[IntegrityMethod] = field(default_factory=list)
location_hint: Optional[str] = None
defining_document: Optional[str] = None
<mask token>
|
<mask token>
@dataclass_with_properties
class ExternalMap:
external_id: str
verified_using: List[IntegrityMethod] = field(default_factory=list)
location_hint: Optional[str] = None
defining_document: Optional[str] = None
def __init__(self, external_id: str, verified_using: List[
IntegrityMethod]=None, location_hint: Optional[str]=None,
defining_document: Optional[str]=None):
verified_using = [] if verified_using is None else verified_using
check_types_and_set_values(self, locals())
|
from dataclasses import field
from beartype.typing import List, Optional
from spdx_tools.common.typing.dataclass_with_properties import dataclass_with_properties
from spdx_tools.common.typing.type_checks import check_types_and_set_values
from spdx_tools.spdx3.model import IntegrityMethod
@dataclass_with_properties
class ExternalMap:
external_id: str
verified_using: List[IntegrityMethod] = field(default_factory=list)
location_hint: Optional[str] = None
defining_document: Optional[str] = None
def __init__(self, external_id: str, verified_using: List[
IntegrityMethod]=None, location_hint: Optional[str]=None,
defining_document: Optional[str]=None):
verified_using = [] if verified_using is None else verified_using
check_types_and_set_values(self, locals())
|
# SPDX-FileCopyrightText: 2023 spdx contributors
#
# SPDX-License-Identifier: Apache-2.0
from dataclasses import field
from beartype.typing import List, Optional
from spdx_tools.common.typing.dataclass_with_properties import dataclass_with_properties
from spdx_tools.common.typing.type_checks import check_types_and_set_values
from spdx_tools.spdx3.model import IntegrityMethod
@dataclass_with_properties
class ExternalMap:
external_id: str # anyURI
verified_using: List[IntegrityMethod] = field(default_factory=list)
location_hint: Optional[str] = None # anyURI
defining_document: Optional[str] = None
def __init__(
self,
external_id: str,
verified_using: List[IntegrityMethod] = None,
location_hint: Optional[str] = None,
defining_document: Optional[str] = None,
):
verified_using = [] if verified_using is None else verified_using
check_types_and_set_values(self, locals())
|
[
0,
1,
2,
3,
4
] |
2,209 |
5aebebb7f22e094a1a897b3266ff07d59400b76c
|
<mask token>
class ScheduleAdmin(admin.ModelAdmin):
"""model admin for schedules"""
list_display = ('id', 'name', 'func', 'schedule_type', 'repeats',
'cluster', 'next_run', 'last_run', 'success')
if not croniter:
readonly_fields = 'cron',
list_filter = 'next_run', 'schedule_type', 'cluster'
search_fields = 'func',
list_display_links = 'id', 'name'
class QueueAdmin(admin.ModelAdmin):
"""queue admin for ORM broker"""
list_display = 'id', 'key', 'task_id', 'name', 'func', 'lock'
def save_model(self, request, obj, form, change):
obj.save(using=Conf.ORM)
def delete_model(self, request, obj):
obj.delete(using=Conf.ORM)
def get_queryset(self, request):
return super(QueueAdmin, self).get_queryset(request).using(Conf.ORM)
def has_add_permission(self, request):
"""Don't allow adds."""
return False
list_filter = 'key',
<mask token>
|
<mask token>
class TaskAdmin(admin.ModelAdmin):
"""model admin for success tasks."""
list_display = 'name', 'func', 'started', 'stopped', 'time_taken', 'group'
def has_add_permission(self, request):
"""Don't allow adds."""
return False
def get_queryset(self, request):
"""Only show successes."""
qs = super(TaskAdmin, self).get_queryset(request)
return qs.filter(success=True)
search_fields = 'name', 'func', 'group'
readonly_fields = []
list_filter = 'group',
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj.
_meta.fields]
<mask token>
class FailAdmin(admin.ModelAdmin):
"""model admin for failed tasks."""
list_display = 'name', 'func', 'started', 'stopped', 'short_result'
def has_add_permission(self, request):
"""Don't allow adds."""
return False
actions = [retry_failed]
search_fields = 'name', 'func'
list_filter = 'group',
readonly_fields = []
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj.
_meta.fields]
class ScheduleAdmin(admin.ModelAdmin):
"""model admin for schedules"""
list_display = ('id', 'name', 'func', 'schedule_type', 'repeats',
'cluster', 'next_run', 'last_run', 'success')
if not croniter:
readonly_fields = 'cron',
list_filter = 'next_run', 'schedule_type', 'cluster'
search_fields = 'func',
list_display_links = 'id', 'name'
class QueueAdmin(admin.ModelAdmin):
"""queue admin for ORM broker"""
list_display = 'id', 'key', 'task_id', 'name', 'func', 'lock'
def save_model(self, request, obj, form, change):
obj.save(using=Conf.ORM)
def delete_model(self, request, obj):
obj.delete(using=Conf.ORM)
def get_queryset(self, request):
return super(QueueAdmin, self).get_queryset(request).using(Conf.ORM)
def has_add_permission(self, request):
"""Don't allow adds."""
return False
list_filter = 'key',
<mask token>
|
<mask token>
class TaskAdmin(admin.ModelAdmin):
"""model admin for success tasks."""
list_display = 'name', 'func', 'started', 'stopped', 'time_taken', 'group'
def has_add_permission(self, request):
"""Don't allow adds."""
return False
def get_queryset(self, request):
"""Only show successes."""
qs = super(TaskAdmin, self).get_queryset(request)
return qs.filter(success=True)
search_fields = 'name', 'func', 'group'
readonly_fields = []
list_filter = 'group',
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj.
_meta.fields]
def retry_failed(FailAdmin, request, queryset):
"""Submit selected tasks back to the queue."""
for task in queryset:
async_task(task.func, *(task.args or ()), hook=task.hook, **task.
kwargs or {})
task.delete()
<mask token>
class FailAdmin(admin.ModelAdmin):
"""model admin for failed tasks."""
list_display = 'name', 'func', 'started', 'stopped', 'short_result'
def has_add_permission(self, request):
"""Don't allow adds."""
return False
actions = [retry_failed]
search_fields = 'name', 'func'
list_filter = 'group',
readonly_fields = []
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj.
_meta.fields]
class ScheduleAdmin(admin.ModelAdmin):
"""model admin for schedules"""
list_display = ('id', 'name', 'func', 'schedule_type', 'repeats',
'cluster', 'next_run', 'last_run', 'success')
if not croniter:
readonly_fields = 'cron',
list_filter = 'next_run', 'schedule_type', 'cluster'
search_fields = 'func',
list_display_links = 'id', 'name'
class QueueAdmin(admin.ModelAdmin):
"""queue admin for ORM broker"""
list_display = 'id', 'key', 'task_id', 'name', 'func', 'lock'
def save_model(self, request, obj, form, change):
obj.save(using=Conf.ORM)
def delete_model(self, request, obj):
obj.delete(using=Conf.ORM)
def get_queryset(self, request):
return super(QueueAdmin, self).get_queryset(request).using(Conf.ORM)
def has_add_permission(self, request):
"""Don't allow adds."""
return False
list_filter = 'key',
<mask token>
|
<mask token>
class TaskAdmin(admin.ModelAdmin):
"""model admin for success tasks."""
list_display = 'name', 'func', 'started', 'stopped', 'time_taken', 'group'
def has_add_permission(self, request):
"""Don't allow adds."""
return False
def get_queryset(self, request):
"""Only show successes."""
qs = super(TaskAdmin, self).get_queryset(request)
return qs.filter(success=True)
search_fields = 'name', 'func', 'group'
readonly_fields = []
list_filter = 'group',
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj.
_meta.fields]
def retry_failed(FailAdmin, request, queryset):
"""Submit selected tasks back to the queue."""
for task in queryset:
async_task(task.func, *(task.args or ()), hook=task.hook, **task.
kwargs or {})
task.delete()
<mask token>
class FailAdmin(admin.ModelAdmin):
"""model admin for failed tasks."""
list_display = 'name', 'func', 'started', 'stopped', 'short_result'
def has_add_permission(self, request):
"""Don't allow adds."""
return False
actions = [retry_failed]
search_fields = 'name', 'func'
list_filter = 'group',
readonly_fields = []
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj.
_meta.fields]
class ScheduleAdmin(admin.ModelAdmin):
"""model admin for schedules"""
list_display = ('id', 'name', 'func', 'schedule_type', 'repeats',
'cluster', 'next_run', 'last_run', 'success')
if not croniter:
readonly_fields = 'cron',
list_filter = 'next_run', 'schedule_type', 'cluster'
search_fields = 'func',
list_display_links = 'id', 'name'
class QueueAdmin(admin.ModelAdmin):
"""queue admin for ORM broker"""
list_display = 'id', 'key', 'task_id', 'name', 'func', 'lock'
def save_model(self, request, obj, form, change):
obj.save(using=Conf.ORM)
def delete_model(self, request, obj):
obj.delete(using=Conf.ORM)
def get_queryset(self, request):
return super(QueueAdmin, self).get_queryset(request).using(Conf.ORM)
def has_add_permission(self, request):
"""Don't allow adds."""
return False
list_filter = 'key',
admin.site.register(Schedule, ScheduleAdmin)
admin.site.register(Success, TaskAdmin)
admin.site.register(Failure, FailAdmin)
if Conf.ORM or Conf.TESTING:
admin.site.register(OrmQ, QueueAdmin)
|
"""Admin module for Django."""
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from django_q.conf import Conf, croniter
from django_q.models import Failure, OrmQ, Schedule, Success
from django_q.tasks import async_task
class TaskAdmin(admin.ModelAdmin):
"""model admin for success tasks."""
list_display = ("name", "func", "started", "stopped", "time_taken", "group")
def has_add_permission(self, request):
"""Don't allow adds."""
return False
def get_queryset(self, request):
"""Only show successes."""
qs = super(TaskAdmin, self).get_queryset(request)
return qs.filter(success=True)
search_fields = ("name", "func", "group")
readonly_fields = []
list_filter = ("group",)
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj._meta.fields]
def retry_failed(FailAdmin, request, queryset):
"""Submit selected tasks back to the queue."""
for task in queryset:
async_task(task.func, *task.args or (), hook=task.hook, **task.kwargs or {})
task.delete()
retry_failed.short_description = _("Resubmit selected tasks to queue")
class FailAdmin(admin.ModelAdmin):
"""model admin for failed tasks."""
list_display = ("name", "func", "started", "stopped", "short_result")
def has_add_permission(self, request):
"""Don't allow adds."""
return False
actions = [retry_failed]
search_fields = ("name", "func")
list_filter = ("group",)
readonly_fields = []
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj._meta.fields]
class ScheduleAdmin(admin.ModelAdmin):
"""model admin for schedules"""
list_display = (
"id",
"name",
"func",
"schedule_type",
"repeats",
"cluster",
"next_run",
"last_run",
"success",
)
# optional cron strings
if not croniter:
readonly_fields = ("cron",)
list_filter = ("next_run", "schedule_type", "cluster")
search_fields = ("func",)
list_display_links = ("id", "name")
class QueueAdmin(admin.ModelAdmin):
"""queue admin for ORM broker"""
list_display = ("id", "key", "task_id", "name", "func", "lock")
def save_model(self, request, obj, form, change):
obj.save(using=Conf.ORM)
def delete_model(self, request, obj):
obj.delete(using=Conf.ORM)
def get_queryset(self, request):
return super(QueueAdmin, self).get_queryset(request).using(Conf.ORM)
def has_add_permission(self, request):
"""Don't allow adds."""
return False
list_filter = ("key",)
admin.site.register(Schedule, ScheduleAdmin)
admin.site.register(Success, TaskAdmin)
admin.site.register(Failure, FailAdmin)
if Conf.ORM or Conf.TESTING:
admin.site.register(OrmQ, QueueAdmin)
|
[
10,
21,
22,
23,
26
] |
2,210 |
d91bacfd4b45832a79189c0f1ec4f4cb3ef14851
|
<mask token>
|
<mask token>
print(list(myquery))
<mask token>
print(list(myquery))
|
<mask token>
myclient = pymongo.MongoClient('mongodb://localhost:27017/')
mydb = myclient['divya_db']
mycol = mydb['vani_data']
myquery = mycol.find().sort('age', -1)
print(list(myquery))
myquery = mycol.find().sort('visits', 1)
print(list(myquery))
|
import pymongo
myclient = pymongo.MongoClient('mongodb://localhost:27017/')
mydb = myclient['divya_db']
mycol = mydb['vani_data']
myquery = mycol.find().sort('age', -1)
print(list(myquery))
myquery = mycol.find().sort('visits', 1)
print(list(myquery))
|
# 14. Sort dataframe (birds) first by the values in the 'age' in decending order, then by the value in the 'visits' column in ascending order.
import pymongo
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["divya_db"]
mycol = mydb["vani_data"]
# age column in decending order
myquery = mycol.find().sort("age",-1)
print(list(myquery))
# visits column in ascending order
myquery = mycol.find().sort("visits",1)
print(list(myquery))
|
[
0,
1,
2,
3,
4
] |
2,211 |
6be2cc99d03596715d76cda41d63b8c91c829498
|
<mask token>
|
<mask token>
class BmExam(db.Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
|
<mask token>
class BmExam(db.Model):
__tablename__ = 'bm_exam'
id = db.Column(db.Integer, primary_key=True)
status = db.Column(db.Integer, nullable=False, server_default=db.
FetchedValue())
exam_id = db.Column(db.Integer, nullable=False)
exam_name = db.Column(db.String(200), nullable=False, server_default=db
.FetchedValue())
show_exam_name = db.Column(db.String(200), nullable=False,
server_default=db.FetchedValue())
numbers = db.Column(db.Integer, nullable=False, server_default=db.
FetchedValue())
x_rules = db.Column(db.String(1000), nullable=False, server_default=db.
FetchedValue())
m_rules = db.Column(db.String(1000), nullable=False, server_default=db.
FetchedValue())
rule_status = db.Column(db.Integer, nullable=False, server_default=db.
FetchedValue())
start_time = db.Column(db.DateTime, nullable=False, server_default=db.
FetchedValue())
end_time = db.Column(db.DateTime, nullable=False, server_default=db.
FetchedValue())
beizhu = db.Column(db.String(2000), nullable=False, server_default=db.
FetchedValue())
beizhu2 = db.Column(db.String(200), nullable=False, server_default=db.
FetchedValue())
beizhu3 = db.Column(db.String(200), nullable=False, server_default=db.
FetchedValue())
updated_time = db.Column(db.DateTime, nullable=False, server_default=db
.FetchedValue())
created_time = db.Column(db.DateTime, nullable=False, server_default=db
.FetchedValue())
|
from sqlalchemy import Column, DateTime, Integer, String
from sqlalchemy.schema import FetchedValue
from application import db
class BmExam(db.Model):
__tablename__ = 'bm_exam'
id = db.Column(db.Integer, primary_key=True)
status = db.Column(db.Integer, nullable=False, server_default=db.
FetchedValue())
exam_id = db.Column(db.Integer, nullable=False)
exam_name = db.Column(db.String(200), nullable=False, server_default=db
.FetchedValue())
show_exam_name = db.Column(db.String(200), nullable=False,
server_default=db.FetchedValue())
numbers = db.Column(db.Integer, nullable=False, server_default=db.
FetchedValue())
x_rules = db.Column(db.String(1000), nullable=False, server_default=db.
FetchedValue())
m_rules = db.Column(db.String(1000), nullable=False, server_default=db.
FetchedValue())
rule_status = db.Column(db.Integer, nullable=False, server_default=db.
FetchedValue())
start_time = db.Column(db.DateTime, nullable=False, server_default=db.
FetchedValue())
end_time = db.Column(db.DateTime, nullable=False, server_default=db.
FetchedValue())
beizhu = db.Column(db.String(2000), nullable=False, server_default=db.
FetchedValue())
beizhu2 = db.Column(db.String(200), nullable=False, server_default=db.
FetchedValue())
beizhu3 = db.Column(db.String(200), nullable=False, server_default=db.
FetchedValue())
updated_time = db.Column(db.DateTime, nullable=False, server_default=db
.FetchedValue())
created_time = db.Column(db.DateTime, nullable=False, server_default=db
.FetchedValue())
|
# coding: utf-8
from sqlalchemy import Column, DateTime, Integer, String
from sqlalchemy.schema import FetchedValue
from application import db
class BmExam(db.Model):
__tablename__ = 'bm_exam'
id = db.Column(db.Integer, primary_key=True)
status = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
exam_id = db.Column(db.Integer, nullable=False)
exam_name = db.Column(db.String(200), nullable=False, server_default=db.FetchedValue())
show_exam_name = db.Column(db.String(200), nullable=False, server_default=db.FetchedValue())
numbers = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
x_rules = db.Column(db.String(1000), nullable=False, server_default=db.FetchedValue())
m_rules = db.Column(db.String(1000), nullable=False, server_default=db.FetchedValue())
rule_status = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
start_time = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
end_time = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
beizhu = db.Column(db.String(2000), nullable=False, server_default=db.FetchedValue())
beizhu2 = db.Column(db.String(200), nullable=False, server_default=db.FetchedValue())
beizhu3 = db.Column(db.String(200), nullable=False, server_default=db.FetchedValue())
updated_time = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
created_time = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
|
[
0,
1,
2,
3,
4
] |
2,212 |
1450d3b8cc4cef1c5f802e4d84e2211b7467fe12
|
<mask token>
def inventory(component, component_type):
try:
component_inventory = _inventory[component]
except KeyError:
raise ValueError('Illegal assembly component: {}'.format(component))
try:
component_class = component_inventory[component_type]
except KeyError:
raise ValueError('{} type `{}` is not listed in the inventory'.
format(component, component_type))
return component_class
def from_config(config):
try:
config.items()
except AttributeError:
with open(str(config), 'r') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
return _from_config(config)
<mask token>
|
<mask token>
def inventory(component, component_type):
try:
component_inventory = _inventory[component]
except KeyError:
raise ValueError('Illegal assembly component: {}'.format(component))
try:
component_class = component_inventory[component_type]
except KeyError:
raise ValueError('{} type `{}` is not listed in the inventory'.
format(component, component_type))
return component_class
def from_config(config):
try:
config.items()
except AttributeError:
with open(str(config), 'r') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
return _from_config(config)
def _from_config(config):
try:
config.items()
except:
raise TypeError('Consfis must be a disctionary. Got {}'.format(type
(config)))
if not 'assembly' in config:
raise KeyError("Config is missing mandatory key 'assembly'")
if not 'type' in config['assembly']:
raise KeyError("Assembly definition is missing mandatory key 'type'")
constructor = inventory('assembly', config['assembly']['type'])
assembly_params = config['assembly'].get('parameters', dict())
assembly_template = constructor(**assembly_params)
readers_definitions = config.get('readers', list())
readers = dict()
for reader_definition in readers_definitions:
try:
reader_type = reader_definition.pop('type')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'type'")
try:
reader_label = reader_definition.pop('label')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'label'"
)
reader_parameters = reader_definition.get('parameters', dict())
reader_class = inventory('reader', reader_type)
readers[reader_label] = reader_class(**reader_parameters)
assembly_readers = config['assembly'].get('readers', dict())
for signal_name, reader_label in assembly_readers.items():
try:
reader = readers[reader_label]
except KeyError:
raise KeyError("Undefined reader label '{}' in aseembly definition"
.format(reader_label))
if signal_name == 'default':
assembly_template.attach_reader(reader)
else:
assembly_template.attach_reader(reader, signal_name)
networks_definitions = config.get('networks', list())
networks = dict()
for network_definition in networks_definitions:
try:
network_type = network_definition.pop('type')
except KeyError:
raise KeyError("network definition is missing mandatory key 'type'"
)
try:
network_label = network_definition.pop('label')
except KeyError:
raise KeyError(
"network definition is missing mandatory key 'label'")
network_parameters = network_definition.get('parameters', dict())
network_class = inventory('network', network_type)
networks[network_label] = network_class(**network_parameters)
assembly_networks = config['assembly'].get('networks', dict())
for signal_name, network_label in assembly_networks.items():
try:
network = networks[network_label]
except KeyError:
raise KeyError(
"Undefined network label '{}' in aseembly definition".
format(network_label))
if signal_name == 'default':
assembly_template.attach_network(network)
else:
assembly_template.attach_network(network, signal_name)
assembly = assembly_template()
return assembly
|
<mask token>
_inventory = dict(assembly=dict(Flatline=contructors.Flatline, Seasaw=
contructors.Seesaw, Pulser=contructors.Pulser, SimpleActuator=
contructors.SimpleActuator), reader=dict(PassThrough=readers.
PassthroughReader, EveryNth=readers.EveryNthReader, OnChange=readers.
OnChangeReader), network=dict(Ideal=networks.IdealNetwork, Normal=
networks.NormalNetwork))
def inventory(component, component_type):
try:
component_inventory = _inventory[component]
except KeyError:
raise ValueError('Illegal assembly component: {}'.format(component))
try:
component_class = component_inventory[component_type]
except KeyError:
raise ValueError('{} type `{}` is not listed in the inventory'.
format(component, component_type))
return component_class
def from_config(config):
try:
config.items()
except AttributeError:
with open(str(config), 'r') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
return _from_config(config)
def _from_config(config):
try:
config.items()
except:
raise TypeError('Consfis must be a disctionary. Got {}'.format(type
(config)))
if not 'assembly' in config:
raise KeyError("Config is missing mandatory key 'assembly'")
if not 'type' in config['assembly']:
raise KeyError("Assembly definition is missing mandatory key 'type'")
constructor = inventory('assembly', config['assembly']['type'])
assembly_params = config['assembly'].get('parameters', dict())
assembly_template = constructor(**assembly_params)
readers_definitions = config.get('readers', list())
readers = dict()
for reader_definition in readers_definitions:
try:
reader_type = reader_definition.pop('type')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'type'")
try:
reader_label = reader_definition.pop('label')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'label'"
)
reader_parameters = reader_definition.get('parameters', dict())
reader_class = inventory('reader', reader_type)
readers[reader_label] = reader_class(**reader_parameters)
assembly_readers = config['assembly'].get('readers', dict())
for signal_name, reader_label in assembly_readers.items():
try:
reader = readers[reader_label]
except KeyError:
raise KeyError("Undefined reader label '{}' in aseembly definition"
.format(reader_label))
if signal_name == 'default':
assembly_template.attach_reader(reader)
else:
assembly_template.attach_reader(reader, signal_name)
networks_definitions = config.get('networks', list())
networks = dict()
for network_definition in networks_definitions:
try:
network_type = network_definition.pop('type')
except KeyError:
raise KeyError("network definition is missing mandatory key 'type'"
)
try:
network_label = network_definition.pop('label')
except KeyError:
raise KeyError(
"network definition is missing mandatory key 'label'")
network_parameters = network_definition.get('parameters', dict())
network_class = inventory('network', network_type)
networks[network_label] = network_class(**network_parameters)
assembly_networks = config['assembly'].get('networks', dict())
for signal_name, network_label in assembly_networks.items():
try:
network = networks[network_label]
except KeyError:
raise KeyError(
"Undefined network label '{}' in aseembly definition".
format(network_label))
if signal_name == 'default':
assembly_template.attach_network(network)
else:
assembly_template.attach_network(network, signal_name)
assembly = assembly_template()
return assembly
|
import iotsim.readers as readers
import iotsim.networks as networks
import iotsim.constructors as contructors
import yaml
_inventory = dict(assembly=dict(Flatline=contructors.Flatline, Seasaw=
contructors.Seesaw, Pulser=contructors.Pulser, SimpleActuator=
contructors.SimpleActuator), reader=dict(PassThrough=readers.
PassthroughReader, EveryNth=readers.EveryNthReader, OnChange=readers.
OnChangeReader), network=dict(Ideal=networks.IdealNetwork, Normal=
networks.NormalNetwork))
def inventory(component, component_type):
try:
component_inventory = _inventory[component]
except KeyError:
raise ValueError('Illegal assembly component: {}'.format(component))
try:
component_class = component_inventory[component_type]
except KeyError:
raise ValueError('{} type `{}` is not listed in the inventory'.
format(component, component_type))
return component_class
def from_config(config):
try:
config.items()
except AttributeError:
with open(str(config), 'r') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
return _from_config(config)
def _from_config(config):
try:
config.items()
except:
raise TypeError('Consfis must be a disctionary. Got {}'.format(type
(config)))
if not 'assembly' in config:
raise KeyError("Config is missing mandatory key 'assembly'")
if not 'type' in config['assembly']:
raise KeyError("Assembly definition is missing mandatory key 'type'")
constructor = inventory('assembly', config['assembly']['type'])
assembly_params = config['assembly'].get('parameters', dict())
assembly_template = constructor(**assembly_params)
readers_definitions = config.get('readers', list())
readers = dict()
for reader_definition in readers_definitions:
try:
reader_type = reader_definition.pop('type')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'type'")
try:
reader_label = reader_definition.pop('label')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'label'"
)
reader_parameters = reader_definition.get('parameters', dict())
reader_class = inventory('reader', reader_type)
readers[reader_label] = reader_class(**reader_parameters)
assembly_readers = config['assembly'].get('readers', dict())
for signal_name, reader_label in assembly_readers.items():
try:
reader = readers[reader_label]
except KeyError:
raise KeyError("Undefined reader label '{}' in aseembly definition"
.format(reader_label))
if signal_name == 'default':
assembly_template.attach_reader(reader)
else:
assembly_template.attach_reader(reader, signal_name)
networks_definitions = config.get('networks', list())
networks = dict()
for network_definition in networks_definitions:
try:
network_type = network_definition.pop('type')
except KeyError:
raise KeyError("network definition is missing mandatory key 'type'"
)
try:
network_label = network_definition.pop('label')
except KeyError:
raise KeyError(
"network definition is missing mandatory key 'label'")
network_parameters = network_definition.get('parameters', dict())
network_class = inventory('network', network_type)
networks[network_label] = network_class(**network_parameters)
assembly_networks = config['assembly'].get('networks', dict())
for signal_name, network_label in assembly_networks.items():
try:
network = networks[network_label]
except KeyError:
raise KeyError(
"Undefined network label '{}' in aseembly definition".
format(network_label))
if signal_name == 'default':
assembly_template.attach_network(network)
else:
assembly_template.attach_network(network, signal_name)
assembly = assembly_template()
return assembly
|
import iotsim.readers as readers
import iotsim.networks as networks
import iotsim.constructors as contructors
import yaml
_inventory = dict(
assembly=dict(
Flatline=contructors.Flatline,
Seasaw=contructors.Seesaw,
Pulser=contructors.Pulser,
SimpleActuator=contructors.SimpleActuator,
),
reader=dict(
PassThrough=readers.PassthroughReader,
EveryNth=readers.EveryNthReader,
OnChange=readers.OnChangeReader,
),
network = dict(
Ideal = networks.IdealNetwork,
Normal = networks.NormalNetwork,
),
)
def inventory(component, component_type):
try:
component_inventory = _inventory[component]
except KeyError:
raise ValueError("Illegal assembly component: {}".format(component))
try:
component_class = component_inventory[component_type]
except KeyError:
raise ValueError("{} type `{}` is not listed in the inventory".
format(component, component_type))
return component_class
def from_config(config):
try:
config.items()
except AttributeError:
with open(str(config), 'r') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
return _from_config(config)
def _from_config(config):
try:
config.items()
except:
raise TypeError("Consfis must be a disctionary. Got {}".format(type(config)))
if not 'assembly' in config:
raise KeyError("Config is missing mandatory key 'assembly'")
if not 'type' in config['assembly']:
raise KeyError("Assembly definition is missing mandatory key 'type'")
constructor = inventory('assembly', config['assembly']['type'])
assembly_params = config['assembly'].get('parameters', dict())
assembly_template = constructor(**assembly_params)
readers_definitions = config.get('readers', list())
readers = dict()
for reader_definition in readers_definitions:
try:
reader_type = reader_definition.pop('type')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'type'")
try:
reader_label = reader_definition.pop('label')
except KeyError:
raise KeyError("Reader definition is missing mandatory key 'label'")
reader_parameters = reader_definition.get('parameters', dict())
reader_class = inventory('reader', reader_type)
readers[reader_label] = reader_class(**reader_parameters)
assembly_readers = config['assembly'].get('readers', dict())
for signal_name, reader_label in assembly_readers.items():
try:
reader = readers[reader_label]
except KeyError:
raise KeyError("Undefined reader label '{}' in aseembly definition".
format(reader_label))
if signal_name == 'default':
assembly_template.attach_reader(reader)
else:
assembly_template.attach_reader(reader, signal_name)
networks_definitions = config.get('networks', list())
networks = dict()
for network_definition in networks_definitions:
try:
network_type = network_definition.pop('type')
except KeyError:
raise KeyError("network definition is missing mandatory key 'type'")
try:
network_label = network_definition.pop('label')
except KeyError:
raise KeyError("network definition is missing mandatory key 'label'")
network_parameters = network_definition.get('parameters', dict())
network_class = inventory('network', network_type)
networks[network_label] = network_class(**network_parameters)
assembly_networks = config['assembly'].get('networks', dict())
for signal_name, network_label in assembly_networks.items():
try:
network = networks[network_label]
except KeyError:
raise KeyError("Undefined network label '{}' in aseembly definition".
format(network_label))
if signal_name == 'default':
assembly_template.attach_network(network)
else:
assembly_template.attach_network(network, signal_name)
assembly = assembly_template()
return assembly
|
[
2,
3,
4,
5,
6
] |
2,213 |
426396c981fe56230e39b81e156e7c6877e39055
|
<mask token>
def make_Folders(names):
for n in names:
if not os.path.exists(n):
os.makedirs(n)
<mask token>
|
<mask token>
def make_Folders(names):
for n in names:
if not os.path.exists(n):
os.makedirs(n)
make_Folders(timeslices)
|
<mask token>
timeslices = [('0_' + str(x) + '_' + str(y)) for x in range(30, 100) for y in
range(0, 6)]
def make_Folders(names):
for n in names:
if not os.path.exists(n):
os.makedirs(n)
make_Folders(timeslices)
|
import os
timeslices = [('0_' + str(x) + '_' + str(y)) for x in range(30, 100) for y in
range(0, 6)]
def make_Folders(names):
for n in names:
if not os.path.exists(n):
os.makedirs(n)
make_Folders(timeslices)
|
import os
timeslices = ['0_' + str(x) + '_' + str(y) for x in range(30,100) for y in range(0,6)]
#print timeslices
def make_Folders(names):
for n in names:
if not os.path.exists(n):
os.makedirs(n)
make_Folders(timeslices)
|
[
1,
2,
3,
4,
5
] |
2,214 |
82bfdb46e1da96e5db91d66c3a060d8bf7747d07
|
<mask token>
class Submissions(Cog):
<mask token>
@command()
async def current(self, ctx: Context) ->None:
await ctx.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await ctx.send('No competition active')
return
writeup = drafts[0]
msg = f'Current Competition: {writeup.title}\n'
if writeup.prompt:
msg += f'```\n{writeup.prompt}\n```\n'
if writeup.sections:
msg += f'There are currently {len(writeup.sections) - 1} entries\n'
await ctx.send(msg)
@Cog.listener()
async def on_message(self, msg: Message) ->None:
if msg.channel.type != ChannelType.private:
return
if not msg.attachments:
return
await msg.channel.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await msg.channel.send('No competition active')
return
writeup = drafts[0]
author_id = msg.author.id
url = msg.attachments[0].url
section_id = writeup.get_section_index(author_id)
if section_id == -1:
text = f"""
[{author_id}]: {url}
INTRO
![{msg.author.name}'s Card Name Goes Here][{author_id}]
TEXT
"""
writeup.sections.append(text)
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been recorded')
else:
text = writeup.sections[section_id].strip()
lines = text.splitlines()
if text.startswith(f'[{author_id}]: '):
lines[0] = f'[{author_id}]: {url}'
else:
lines.insert(0, f'[{author_id}]: {url}')
writeup.sections[section_id] = '\n' + '\n'.join(lines) + '\n'
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been updated')
try:
if not os.path.exists(os.path.join('site', 'images', writeup.
imgdir)):
os.mkdir(os.path.join('site', 'images', writeup.imgdir))
await fetch_tools.store_async(msg.attachments[0].url, os.path.
join('site', 'images', writeup.imgdir, msg.attachments[0].
filename))
except Exception:
print('Failed to download')
repo.commit(f"{writeup.title}: {msg.author.name}'s submission")
await fetch_tools.post_discord_webhook('685261389836845077',
'XohTy7E-3ilDYvHIhUsjB9rJf6YaUuHWzGOra1AmJ7XNbci-5C7omOypgcEjG_UHUZRy'
, f'<@{author_id}> submitted {url}')
<mask token>
|
<mask token>
class Submissions(Cog):
def __init__(self, bot: Bot) ->None:
self.bot = bot
@command()
async def current(self, ctx: Context) ->None:
await ctx.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await ctx.send('No competition active')
return
writeup = drafts[0]
msg = f'Current Competition: {writeup.title}\n'
if writeup.prompt:
msg += f'```\n{writeup.prompt}\n```\n'
if writeup.sections:
msg += f'There are currently {len(writeup.sections) - 1} entries\n'
await ctx.send(msg)
@Cog.listener()
async def on_message(self, msg: Message) ->None:
if msg.channel.type != ChannelType.private:
return
if not msg.attachments:
return
await msg.channel.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await msg.channel.send('No competition active')
return
writeup = drafts[0]
author_id = msg.author.id
url = msg.attachments[0].url
section_id = writeup.get_section_index(author_id)
if section_id == -1:
text = f"""
[{author_id}]: {url}
INTRO
![{msg.author.name}'s Card Name Goes Here][{author_id}]
TEXT
"""
writeup.sections.append(text)
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been recorded')
else:
text = writeup.sections[section_id].strip()
lines = text.splitlines()
if text.startswith(f'[{author_id}]: '):
lines[0] = f'[{author_id}]: {url}'
else:
lines.insert(0, f'[{author_id}]: {url}')
writeup.sections[section_id] = '\n' + '\n'.join(lines) + '\n'
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been updated')
try:
if not os.path.exists(os.path.join('site', 'images', writeup.
imgdir)):
os.mkdir(os.path.join('site', 'images', writeup.imgdir))
await fetch_tools.store_async(msg.attachments[0].url, os.path.
join('site', 'images', writeup.imgdir, msg.attachments[0].
filename))
except Exception:
print('Failed to download')
repo.commit(f"{writeup.title}: {msg.author.name}'s submission")
await fetch_tools.post_discord_webhook('685261389836845077',
'XohTy7E-3ilDYvHIhUsjB9rJf6YaUuHWzGOra1AmJ7XNbci-5C7omOypgcEjG_UHUZRy'
, f'<@{author_id}> submitted {url}')
<mask token>
|
<mask token>
class Submissions(Cog):
def __init__(self, bot: Bot) ->None:
self.bot = bot
@command()
async def current(self, ctx: Context) ->None:
await ctx.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await ctx.send('No competition active')
return
writeup = drafts[0]
msg = f'Current Competition: {writeup.title}\n'
if writeup.prompt:
msg += f'```\n{writeup.prompt}\n```\n'
if writeup.sections:
msg += f'There are currently {len(writeup.sections) - 1} entries\n'
await ctx.send(msg)
@Cog.listener()
async def on_message(self, msg: Message) ->None:
if msg.channel.type != ChannelType.private:
return
if not msg.attachments:
return
await msg.channel.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await msg.channel.send('No competition active')
return
writeup = drafts[0]
author_id = msg.author.id
url = msg.attachments[0].url
section_id = writeup.get_section_index(author_id)
if section_id == -1:
text = f"""
[{author_id}]: {url}
INTRO
![{msg.author.name}'s Card Name Goes Here][{author_id}]
TEXT
"""
writeup.sections.append(text)
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been recorded')
else:
text = writeup.sections[section_id].strip()
lines = text.splitlines()
if text.startswith(f'[{author_id}]: '):
lines[0] = f'[{author_id}]: {url}'
else:
lines.insert(0, f'[{author_id}]: {url}')
writeup.sections[section_id] = '\n' + '\n'.join(lines) + '\n'
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been updated')
try:
if not os.path.exists(os.path.join('site', 'images', writeup.
imgdir)):
os.mkdir(os.path.join('site', 'images', writeup.imgdir))
await fetch_tools.store_async(msg.attachments[0].url, os.path.
join('site', 'images', writeup.imgdir, msg.attachments[0].
filename))
except Exception:
print('Failed to download')
repo.commit(f"{writeup.title}: {msg.author.name}'s submission")
await fetch_tools.post_discord_webhook('685261389836845077',
'XohTy7E-3ilDYvHIhUsjB9rJf6YaUuHWzGOra1AmJ7XNbci-5C7omOypgcEjG_UHUZRy'
, f'<@{author_id}> submitted {url}')
def setup(bot: Bot) ->None:
bot.add_cog(Submissions(bot))
|
from discord import Message, ChannelType
from discord.ext.commands import Bot, Cog, command, Context
from ccbot import repo
from shared import fetch_tools
import os
class Submissions(Cog):
def __init__(self, bot: Bot) ->None:
self.bot = bot
@command()
async def current(self, ctx: Context) ->None:
await ctx.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await ctx.send('No competition active')
return
writeup = drafts[0]
msg = f'Current Competition: {writeup.title}\n'
if writeup.prompt:
msg += f'```\n{writeup.prompt}\n```\n'
if writeup.sections:
msg += f'There are currently {len(writeup.sections) - 1} entries\n'
await ctx.send(msg)
@Cog.listener()
async def on_message(self, msg: Message) ->None:
if msg.channel.type != ChannelType.private:
return
if not msg.attachments:
return
await msg.channel.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await msg.channel.send('No competition active')
return
writeup = drafts[0]
author_id = msg.author.id
url = msg.attachments[0].url
section_id = writeup.get_section_index(author_id)
if section_id == -1:
text = f"""
[{author_id}]: {url}
INTRO
![{msg.author.name}'s Card Name Goes Here][{author_id}]
TEXT
"""
writeup.sections.append(text)
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been recorded')
else:
text = writeup.sections[section_id].strip()
lines = text.splitlines()
if text.startswith(f'[{author_id}]: '):
lines[0] = f'[{author_id}]: {url}'
else:
lines.insert(0, f'[{author_id}]: {url}')
writeup.sections[section_id] = '\n' + '\n'.join(lines) + '\n'
writeup.save()
await msg.channel.send(
f'Your submission for {writeup.title} has been updated')
try:
if not os.path.exists(os.path.join('site', 'images', writeup.
imgdir)):
os.mkdir(os.path.join('site', 'images', writeup.imgdir))
await fetch_tools.store_async(msg.attachments[0].url, os.path.
join('site', 'images', writeup.imgdir, msg.attachments[0].
filename))
except Exception:
print('Failed to download')
repo.commit(f"{writeup.title}: {msg.author.name}'s submission")
await fetch_tools.post_discord_webhook('685261389836845077',
'XohTy7E-3ilDYvHIhUsjB9rJf6YaUuHWzGOra1AmJ7XNbci-5C7omOypgcEjG_UHUZRy'
, f'<@{author_id}> submitted {url}')
def setup(bot: Bot) ->None:
bot.add_cog(Submissions(bot))
|
from discord import Message, ChannelType
from discord.ext.commands import Bot, Cog, command, Context
from ccbot import repo
from shared import fetch_tools
import os
class Submissions(Cog):
def __init__(self, bot: Bot) -> None:
self.bot = bot
@command()
async def current(self, ctx: Context) -> None:
await ctx.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await ctx.send('No competition active')
return
writeup = drafts[0]
msg = f'Current Competition: {writeup.title}\n'
if writeup.prompt:
msg += f'```\n{writeup.prompt}\n```\n'
if writeup.sections:
msg += f'There are currently {len(writeup.sections) - 1} entries\n'
await ctx.send(msg)
@Cog.listener()
async def on_message(self, msg: Message) -> None:
if msg.channel.type != ChannelType.private:
return
if not msg.attachments:
return
await msg.channel.trigger_typing()
repo.init()
drafts = repo.drafts()
if not drafts:
await msg.channel.send('No competition active')
return
writeup = drafts[0]
author_id = msg.author.id
url = msg.attachments[0].url
section_id = writeup.get_section_index(author_id)
if section_id == -1:
text = f'\n\n[{author_id}]: {url}\n\nINTRO\n\n![{msg.author.name}\'s Card Name Goes Here][{author_id}]\n\nTEXT\n\n'
writeup.sections.append(text)
writeup.save()
await msg.channel.send(f'Your submission for {writeup.title} has been recorded')
else:
text = writeup.sections[section_id].strip()
lines = text.splitlines()
if text.startswith(f'[{author_id}]: '):
lines[0] = f'[{author_id}]: {url}'
else:
lines.insert(0, f'[{author_id}]: {url}')
writeup.sections[section_id] = '\n' + '\n'.join(lines) + '\n'
writeup.save()
await msg.channel.send(f'Your submission for {writeup.title} has been updated')
try:
if not os.path.exists(os.path.join('site', 'images', writeup.imgdir)):
os.mkdir(os.path.join('site', 'images', writeup.imgdir))
await fetch_tools.store_async(msg.attachments[0].url, os.path.join('site', 'images', writeup.imgdir, msg.attachments[0].filename))
except Exception:
print('Failed to download')
repo.commit(f'{writeup.title}: {msg.author.name}\'s submission')
await fetch_tools.post_discord_webhook('685261389836845077', 'XohTy7E-3ilDYvHIhUsjB9rJf6YaUuHWzGOra1AmJ7XNbci-5C7omOypgcEjG_UHUZRy', f'<@{author_id}> submitted {url}')
def setup(bot: Bot) -> None:
bot.add_cog(Submissions(bot))
|
[
1,
2,
3,
4,
5
] |
2,215 |
b4bcf9903f4a34c8b256c65cada29e952a436f74
|
<mask token>
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'main_videoad', 'compress', self.gf(
'django.db.models.fields.BooleanField')(default=False),
keep_default=False)
<mask token>
<mask token>
<mask token>
|
<mask token>
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'main_videoad', 'compress', self.gf(
'django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
db.delete_column(u'main_videoad', 'compress')
<mask token>
<mask token>
|
<mask token>
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'main_videoad', 'compress', self.gf(
'django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
db.delete_column(u'main_videoad', 'compress')
models = {u'contenttypes.contenttype': {'Meta': {'ordering':
"('name',)", 'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '100'}), u'id': ('django.db.models.fields.AutoField',
[], {'primary_key': 'True'}), 'model': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length':
'100'})}, u'main.days': {'Meta': {'object_name': 'Days'}, 'date': (
'django.db.models.fields.DateField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_ad': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': u"orm['main.ImageAd']", 'null':
'True', 'blank': 'True'}), 'show_text': (
'django.db.models.fields.BooleanField', [], {}), 'show_video': (
'django.db.models.fields.BooleanField', [], {}), 'start_time': (
'django.db.models.fields.TimeField', [], {'default':
'datetime.time(8, 0)'}), 'stop_time': (
'django.db.models.fields.TimeField', [], {'default':
'datetime.time(22, 0)'}), 'terminal': (
'django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['main.Terminal']"}), 'text_ad': (
'django.db.models.fields.related.ManyToManyField', [], {
'symmetrical': 'False', 'to': u"orm['main.TextAd']", 'null': 'True',
'blank': 'True'}), 'text_count': (
'django.db.models.fields.PositiveIntegerField', [], {'default': '0'
}), 'video_ad': ('django.db.models.fields.related.ManyToManyField',
[], {'symmetrical': 'False', 'to': u"orm['main.VideoAd']", 'null':
'True', 'blank': 'True'}), 'video_count': (
'django.db.models.fields.PositiveIntegerField', [], {'default': '0'
})}, u'main.imagead': {'Meta': {'object_name': 'ImageAd'},
'datelist': ('main.fields.DateArrayField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length':
'255'}), 'partner': ('django.db.models.fields.related.ForeignKey',
[], {'to': u"orm['main.Partner']"}), 'prolongation': (
'django.db.models.fields.TimeField', [], {}), 'terminals': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['main.Terminal']", 'symmetrical': 'False'})},
u'main.immediatelyad': {'Meta': {'object_name': 'ImmediatelyAd'},
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': u"orm['contenttypes.ContentType']"}), 'day': (
'django.db.models.fields.related.ForeignKey', [], {'related_name':
"u'immediatelies'", 'to': u"orm['main.Days']"}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {
}), 'time': ('django.db.models.fields.TimeField', [], {})},
u'main.oscommandlog': {'Meta': {'object_name': 'OsCommandLog'},
'command': ('django.db.models.fields.TextField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {
'auto_now': 'True', 'blank': 'True'}), 'errors': (
'django.db.models.fields.TextField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ouput': ('django.db.models.fields.TextField', [], {}),
'return_code': ('django.db.models.fields.CharField', [], {
'max_length': '255'})}, u'main.partner': {'Meta': {'object_name':
'Partner'}, 'account_number': ('django.db.models.fields.CharField',
[], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'bank':
('django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'bik': (
'django.db.models.fields.CharField', [], {'max_length': '100',
'null': 'True', 'blank': 'True'}), 'director': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'full_name': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inn': ('django.db.models.fields.CharField', [], {'max_length':
'50', 'null': 'True', 'blank': 'True'}), 'kpp': (
'django.db.models.fields.CharField', [], {'max_length': '50',
'null': 'True', 'blank': 'True'}), 'ks': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'legal_address': (
'django.db.models.fields.CharField', [], {'max_length': '400',
'null': 'True', 'blank': 'True'}), 'name': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'ogrn': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'partner_type': (
'django.db.models.fields.PositiveIntegerField', [], {'default': '0'
}), 'passport': ('django.db.models.fields.TextField', [], {'null':
'True', 'blank': 'True'}), 'phones': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'short_name': (
'django.db.models.fields.CharField', [], {'max_length': '500',
'null': 'True', 'blank': 'True'})}, u'main.terminal': {'Meta': {
'object_name': 'Terminal'}, 'config': (
'django.db.models.fields.TextField', [], {'null': 'True', 'blank':
'True'}), u'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'}), 'text': (
'django.db.models.fields.TextField', [], {})}, u'main.textad': {
'Meta': {'object_name': 'TextAd'}, 'datelist': (
'main.fields.DateArrayField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['main.Partner']"}), 'terminals': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['main.Terminal']", 'symmetrical': 'False'}), 'text': (
'django.db.models.fields.TextField', [], {})}, u'main.videoad': {
'Meta': {'object_name': 'VideoAd'}, 'compress': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'datelist': ('main.fields.DateArrayField', [], {}), 'file_video': (
'filebrowser.fields.FileBrowseField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['main.Partner']"}), 'prolongation': (
'django.db.models.fields.TimeField', [], {'null': 'True', 'blank':
'True'}), 'terminals': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['main.Terminal']", 'symmetrical': 'False'})}}
complete_apps = ['main']
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column(u'main_videoad', 'compress', self.gf(
'django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
db.delete_column(u'main_videoad', 'compress')
models = {u'contenttypes.contenttype': {'Meta': {'ordering':
"('name',)", 'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '100'}), u'id': ('django.db.models.fields.AutoField',
[], {'primary_key': 'True'}), 'model': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length':
'100'})}, u'main.days': {'Meta': {'object_name': 'Days'}, 'date': (
'django.db.models.fields.DateField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_ad': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': u"orm['main.ImageAd']", 'null':
'True', 'blank': 'True'}), 'show_text': (
'django.db.models.fields.BooleanField', [], {}), 'show_video': (
'django.db.models.fields.BooleanField', [], {}), 'start_time': (
'django.db.models.fields.TimeField', [], {'default':
'datetime.time(8, 0)'}), 'stop_time': (
'django.db.models.fields.TimeField', [], {'default':
'datetime.time(22, 0)'}), 'terminal': (
'django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['main.Terminal']"}), 'text_ad': (
'django.db.models.fields.related.ManyToManyField', [], {
'symmetrical': 'False', 'to': u"orm['main.TextAd']", 'null': 'True',
'blank': 'True'}), 'text_count': (
'django.db.models.fields.PositiveIntegerField', [], {'default': '0'
}), 'video_ad': ('django.db.models.fields.related.ManyToManyField',
[], {'symmetrical': 'False', 'to': u"orm['main.VideoAd']", 'null':
'True', 'blank': 'True'}), 'video_count': (
'django.db.models.fields.PositiveIntegerField', [], {'default': '0'
})}, u'main.imagead': {'Meta': {'object_name': 'ImageAd'},
'datelist': ('main.fields.DateArrayField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length':
'255'}), 'partner': ('django.db.models.fields.related.ForeignKey',
[], {'to': u"orm['main.Partner']"}), 'prolongation': (
'django.db.models.fields.TimeField', [], {}), 'terminals': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['main.Terminal']", 'symmetrical': 'False'})},
u'main.immediatelyad': {'Meta': {'object_name': 'ImmediatelyAd'},
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': u"orm['contenttypes.ContentType']"}), 'day': (
'django.db.models.fields.related.ForeignKey', [], {'related_name':
"u'immediatelies'", 'to': u"orm['main.Days']"}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {
}), 'time': ('django.db.models.fields.TimeField', [], {})},
u'main.oscommandlog': {'Meta': {'object_name': 'OsCommandLog'},
'command': ('django.db.models.fields.TextField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {
'auto_now': 'True', 'blank': 'True'}), 'errors': (
'django.db.models.fields.TextField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ouput': ('django.db.models.fields.TextField', [], {}),
'return_code': ('django.db.models.fields.CharField', [], {
'max_length': '255'})}, u'main.partner': {'Meta': {'object_name':
'Partner'}, 'account_number': ('django.db.models.fields.CharField',
[], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'bank':
('django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'bik': (
'django.db.models.fields.CharField', [], {'max_length': '100',
'null': 'True', 'blank': 'True'}), 'director': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'full_name': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inn': ('django.db.models.fields.CharField', [], {'max_length':
'50', 'null': 'True', 'blank': 'True'}), 'kpp': (
'django.db.models.fields.CharField', [], {'max_length': '50',
'null': 'True', 'blank': 'True'}), 'ks': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'legal_address': (
'django.db.models.fields.CharField', [], {'max_length': '400',
'null': 'True', 'blank': 'True'}), 'name': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'ogrn': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'partner_type': (
'django.db.models.fields.PositiveIntegerField', [], {'default': '0'
}), 'passport': ('django.db.models.fields.TextField', [], {'null':
'True', 'blank': 'True'}), 'phones': (
'django.db.models.fields.CharField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), 'short_name': (
'django.db.models.fields.CharField', [], {'max_length': '500',
'null': 'True', 'blank': 'True'})}, u'main.terminal': {'Meta': {
'object_name': 'Terminal'}, 'config': (
'django.db.models.fields.TextField', [], {'null': 'True', 'blank':
'True'}), u'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'}), 'text': (
'django.db.models.fields.TextField', [], {})}, u'main.textad': {
'Meta': {'object_name': 'TextAd'}, 'datelist': (
'main.fields.DateArrayField', [], {}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['main.Partner']"}), 'terminals': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['main.Terminal']", 'symmetrical': 'False'}), 'text': (
'django.db.models.fields.TextField', [], {})}, u'main.videoad': {
'Meta': {'object_name': 'VideoAd'}, 'compress': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'datelist': ('main.fields.DateArrayField', [], {}), 'file_video': (
'filebrowser.fields.FileBrowseField', [], {'max_length': '255',
'null': 'True', 'blank': 'True'}), u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to':
u"orm['main.Partner']"}), 'prolongation': (
'django.db.models.fields.TimeField', [], {'null': 'True', 'blank':
'True'}), 'terminals': (
'django.db.models.fields.related.ManyToManyField', [], {'to':
u"orm['main.Terminal']", 'symmetrical': 'False'})}}
complete_apps = ['main']
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'VideoAd.compress'
db.add_column(u'main_videoad', 'compress',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'VideoAd.compress'
db.delete_column(u'main_videoad', 'compress')
models = {
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.days': {
'Meta': {'object_name': 'Days'},
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_ad': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['main.ImageAd']", 'null': 'True', 'blank': 'True'}),
'show_text': ('django.db.models.fields.BooleanField', [], {}),
'show_video': ('django.db.models.fields.BooleanField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {'default': 'datetime.time(8, 0)'}),
'stop_time': ('django.db.models.fields.TimeField', [], {'default': 'datetime.time(22, 0)'}),
'terminal': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Terminal']"}),
'text_ad': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['main.TextAd']", 'null': 'True', 'blank': 'True'}),
'text_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'video_ad': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['main.VideoAd']", 'null': 'True', 'blank': 'True'}),
'video_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'main.imagead': {
'Meta': {'object_name': 'ImageAd'},
'datelist': ('main.fields.DateArrayField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '255'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Partner']"}),
'prolongation': ('django.db.models.fields.TimeField', [], {}),
'terminals': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Terminal']", 'symmetrical': 'False'})
},
u'main.immediatelyad': {
'Meta': {'object_name': 'ImmediatelyAd'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'day': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'immediatelies'", 'to': u"orm['main.Days']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'time': ('django.db.models.fields.TimeField', [], {})
},
u'main.oscommandlog': {
'Meta': {'object_name': 'OsCommandLog'},
'command': ('django.db.models.fields.TextField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'errors': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ouput': ('django.db.models.fields.TextField', [], {}),
'return_code': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'main.partner': {
'Meta': {'object_name': 'Partner'},
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bank': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bik': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'director': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inn': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'kpp': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'ks': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'legal_address': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ogrn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'partner_type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'passport': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phones': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'})
},
u'main.terminal': {
'Meta': {'object_name': 'Terminal'},
'config': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'main.textad': {
'Meta': {'object_name': 'TextAd'},
'datelist': ('main.fields.DateArrayField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Partner']"}),
'terminals': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Terminal']", 'symmetrical': 'False'}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'main.videoad': {
'Meta': {'object_name': 'VideoAd'},
'compress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'datelist': ('main.fields.DateArrayField', [], {}),
'file_video': ('filebrowser.fields.FileBrowseField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Partner']"}),
'prolongation': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'terminals': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Terminal']", 'symmetrical': 'False'})
}
}
complete_apps = ['main']
|
[
2,
3,
4,
5,
6
] |
2,216 |
200deda300e39b07e0e558277a340b7ad01c7dee
|
<mask token>
|
class State:
<mask token>
<mask token>
|
class State:
def __init__(self, id):
self.id = id
<mask token>
|
class State:
def __init__(self, id):
self.id = id
def NotinClosed(problem, node):
NotVisited = 1
for tuple in problem.closed:
if node.state.id == tuple[0].id and node.depth >= tuple[1]:
NotVisited = 0
return NotVisited
|
class State:
def __init__(self, id):
self.id = id
def NotinClosed(problem, node): #restituisce 1 se lo stato non è stato già visitato (al netto di controlli sulla depth) è quindi bisogna aggiungerlo
NotVisited = 1
for tuple in problem.closed:
if node.state.id == tuple[0].id and node.depth >= tuple[1]:
NotVisited = 0 #presente nei visited ma selected_node ha maggiore/uguale depth
return NotVisited
|
[
0,
1,
2,
3,
4
] |
2,217 |
237a647e7bf0b1c12abd78b1ef6e293e73232a6c
|
from kivy.app import App
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
import subprocess
import socket
from kivy.uix.button import Button
from kivy.uix.button import Label
from kivy.uix.boxlayout import BoxLayout
Builder.load_string("""
<MenuScreen>:
BoxLayout:
orientation: "vertical"
<SettingsScreen>:
BoxLayout:
orientation: "vertical"
Button:
text: 'Scan For Networks'
on_release:
root.manager.current = 'networks'
root.scan()
Button:
text: 'Back to menu'
on_release:
root.manager.transition.direction = 'right'
root.manager.current = 'menu'
<NetworksScreen>:
BoxLayout:
orientation: "vertical"
""")
ssids = []
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Declare both screens
class MenuScreen(Screen):
def __init__(self, **kwargs):
super(Screen, self).__init__(**kwargs)
vLayout2 = BoxLayout(orientation='vertical')
self.add_widget(vLayout2)
settings_button = Button(text='Settings')
vLayout2.add_widget(settings_button)
settings_button.bind(on_press=self.forwardFunction)
test_button = Button(text='Test')
vLayout2.add_widget(test_button)
test_button.bind(on_press=self.forwardFunction2)
quit_button = Button(text='Quit')
vLayout2.add_widget(quit_button)
quit_button.bind(on_press=self.closeButton)
def closeButton(self, placeholder):
s.close()
App.get_running_app().stop()
def forwardFunction(self, next_screen):
sm.transition.direction = 'left'
sm.current = 'settings'
def forwardFunction2(self, next_screen):
sm.transition.direction = 'left'
sm.current = 'testing'
class TestScreen(Screen):
def __init__(self, **kwargs):
super(Screen, self).__init__(**kwargs)
vLayout3 = BoxLayout(orientation='vertical')
self.add_widget(vLayout3)
test_button = Button(text='Send Message',pos = (100,25), size=(100, 25), size_hint=(.15, None))
self.add_widget(test_button)
test_button.bind(on_press=self.sendData)
back_button = Button(text='Back to Menu', size=(100, 25), size_hint=(.15, None))
vLayout3.add_widget(back_button)
back_button.bind(on_press=self.backFunction)
def sendData(self, placeholder):
data = 'Test Worked'
try:
s.send(data.encode('utf-8'))
except socket.error:
print("An error has occurred... closing connection to server")
finally:
s.shutdown(socket.SHUT_RDWR)
s.close()
def backFunction(self, next_screen):
sm.transition.direction = 'right'
sm.current = 'menu'
class NetworksScreen(Screen):
#def settings_release(self):
def __init__(self, **kwargs):
super(Screen, self).__init__(**kwargs)
def backFunction(self, next_screen):
sm.transition.direction = 'right'
sm.current = 'settings'
def connectWifi(self, placeholder):
#s = socket.socket() # Create a socket object
host = socket.gethostname() # Get local machine name
port = 12345 # Reserve a port for your service.
try:
s.connect((host, port))
print(s.recv(1024))
except socket.error:
print("An error has occurred... closing connection to server")
finally:
#s.shutdown(socket.SHUT_RDWR)
#s.close()
def printButtons(self):
y = 0
s2 = self.manager.get_screen('settings')
vLayout = BoxLayout(orientation='vertical')
self.add_widget(vLayout)
while y < len(ssids) - 1:
button = Button(text=ssids[y])
button.bind(on_press=self.connectWifi)
vLayout.add_widget(button)
y += 1
back_button = Button(text='Back to Settings')
vLayout.add_widget(back_button)
back_button.bind(on_press=self.backFunction)
class SettingsScreen(Screen):
def scan(self):
results = subprocess.check_output(["netsh", "wlan", "show", "network"])
results = results.decode("ascii") # needed in python 3
results = results.replace("\r", "")
ls = results.split("\n")
ls = ls[4:]
x = 0
y = 0
while x < len(ls):
if x % 5 == 0:
ssids.append(ls[x])
x += 1
while y < len(ssids)-1:
y += 1
s2 = self.manager.get_screen('networks')
s2.printButtons()
# Create the screen manager
sm = ScreenManager()
sm.add_widget(MenuScreen(name='menu'))
sm.add_widget(SettingsScreen(name='settings'))
sm.add_widget(TestScreen(name='testing'))
sm.add_widget(NetworksScreen(name='networks'))
class TestApp(App):
def build(self):
return sm
if __name__ == '__main__':
TestApp().run()
| null | null | null | null |
[
0
] |
2,218 |
78178ec8474a3deb876ab7d3950cd427d7a795d5
|
<mask token>
|
<mask token>
if year % 4 == 0 and year % 100 != 0 or year % 400 == 0:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
|
year = int(input('请输入一个年份:'))
<mask token>
if year % 4 == 0 and year % 100 != 0 or year % 400 == 0:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
|
#-*- coding:UTF-8 -*-
year = int(input('请输入一个年份:'))
"""
if(year % 4) == 0:
if(year % 100) == 0:
if(year % 400) == 0:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
else:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
"""
if(year%4)==0 and (year%100)!=0 or (year%400)==0:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
| null |
[
0,
1,
2,
3
] |
2,219 |
d8482da6b9983d990da980c3a5edab0c49a28229
|
<mask token>
|
<mask token>
def jumlah(x, y):
hasil = x + y
return hasil
<mask token>
|
<mask token>
def jumlah(x, y):
hasil = x + y
return hasil
print('hasil dari', x, '+', y, '=', jumlah(x, y))
<mask token>
print(k)
|
x = int(input('masukkan'))
y = int(input('masukkan'))
def jumlah(x, y):
hasil = x + y
return hasil
print('hasil dari', x, '+', y, '=', jumlah(x, y))
k = jumlah(2, 4) + 1
print(k)
| null |
[
0,
1,
2,
3
] |
2,220 |
23150f359db97e1e0ce3f12a173cd7015ad22cd4
|
version https://git-lfs.github.com/spec/v1
oid sha256:839b1a9cc0c676f388ebfe8d8f2e89ad7c39a6f0aa50fa76b2236703bf1a8264
size 62
| null | null | null | null |
[
0
] |
2,221 |
a2eabf4dae931d82e4e9eda87d79031711faf1aa
|
<mask token>
def mouseMoved():
redraw()
|
<mask token>
def setup():
global table
table = loadTable('flights.csv', 'header')
size(width, height)
noLoop()
noStroke()
<mask token>
def mouseMoved():
redraw()
|
<mask token>
def setup():
global table
table = loadTable('flights.csv', 'header')
size(width, height)
noLoop()
noStroke()
def draw():
global table
background(255, 255, 255)
for row in table.rows():
from_x = map(row.getFloat('from_long'), -180, 180, 0, width)
from_y = map(row.getFloat('from_lat'), -90, 90, height / 2, 0)
to_x = map(row.getFloat('to_long'), -180, 180, 0, width)
to_y = map(row.getFloat('to_lat'), -90, 90, height, height / 2)
r = 3
if dist(from_x, from_y, mouseX, mouseY) < 15:
fill(255, 0, 0, 20)
else:
fill(0, 0, 255, 5)
ellipse(from_x, from_y, r, r)
ellipse(to_x, to_y, r, r)
def mouseMoved():
redraw()
|
table = None
width = 1000
height = 1000
def setup():
global table
table = loadTable('flights.csv', 'header')
size(width, height)
noLoop()
noStroke()
def draw():
global table
background(255, 255, 255)
for row in table.rows():
from_x = map(row.getFloat('from_long'), -180, 180, 0, width)
from_y = map(row.getFloat('from_lat'), -90, 90, height / 2, 0)
to_x = map(row.getFloat('to_long'), -180, 180, 0, width)
to_y = map(row.getFloat('to_lat'), -90, 90, height, height / 2)
r = 3
if dist(from_x, from_y, mouseX, mouseY) < 15:
fill(255, 0, 0, 20)
else:
fill(0, 0, 255, 5)
ellipse(from_x, from_y, r, r)
ellipse(to_x, to_y, r, r)
def mouseMoved():
redraw()
|
table = None
width = 1000
height = 1000
def setup():
global table
table = loadTable("flights.csv", "header")
size(width, height)
noLoop()
noStroke()
def draw():
global table
background(255, 255, 255)
for row in table.rows():
from_x = map(row.getFloat('from_long'), -180, 180, 0, width)
from_y = map(row.getFloat('from_lat'), -90, 90, height/2, 0)
to_x = map(row.getFloat('to_long'), -180, 180, 0, width)
to_y = map(row.getFloat('to_lat'), -90, 90, height, height/2)
r = 3
if dist(from_x, from_y, mouseX, mouseY) < 15:
fill(255, 0, 0, 20)
else:
fill(0, 0, 255, 5)
ellipse(from_x, from_y, r, r)
ellipse(to_x, to_y, r, r)
def mouseMoved():
redraw()
|
[
1,
2,
3,
4,
5
] |
2,222 |
65ef3b2ed5eef3d9d9e682ca18cf84457e929df2
|
<mask token>
|
<mask token>
def hmac_sha1_token():
timestamp = str(time.time())
hmac_pass = hmac.new(b'some very secret string', timestamp.encode(
'utf-8'), hashlib.sha1).hexdigest()
token = '%s:%s' % (timestamp, hmac_pass)
return token
|
import hashlib
import hmac
import time
def hmac_sha1_token():
timestamp = str(time.time())
hmac_pass = hmac.new(b'some very secret string', timestamp.encode(
'utf-8'), hashlib.sha1).hexdigest()
token = '%s:%s' % (timestamp, hmac_pass)
return token
| null | null |
[
0,
1,
2
] |
2,223 |
e48a6a84268a0fe64e90714bd32712665934fc39
|
<mask token>
|
<mask token>
with open('ratings.csv') as in_file:
csvreader = csv.reader(in_file)
with open('ratings_train.csv', 'w') as train_out:
with open('ratings_test.csv', 'w') as test_out:
for row in csvreader:
if not skipped_header:
skipped_header = True
continue
elif int(row[0]) <= 146541:
train_out.write(','.join(row[:-1]))
train_out.write('\n')
else:
test_out.write(','.join(row[:-1]))
test_out.write('\n')
|
<mask token>
skipped_header = False
with open('ratings.csv') as in_file:
csvreader = csv.reader(in_file)
with open('ratings_train.csv', 'w') as train_out:
with open('ratings_test.csv', 'w') as test_out:
for row in csvreader:
if not skipped_header:
skipped_header = True
continue
elif int(row[0]) <= 146541:
train_out.write(','.join(row[:-1]))
train_out.write('\n')
else:
test_out.write(','.join(row[:-1]))
test_out.write('\n')
|
import csv
skipped_header = False
with open('ratings.csv') as in_file:
csvreader = csv.reader(in_file)
with open('ratings_train.csv', 'w') as train_out:
with open('ratings_test.csv', 'w') as test_out:
for row in csvreader:
if not skipped_header:
skipped_header = True
continue
elif int(row[0]) <= 146541:
train_out.write(','.join(row[:-1]))
train_out.write('\n')
else:
test_out.write(','.join(row[:-1]))
test_out.write('\n')
|
import csv
#ratings.csv must be in the same directory
skipped_header = False
with open("ratings.csv") as in_file:
csvreader = csv.reader(in_file)
#read each row of ratings.csv (userId,movieId,rating,timestamp)
with open("ratings_train.csv", 'w') as train_out:
with open("ratings_test.csv", 'w') as test_out:
for row in csvreader:
if not skipped_header:
skipped_header = True
continue
elif int(row[0]) <= 146541:
train_out.write(",".join(row[:-1]))
train_out.write("\n")
else: #rest of the data (16000 of them)
test_out.write(",".join(row[:-1]))
test_out.write("\n")
|
[
0,
1,
2,
3,
4
] |
2,224 |
454d210c1b1a41e4a645ef7ccb24f80ee20a451c
|
<mask token>
class Cart(models.Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def get_total(self):
total = self.item.price * self.quantity
f_total = format(total, '0.2f')
return f_total
class Order(models.Model):
orderitems = models.ManyToManyField(Cart)
user = models.ForeignKey(MainUser, on_delete=models.CASCADE)
ordered = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
payment_id = models.CharField(max_length=300, blank=True, null=True)
orderid = models.CharField(max_length=300, blank=True, null=True)
|
<mask token>
class Cart(models.Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def __str__(self):
return f'{self.item}x{self.quantity}'
def get_total(self):
total = self.item.price * self.quantity
f_total = format(total, '0.2f')
return f_total
class Order(models.Model):
orderitems = models.ManyToManyField(Cart)
user = models.ForeignKey(MainUser, on_delete=models.CASCADE)
ordered = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
payment_id = models.CharField(max_length=300, blank=True, null=True)
orderid = models.CharField(max_length=300, blank=True, null=True)
|
<mask token>
class Cart(models.Model):
user = models.ForeignKey(MainUser, on_delete=models.CASCADE)
item = models.ForeignKey(Product, on_delete=models.CASCADE)
quantity = models.PositiveIntegerField(default=1)
parchased = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return f'{self.item}x{self.quantity}'
def get_total(self):
total = self.item.price * self.quantity
f_total = format(total, '0.2f')
return f_total
class Order(models.Model):
orderitems = models.ManyToManyField(Cart)
user = models.ForeignKey(MainUser, on_delete=models.CASCADE)
ordered = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
payment_id = models.CharField(max_length=300, blank=True, null=True)
orderid = models.CharField(max_length=300, blank=True, null=True)
|
from django.db import models
from home.models import MainUser
from product.models import Product
class Cart(models.Model):
user = models.ForeignKey(MainUser, on_delete=models.CASCADE)
item = models.ForeignKey(Product, on_delete=models.CASCADE)
quantity = models.PositiveIntegerField(default=1)
parchased = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return f'{self.item}x{self.quantity}'
def get_total(self):
total = self.item.price * self.quantity
f_total = format(total, '0.2f')
return f_total
class Order(models.Model):
orderitems = models.ManyToManyField(Cart)
user = models.ForeignKey(MainUser, on_delete=models.CASCADE)
ordered = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
payment_id = models.CharField(max_length=300, blank=True, null=True)
orderid = models.CharField(max_length=300, blank=True, null=True)
|
from django.db import models
from home.models import MainUser
from product.models import Product
# Create your models here.
class Cart(models.Model):
user = models.ForeignKey(MainUser,on_delete=models.CASCADE)
item = models.ForeignKey(Product, on_delete=models.CASCADE)
quantity = models.PositiveIntegerField(default=1)
parchased=models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return f'{self.item}x{self.quantity}'
def get_total(self):
total=self.item.price *self.quantity
f_total=format(total,'0.2f')
return f_total
class Order(models.Model):
orderitems = models.ManyToManyField(Cart)
user=models.ForeignKey(MainUser,on_delete=models.CASCADE)
ordered=models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
payment_id=models.CharField(max_length=300,blank=True,null=True)
orderid=models.CharField(max_length=300,blank=True,null=True)
|
[
4,
5,
6,
7,
8
] |
2,225 |
7ef0bb3e8cbba4a29249a09cf7bc91e053411361
|
<mask token>
class MaoyanSpider(object):
def __init__(self):
self.url = 'https://maoyan.com/board/4?offset={}'
def get_html(self, url):
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=url, headers=headers)
res = request.urlopen(req)
html = res.read().decode()
return html
<mask token>
<mask token>
<mask token>
def get_comment(self, two_link):
two_html = self.get_html(two_link)
with open('test.html', 'w') as f:
f.write(two_html)
re_bds = '<div class="comment-content">(.*?)</div>'
comment_list = self.re_func(re_bds, two_html)
return comment_list
def save_image(self, two_link, name):
two_html = self.get_html(two_link)
re_bds = (
'<div class="img.*?"><img class="default-img" data-src="(.*?)" alt=""></div>'
)
link_list = self.re_func(re_bds, two_html)
print(link_list)
directory = '/home/tarena/images/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for link in link_list:
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=link, headers=headers)
res = request.urlopen(req)
html = res.read()
filename = directory + link.split('@')[0][-10:]
with open(filename, 'wb') as f:
f.write(html)
time.sleep(random.randint(1, 3))
def run(self):
for offset in range(0, 21, 10):
url = self.url.format(offset)
self.parse_html(url)
time.sleep(random.randint(1, 2))
<mask token>
|
<mask token>
class MaoyanSpider(object):
def __init__(self):
self.url = 'https://maoyan.com/board/4?offset={}'
def get_html(self, url):
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=url, headers=headers)
res = request.urlopen(req)
html = res.read().decode()
return html
def re_func(self, re_bds, html):
pattern = re.compile(re_bds, re.S)
r_list = pattern.findall(html)
return r_list
def parse_html(self, one_url):
one_html = self.get_html(one_url)
re_bds = (
'<div class="movie-item-info">.*?href="(.*?)".*?title="(.*?)".*?<p class="star">(.*?)</p>.*?class="releasetime">(.*?)</p>'
)
r_list = self.re_func(re_bds, one_html)
self.save_html(r_list)
def save_html(self, r_list):
item = {}
for r in r_list:
item['name'] = r[1].strip()
item['star'] = r[2].strip()[3:]
item['time'] = r[3].strip()[5:15]
two_link = 'https://maoyan.com' + r[0]
item['comment'] = self.get_comment(two_link)
print(item)
self.save_image(two_link, item['name'])
def get_comment(self, two_link):
two_html = self.get_html(two_link)
with open('test.html', 'w') as f:
f.write(two_html)
re_bds = '<div class="comment-content">(.*?)</div>'
comment_list = self.re_func(re_bds, two_html)
return comment_list
def save_image(self, two_link, name):
two_html = self.get_html(two_link)
re_bds = (
'<div class="img.*?"><img class="default-img" data-src="(.*?)" alt=""></div>'
)
link_list = self.re_func(re_bds, two_html)
print(link_list)
directory = '/home/tarena/images/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for link in link_list:
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=link, headers=headers)
res = request.urlopen(req)
html = res.read()
filename = directory + link.split('@')[0][-10:]
with open(filename, 'wb') as f:
f.write(html)
time.sleep(random.randint(1, 3))
def run(self):
for offset in range(0, 21, 10):
url = self.url.format(offset)
self.parse_html(url)
time.sleep(random.randint(1, 2))
<mask token>
|
<mask token>
class MaoyanSpider(object):
def __init__(self):
self.url = 'https://maoyan.com/board/4?offset={}'
def get_html(self, url):
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=url, headers=headers)
res = request.urlopen(req)
html = res.read().decode()
return html
def re_func(self, re_bds, html):
pattern = re.compile(re_bds, re.S)
r_list = pattern.findall(html)
return r_list
def parse_html(self, one_url):
one_html = self.get_html(one_url)
re_bds = (
'<div class="movie-item-info">.*?href="(.*?)".*?title="(.*?)".*?<p class="star">(.*?)</p>.*?class="releasetime">(.*?)</p>'
)
r_list = self.re_func(re_bds, one_html)
self.save_html(r_list)
def save_html(self, r_list):
item = {}
for r in r_list:
item['name'] = r[1].strip()
item['star'] = r[2].strip()[3:]
item['time'] = r[3].strip()[5:15]
two_link = 'https://maoyan.com' + r[0]
item['comment'] = self.get_comment(two_link)
print(item)
self.save_image(two_link, item['name'])
def get_comment(self, two_link):
two_html = self.get_html(two_link)
with open('test.html', 'w') as f:
f.write(two_html)
re_bds = '<div class="comment-content">(.*?)</div>'
comment_list = self.re_func(re_bds, two_html)
return comment_list
def save_image(self, two_link, name):
two_html = self.get_html(two_link)
re_bds = (
'<div class="img.*?"><img class="default-img" data-src="(.*?)" alt=""></div>'
)
link_list = self.re_func(re_bds, two_html)
print(link_list)
directory = '/home/tarena/images/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for link in link_list:
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=link, headers=headers)
res = request.urlopen(req)
html = res.read()
filename = directory + link.split('@')[0][-10:]
with open(filename, 'wb') as f:
f.write(html)
time.sleep(random.randint(1, 3))
def run(self):
for offset in range(0, 21, 10):
url = self.url.format(offset)
self.parse_html(url)
time.sleep(random.randint(1, 2))
if __name__ == '__main__':
spider = MaoyanSpider()
spider.run()
|
from urllib import request
import time
import random
from useragents import ua_list
import re
import os
class MaoyanSpider(object):
def __init__(self):
self.url = 'https://maoyan.com/board/4?offset={}'
def get_html(self, url):
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=url, headers=headers)
res = request.urlopen(req)
html = res.read().decode()
return html
def re_func(self, re_bds, html):
pattern = re.compile(re_bds, re.S)
r_list = pattern.findall(html)
return r_list
def parse_html(self, one_url):
one_html = self.get_html(one_url)
re_bds = (
'<div class="movie-item-info">.*?href="(.*?)".*?title="(.*?)".*?<p class="star">(.*?)</p>.*?class="releasetime">(.*?)</p>'
)
r_list = self.re_func(re_bds, one_html)
self.save_html(r_list)
def save_html(self, r_list):
item = {}
for r in r_list:
item['name'] = r[1].strip()
item['star'] = r[2].strip()[3:]
item['time'] = r[3].strip()[5:15]
two_link = 'https://maoyan.com' + r[0]
item['comment'] = self.get_comment(two_link)
print(item)
self.save_image(two_link, item['name'])
def get_comment(self, two_link):
two_html = self.get_html(two_link)
with open('test.html', 'w') as f:
f.write(two_html)
re_bds = '<div class="comment-content">(.*?)</div>'
comment_list = self.re_func(re_bds, two_html)
return comment_list
def save_image(self, two_link, name):
two_html = self.get_html(two_link)
re_bds = (
'<div class="img.*?"><img class="default-img" data-src="(.*?)" alt=""></div>'
)
link_list = self.re_func(re_bds, two_html)
print(link_list)
directory = '/home/tarena/images/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for link in link_list:
headers = {'User-Agent': random.choice(ua_list)}
req = request.Request(url=link, headers=headers)
res = request.urlopen(req)
html = res.read()
filename = directory + link.split('@')[0][-10:]
with open(filename, 'wb') as f:
f.write(html)
time.sleep(random.randint(1, 3))
def run(self):
for offset in range(0, 21, 10):
url = self.url.format(offset)
self.parse_html(url)
time.sleep(random.randint(1, 2))
if __name__ == '__main__':
spider = MaoyanSpider()
spider.run()
|
from urllib import request
import time
import random
from useragents import ua_list
import re
import os
class MaoyanSpider(object):
def __init__(self):
self.url = 'https://maoyan.com/board/4?offset={}'
# 请求功能函数 - html
def get_html(self,url):
headers = {
'User-Agent':random.choice(ua_list)
}
req = request.Request(url=url,headers=headers)
res = request.urlopen(req)
html = res.read().decode()
return html
# 解析功能函数
def re_func(self,re_bds,html):
pattern = re.compile(re_bds,re.S)
r_list = pattern.findall(html)
return r_list
# 解析一级页面
def parse_html(self,one_url):
one_html = self.get_html(one_url)
re_bds = '<div class="movie-item-info">.*?href="(.*?)".*?title="(.*?)".*?<p class="star">(.*?)</p>.*?class="releasetime">(.*?)</p>'
# r_list: [('/films/1203','name','star','time'),()]
r_list = self.re_func(re_bds,one_html)
self.save_html(r_list)
def save_html(self,r_list):
item = {}
# r: ('/films/1203','name','star','time')
for r in r_list:
item['name'] = r[1].strip()
item['star'] = r[2].strip()[3:]
item['time'] = r[3].strip()[5:15]
two_link = 'https://maoyan.com' + r[0]
item['comment'] = self.get_comment(two_link)
print(item)
self.save_image(two_link,item['name'])
# 获取评论的函数
def get_comment(self,two_link):
two_html = self.get_html(two_link)
with open('test.html','w') as f:
f.write(two_html)
re_bds = '<div class="comment-content">(.*?)</div>'
comment_list = self.re_func(re_bds,two_html)
return comment_list
# 保存图片函数
def save_image(self,two_link,name):
two_html = self.get_html(two_link)
re_bds = '<div class="img.*?"><img class="default-img" data-src="(.*?)" alt=""></div>'
# link_list: ['src1','src2','src3']
link_list = self.re_func(re_bds,two_html)
print(link_list)
# 创建对应文件夹
directory = '/home/tarena/images/' + name + '/'
if not os.path.exists(directory):
os.makedirs(directory)
for link in link_list:
headers = {'User-Agent':random.choice(ua_list)}
req = request.Request(url=link,headers=headers)
res = request.urlopen(req)
html = res.read()
filename = directory + \
link.split('@')[0][-10:]
with open(filename,'wb') as f:
f.write(html)
time.sleep(random.randint(1,3))
def run(self):
for offset in range(0,21,10):
url = self.url.format(offset)
self.parse_html(url)
time.sleep(random.randint(1,2))
if __name__ == '__main__':
spider = MaoyanSpider()
spider.run()
|
[
6,
9,
10,
11,
12
] |
2,226 |
96cfee85194c9c30b3d74bbddc2a31b6933eb032
|
<mask token>
|
<mask token>
def isCousin(root, a, b):
if check(root, a, b) == False:
return False
q = []
q.insert(0, root)
tmp = set()
while len(q):
l = len(q)
for i in range(l):
n = q.pop()
tmp.add(n.data)
if n.left:
q.insert(0, n.left)
if n.right:
q.insert(0, n.right)
if a in tmp and b in tmp:
return True
tmp.clear()
return False
|
def check(root, a, b):
if root:
if (root.left == a and root.right == b or root.left == b and root.
right == a):
return False
return check(root.left, a, b) and check(root.right, a, b)
return True
def isCousin(root, a, b):
if check(root, a, b) == False:
return False
q = []
q.insert(0, root)
tmp = set()
while len(q):
l = len(q)
for i in range(l):
n = q.pop()
tmp.add(n.data)
if n.left:
q.insert(0, n.left)
if n.right:
q.insert(0, n.right)
if a in tmp and b in tmp:
return True
tmp.clear()
return False
|
def check(root, a, b):
if root:
if (root.left == a and root.right == b) or (root.left ==b and root.right==a):
return False
return check(root.left, a, b) and check(root.right, a, b)
return True
def isCousin(root, a, b):
# Your code here
if check(root, a, b)==False:
return False
q=[]
q.insert(0, root)
tmp=set()
while(len(q)):
l = len(q)
for i in range(l):
n = q.pop()
tmp.add(n.data)
if n.left:
q.insert(0, n.left)
if n.right:
q.insert(0, n.right)
if a in tmp and b in tmp:
return True
tmp.clear()
return False
| null |
[
0,
1,
2,
3
] |
2,227 |
f2a508ae99697d6ba320b158a1000379b975d568
|
<mask token>
|
<mask token>
for i in word:
if 'a' <= i and i <= 'z' or 'A' <= i and i <= 'Z':
letter += 1
if '0' <= i and i <= '9':
digit += 1
print("""LETTERS {0}
DIGITS {1}""".format(letter, digit))
|
word = input()
letter, digit = 0, 0
for i in word:
if 'a' <= i and i <= 'z' or 'A' <= i and i <= 'Z':
letter += 1
if '0' <= i and i <= '9':
digit += 1
print("""LETTERS {0}
DIGITS {1}""".format(letter, digit))
|
word=input()
letter,digit=0,0
for i in word:
if('a'<=i and i<='z') or ('A'<=i and i<='Z'):
letter+=1
if '0'<=i and i<='9':
digit+=1
print("LETTERS {0} \n DIGITS {1}".format(letter,digit))
| null |
[
0,
1,
2,
3
] |
2,228 |
2a65287588fe1337ba1a6f7c2e15e0505611d739
|
<mask token>
|
<mask token>
def a():
lines = 0
words = 0
letters = 0
for line in open(f'{text}.txt', 'r'):
lines += 1
letters += len(line.strip('.,:-()!?;)"\'\n}'))
words += len(line.split())
return f'Lines = {lines}, words = {words}, letters = {letters}'
<mask token>
|
<mask token>
def a():
lines = 0
words = 0
letters = 0
for line in open(f'{text}.txt', 'r'):
lines += 1
letters += len(line.strip('.,:-()!?;)"\'\n}'))
words += len(line.split())
return f'Lines = {lines}, words = {words}, letters = {letters}'
print(a())
|
text = input('Ввести имя файла: ')
def a():
lines = 0
words = 0
letters = 0
for line in open(f'{text}.txt', 'r'):
lines += 1
letters += len(line.strip('.,:-()!?;)"\'\n}'))
words += len(line.split())
return f'Lines = {lines}, words = {words}, letters = {letters}'
print(a())
| null |
[
0,
1,
2,
3
] |
2,229 |
c93bd042340a6e1d0124d8f6176bdf17ab56e405
|
<mask token>
|
<mask token>
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(
2, max_b + 1)))
return len(set(gen))
<mask token>
|
<mask token>
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(
2, max_b + 1)))
return len(set(gen))
if __name__ == '__main__':
print(euler_29(100, 100))
|
<mask token>
import itertools
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(
2, max_b + 1)))
return len(set(gen))
if __name__ == '__main__':
print(euler_29(100, 100))
|
#!/usr/bin/env python
"""
Consider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
22=4, 23=8, 24=16, 25=32
32=9, 33=27, 34=81, 35=243
42=16, 43=64, 44=256, 45=1024
52=25, 53=125, 54=625, 55=3125
If they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:
4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
How many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
"""
import itertools
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(2, max_b + 1)))
return len(set(gen))
if __name__ == "__main__":
print(euler_29(100, 100))
|
[
0,
1,
2,
3,
4
] |
2,230 |
bfd31d0b80511721ee5117daced04eaf63679fd8
|
<mask token>
def get_clusters(link, dn, inds, th=0.7):
clst = fcluster(link, criterion='distance', t=th)
return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]
def draw_significant_groups(groups, dn_ax, color='white'):
for group in groups:
rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -
group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=
color, facecolor='none')
dn_ax.add_patch(rect)
def draw_legume_group(group, ax):
y_values = ax.get_ylim()
x_values = ax.get_xlim()
rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,
edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -
group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
def get_groups(clst, clust_above=MIN_CLUST):
groups = []
v = -1
for i in range(len(clst)):
if clst[i] == v:
continue
if v == -1:
groups.append([i])
v = clst[i]
continue
if i - groups[-1][0] >= clust_above:
groups[-1].append(i)
groups.append([i])
else:
groups[-1][0] = i
v = clst[i]
groups = groups[:-1]
return groups
<mask token>
|
<mask token>
def get_clusters(link, dn, inds, th=0.7):
clst = fcluster(link, criterion='distance', t=th)
return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]
def draw_significant_groups(groups, dn_ax, color='white'):
for group in groups:
rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -
group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=
color, facecolor='none')
dn_ax.add_patch(rect)
def draw_legume_group(group, ax):
y_values = ax.get_ylim()
x_values = ax.get_xlim()
rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,
edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -
group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
def get_groups(clst, clust_above=MIN_CLUST):
groups = []
v = -1
for i in range(len(clst)):
if clst[i] == v:
continue
if v == -1:
groups.append([i])
v = clst[i]
continue
if i - groups[-1][0] >= clust_above:
groups[-1].append(i)
groups.append([i])
else:
groups[-1][0] = i
v = clst[i]
groups = groups[:-1]
return groups
if __name__ == '__main__':
os.makedirs(out_path, exist_ok=True)
df_info = pandas.read_csv(os.path.join(base_path,
'library_contents.csv'), index_col=0, low_memory=False)
df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]
inds = df_info.index
l_base = len(inds)
meta_df = pandas.read_csv(os.path.join(base_path, 'cohort.csv'),
index_col=0, low_memory=False)
meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >=
MIN_OLIS)]
fold_df = pandas.read_csv(os.path.join(base_path, 'fold_data.csv'),
index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack()
fold_df.columns = fold_df.columns.get_level_values(1)
fold_df = fold_df[fold_df.columns.intersection(inds)]
if THROW_BAD_OLIS:
drop = fold_df.columns[(fold_df == -1).sum() > 0]
fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)
inds = df_info.index.difference(drop)
df_info = df_info.loc[inds]
fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > MIN_APPEAR *
len(fold_df)]]
fold_df = numpy.log(fold_df.fillna(1))
df_info = df_info.loc[fold_df.columns]
th = CLUST_TH
corr = fold_df.corr('spearman')
link = linkage(squareform(1 - corr), method='average')
dn = dendrogram(link, no_plot=True)
clst = get_clusters(link, dn, corr.columns, th)
groups = get_groups(clst)
corr1 = fold_df.T.corr('spearman')
link1 = linkage(squareform(1 - corr1), method='average')
dn1 = dendrogram(link1, no_plot=True)
clst1 = get_clusters(link1, dn1, corr1.columns, th)
groups1 = get_groups(clst1)
fig = plt.figure(figsize=[9.2, 12])
gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])
bar_ax = fig.add_subplot(gs[0])
dendogram_ax = fig.add_subplot(gs[1])
sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.
color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=
False, xticklabels=False, cbar_ax=bar_ax)
dendogram_ax.set_xlabel('oligos')
dendogram_ax.set_ylabel('samples')
mt = 'normalized mt_1342'
bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)
meta_df['yob'] = (meta_df['yob'] - 1944) / 60
use_columns = ['gender', 'yob']
sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt],
left_index=True, right_index=True, how='left')
sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt]
.min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())
).astype(float)
sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)
mt = 'norm mt_1342'
sample_extra_info = sample_extra_info.iloc[dn1['leaves']]
sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.
columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette(
'viridis', as_cmap=True))
fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()
significant_groups = []
for oligo_subgroup in groups:
sample_group_means = sorted(enumerate([fold_df.iloc[range(*
sample_group), range(*oligo_subgroup)].mean().mean() for
sample_group in groups1]), key=lambda x: -x[1])
if sample_group_means[0][1] > 2 * sample_group_means[1][1]:
significant_groups.append([oligo_subgroup, groups1[
sample_group_means[0][0]]])
draw_significant_groups(significant_groups, dendogram_ax)
mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*
sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0,
sample_group[0])) + list(range(sample_group[1], len(
sample_extra_info)))][mt].dropna())[1] for oligos_group,
sample_group in significant_groups])
mt_group = significant_groups[mt_scores.idxmin()]
mt_pval = mt_scores.min()
draw_significant_groups([mt_group], dendogram_ax, color='blue')
draw_legume_group(mt_group[1], bar_axis1)
plt.suptitle(
"""For group marked in blue the %s level
of samples in group vs those not in group
"""
% mt + 'got MW p-value of %g' % mt_pval)
plt.savefig(os.path.join(out_path, 'legumes.png'))
res = {}
inds = sample_extra_info[mt].dropna().index
for i in range(*mt_group[0]):
col = fold_df.columns[i]
res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[
inds, col].values)
res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')
res['Bonf'] = res['pval'] * len(res)
FDR = multipletests(res.pval.values.tolist(), method='fdr_by')
res['FDR_BY'] = FDR[0]
res['FDR_BY_qval'] = FDR[1]
FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')
res['FDR_BH'] = FDR[0]
res['FDR_BH_qval'] = FDR[1]
res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name
print('Of %d oligos in the blue group %d pass FDR (BY) vs %s' % (len(
res), len(res[res.FDR_BY]), mt))
res.to_csv(os.path.join(out_path, 'mt_1342.csv'))
|
<mask token>
MIN_OLIS = 200
THROW_BAD_OLIS = True
MIN_APPEAR = 0.02
CLUST_TH = 0.7
MIN_CLUST = 10
def get_clusters(link, dn, inds, th=0.7):
clst = fcluster(link, criterion='distance', t=th)
return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]
def draw_significant_groups(groups, dn_ax, color='white'):
for group in groups:
rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -
group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=
color, facecolor='none')
dn_ax.add_patch(rect)
def draw_legume_group(group, ax):
y_values = ax.get_ylim()
x_values = ax.get_xlim()
rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,
edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -
group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
def get_groups(clst, clust_above=MIN_CLUST):
groups = []
v = -1
for i in range(len(clst)):
if clst[i] == v:
continue
if v == -1:
groups.append([i])
v = clst[i]
continue
if i - groups[-1][0] >= clust_above:
groups[-1].append(i)
groups.append([i])
else:
groups[-1][0] = i
v = clst[i]
groups = groups[:-1]
return groups
if __name__ == '__main__':
os.makedirs(out_path, exist_ok=True)
df_info = pandas.read_csv(os.path.join(base_path,
'library_contents.csv'), index_col=0, low_memory=False)
df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]
inds = df_info.index
l_base = len(inds)
meta_df = pandas.read_csv(os.path.join(base_path, 'cohort.csv'),
index_col=0, low_memory=False)
meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >=
MIN_OLIS)]
fold_df = pandas.read_csv(os.path.join(base_path, 'fold_data.csv'),
index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack()
fold_df.columns = fold_df.columns.get_level_values(1)
fold_df = fold_df[fold_df.columns.intersection(inds)]
if THROW_BAD_OLIS:
drop = fold_df.columns[(fold_df == -1).sum() > 0]
fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)
inds = df_info.index.difference(drop)
df_info = df_info.loc[inds]
fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > MIN_APPEAR *
len(fold_df)]]
fold_df = numpy.log(fold_df.fillna(1))
df_info = df_info.loc[fold_df.columns]
th = CLUST_TH
corr = fold_df.corr('spearman')
link = linkage(squareform(1 - corr), method='average')
dn = dendrogram(link, no_plot=True)
clst = get_clusters(link, dn, corr.columns, th)
groups = get_groups(clst)
corr1 = fold_df.T.corr('spearman')
link1 = linkage(squareform(1 - corr1), method='average')
dn1 = dendrogram(link1, no_plot=True)
clst1 = get_clusters(link1, dn1, corr1.columns, th)
groups1 = get_groups(clst1)
fig = plt.figure(figsize=[9.2, 12])
gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])
bar_ax = fig.add_subplot(gs[0])
dendogram_ax = fig.add_subplot(gs[1])
sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.
color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=
False, xticklabels=False, cbar_ax=bar_ax)
dendogram_ax.set_xlabel('oligos')
dendogram_ax.set_ylabel('samples')
mt = 'normalized mt_1342'
bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)
meta_df['yob'] = (meta_df['yob'] - 1944) / 60
use_columns = ['gender', 'yob']
sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt],
left_index=True, right_index=True, how='left')
sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt]
.min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())
).astype(float)
sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)
mt = 'norm mt_1342'
sample_extra_info = sample_extra_info.iloc[dn1['leaves']]
sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.
columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette(
'viridis', as_cmap=True))
fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()
significant_groups = []
for oligo_subgroup in groups:
sample_group_means = sorted(enumerate([fold_df.iloc[range(*
sample_group), range(*oligo_subgroup)].mean().mean() for
sample_group in groups1]), key=lambda x: -x[1])
if sample_group_means[0][1] > 2 * sample_group_means[1][1]:
significant_groups.append([oligo_subgroup, groups1[
sample_group_means[0][0]]])
draw_significant_groups(significant_groups, dendogram_ax)
mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*
sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0,
sample_group[0])) + list(range(sample_group[1], len(
sample_extra_info)))][mt].dropna())[1] for oligos_group,
sample_group in significant_groups])
mt_group = significant_groups[mt_scores.idxmin()]
mt_pval = mt_scores.min()
draw_significant_groups([mt_group], dendogram_ax, color='blue')
draw_legume_group(mt_group[1], bar_axis1)
plt.suptitle(
"""For group marked in blue the %s level
of samples in group vs those not in group
"""
% mt + 'got MW p-value of %g' % mt_pval)
plt.savefig(os.path.join(out_path, 'legumes.png'))
res = {}
inds = sample_extra_info[mt].dropna().index
for i in range(*mt_group[0]):
col = fold_df.columns[i]
res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[
inds, col].values)
res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')
res['Bonf'] = res['pval'] * len(res)
FDR = multipletests(res.pval.values.tolist(), method='fdr_by')
res['FDR_BY'] = FDR[0]
res['FDR_BY_qval'] = FDR[1]
FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')
res['FDR_BH'] = FDR[0]
res['FDR_BH_qval'] = FDR[1]
res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name
print('Of %d oligos in the blue group %d pass FDR (BY) vs %s' % (len(
res), len(res[res.FDR_BY]), mt))
res.to_csv(os.path.join(out_path, 'mt_1342.csv'))
|
from scipy.stats import mannwhitneyu
import matplotlib.patches as patches
import os
import numpy
import pandas
from matplotlib.gridspec import GridSpec
from scipy.cluster.hierarchy import fcluster, linkage, dendrogram
from scipy.spatial.distance import squareform
import seaborn as sns
from scipy.stats import spearmanr
from statsmodels.stats.multitest import multipletests
import matplotlib.pyplot as plt
from config import base_path, out_path
MIN_OLIS = 200
THROW_BAD_OLIS = True
MIN_APPEAR = 0.02
CLUST_TH = 0.7
MIN_CLUST = 10
def get_clusters(link, dn, inds, th=0.7):
clst = fcluster(link, criterion='distance', t=th)
return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]
def draw_significant_groups(groups, dn_ax, color='white'):
for group in groups:
rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] -
group[0][0], group[1][1] - group[1][0], linewidth=1, edgecolor=
color, facecolor='none')
dn_ax.add_patch(rect)
def draw_legume_group(group, ax):
y_values = ax.get_ylim()
x_values = ax.get_xlim()
rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1,
edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] -
group[1], linewidth=1, edgecolor='white', facecolor='white', alpha=0.6)
ax.add_patch(rect)
def get_groups(clst, clust_above=MIN_CLUST):
groups = []
v = -1
for i in range(len(clst)):
if clst[i] == v:
continue
if v == -1:
groups.append([i])
v = clst[i]
continue
if i - groups[-1][0] >= clust_above:
groups[-1].append(i)
groups.append([i])
else:
groups[-1][0] = i
v = clst[i]
groups = groups[:-1]
return groups
if __name__ == '__main__':
os.makedirs(out_path, exist_ok=True)
df_info = pandas.read_csv(os.path.join(base_path,
'library_contents.csv'), index_col=0, low_memory=False)
df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]
inds = df_info.index
l_base = len(inds)
meta_df = pandas.read_csv(os.path.join(base_path, 'cohort.csv'),
index_col=0, low_memory=False)
meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >=
MIN_OLIS)]
fold_df = pandas.read_csv(os.path.join(base_path, 'fold_data.csv'),
index_col=[0, 1], low_memory=False).loc[meta_df.index].unstack()
fold_df.columns = fold_df.columns.get_level_values(1)
fold_df = fold_df[fold_df.columns.intersection(inds)]
if THROW_BAD_OLIS:
drop = fold_df.columns[(fold_df == -1).sum() > 0]
fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)
inds = df_info.index.difference(drop)
df_info = df_info.loc[inds]
fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > MIN_APPEAR *
len(fold_df)]]
fold_df = numpy.log(fold_df.fillna(1))
df_info = df_info.loc[fold_df.columns]
th = CLUST_TH
corr = fold_df.corr('spearman')
link = linkage(squareform(1 - corr), method='average')
dn = dendrogram(link, no_plot=True)
clst = get_clusters(link, dn, corr.columns, th)
groups = get_groups(clst)
corr1 = fold_df.T.corr('spearman')
link1 = linkage(squareform(1 - corr1), method='average')
dn1 = dendrogram(link1, no_plot=True)
clst1 = get_clusters(link1, dn1, corr1.columns, th)
groups1 = get_groups(clst1)
fig = plt.figure(figsize=[9.2, 12])
gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])
bar_ax = fig.add_subplot(gs[0])
dendogram_ax = fig.add_subplot(gs[1])
sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.
color_palette('flare', as_cmap=True), ax=dendogram_ax, yticklabels=
False, xticklabels=False, cbar_ax=bar_ax)
dendogram_ax.set_xlabel('oligos')
dendogram_ax.set_ylabel('samples')
mt = 'normalized mt_1342'
bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)
meta_df['yob'] = (meta_df['yob'] - 1944) / 60
use_columns = ['gender', 'yob']
sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt],
left_index=True, right_index=True, how='left')
sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt]
.min()) / (sample_extra_info[mt].max() - sample_extra_info[mt].min())
).astype(float)
sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)
mt = 'norm mt_1342'
sample_extra_info = sample_extra_info.iloc[dn1['leaves']]
sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.
columns, yticklabels=False, ax=bar_axis1, cmap=sns.color_palette(
'viridis', as_cmap=True))
fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()
significant_groups = []
for oligo_subgroup in groups:
sample_group_means = sorted(enumerate([fold_df.iloc[range(*
sample_group), range(*oligo_subgroup)].mean().mean() for
sample_group in groups1]), key=lambda x: -x[1])
if sample_group_means[0][1] > 2 * sample_group_means[1][1]:
significant_groups.append([oligo_subgroup, groups1[
sample_group_means[0][0]]])
draw_significant_groups(significant_groups, dendogram_ax)
mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*
sample_group)][mt].dropna(), sample_extra_info.iloc[list(range(0,
sample_group[0])) + list(range(sample_group[1], len(
sample_extra_info)))][mt].dropna())[1] for oligos_group,
sample_group in significant_groups])
mt_group = significant_groups[mt_scores.idxmin()]
mt_pval = mt_scores.min()
draw_significant_groups([mt_group], dendogram_ax, color='blue')
draw_legume_group(mt_group[1], bar_axis1)
plt.suptitle(
"""For group marked in blue the %s level
of samples in group vs those not in group
"""
% mt + 'got MW p-value of %g' % mt_pval)
plt.savefig(os.path.join(out_path, 'legumes.png'))
res = {}
inds = sample_extra_info[mt].dropna().index
for i in range(*mt_group[0]):
col = fold_df.columns[i]
res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[
inds, col].values)
res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')
res['Bonf'] = res['pval'] * len(res)
FDR = multipletests(res.pval.values.tolist(), method='fdr_by')
res['FDR_BY'] = FDR[0]
res['FDR_BY_qval'] = FDR[1]
FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')
res['FDR_BH'] = FDR[0]
res['FDR_BH_qval'] = FDR[1]
res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name
print('Of %d oligos in the blue group %d pass FDR (BY) vs %s' % (len(
res), len(res[res.FDR_BY]), mt))
res.to_csv(os.path.join(out_path, 'mt_1342.csv'))
|
from scipy.stats import mannwhitneyu
import matplotlib.patches as patches
import os
import numpy
import pandas
from matplotlib.gridspec import GridSpec
from scipy.cluster.hierarchy import fcluster, linkage, dendrogram
from scipy.spatial.distance import squareform
import seaborn as sns
from scipy.stats import spearmanr
from statsmodels.stats.multitest import multipletests
import matplotlib.pyplot as plt
from config import base_path, out_path
MIN_OLIS = 200
THROW_BAD_OLIS = True
MIN_APPEAR = 0.02
CLUST_TH = 0.7
MIN_CLUST = 10
def get_clusters(link, dn, inds, th=0.7):
clst = fcluster(link, criterion='distance', t=th)
return pandas.Series(index=inds, data=clst).iloc[dn['leaves']]
def draw_significant_groups(groups, dn_ax, color='white'):
# Draw boxes around clusters
for group in groups:
rect = patches.Rectangle((group[0][0], group[1][0]), group[0][1] - group[0][0], group[1][1] - group[1][0],
linewidth=1, edgecolor=color, facecolor='none')
dn_ax.add_patch(rect)
def draw_legume_group(group, ax):
y_values = ax.get_ylim()
x_values = ax.get_xlim()
rect = patches.Rectangle((0, 0), x_values[1], group[0], linewidth=1, edgecolor='white',
facecolor='white', alpha=0.6)
ax.add_patch(rect)
rect = patches.Rectangle((0, group[1]), x_values[1], y_values[0] - group[1], linewidth=1, edgecolor='white',
facecolor='white', alpha=0.6)
ax.add_patch(rect)
def get_groups(clst, clust_above=MIN_CLUST):
groups = []
v = -1
for i in range(len(clst)):
if clst[i] == v:
continue
if v == -1:
groups.append([i])
v = clst[i]
continue
if (i - groups[-1][0]) >= clust_above:
groups[-1].append(i)
groups.append([i])
else:
groups[-1][0] = i
v = clst[i]
groups = groups[:-1]
return groups
if __name__ == "__main__":
os.makedirs(out_path, exist_ok=True)
df_info = pandas.read_csv(os.path.join(base_path, "library_contents.csv"), index_col=0, low_memory=False)
df_info = df_info[df_info.is_allergens & (df_info['num_copy'] == 1)]
inds = df_info.index
l_base = len(inds)
meta_df = pandas.read_csv(os.path.join(base_path, "cohort.csv"), index_col=0, low_memory=False)
meta_df = meta_df[(meta_df.timepoint == 1) & (meta_df.num_passed >= MIN_OLIS)]
fold_df = pandas.read_csv(os.path.join(base_path, "fold_data.csv"), index_col=[0, 1],
low_memory=False).loc[meta_df.index].unstack()
fold_df.columns = fold_df.columns.get_level_values(1)
fold_df = fold_df[fold_df.columns.intersection(inds)]
if THROW_BAD_OLIS:
drop = fold_df.columns[(fold_df == -1).sum() > 0]
fold_df = fold_df[fold_df.columns.difference(drop)].fillna(1)
inds = df_info.index.difference(drop)
df_info = df_info.loc[inds]
fold_df = fold_df[fold_df.columns[(fold_df > 1).sum() > (MIN_APPEAR * len(fold_df))]]
fold_df = numpy.log(fold_df.fillna(1))
df_info = df_info.loc[fold_df.columns]
th = CLUST_TH
# Oligos level correlations
corr = fold_df.corr('spearman')
link = linkage(squareform(1 - corr), method='average')
dn = dendrogram(link, no_plot=True)
clst = get_clusters(link, dn, corr.columns, th)
groups = get_groups(clst)
# Samples level correlations
corr1 = fold_df.T.corr('spearman')
link1 = linkage(squareform(1 - corr1), method='average')
dn1 = dendrogram(link1, no_plot=True)
clst1 = get_clusters(link1, dn1, corr1.columns, th)
groups1 = get_groups(clst1)
# Define figure
fig = plt.figure(figsize=[9.2, 12])
gs = GridSpec(1, 3, width_ratios=[0.2, 3, 1])
# Plot heatmap
bar_ax = fig.add_subplot(gs[0])
dendogram_ax = fig.add_subplot(gs[1])
sns.heatmap(fold_df.iloc[dn1['leaves'], dn['leaves']], cmap=sns.color_palette('flare', as_cmap=True),
ax=dendogram_ax, yticklabels=False, xticklabels=False, cbar_ax=bar_ax)
dendogram_ax.set_xlabel("oligos")
dendogram_ax.set_ylabel("samples")
# Plot sample level bars
mt = 'normalized mt_1342'
bar_axis1 = fig.add_subplot(gs[2], sharey=dendogram_ax)
meta_df['yob'] = (meta_df['yob'] - 1944) / 60
use_columns = ['gender', 'yob']
sample_extra_info = pandas.merge(meta_df[use_columns], meta_df[mt], left_index=True,
right_index=True, how='left')
sample_extra_info[mt] = ((sample_extra_info[mt] - sample_extra_info[mt].min()) /
(sample_extra_info[mt].max() - sample_extra_info[mt].min())).astype(float)
sample_extra_info.rename(columns={mt: 'norm mt_1342'}, inplace=True)
mt = 'norm mt_1342'
sample_extra_info = sample_extra_info.iloc[dn1['leaves']]
sns.heatmap(data=sample_extra_info, xticklabels=sample_extra_info.columns, yticklabels=False,
ax=bar_axis1, cmap=sns.color_palette("viridis", as_cmap=True))
# Compute significant shared groups
fold_df = fold_df.iloc[dn1['leaves'], dn['leaves']].copy()
significant_groups = []
for oligo_subgroup in groups:
sample_group_means = sorted(enumerate(
[fold_df.iloc[range(*sample_group), range(*oligo_subgroup)].mean().mean() for sample_group in groups1]),
key=lambda x: -x[1])
if sample_group_means[0][1] > 2 * sample_group_means[1][1]:
significant_groups.append([oligo_subgroup, groups1[sample_group_means[0][0]]])
draw_significant_groups(significant_groups, dendogram_ax)
mt_scores = pandas.Series([mannwhitneyu(sample_extra_info.iloc[range(*sample_group)][mt].dropna(),
sample_extra_info.iloc[list(range(0, sample_group[0])) +
list(range(sample_group[1], len(sample_extra_info)))]
[mt].dropna())[1]
for oligos_group, sample_group in significant_groups])
mt_group = significant_groups[mt_scores.idxmin()]
mt_pval = mt_scores.min()
draw_significant_groups([mt_group], dendogram_ax, color='blue')
draw_legume_group(mt_group[1], bar_axis1)
plt.suptitle('For group marked in blue the %s level\nof samples in group vs those not in group\n' % mt +
'got MW p-value of %g' % mt_pval)
plt.savefig(os.path.join(out_path, "legumes.png"))
res = {}
inds = sample_extra_info[mt].dropna().index
for i in range(*mt_group[0]):
col = fold_df.columns[i]
res[col] = spearmanr(sample_extra_info.loc[inds][mt], fold_df.loc[inds, col].values)
res = pandas.DataFrame(res, index=['stat', 'pval']).T.sort_values('pval')
res["Bonf"] = res['pval'] * len(res)
FDR = multipletests(res.pval.values.tolist(), method='fdr_by')
res["FDR_BY"] = FDR[0]
res['FDR_BY_qval'] = FDR[1]
FDR = multipletests(res.pval.values.tolist(), method='fdr_bh')
res["FDR_BH"] = FDR[0]
res['FDR_BH_qval'] = FDR[1]
res['allergens_common_name'] = df_info.loc[res.index].allergens_common_name
print("Of %d oligos in the blue group %d pass FDR (BY) vs %s" % (len(res), len(res[res.FDR_BY]), mt))
res.to_csv(os.path.join(out_path, "mt_1342.csv"))
|
[
4,
5,
6,
7,
8
] |
2,231 |
e2b439974b66e45a899605bc7234850783c3dfb0
|
<mask token>
class AffiliatedStoreManager(models.Manager):
<mask token>
<mask token>
<mask token>
class AffiliatedStore(models.Model):
class Meta:
db_table = 'affiliated_store'
objects = AffiliatedStoreManager()
title = models.CharField(max_length=255)
server_url = models.CharField(max_length=2083, validators=[
RegexValidator(regex='^(https|http)://.*$', code='invalid url',
message='server_url must be a url')])
icon = models.ForeignKey(Image, related_name='+', null=True, blank=True)
is_enabled = models.BooleanField(default=True)
def __repr__(self):
return 'AffiliatedStore(id={0!s}, title="{1!s}")'.format(self.id,
self.title)
def __str__(self):
return repr(self)
|
<mask token>
class AffiliatedStoreManager(models.Manager):
def get_queryset(self):
return super().get_queryset().select_related('icon').select_related(
'icon__image_type')
<mask token>
<mask token>
class AffiliatedStore(models.Model):
class Meta:
db_table = 'affiliated_store'
objects = AffiliatedStoreManager()
title = models.CharField(max_length=255)
server_url = models.CharField(max_length=2083, validators=[
RegexValidator(regex='^(https|http)://.*$', code='invalid url',
message='server_url must be a url')])
icon = models.ForeignKey(Image, related_name='+', null=True, blank=True)
is_enabled = models.BooleanField(default=True)
def __repr__(self):
return 'AffiliatedStore(id={0!s}, title="{1!s}")'.format(self.id,
self.title)
def __str__(self):
return repr(self)
|
<mask token>
class AffiliatedStoreManager(models.Manager):
def get_queryset(self):
return super().get_queryset().select_related('icon').select_related(
'icon__image_type')
<mask token>
def find_by_id(self, id):
return self.get(id=id)
class AffiliatedStore(models.Model):
class Meta:
db_table = 'affiliated_store'
objects = AffiliatedStoreManager()
title = models.CharField(max_length=255)
server_url = models.CharField(max_length=2083, validators=[
RegexValidator(regex='^(https|http)://.*$', code='invalid url',
message='server_url must be a url')])
icon = models.ForeignKey(Image, related_name='+', null=True, blank=True)
is_enabled = models.BooleanField(default=True)
def __repr__(self):
return 'AffiliatedStore(id={0!s}, title="{1!s}")'.format(self.id,
self.title)
def __str__(self):
return repr(self)
|
from django.core.validators import RegexValidator
from django.db import models
from .image import Image
class AffiliatedStoreManager(models.Manager):
def get_queryset(self):
return super().get_queryset().select_related('icon').select_related(
'icon__image_type')
def find_all(self):
return self.all()
def find_by_id(self, id):
return self.get(id=id)
class AffiliatedStore(models.Model):
class Meta:
db_table = 'affiliated_store'
objects = AffiliatedStoreManager()
title = models.CharField(max_length=255)
server_url = models.CharField(max_length=2083, validators=[
RegexValidator(regex='^(https|http)://.*$', code='invalid url',
message='server_url must be a url')])
icon = models.ForeignKey(Image, related_name='+', null=True, blank=True)
is_enabled = models.BooleanField(default=True)
def __repr__(self):
return 'AffiliatedStore(id={0!s}, title="{1!s}")'.format(self.id,
self.title)
def __str__(self):
return repr(self)
|
from django.core.validators import RegexValidator
from django.db import models
from .image import Image
class AffiliatedStoreManager(models.Manager):
def get_queryset(self):
return super().get_queryset() \
.select_related('icon') \
.select_related('icon__image_type')
def find_all(self):
return self.all()
def find_by_id(self, id):
return self.get(id=id)
class AffiliatedStore(models.Model):
class Meta:
db_table = 'affiliated_store'
objects = AffiliatedStoreManager()
title = models.CharField(max_length=255)
server_url = models.CharField(max_length=2083,
validators=[RegexValidator(regex='^(https|http)://.*$',
code='invalid url',
message='server_url must be a url')])
icon = models.ForeignKey(Image, related_name='+', null=True, blank=True)
is_enabled = models.BooleanField(default=True)
def __repr__(self):
return 'AffiliatedStore(id={0!s}, title="{1!s}")'.format(self.id, self.title)
def __str__(self):
return repr(self)
|
[
5,
6,
7,
9,
10
] |
2,232 |
f35569e2d8d26f43d4b2395b5088902c6cd3b826
|
<mask token>
class PW(QWidget):
<mask token>
<mask token>
def ButtonNoAction(self):
table = 'patient_' + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Нет', table)
if self.index < self.maxim - 1:
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
|
<mask token>
class PW(QWidget):
def __init__(self, index, question, pid):
super().__init__()
self.question = question
self.pid = pid
self.maxim = len(self.question)
self.index = index
self.Pat = None
print(self.maxim)
self.setWindowTitle('Вопрос №' + str(question[self.index]['qid']))
self.setFixedSize(QSize(300, 400))
questionLayout = QHBoxLayout()
answerLayout = QHBoxLayout()
pageLayout = QVBoxLayout()
self.questionLabel = QLabel(question[self.index]['question'])
self.questionLabel.setAlignment(Qt.AlignCenter)
buttonYes = QPushButton('Да')
buttonNo = QPushButton('Нет')
questionLayout.addWidget(self.questionLabel)
answerLayout.addWidget(buttonYes)
answerLayout.addWidget(buttonNo)
pageLayout.addLayout(questionLayout)
pageLayout.addLayout(answerLayout)
self.setLayout(pageLayout)
buttonYes.clicked.connect(self.ButtonYesAction)
buttonNo.clicked.connect(self.ButtonNoAction)
<mask token>
def ButtonNoAction(self):
table = 'patient_' + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Нет', table)
if self.index < self.maxim - 1:
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
|
<mask token>
class PW(QWidget):
def __init__(self, index, question, pid):
super().__init__()
self.question = question
self.pid = pid
self.maxim = len(self.question)
self.index = index
self.Pat = None
print(self.maxim)
self.setWindowTitle('Вопрос №' + str(question[self.index]['qid']))
self.setFixedSize(QSize(300, 400))
questionLayout = QHBoxLayout()
answerLayout = QHBoxLayout()
pageLayout = QVBoxLayout()
self.questionLabel = QLabel(question[self.index]['question'])
self.questionLabel.setAlignment(Qt.AlignCenter)
buttonYes = QPushButton('Да')
buttonNo = QPushButton('Нет')
questionLayout.addWidget(self.questionLabel)
answerLayout.addWidget(buttonYes)
answerLayout.addWidget(buttonNo)
pageLayout.addLayout(questionLayout)
pageLayout.addLayout(answerLayout)
self.setLayout(pageLayout)
buttonYes.clicked.connect(self.ButtonYesAction)
buttonNo.clicked.connect(self.ButtonNoAction)
def ButtonYesAction(self):
table = 'patient_' + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Да', table)
if self.index < self.maxim - 1:
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
def ButtonNoAction(self):
table = 'patient_' + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Нет', table)
if self.index < self.maxim - 1:
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
|
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtSql import *
from DatabaseHandler import send_answer
class PW(QWidget):
def __init__(self, index, question, pid):
super().__init__()
self.question = question
self.pid = pid
self.maxim = len(self.question)
self.index = index
self.Pat = None
print(self.maxim)
self.setWindowTitle('Вопрос №' + str(question[self.index]['qid']))
self.setFixedSize(QSize(300, 400))
questionLayout = QHBoxLayout()
answerLayout = QHBoxLayout()
pageLayout = QVBoxLayout()
self.questionLabel = QLabel(question[self.index]['question'])
self.questionLabel.setAlignment(Qt.AlignCenter)
buttonYes = QPushButton('Да')
buttonNo = QPushButton('Нет')
questionLayout.addWidget(self.questionLabel)
answerLayout.addWidget(buttonYes)
answerLayout.addWidget(buttonNo)
pageLayout.addLayout(questionLayout)
pageLayout.addLayout(answerLayout)
self.setLayout(pageLayout)
buttonYes.clicked.connect(self.ButtonYesAction)
buttonNo.clicked.connect(self.ButtonNoAction)
def ButtonYesAction(self):
table = 'patient_' + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Да', table)
if self.index < self.maxim - 1:
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
def ButtonNoAction(self):
table = 'patient_' + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Нет', table)
if self.index < self.maxim - 1:
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
|
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtSql import *
from DatabaseHandler import send_answer
class PW(QWidget):
def __init__(self, index, question, pid):
super().__init__()
self.question = question
self.pid = pid
self.maxim = len(self.question)
self.index = index
self.Pat = None
print(self.maxim)
self.setWindowTitle("Вопрос №" + str(question[self.index]['qid']))
self.setFixedSize(QSize(300, 400))
questionLayout = QHBoxLayout()
answerLayout = QHBoxLayout()
pageLayout = QVBoxLayout()
self.questionLabel = QLabel(question[self.index]['question'])
self.questionLabel.setAlignment(Qt.AlignCenter)
buttonYes = QPushButton("Да")
buttonNo = QPushButton("Нет")
questionLayout.addWidget(self.questionLabel)
answerLayout.addWidget(buttonYes)
answerLayout.addWidget(buttonNo)
pageLayout.addLayout(questionLayout)
pageLayout.addLayout(answerLayout)
self.setLayout(pageLayout)
buttonYes.clicked.connect(self.ButtonYesAction)
buttonNo.clicked.connect(self.ButtonNoAction)
def ButtonYesAction(self):
table = "patient_" + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Да', table)
if (self.index<self.maxim-1):
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
def ButtonNoAction(self):
table = "patient_" + str(self.pid)
send_answer(self.question[self.index]['qid'], 'Нет', table)
if (self.index<self.maxim-1):
self.Pat = PW(self.index + 1, self.question, self.pid)
self.Pat.show()
self.close()
|
[
2,
3,
4,
5,
6
] |
2,233 |
2185d332f7cd4cbf17d6b72a19297d156c2182a1
|
<mask token>
class RwidSpider(scrapy.Spider):
<mask token>
<mask token>
<mask token>
def parse(self, response):
data = {'username': 'user', 'password': 'user12345'}
return scrapy.FormRequest(url='http://0.0.0.0:9999/login', formdata
=data, callback=self.after_login)
<mask token>
def parse_detail(self, response):
yield {'title': response.css('title::text').get()}
|
<mask token>
class RwidSpider(scrapy.Spider):
<mask token>
<mask token>
<mask token>
def parse(self, response):
data = {'username': 'user', 'password': 'user12345'}
return scrapy.FormRequest(url='http://0.0.0.0:9999/login', formdata
=data, callback=self.after_login)
def after_login(self, response):
"""
Ada 2 Task disini :
1. Ambil semua data barang yang ada dihalaman hasil -> akan menuju detail (parsing detail)
2. Ambil semua link next -> akan balik ke self.after_login
:param response:
:return:
"""
detail_products: List[Selector] = response.css('.card .card-title a')
for detail in detail_products:
href = detail.attrib.get('href')
yield response.follow(href, callback=self.parse_detail)
yield {'title': response.css('title::text').get()}
def parse_detail(self, response):
yield {'title': response.css('title::text').get()}
|
<mask token>
class RwidSpider(scrapy.Spider):
name = 'rwid'
allowed_domains = ['0.0.0.0']
start_urls = ['http://0.0.0.0:9999/']
def parse(self, response):
data = {'username': 'user', 'password': 'user12345'}
return scrapy.FormRequest(url='http://0.0.0.0:9999/login', formdata
=data, callback=self.after_login)
def after_login(self, response):
"""
Ada 2 Task disini :
1. Ambil semua data barang yang ada dihalaman hasil -> akan menuju detail (parsing detail)
2. Ambil semua link next -> akan balik ke self.after_login
:param response:
:return:
"""
detail_products: List[Selector] = response.css('.card .card-title a')
for detail in detail_products:
href = detail.attrib.get('href')
yield response.follow(href, callback=self.parse_detail)
yield {'title': response.css('title::text').get()}
def parse_detail(self, response):
yield {'title': response.css('title::text').get()}
|
from typing import List
import scrapy
from cssselect import Selector
class RwidSpider(scrapy.Spider):
name = 'rwid'
allowed_domains = ['0.0.0.0']
start_urls = ['http://0.0.0.0:9999/']
def parse(self, response):
data = {'username': 'user', 'password': 'user12345'}
return scrapy.FormRequest(url='http://0.0.0.0:9999/login', formdata
=data, callback=self.after_login)
def after_login(self, response):
"""
Ada 2 Task disini :
1. Ambil semua data barang yang ada dihalaman hasil -> akan menuju detail (parsing detail)
2. Ambil semua link next -> akan balik ke self.after_login
:param response:
:return:
"""
detail_products: List[Selector] = response.css('.card .card-title a')
for detail in detail_products:
href = detail.attrib.get('href')
yield response.follow(href, callback=self.parse_detail)
yield {'title': response.css('title::text').get()}
def parse_detail(self, response):
yield {'title': response.css('title::text').get()}
|
from typing import List
import scrapy
from cssselect import Selector
class RwidSpider(scrapy.Spider):
name = 'rwid'
allowed_domains = ['0.0.0.0']
# REQUEST LOGIN DARI URLS
start_urls = ['http://0.0.0.0:9999/']
# LOGIN DISINI
def parse(self, response):
# apa bedanya yield & return
# yield {"title": response.css("title::text").get()}
# cek di inspect element perlu login tidak?
data = {
"username": "user",
"password": "user12345"
}
# cek di FormRequest butuhnya apa aja
return scrapy.FormRequest(
url="http://0.0.0.0:9999/login",
formdata=data,
callback=self.after_login # untuk mengektraksi data
)
def after_login(self, response):
"""
Ada 2 Task disini :
1. Ambil semua data barang yang ada dihalaman hasil -> akan menuju detail (parsing detail)
2. Ambil semua link next -> akan balik ke self.after_login
:param response:
:return:
"""
# get detail product
detail_products: List[Selector] = response.css(".card .card-title a")
for detail in detail_products:
href = detail.attrib.get("href") # untuk mendapatkan urls
yield response.follow(href, callback=self.parse_detail) # masukkan urls ini ke antrian scrapy
yield {"title": response.css("title::text").get()}
def parse_detail(self, response):
yield {"title": response.css("title::text").get()}
|
[
3,
4,
5,
6,
7
] |
2,234 |
87e5a615157db59d1eac4967c321829c878d00a5
|
<mask token>
|
<mask token>
def product_sum_helper(array, depth):
sum = 0
for ele in array:
if type(ele) is int:
sum += ele
else:
sum += product_sum_helper(ele, depth + 1)
return depth * sum
|
<mask token>
def product_sum(array):
sum = 0
depth = 1
sum += product_sum_helper(array, depth)
return sum
def product_sum_helper(array, depth):
sum = 0
for ele in array:
if type(ele) is int:
sum += ele
else:
sum += product_sum_helper(ele, depth + 1)
return depth * sum
|
"""
- input: is a 'special' array (heavily nested array)
- output: return the product sum
- notes:
- special array is a non-empty array that contains either integers or other 'special' arrays
- product sum of a special array is the sum of its elements, where 'special' arrays inside are summed themselves and then multipled by their level of depth
- logic:
- need two variables the sum and the depth; the depth will be passed on from function call to function call
- we iterate through the 'special' array
- check if it is a type int
- add to the sum
- else the element we are currently on is a 'special' array
- add to the sum the return value of recursively calling the function passing in the element and the current depth + 1
- return sum
"""
def product_sum(array):
sum = 0
depth = 1
sum += product_sum_helper(array, depth)
return sum
def product_sum_helper(array, depth):
sum = 0
for ele in array:
if type(ele) is int:
sum += ele
else:
sum += product_sum_helper(ele, depth + 1)
return depth * sum
# Time Complexity: O(n), where n is the number of nodes in the element
# Space Complexity: O(d), where d is the greatest depth of the 'special' arrays in the array
| null |
[
0,
1,
2,
3
] |
2,235 |
c6170678b523a105312d8ce316853859657d3c94
|
<mask token>
|
<mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
|
<mask token>
class Migration(migrations.Migration):
dependencies = [('user_details', '0003_auto_20180226_1816')]
operations = [migrations.AlterField(model_name='token', name=
'expiry_date', field=models.DateTimeField(default=datetime.datetime
(2018, 2, 28, 13, 14, 15, 831612, tzinfo=utc))), migrations.
AlterField(model_name='token', name='user_id', field=models.
ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=
'user_details.User'))]
|
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [('user_details', '0003_auto_20180226_1816')]
operations = [migrations.AlterField(model_name='token', name=
'expiry_date', field=models.DateTimeField(default=datetime.datetime
(2018, 2, 28, 13, 14, 15, 831612, tzinfo=utc))), migrations.
AlterField(model_name='token', name='user_id', field=models.
ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=
'user_details.User'))]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-26 13:14
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('user_details', '0003_auto_20180226_1816'),
]
operations = [
migrations.AlterField(
model_name='token',
name='expiry_date',
field=models.DateTimeField(default=datetime.datetime(2018, 2, 28, 13, 14, 15, 831612, tzinfo=utc)),
),
migrations.AlterField(
model_name='token',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user_details.User'),
),
]
|
[
0,
1,
2,
3,
4
] |
2,236 |
ab38371ee3941e214344497b7e56786908a9b3d1
|
<mask token>
|
<mask token>
admin.site.register(Sport)
admin.site.register(Action)
|
from django.contrib import admin
from .models import Sport
from .models import Action
admin.site.register(Sport)
admin.site.register(Action)
| null | null |
[
0,
1,
2
] |
2,237 |
e67f27eec53901f27ba5a7ee7e2a20bbb1e8f7f9
|
<mask token>
class IBehaviourBase(Client):
<mask token>
<mask token>
<mask token>
|
<mask token>
class IBehaviourBase(Client):
<mask token>
def __init__(self, email, password, kwargs):
""""abstract class being parent of every user implemented behaviour;
it handles logging in and tasks on behaviour loader side"""
self.kwargs = kwargs
Client.__init__(self, email=email, password=password)
self.Run()
def Run(self):
print('behaviour base abstract method invoked error')
|
<mask token>
class IBehaviourBase(Client):
BreakFlag = False
def __init__(self, email, password, kwargs):
""""abstract class being parent of every user implemented behaviour;
it handles logging in and tasks on behaviour loader side"""
self.kwargs = kwargs
Client.__init__(self, email=email, password=password)
self.Run()
def Run(self):
print('behaviour base abstract method invoked error')
|
from fbchat import Client
class IBehaviourBase(Client):
BreakFlag = False
def __init__(self, email, password, kwargs):
""""abstract class being parent of every user implemented behaviour;
it handles logging in and tasks on behaviour loader side"""
self.kwargs = kwargs
Client.__init__(self, email=email, password=password)
self.Run()
def Run(self):
print('behaviour base abstract method invoked error')
|
from fbchat import Client
class IBehaviourBase(Client):
BreakFlag = False
def __init__(self,email,password, kwargs):
""""abstract class being parent of every user implemented behaviour;
it handles logging in and tasks on behaviour loader side"""
self.kwargs=kwargs
Client.__init__(self, email=email, password=password)
self.Run()
def Run(self):
print("behaviour base abstract method invoked error")
## todo add exception here
|
[
1,
3,
4,
5,
6
] |
2,238 |
7026f4549019c25cb736af556fe46fd360fba46f
|
<mask token>
|
<mask token>
def DIXMAAN(type):
def DIXMAAN_(n):
name = 'DIXMAAN%c function (CUTE)' % type
alpha, beta, gamma, sigma, k1, k2, k3, k4 = table_DIXMAAN[type]
m = n // 3
sm = lambda i: alpha * xi(i) ** 2 * (i / n) ** k1
sm2 = lambda i: beta * xi(i) ** 2 * (xi(i + 1) + xi(i + 1) ** 2) * (i /
n) ** k2
sm3 = lambda i: gamma * xi(i) ** 2 * xi(i + m) ** 4 * (i / n) ** k3
sm4 = lambda i: sigma * xi(i) * xi(i + 2 * m) * (i / n) ** k4
f_1 = lambda : sum([sm2(i) for i in range(1, n)])
f_2 = lambda : sum([sm3(i) for i in range(1, 2 * m + 1)])
f_3 = lambda : sum([sm4(i) for i in range(1, m + 1)])
f = lambda : 1 + f_1() + f_2() + f_3()
x0 = np.ones((n, 1)) * 2.0
return create_test_function(name, n, sm, x0, first=f, range_func=
default_range_1)
DIXMAAN_.__name__ += type
return DIXMAAN_
|
<mask token>
table_DIXMAAN = dict()
table_DIXMAAN['A'] = 1, 0, 0.125, 0.125, 0, 0, 0, 0
table_DIXMAAN['B'] = 1, 0.0625, 0.0625, 0.0625, 0, 0, 0, 1
table_DIXMAAN['C'] = 1, 0.125, 0.125, 0.125, 0, 0, 0, 0
table_DIXMAAN['D'] = 1, 0.26, 0.26, 0.26, 0, 0, 0, 0
table_DIXMAAN['E'] = 1, 0, 0.125, 0.125, 1, 0, 0, 1
table_DIXMAAN['F'] = 1, 0.0625, 0.0625, 0.0625, 1, 0, 0, 1
table_DIXMAAN['G'] = 1, 0.125, 0.125, 0.125, 1, 0, 0, 1
table_DIXMAAN['H'] = 1, 0.26, 0.26, 0.26, 1, 0, 0, 1
table_DIXMAAN['I'] = 1, 0, 0.125, 0.125, 2, 0, 0, 2
table_DIXMAAN['J'] = 1, 0.0625, 0.0625, 0.0625, 2, 0, 0, 2
table_DIXMAAN['K'] = 1, 0.125, 0.125, 0.125, 2, 0, 0, 2
table_DIXMAAN['L'] = 1, 0.26, 0.26, 0.26, 2, 0, 0, 2
def DIXMAAN(type):
def DIXMAAN_(n):
name = 'DIXMAAN%c function (CUTE)' % type
alpha, beta, gamma, sigma, k1, k2, k3, k4 = table_DIXMAAN[type]
m = n // 3
sm = lambda i: alpha * xi(i) ** 2 * (i / n) ** k1
sm2 = lambda i: beta * xi(i) ** 2 * (xi(i + 1) + xi(i + 1) ** 2) * (i /
n) ** k2
sm3 = lambda i: gamma * xi(i) ** 2 * xi(i + m) ** 4 * (i / n) ** k3
sm4 = lambda i: sigma * xi(i) * xi(i + 2 * m) * (i / n) ** k4
f_1 = lambda : sum([sm2(i) for i in range(1, n)])
f_2 = lambda : sum([sm3(i) for i in range(1, 2 * m + 1)])
f_3 = lambda : sum([sm4(i) for i in range(1, m + 1)])
f = lambda : 1 + f_1() + f_2() + f_3()
x0 = np.ones((n, 1)) * 2.0
return create_test_function(name, n, sm, x0, first=f, range_func=
default_range_1)
DIXMAAN_.__name__ += type
return DIXMAAN_
|
from .test_function import *
from .support_funcs import *
table_DIXMAAN = dict()
table_DIXMAAN['A'] = 1, 0, 0.125, 0.125, 0, 0, 0, 0
table_DIXMAAN['B'] = 1, 0.0625, 0.0625, 0.0625, 0, 0, 0, 1
table_DIXMAAN['C'] = 1, 0.125, 0.125, 0.125, 0, 0, 0, 0
table_DIXMAAN['D'] = 1, 0.26, 0.26, 0.26, 0, 0, 0, 0
table_DIXMAAN['E'] = 1, 0, 0.125, 0.125, 1, 0, 0, 1
table_DIXMAAN['F'] = 1, 0.0625, 0.0625, 0.0625, 1, 0, 0, 1
table_DIXMAAN['G'] = 1, 0.125, 0.125, 0.125, 1, 0, 0, 1
table_DIXMAAN['H'] = 1, 0.26, 0.26, 0.26, 1, 0, 0, 1
table_DIXMAAN['I'] = 1, 0, 0.125, 0.125, 2, 0, 0, 2
table_DIXMAAN['J'] = 1, 0.0625, 0.0625, 0.0625, 2, 0, 0, 2
table_DIXMAAN['K'] = 1, 0.125, 0.125, 0.125, 2, 0, 0, 2
table_DIXMAAN['L'] = 1, 0.26, 0.26, 0.26, 2, 0, 0, 2
def DIXMAAN(type):
def DIXMAAN_(n):
name = 'DIXMAAN%c function (CUTE)' % type
alpha, beta, gamma, sigma, k1, k2, k3, k4 = table_DIXMAAN[type]
m = n // 3
sm = lambda i: alpha * xi(i) ** 2 * (i / n) ** k1
sm2 = lambda i: beta * xi(i) ** 2 * (xi(i + 1) + xi(i + 1) ** 2) * (i /
n) ** k2
sm3 = lambda i: gamma * xi(i) ** 2 * xi(i + m) ** 4 * (i / n) ** k3
sm4 = lambda i: sigma * xi(i) * xi(i + 2 * m) * (i / n) ** k4
f_1 = lambda : sum([sm2(i) for i in range(1, n)])
f_2 = lambda : sum([sm3(i) for i in range(1, 2 * m + 1)])
f_3 = lambda : sum([sm4(i) for i in range(1, m + 1)])
f = lambda : 1 + f_1() + f_2() + f_3()
x0 = np.ones((n, 1)) * 2.0
return create_test_function(name, n, sm, x0, first=f, range_func=
default_range_1)
DIXMAAN_.__name__ += type
return DIXMAAN_
|
from .test_function import *
from .support_funcs import *
table_DIXMAAN = dict()
table_DIXMAAN['A'] = (1, 0, 0.125, 0.125, 0, 0, 0, 0)
table_DIXMAAN['B'] = (1, 0.0625, 0.0625, 0.0625, 0, 0, 0, 1)
table_DIXMAAN['C'] = (1, 0.125, 0.125, 0.125, 0, 0, 0, 0)
table_DIXMAAN['D'] = (1, 0.26, 0.26, 0.26, 0, 0, 0, 0)
table_DIXMAAN['E'] = (1, 0, 0.125, 0.125, 1, 0, 0, 1)
table_DIXMAAN['F'] = (1, 0.0625, 0.0625, 0.0625, 1, 0, 0, 1)
table_DIXMAAN['G'] = (1, 0.125, 0.125, 0.125, 1, 0, 0, 1)
table_DIXMAAN['H'] = (1, 0.26, 0.26, 0.26, 1, 0, 0, 1)
table_DIXMAAN['I'] = (1, 0, 0.125, 0.125, 2, 0, 0, 2)
table_DIXMAAN['J'] = (1, 0.0625, 0.0625, 0.0625, 2, 0, 0, 2)
table_DIXMAAN['K'] = (1, 0.125, 0.125, 0.125, 2, 0, 0, 2)
table_DIXMAAN['L'] = (1, 0.26, 0.26, 0.26, 2, 0, 0, 2)
def DIXMAAN(type):
def DIXMAAN_(n):
name = "DIXMAAN%c function (CUTE)" % type
alpha, beta, gamma, sigma, k1, k2, k3, k4 = table_DIXMAAN[type]
m = n // 3
sm = lambda i: alpha * xi(i) ** 2 *(i / n) ** k1
sm2 = lambda i: beta * xi(i) ** 2 * (xi(i+1) + xi(i+1)**2) * (i / n) ** k2
sm3 = lambda i: gamma * xi(i)**2 * xi(i+m) ** 4 * (i / n) ** k3
sm4 = lambda i: sigma * xi(i) * xi(i+2*m) * (i / n) ** k4
f_1 = lambda: sum([sm2(i) for i in range(1, n)])
f_2 = lambda: sum([sm3(i) for i in range(1, 2 * m + 1)])
f_3 = lambda: sum([sm4(i) for i in range(1, m + 1)])
f = lambda: 1 + f_1() + f_2() + f_3()
x0 = np.ones((n, 1)) * 2.0
return create_test_function(name, n, sm, x0, first=f, range_func=default_range_1)
DIXMAAN_.__name__ += type
return DIXMAAN_
|
[
0,
1,
2,
3,
4
] |
2,239 |
e3c9487f3221ca89b9014b2e6470ca9d4dbc925a
|
<mask token>
class section:
def __init__(self, i0, j0, subImg, Params):
self.Params = Params
self.subParams = {}
self.subParams['wLen'] = [6.3e-07, 5.3e-07, 4.3e-07]
self.subParams['subSize'] = subImg.shape
self.subParams['bigSize'] = [np.int(Params['size'] / Params[
'numFiles'])] * 2
self.S = np.empty([self.subParams['bigSize'][0], self.subParams[
'bigSize'][1], 3], dtype=np.complex64)
self.P = np.empty([self.subParams['subSize'][0], self.subParams[
'subSize'][1], 3], dtype=np.complex64)
self.meanFFT = np.zeros([self.subParams['subSize'][0], self.
subParams['subSize'][1], 3], dtype=np.complex64)
self.meanNum = 0
self.subParams['fRApprox'] = np.empty([3], dtype=int)
self.subParams['coords'] = np.empty([3, 16, 16, 2])
self.subParams['isBF'] = np.empty([3, 16, 16])
for i in range(0, 3):
self.S[:, :, i] = self.initS0(subImg[:, :, i], self.subParams[
'bigSize'])
self.subParams['fRApprox'][i] = self.fRad(Params['fResolution'],
Params['NA'], self.subParams['wLen'][i])
print(Params['NA'], self.subParams['wLen'][i], Params['mag'],
Params['ps'], Params['smallSize'])
self.P[:, :, i] = self.initP0(self.subParams['subSize'], self.
subParams['fRApprox'][i])
self.subParams['coords'][i, :, :, :], self.subParams['isBF'][i,
:, :] = self.initCoords(i0, j0, self.subParams['wLen'][i],
self.subParams['fRApprox'][i])
self.bayer = np.empty([Params['divisor'], Params['divisor'], 3])
self.invBayer = np.empty([Params['divisor'], Params['divisor'], 3])
for i in range(3):
self.bayer[:, :, i], self.invBayer[:, :, i] = h.genBayer([
Params['divisor'], Params['divisor']], i)
def initS0(self, img, size):
""" Initialises the FT of the high res image by linear interpolation of a low res image """
I0 = cv.resize(img, (size[1], size[0]), interpolation=cv.INTER_LINEAR)
amplitude = np.sqrt(I0)
FI0 = fft2(ifftshift(amplitude))
FI0 = fftshift(FI0)
S = np.array(FI0, dtype=np.complex64)
return S
<mask token>
<mask token>
def initCoords(self, i, j, wLen, Rad):
""" Returns 2D array where LED coords relate to fourier centre positions """
segmentPos = [i, j]
n = self.Params['numFiles']
w = self.subParams['subSize'][0]
c = w / (2 * n)
centre = (segmentPos[0] * 2 * c + c - w) * self.Params['ps'][0
] / self.Params['mag']
self.Params['centre'] = centre
coords = np.empty((self.Params['LEDNum'][0], self.Params['LEDNum'][
1], 2), dtype=np.int32)
isBF = np.zeros((self.Params['LEDNum'][0], self.Params['LEDNum'][1]
), dtype=np.int32)
numImgs = int(len(self.Params['images']) ** 0.5)
for i, img in enumerate(self.Params['images']):
LED = meth.getLED(img)
LEDPixelPos = self.getLEDPos(LED[0], LED[1], centre, wLen)
coords[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1
] = LEDPixelPos
if (LEDPixelPos[0] - w / 2) ** 2 + (LEDPixelPos[1] - w / 2
) ** 2 < Rad:
isBF[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs /
2) - 1] = 1
return coords, isBF
def getLEDPos(self, nx, ny, centre, wLen):
""" Determines the location of the centre of the fourier pattern in pixels """
ax = np.arctan((centre - nx * self.Params['LEDSpace']) / self.
Params['distance'])
ay = np.arctan((centre - ny * self.Params['LEDSpace']) / self.
Params['distance'])
dx = ax / (wLen * self.Params['fResolution'][0])
dy = ay / (wLen * self.Params['fResolution'][1])
pos = [int(dx + self.subParams['subSize'][0] / 2), int(dy + self.
subParams['subSize'][0] / 2)]
return pos
class splitImage:
def __init__(self, dir, imgName, numSplits, splitSize):
self.LEDPos = meth.getLED(imgName)
self.subImg = np.empty([numSplits, numSplits], dtype=subImage)
for i in range(numSplits):
for j in range(numSplits):
self.subImg[i, j] = subImage(dir, splitSize, imgName, self.
LEDPos, i, j)
class subImage:
def __init__(self, dir, splitSize, imgName, LEDPos, i, j):
img = meth.readImage(dir, imgName)
self.image = img[i * splitSize:(i + 1) * splitSize, j * splitSize:(
j + 1) * splitSize]
self.imgPos = [i, j]
self.LEDPos = LEDPos
<mask token>
|
<mask token>
class section:
def __init__(self, i0, j0, subImg, Params):
self.Params = Params
self.subParams = {}
self.subParams['wLen'] = [6.3e-07, 5.3e-07, 4.3e-07]
self.subParams['subSize'] = subImg.shape
self.subParams['bigSize'] = [np.int(Params['size'] / Params[
'numFiles'])] * 2
self.S = np.empty([self.subParams['bigSize'][0], self.subParams[
'bigSize'][1], 3], dtype=np.complex64)
self.P = np.empty([self.subParams['subSize'][0], self.subParams[
'subSize'][1], 3], dtype=np.complex64)
self.meanFFT = np.zeros([self.subParams['subSize'][0], self.
subParams['subSize'][1], 3], dtype=np.complex64)
self.meanNum = 0
self.subParams['fRApprox'] = np.empty([3], dtype=int)
self.subParams['coords'] = np.empty([3, 16, 16, 2])
self.subParams['isBF'] = np.empty([3, 16, 16])
for i in range(0, 3):
self.S[:, :, i] = self.initS0(subImg[:, :, i], self.subParams[
'bigSize'])
self.subParams['fRApprox'][i] = self.fRad(Params['fResolution'],
Params['NA'], self.subParams['wLen'][i])
print(Params['NA'], self.subParams['wLen'][i], Params['mag'],
Params['ps'], Params['smallSize'])
self.P[:, :, i] = self.initP0(self.subParams['subSize'], self.
subParams['fRApprox'][i])
self.subParams['coords'][i, :, :, :], self.subParams['isBF'][i,
:, :] = self.initCoords(i0, j0, self.subParams['wLen'][i],
self.subParams['fRApprox'][i])
self.bayer = np.empty([Params['divisor'], Params['divisor'], 3])
self.invBayer = np.empty([Params['divisor'], Params['divisor'], 3])
for i in range(3):
self.bayer[:, :, i], self.invBayer[:, :, i] = h.genBayer([
Params['divisor'], Params['divisor']], i)
def initS0(self, img, size):
""" Initialises the FT of the high res image by linear interpolation of a low res image """
I0 = cv.resize(img, (size[1], size[0]), interpolation=cv.INTER_LINEAR)
amplitude = np.sqrt(I0)
FI0 = fft2(ifftshift(amplitude))
FI0 = fftshift(FI0)
S = np.array(FI0, dtype=np.complex64)
return S
def initP0(self, size, radius):
""" Initialises the pupil function as a real circular step function of value 1 """
return h.circle(size, radius)[:, :, 0]
def fRad(self, fDu, NA, wLen):
""" Determines the approximate radius in F-space in pixels of the pupil function """
x = 2 * np.pi * NA / (wLen * fDu[0])
y = 2 * np.pi * NA / (wLen * fDu[1])
avr = np.int32(np.average([x, y]))
return avr
def initCoords(self, i, j, wLen, Rad):
""" Returns 2D array where LED coords relate to fourier centre positions """
segmentPos = [i, j]
n = self.Params['numFiles']
w = self.subParams['subSize'][0]
c = w / (2 * n)
centre = (segmentPos[0] * 2 * c + c - w) * self.Params['ps'][0
] / self.Params['mag']
self.Params['centre'] = centre
coords = np.empty((self.Params['LEDNum'][0], self.Params['LEDNum'][
1], 2), dtype=np.int32)
isBF = np.zeros((self.Params['LEDNum'][0], self.Params['LEDNum'][1]
), dtype=np.int32)
numImgs = int(len(self.Params['images']) ** 0.5)
for i, img in enumerate(self.Params['images']):
LED = meth.getLED(img)
LEDPixelPos = self.getLEDPos(LED[0], LED[1], centre, wLen)
coords[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1
] = LEDPixelPos
if (LEDPixelPos[0] - w / 2) ** 2 + (LEDPixelPos[1] - w / 2
) ** 2 < Rad:
isBF[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs /
2) - 1] = 1
return coords, isBF
def getLEDPos(self, nx, ny, centre, wLen):
""" Determines the location of the centre of the fourier pattern in pixels """
ax = np.arctan((centre - nx * self.Params['LEDSpace']) / self.
Params['distance'])
ay = np.arctan((centre - ny * self.Params['LEDSpace']) / self.
Params['distance'])
dx = ax / (wLen * self.Params['fResolution'][0])
dy = ay / (wLen * self.Params['fResolution'][1])
pos = [int(dx + self.subParams['subSize'][0] / 2), int(dy + self.
subParams['subSize'][0] / 2)]
return pos
class splitImage:
def __init__(self, dir, imgName, numSplits, splitSize):
self.LEDPos = meth.getLED(imgName)
self.subImg = np.empty([numSplits, numSplits], dtype=subImage)
for i in range(numSplits):
for j in range(numSplits):
self.subImg[i, j] = subImage(dir, splitSize, imgName, self.
LEDPos, i, j)
class subImage:
def __init__(self, dir, splitSize, imgName, LEDPos, i, j):
img = meth.readImage(dir, imgName)
self.image = img[i * splitSize:(i + 1) * splitSize, j * splitSize:(
j + 1) * splitSize]
self.imgPos = [i, j]
self.LEDPos = LEDPos
<mask token>
|
<mask token>
class fullSys:
<mask token>
<mask token>
def getDivisor(self, img, splitSize):
imgSize = img.shape[0]
while True:
if imgSize % splitSize == 0:
divisor = splitSize
break
splitSize += 1
numFiles = int(imgSize / divisor)
return numFiles, divisor
<mask token>
<mask token>
class section:
def __init__(self, i0, j0, subImg, Params):
self.Params = Params
self.subParams = {}
self.subParams['wLen'] = [6.3e-07, 5.3e-07, 4.3e-07]
self.subParams['subSize'] = subImg.shape
self.subParams['bigSize'] = [np.int(Params['size'] / Params[
'numFiles'])] * 2
self.S = np.empty([self.subParams['bigSize'][0], self.subParams[
'bigSize'][1], 3], dtype=np.complex64)
self.P = np.empty([self.subParams['subSize'][0], self.subParams[
'subSize'][1], 3], dtype=np.complex64)
self.meanFFT = np.zeros([self.subParams['subSize'][0], self.
subParams['subSize'][1], 3], dtype=np.complex64)
self.meanNum = 0
self.subParams['fRApprox'] = np.empty([3], dtype=int)
self.subParams['coords'] = np.empty([3, 16, 16, 2])
self.subParams['isBF'] = np.empty([3, 16, 16])
for i in range(0, 3):
self.S[:, :, i] = self.initS0(subImg[:, :, i], self.subParams[
'bigSize'])
self.subParams['fRApprox'][i] = self.fRad(Params['fResolution'],
Params['NA'], self.subParams['wLen'][i])
print(Params['NA'], self.subParams['wLen'][i], Params['mag'],
Params['ps'], Params['smallSize'])
self.P[:, :, i] = self.initP0(self.subParams['subSize'], self.
subParams['fRApprox'][i])
self.subParams['coords'][i, :, :, :], self.subParams['isBF'][i,
:, :] = self.initCoords(i0, j0, self.subParams['wLen'][i],
self.subParams['fRApprox'][i])
self.bayer = np.empty([Params['divisor'], Params['divisor'], 3])
self.invBayer = np.empty([Params['divisor'], Params['divisor'], 3])
for i in range(3):
self.bayer[:, :, i], self.invBayer[:, :, i] = h.genBayer([
Params['divisor'], Params['divisor']], i)
def initS0(self, img, size):
""" Initialises the FT of the high res image by linear interpolation of a low res image """
I0 = cv.resize(img, (size[1], size[0]), interpolation=cv.INTER_LINEAR)
amplitude = np.sqrt(I0)
FI0 = fft2(ifftshift(amplitude))
FI0 = fftshift(FI0)
S = np.array(FI0, dtype=np.complex64)
return S
def initP0(self, size, radius):
""" Initialises the pupil function as a real circular step function of value 1 """
return h.circle(size, radius)[:, :, 0]
def fRad(self, fDu, NA, wLen):
""" Determines the approximate radius in F-space in pixels of the pupil function """
x = 2 * np.pi * NA / (wLen * fDu[0])
y = 2 * np.pi * NA / (wLen * fDu[1])
avr = np.int32(np.average([x, y]))
return avr
def initCoords(self, i, j, wLen, Rad):
""" Returns 2D array where LED coords relate to fourier centre positions """
segmentPos = [i, j]
n = self.Params['numFiles']
w = self.subParams['subSize'][0]
c = w / (2 * n)
centre = (segmentPos[0] * 2 * c + c - w) * self.Params['ps'][0
] / self.Params['mag']
self.Params['centre'] = centre
coords = np.empty((self.Params['LEDNum'][0], self.Params['LEDNum'][
1], 2), dtype=np.int32)
isBF = np.zeros((self.Params['LEDNum'][0], self.Params['LEDNum'][1]
), dtype=np.int32)
numImgs = int(len(self.Params['images']) ** 0.5)
for i, img in enumerate(self.Params['images']):
LED = meth.getLED(img)
LEDPixelPos = self.getLEDPos(LED[0], LED[1], centre, wLen)
coords[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1
] = LEDPixelPos
if (LEDPixelPos[0] - w / 2) ** 2 + (LEDPixelPos[1] - w / 2
) ** 2 < Rad:
isBF[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs /
2) - 1] = 1
return coords, isBF
def getLEDPos(self, nx, ny, centre, wLen):
""" Determines the location of the centre of the fourier pattern in pixels """
ax = np.arctan((centre - nx * self.Params['LEDSpace']) / self.
Params['distance'])
ay = np.arctan((centre - ny * self.Params['LEDSpace']) / self.
Params['distance'])
dx = ax / (wLen * self.Params['fResolution'][0])
dy = ay / (wLen * self.Params['fResolution'][1])
pos = [int(dx + self.subParams['subSize'][0] / 2), int(dy + self.
subParams['subSize'][0] / 2)]
return pos
class splitImage:
def __init__(self, dir, imgName, numSplits, splitSize):
self.LEDPos = meth.getLED(imgName)
self.subImg = np.empty([numSplits, numSplits], dtype=subImage)
for i in range(numSplits):
for j in range(numSplits):
self.subImg[i, j] = subImage(dir, splitSize, imgName, self.
LEDPos, i, j)
class subImage:
def __init__(self, dir, splitSize, imgName, LEDPos, i, j):
img = meth.readImage(dir, imgName)
self.image = img[i * splitSize:(i + 1) * splitSize, j * splitSize:(
j + 1) * splitSize]
self.imgPos = [i, j]
self.LEDPos = LEDPos
<mask token>
|
<mask token>
class fullSys:
def __init__(self, dir, file, size, line):
csv_reader = pandas.read_csv(file, index_col='Objective')
self.Params = {}
self.Params['mag'] = csv_reader['Magnification'][line]
self.Params['NA'] = csv_reader['NA'][line]
self.Params['ps'] = [csv_reader['Pixel Size x'][line], csv_reader[
'Pixel Size y'][line]]
self.Params['distance'] = csv_reader['Screen Distance'][line]
self.Params['LEDSpace'] = csv_reader['LED Spacing'][line]
self.Params['LEDNum'] = [csv_reader['Num LED x'][line], csv_reader[
'Num LED x'][line]]
self.Params['dir'] = dir
self.Params['images'] = os.listdir(dir)
self.Params['numImgs'] = len(self.Params['images'])
self.Params['smallSize'] = meth.readImage(dir, self.Params['images'
][0], colour=1, getsize=True)
self.Params['fResolution'] = self.fRes(self.Params['mag'], self.
Params['smallSize'], self.Params['ps'])
print('fullSys')
splitSize, self.Params['lc'] = self.getSS()
img = meth.readImage(self.Params['dir'], self.Params['images'][0])
print('fullSys2')
numFiles, divisor = self.getDivisor(img, splitSize)
print('fullSys2')
self.Params['numFiles'] = numFiles
self.Params['divisor'] = divisor
self.Params['size'] = self.getSize(size, numFiles)
self.subObjs = np.empty([numFiles, numFiles], dtype=section)
print('fullSys1')
for i in range(numFiles):
for j in range(numFiles):
subImg = img[i * divisor:(i + 1) * divisor, j * divisor:(j +
1) * divisor]
self.subObjs[i, j] = section(i, j, subImg, self.Params)
h.progbar(i, numFiles, 'Initializing')
def getSS(self):
""" Determines the required subsection size based on Cittert Zernike theorem """
rho = 0.0003
lc = 0.61 * R * 530 / rho
size = lc * slef.Params['mag'] / self.Params['ps']
return size, lc
def getDivisor(self, img, splitSize):
imgSize = img.shape[0]
while True:
if imgSize % splitSize == 0:
divisor = splitSize
break
splitSize += 1
numFiles = int(imgSize / divisor)
return numFiles, divisor
<mask token>
<mask token>
class section:
def __init__(self, i0, j0, subImg, Params):
self.Params = Params
self.subParams = {}
self.subParams['wLen'] = [6.3e-07, 5.3e-07, 4.3e-07]
self.subParams['subSize'] = subImg.shape
self.subParams['bigSize'] = [np.int(Params['size'] / Params[
'numFiles'])] * 2
self.S = np.empty([self.subParams['bigSize'][0], self.subParams[
'bigSize'][1], 3], dtype=np.complex64)
self.P = np.empty([self.subParams['subSize'][0], self.subParams[
'subSize'][1], 3], dtype=np.complex64)
self.meanFFT = np.zeros([self.subParams['subSize'][0], self.
subParams['subSize'][1], 3], dtype=np.complex64)
self.meanNum = 0
self.subParams['fRApprox'] = np.empty([3], dtype=int)
self.subParams['coords'] = np.empty([3, 16, 16, 2])
self.subParams['isBF'] = np.empty([3, 16, 16])
for i in range(0, 3):
self.S[:, :, i] = self.initS0(subImg[:, :, i], self.subParams[
'bigSize'])
self.subParams['fRApprox'][i] = self.fRad(Params['fResolution'],
Params['NA'], self.subParams['wLen'][i])
print(Params['NA'], self.subParams['wLen'][i], Params['mag'],
Params['ps'], Params['smallSize'])
self.P[:, :, i] = self.initP0(self.subParams['subSize'], self.
subParams['fRApprox'][i])
self.subParams['coords'][i, :, :, :], self.subParams['isBF'][i,
:, :] = self.initCoords(i0, j0, self.subParams['wLen'][i],
self.subParams['fRApprox'][i])
self.bayer = np.empty([Params['divisor'], Params['divisor'], 3])
self.invBayer = np.empty([Params['divisor'], Params['divisor'], 3])
for i in range(3):
self.bayer[:, :, i], self.invBayer[:, :, i] = h.genBayer([
Params['divisor'], Params['divisor']], i)
def initS0(self, img, size):
""" Initialises the FT of the high res image by linear interpolation of a low res image """
I0 = cv.resize(img, (size[1], size[0]), interpolation=cv.INTER_LINEAR)
amplitude = np.sqrt(I0)
FI0 = fft2(ifftshift(amplitude))
FI0 = fftshift(FI0)
S = np.array(FI0, dtype=np.complex64)
return S
def initP0(self, size, radius):
""" Initialises the pupil function as a real circular step function of value 1 """
return h.circle(size, radius)[:, :, 0]
def fRad(self, fDu, NA, wLen):
""" Determines the approximate radius in F-space in pixels of the pupil function """
x = 2 * np.pi * NA / (wLen * fDu[0])
y = 2 * np.pi * NA / (wLen * fDu[1])
avr = np.int32(np.average([x, y]))
return avr
def initCoords(self, i, j, wLen, Rad):
""" Returns 2D array where LED coords relate to fourier centre positions """
segmentPos = [i, j]
n = self.Params['numFiles']
w = self.subParams['subSize'][0]
c = w / (2 * n)
centre = (segmentPos[0] * 2 * c + c - w) * self.Params['ps'][0
] / self.Params['mag']
self.Params['centre'] = centre
coords = np.empty((self.Params['LEDNum'][0], self.Params['LEDNum'][
1], 2), dtype=np.int32)
isBF = np.zeros((self.Params['LEDNum'][0], self.Params['LEDNum'][1]
), dtype=np.int32)
numImgs = int(len(self.Params['images']) ** 0.5)
for i, img in enumerate(self.Params['images']):
LED = meth.getLED(img)
LEDPixelPos = self.getLEDPos(LED[0], LED[1], centre, wLen)
coords[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1
] = LEDPixelPos
if (LEDPixelPos[0] - w / 2) ** 2 + (LEDPixelPos[1] - w / 2
) ** 2 < Rad:
isBF[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs /
2) - 1] = 1
return coords, isBF
def getLEDPos(self, nx, ny, centre, wLen):
""" Determines the location of the centre of the fourier pattern in pixels """
ax = np.arctan((centre - nx * self.Params['LEDSpace']) / self.
Params['distance'])
ay = np.arctan((centre - ny * self.Params['LEDSpace']) / self.
Params['distance'])
dx = ax / (wLen * self.Params['fResolution'][0])
dy = ay / (wLen * self.Params['fResolution'][1])
pos = [int(dx + self.subParams['subSize'][0] / 2), int(dy + self.
subParams['subSize'][0] / 2)]
return pos
class splitImage:
def __init__(self, dir, imgName, numSplits, splitSize):
self.LEDPos = meth.getLED(imgName)
self.subImg = np.empty([numSplits, numSplits], dtype=subImage)
for i in range(numSplits):
for j in range(numSplits):
self.subImg[i, j] = subImage(dir, splitSize, imgName, self.
LEDPos, i, j)
class subImage:
def __init__(self, dir, splitSize, imgName, LEDPos, i, j):
img = meth.readImage(dir, imgName)
self.image = img[i * splitSize:(i + 1) * splitSize, j * splitSize:(
j + 1) * splitSize]
self.imgPos = [i, j]
self.LEDPos = LEDPos
<mask token>
|
import numpy as np
import cv2 as cv
import methods as meth
from numpy.fft import fft2, fftshift, ifft2, ifftshift
import pandas
import os
import noGPU as h
import matplotlib.pyplot as plt
class fullSys():
def __init__(self, dir, file, size, line):
csv_reader = pandas.read_csv(file, index_col='Objective')
self.Params = {}
self.Params['mag'] = csv_reader['Magnification'][line]
self.Params['NA'] = csv_reader['NA'][line]
self.Params['ps'] = [csv_reader['Pixel Size x'][line], csv_reader['Pixel Size y'][line]]
self.Params['distance'] = csv_reader['Screen Distance'][line]
self.Params['LEDSpace'] = csv_reader['LED Spacing'][line]
self.Params['LEDNum'] = [csv_reader['Num LED x'][line], csv_reader['Num LED x'][line]]
self.Params['dir'] = dir
self.Params['images'] = os.listdir(dir)
self.Params['numImgs'] = len(self.Params['images'])
self.Params['smallSize'] = meth.readImage(dir, self.Params['images'][0], colour=1, getsize=True)
self.Params['fResolution'] = self.fRes(self.Params['mag'], self.Params['smallSize'], self.Params['ps'])
print("fullSys")
## Instantiate sub Objects ##
splitSize, self.Params['lc'] = self.getSS()
img = meth.readImage(self.Params['dir'], self.Params['images'][0])
print("fullSys2")
numFiles, divisor = self.getDivisor(img, splitSize)
print("fullSys2")
self.Params['numFiles'] = numFiles
self.Params['divisor'] = divisor
self.Params['size'] = self.getSize(size, numFiles)
self.subObjs = np.empty([numFiles, numFiles], dtype=section)
print("fullSys1")
for i in range(numFiles):
for j in range(numFiles):
subImg = img[i * divisor:(i + 1) * divisor, j * divisor:(j + 1) * divisor]
self.subObjs[i, j] = section(i, j, subImg, self.Params)
h.progbar(i, numFiles, 'Initializing')
def getSS(self):
""" Determines the required subsection size based on Cittert Zernike theorem """
rho = 300e-6 # LED size
lc = 0.61*R*530/rho
size = lc*slef.Params['mag'] / self.Params['ps']
return size, lc
def getDivisor(self, img, splitSize):
imgSize = img.shape[0]
while True:
if imgSize % splitSize == 0:
divisor = splitSize
break
splitSize += 1
numFiles = int(imgSize / divisor)
return numFiles, divisor
def getSize(self, size, numSplits):
while True:
if size[0] % numSplits == 0:
break
size[0] += 1
return size[0]
def fRes(self, mag, size, ps):
""" Determines the change in spatial frequency across one pixel in F-space """
x = 2 * np.pi * mag / (size[0] * ps[0])
y = 2 * np.pi * mag / (size[1] * ps[1])
return [x, y]
class section():
def __init__(self, i0, j0, subImg, Params):
self.Params = Params
self.subParams = {}
self.subParams['wLen'] = [630e-9, 530e-9, 430e-9]
self.subParams['subSize'] = subImg.shape
self.subParams['bigSize'] = [np.int(Params['size'] / Params['numFiles'])] * 2
self.S = np.empty([self.subParams['bigSize'][0], self.subParams['bigSize'][1], 3], dtype=np.complex64)
self.P = np.empty([self.subParams['subSize'][0], self.subParams['subSize'][1], 3], dtype=np.complex64)
self.meanFFT = np.zeros([self.subParams['subSize'][0], self.subParams['subSize'][1], 3], dtype=np.complex64)
self.meanNum = 0
self.subParams['fRApprox'] = np.empty([3], dtype=int)
self.subParams['coords'] = np.empty([3, 16, 16, 2])
self.subParams['isBF'] = np.empty([3, 16, 16])
for i in range(0, 3):
self.S[:, :, i] = self.initS0(subImg[:, :, i], self.subParams['bigSize'])
self.subParams['fRApprox'][i] = self.fRad(Params['fResolution'],
Params['NA'], self.subParams['wLen'][i])
print(Params['NA'], self.subParams['wLen'][i], Params['mag'], Params['ps'], Params['smallSize'])
self.P[:, :, i] = self.initP0(self.subParams['subSize'], self.subParams['fRApprox'][i])
self.subParams['coords'][i, :, :, :], self.subParams['isBF'][i, :, :] =\
self.initCoords(i0, j0, self.subParams['wLen'][i], self.subParams['fRApprox'][i])
self.bayer = np.empty([Params['divisor'], Params['divisor'], 3])
self.invBayer = np.empty([Params['divisor'], Params['divisor'], 3])
for i in range(3):
self.bayer[:, :, i], self.invBayer[:, :, i] = h.genBayer([Params['divisor'], Params['divisor']], i)
def initS0(self, img, size):
""" Initialises the FT of the high res image by linear interpolation of a low res image """
I0 = cv.resize(img, (size[1], size[0]),
interpolation=cv.INTER_LINEAR) # Bilinear interpolated upsampled image
amplitude = np.sqrt(I0)
FI0 = fft2(ifftshift(amplitude))
FI0 = fftshift(FI0) # FI0.shape[0]
S = np.array(FI0, dtype=np.complex64)
return S
def initP0(self, size, radius):
""" Initialises the pupil function as a real circular step function of value 1 """
return h.circle(size, radius)[:, :, 0]
def fRad(self, fDu, NA, wLen):
""" Determines the approximate radius in F-space in pixels of the pupil function """
x = 2 * np.pi * NA / (wLen * fDu[0])
y = 2 * np.pi * NA / (wLen * fDu[1])
avr = np.int32(np.average([x, y]))
return avr
def initCoords(self, i, j, wLen, Rad):
""" Returns 2D array where LED coords relate to fourier centre positions """
segmentPos = [i, j]
n = self.Params['numFiles']
w = self.subParams['subSize'][0]
c = w / (2 * n)
centre = (segmentPos[0] * 2 * c + c - w) * self.Params['ps'][0]/self.Params['mag']
self.Params['centre'] = centre
coords = np.empty((self.Params['LEDNum'][0], self.Params['LEDNum'][1], 2), dtype=np.int32)
isBF = np.zeros((self.Params['LEDNum'][0], self.Params['LEDNum'][1]), dtype=np.int32)
numImgs = int(len(self.Params['images']) ** 0.5)
for i, img in enumerate(self.Params['images']):
LED = meth.getLED(img)
LEDPixelPos = self.getLEDPos(LED[0], LED[1], centre, wLen)
#print("LED:", LED, "LEDPixelPos:", LEDPixelPos)
#print("LEDPos:", [LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1])
coords[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1] = LEDPixelPos
if ((LEDPixelPos[0]-w/2)**2 + (LEDPixelPos[1]-w/2)**2 < Rad):
isBF[LED[0] + int(numImgs / 2) - 1, LED[1] + int(numImgs / 2) - 1] = 1
return coords, isBF
def getLEDPos(self, nx, ny, centre, wLen):
""" Determines the location of the centre of the fourier pattern in pixels """
ax = np.arctan((centre - nx * self.Params['LEDSpace']) / self.Params['distance']) # Angle to x axis
ay = np.arctan((centre - ny * self.Params['LEDSpace']) / self.Params['distance']) # Angle to y axis
dx = ax / (wLen * self.Params['fResolution'][0])
dy = ay / (wLen * self.Params['fResolution'][1])
pos = [int(dx + self.subParams['subSize'][0] / 2), int(dy + self.subParams['subSize'][0] / 2)]
return pos
class splitImage():
def __init__(self, dir, imgName, numSplits, splitSize):
self.LEDPos = meth.getLED(imgName)
self.subImg = np.empty([numSplits, numSplits], dtype=subImage)
for i in range(numSplits):
for j in range(numSplits):
self.subImg[i, j] = subImage(dir, splitSize, imgName, self.LEDPos, i, j)
class subImage():
def __init__(self, dir, splitSize, imgName, LEDPos, i, j):
img = meth.readImage(dir, imgName)
self.image = img[i * splitSize:(i + 1) * splitSize, j * splitSize:(j + 1) * splitSize]
self.imgPos = [i, j]
self.LEDPos = LEDPos
########################################################################################################################
'''
class preProcess(objective):
def __init__(self, dir, file, size, line, colour=1):
""" Slices images into sections """
super().__init__(dir, file, size, line, colour=1)
numFiles, devisor = self.getDevisor(150)
self.genFiles(numFiles)
self.split(devisor, numFiles)
def genFiles(self, numFiles):
path = os.path.join(os.getcwd(), 'temp')
if os.path.isdir(path):
shutil.rmtree(path)
time.sleep(0.01)
os.mkdir(path)
for i in range(numFiles):
for j in range(numFiles):
folder = '%s_%s' % (str(i), str(j))
path1 = os.path.join(path, folder)
os.mkdir(path1)
def getDevisor(self, splitSize):
imgName = self.images[0]
img = self.readImage(self.dir, imgName)
imgSize = img.shape[0]
while True:
if imgSize % splitSize == 0:
devisor = splitSize
break
splitSize += 1
numFiles = int(imgSize / devisor)
return numFiles, devisor
def split(self, devisor, numFiles):
path0 = os.path.join(os.getcwd(), 'temp')
for i0, file in enumerate(self.images):
LED = self.getLED(file)
img = self.readImage(self.dir, file)
for i in range(numFiles):
for j in range(numFiles):
folder = '%s_%s' % (str(i), str(j))
path1 = os.path.join(path0, folder)
file = 'img_%s_%s_.jpg' % (str(LED[0]), str(LED[1]))
path = os.path.join(path1, file)
subImg = img[i * devisor:(i + 1) * devisor, j * devisor:(j + 1) * devisor]
cv.imwrite(path, subImg)
h.progbar(i0 * numFiles ** 2 + i * numFiles + j,
len(self.images) * numFiles ** 2, 'Slicing Images')
def initCoords(self, dir):
""" Returns 2D array where LED coords relate to fourier centre positions """
dirName = os.path.basename(dir)
segmentPos = self.getSegment(dirName)
N = len(os.listdir(dir))
n = np.sqrt(N)
w = self.smallSize[0]
c = w / (2 * n)
centre = (segmentPos[0] * 2 * c + c - w) * self.ps[0]/self.mag
coords = np.empty((self.LEDNum[0], self.LEDNum[1], 2), dtype=np.int32)
for i, img in enumerate(self.images):
LED = self.getLED(img)
LEDPixelPos = self.getLEDPos(LED[0], LED[1], centre)
coords[LED[0], LED[1]] = LEDPixelPos
return coords
'''
|
[
9,
11,
13,
15,
19
] |
2,240 |
5c7c90717f2e98c26675fec6390b4ea9797d6a4e
|
class TrieNode:
def __init__(self):
self.children = [None for i in range(26)]
self.isEndOfWord = 0
class Trie:
def __init__(self):
self.root = self.getNode()
def getNode(self):
return TrieNode()
def insert(self, key):
root = self.root
length = len(key)
for level in range(length):
index = ord(key[level])-ord('a')
if root.children[index]==None:
root.children[index] = self.getNode()
root = root.children[index]
root.isEndOfWord = 1
def search(self, key):
root = self.root
for level,c in enumerate(key):
if root.children[ord(c)-ord('a')]==None:
return False
root = root.children[ord(c)-ord('a')]
return root!=None and root.isEndOfWord==1
keys = ["the","a","there","anaswe","any", "by","their"]
output = ["Not present in trie", "Present in tire"]
# Trie object
t = Trie()
# Construct trie
for key in keys:
print 'inserting key, ', key
t.insert(key)
print("{} ---- {}".format("the",output[t.search("the")]))
print("{} ---- {}".format("these",output[t.search("these")]))
| null | null | null | null |
[
0
] |
2,241 |
e08fddefabf1b92aa97b939e05bb31d888df4e6a
|
<mask token>
def get_comports_list():
ports = list(lp.comports(include_links=False))
for p in ports:
print(p.device)
return ports
def read_while_LF(com, timeout_ms=500):
read_data = ''
delay_ms = 10
attempts = int(timeout_ms / delay_ms)
for i in range(attempts):
byte = com.read(size=1).decode('utf-8')
time.sleep(0.01)
read_data += byte
if byte == '\n':
break
return read_data
def read_write_gst(com, instruction):
write_data = instruction.encode('utf-8')
com.write(write_data)
recieved = []
while 1:
read_data = read_while_LF(com)
if read_data == '':
break
recieved.append(read_data)
return recieved
<mask token>
|
<mask token>
def get_comports_list():
ports = list(lp.comports(include_links=False))
for p in ports:
print(p.device)
return ports
def read_while_LF(com, timeout_ms=500):
read_data = ''
delay_ms = 10
attempts = int(timeout_ms / delay_ms)
for i in range(attempts):
byte = com.read(size=1).decode('utf-8')
time.sleep(0.01)
read_data += byte
if byte == '\n':
break
return read_data
def read_write_gst(com, instruction):
write_data = instruction.encode('utf-8')
com.write(write_data)
recieved = []
while 1:
read_data = read_while_LF(com)
if read_data == '':
break
recieved.append(read_data)
return recieved
<mask token>
print(s)
com.close()
|
<mask token>
def get_comports_list():
ports = list(lp.comports(include_links=False))
for p in ports:
print(p.device)
return ports
def read_while_LF(com, timeout_ms=500):
read_data = ''
delay_ms = 10
attempts = int(timeout_ms / delay_ms)
for i in range(attempts):
byte = com.read(size=1).decode('utf-8')
time.sleep(0.01)
read_data += byte
if byte == '\n':
break
return read_data
def read_write_gst(com, instruction):
write_data = instruction.encode('utf-8')
com.write(write_data)
recieved = []
while 1:
read_data = read_while_LF(com)
if read_data == '':
break
recieved.append(read_data)
return recieved
com = serial.Serial('COM3', baudrate=115200, timeout=0.02)
s = read_write_gst(com, 'fil_test:start\r')
print(s)
com.close()
|
<mask token>
import serial
import time
import serial.tools.list_ports as lp
def get_comports_list():
ports = list(lp.comports(include_links=False))
for p in ports:
print(p.device)
return ports
def read_while_LF(com, timeout_ms=500):
read_data = ''
delay_ms = 10
attempts = int(timeout_ms / delay_ms)
for i in range(attempts):
byte = com.read(size=1).decode('utf-8')
time.sleep(0.01)
read_data += byte
if byte == '\n':
break
return read_data
def read_write_gst(com, instruction):
write_data = instruction.encode('utf-8')
com.write(write_data)
recieved = []
while 1:
read_data = read_while_LF(com)
if read_data == '':
break
recieved.append(read_data)
return recieved
com = serial.Serial('COM3', baudrate=115200, timeout=0.02)
s = read_write_gst(com, 'fil_test:start\r')
print(s)
com.close()
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 19 12:28:39 2020
@author: Ксения
"""
import serial
import time
import serial.tools.list_ports as lp
def get_comports_list():
ports=list(lp.comports(include_links=False))
for p in ports:
print(p.device)
return ports
def read_while_LF(com, timeout_ms=500):
read_data =""
delay_ms=10
attempts=int(timeout_ms/delay_ms)
for i in range(attempts):
byte=com.read(size = 1).decode('utf-8')
time.sleep(0.01)
read_data+=byte
if byte == '\n':
break
return read_data
def read_write_gst(com, instruction):
write_data=instruction.encode('utf-8')
com.write(write_data)
recieved = []
while(1):
read_data=read_while_LF(com)
if(read_data == ""):
break
recieved.append(read_data)
return recieved
com = serial.Serial('COM3', baudrate=115200, timeout=0.02)
s=read_write_gst(com, "fil_test:start\r")
print(s)
com.close()
|
[
3,
4,
5,
6,
7
] |
2,242 |
e7239b4bc3db9bd427b9be888621f66e81b5edeb
|
<mask token>
|
<mask token>
VERSION = 0, 2, 14
__version__ = '.'.join(map(str, VERSION))
__all__ = ['AzFileClient', 'AzFileSystem', 'BlobPathDecoder',
'TableStorage', 'TableStorageWrapper', 'export_decorator']
|
from azfs.az_file_client import AzFileClient, export_decorator
from azfs.az_file_system import AzFileSystem
from azfs.utils import BlobPathDecoder
from .table_storage import TableStorage, TableStorageWrapper
VERSION = 0, 2, 14
__version__ = '.'.join(map(str, VERSION))
__all__ = ['AzFileClient', 'AzFileSystem', 'BlobPathDecoder',
'TableStorage', 'TableStorageWrapper', 'export_decorator']
|
from azfs.az_file_client import (
AzFileClient,
export_decorator
)
from azfs.az_file_system import AzFileSystem
from azfs.utils import BlobPathDecoder
from .table_storage import (
TableStorage,
TableStorageWrapper
)
# comparable tuple
VERSION = (0, 2, 14)
# generate __version__ via VERSION tuple
__version__ = ".".join(map(str, VERSION))
__all__ = [
"AzFileClient",
"AzFileSystem",
"BlobPathDecoder",
"TableStorage",
"TableStorageWrapper",
"export_decorator"
]
| null |
[
0,
1,
2,
3
] |
2,243 |
1ab69874a89311b22220dda541dfe03462a98a55
|
<mask token>
def unescape(text):
return text.replace(''', "'").replace('<', '<').replace('>', '>')
<mask token>
|
<mask token>
def unescape(text):
return text.replace(''', "'").replace('<', '<').replace('>', '>')
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
@client.event
async def on_message(message):
if message.content.startswith(translate_command):
lang = message.content[len(translate_command):message.content.find(' ')
]
ttt = message.content[len(translate_command) + len(lang) + 1:]
s = ttt.find(id_start)
while s != -1:
e = ttt.find('>', s)
ttt = ttt[:s] + client.get_user(int(ttt[s + len(id_start):e])
).name + ttt[e:]
s = ttt.find(id_start)
body = {'q': ttt, 'langpair': lang + '|en' if len(lang) == 2 else
lang[:2] + '|' + lang[2:], 'de': CONTACT_EMAIL}
r = requests.get('https://api.mymemory.translated.net/get', params=body
)
message_sent = await message.channel.send(unescape(r.json()[
'responseData']['translatedText']))
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '❌'
try:
reaction, user = await client.wait_for('reaction_add', timeout=
600.0, check=check)
except asyncio.TimeoutError:
pass
else:
await message_sent.delete()
client.run(TOKEN)
|
<mask token>
TOKEN = 'TOKEN'
CONTACT_EMAIL = None
translate_command = '$t'
id_start = '<@!'
client = discord.Client()
def unescape(text):
return text.replace(''', "'").replace('<', '<').replace('>', '>')
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
@client.event
async def on_message(message):
if message.content.startswith(translate_command):
lang = message.content[len(translate_command):message.content.find(' ')
]
ttt = message.content[len(translate_command) + len(lang) + 1:]
s = ttt.find(id_start)
while s != -1:
e = ttt.find('>', s)
ttt = ttt[:s] + client.get_user(int(ttt[s + len(id_start):e])
).name + ttt[e:]
s = ttt.find(id_start)
body = {'q': ttt, 'langpair': lang + '|en' if len(lang) == 2 else
lang[:2] + '|' + lang[2:], 'de': CONTACT_EMAIL}
r = requests.get('https://api.mymemory.translated.net/get', params=body
)
message_sent = await message.channel.send(unescape(r.json()[
'responseData']['translatedText']))
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '❌'
try:
reaction, user = await client.wait_for('reaction_add', timeout=
600.0, check=check)
except asyncio.TimeoutError:
pass
else:
await message_sent.delete()
client.run(TOKEN)
|
import discord, requests
from random import choice
TOKEN = 'TOKEN'
CONTACT_EMAIL = None
translate_command = '$t'
id_start = '<@!'
client = discord.Client()
def unescape(text):
return text.replace(''', "'").replace('<', '<').replace('>', '>')
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
@client.event
async def on_message(message):
if message.content.startswith(translate_command):
lang = message.content[len(translate_command):message.content.find(' ')
]
ttt = message.content[len(translate_command) + len(lang) + 1:]
s = ttt.find(id_start)
while s != -1:
e = ttt.find('>', s)
ttt = ttt[:s] + client.get_user(int(ttt[s + len(id_start):e])
).name + ttt[e:]
s = ttt.find(id_start)
body = {'q': ttt, 'langpair': lang + '|en' if len(lang) == 2 else
lang[:2] + '|' + lang[2:], 'de': CONTACT_EMAIL}
r = requests.get('https://api.mymemory.translated.net/get', params=body
)
message_sent = await message.channel.send(unescape(r.json()[
'responseData']['translatedText']))
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '❌'
try:
reaction, user = await client.wait_for('reaction_add', timeout=
600.0, check=check)
except asyncio.TimeoutError:
pass
else:
await message_sent.delete()
client.run(TOKEN)
|
import discord, requests
from random import choice
TOKEN = 'TOKEN'
CONTACT_EMAIL = None #'Contact email for getting 10000 words/day instead of 1000'
translate_command = '$t'
id_start = '<@!'
client = discord.Client()
def unescape(text):
return text.replace(''', '\'').replace('<','<').replace('>', '>') # to improve
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
@client.event
async def on_message(message):
if message.content.startswith(translate_command):
lang = message.content[len(translate_command):message.content.find(' ')]
ttt = message.content[len(translate_command)+len(lang)+1:]
s = ttt.find(id_start)
while s != -1:
e = ttt.find('>',s)
ttt = ttt[:s]+client.get_user(int(ttt[s+len(id_start):e])).name+ttt[e:]
s = ttt.find(id_start)
body = {
'q': ttt,
'langpair': lang+'|en' if len(lang) == 2 else lang[:2]+'|'+lang[2:],
'de': CONTACT_EMAIL
}
r = requests.get('https://api.mymemory.translated.net/get', params=body)
message_sent = await message.channel.send(unescape(r.json()['responseData']['translatedText']))
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '❌'
try:
reaction, user = await client.wait_for('reaction_add', timeout=600.0, check=check)
except asyncio.TimeoutError:
pass
else:
await message_sent.delete()
client.run(TOKEN)
|
[
1,
2,
3,
4,
5
] |
2,244 |
0188355f84054143bd4ff9da63f1128e9eb5b23b
|
<mask token>
class LoginView(Resource):
def __init__(self):
self.db = UsersModel()
self.user_db = IncidentsModel()
def post(self):
data = request.get_json()
username = data['username']
password = data['password']
auth = self.db.authenticate(username, password)
return auth
class UserView(Resource):
def __init__(self):
self.db = UsersModel()
def get(self, id):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
else:
res = self.db.get_single_user(id)
return make_response(jsonify({'Response': res}), 201)
def delete(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
else:
self.db.delete_user(id)
return {'Message': 'User Deleted'}
def put(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
if access_token:
data = request.get_json()
resp = Validations().validate_user_inputs(data)
if resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.update_user(id, resp)
return make_response(jsonify({'Message':
'User Details Updated'}), 201)
|
<mask token>
class UsersView(Resource):
<mask token>
def post(self):
data = request.get_json()
resp = Validations().validate_user_inputs(data)
username = data['username']
user = self.db.register_users(username)
if len(user) != 0:
return make_response(jsonify({'Message':
'Username already exists'}), 202)
elif resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.save(resp)
return make_response(jsonify({'Message':
'User Registered. Please login'}), 201)
<mask token>
class LoginView(Resource):
def __init__(self):
self.db = UsersModel()
self.user_db = IncidentsModel()
def post(self):
data = request.get_json()
username = data['username']
password = data['password']
auth = self.db.authenticate(username, password)
return auth
class UserView(Resource):
def __init__(self):
self.db = UsersModel()
def get(self, id):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
else:
res = self.db.get_single_user(id)
return make_response(jsonify({'Response': res}), 201)
def delete(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
else:
self.db.delete_user(id)
return {'Message': 'User Deleted'}
def put(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
if access_token:
data = request.get_json()
resp = Validations().validate_user_inputs(data)
if resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.update_user(id, resp)
return make_response(jsonify({'Message':
'User Details Updated'}), 201)
|
<mask token>
class UsersView(Resource):
def __init__(self):
self.db = UsersModel()
def post(self):
data = request.get_json()
resp = Validations().validate_user_inputs(data)
username = data['username']
user = self.db.register_users(username)
if len(user) != 0:
return make_response(jsonify({'Message':
'Username already exists'}), 202)
elif resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.save(resp)
return make_response(jsonify({'Message':
'User Registered. Please login'}), 201)
<mask token>
class LoginView(Resource):
def __init__(self):
self.db = UsersModel()
self.user_db = IncidentsModel()
def post(self):
data = request.get_json()
username = data['username']
password = data['password']
auth = self.db.authenticate(username, password)
return auth
class UserView(Resource):
def __init__(self):
self.db = UsersModel()
def get(self, id):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
else:
res = self.db.get_single_user(id)
return make_response(jsonify({'Response': res}), 201)
def delete(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
else:
self.db.delete_user(id)
return {'Message': 'User Deleted'}
def put(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
if access_token:
data = request.get_json()
resp = Validations().validate_user_inputs(data)
if resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.update_user(id, resp)
return make_response(jsonify({'Message':
'User Details Updated'}), 201)
|
<mask token>
class UsersView(Resource):
def __init__(self):
self.db = UsersModel()
def post(self):
data = request.get_json()
resp = Validations().validate_user_inputs(data)
username = data['username']
user = self.db.register_users(username)
if len(user) != 0:
return make_response(jsonify({'Message':
'Username already exists'}), 202)
elif resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.save(resp)
return make_response(jsonify({'Message':
'User Registered. Please login'}), 201)
def get(self):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
else:
users = self.db.get_users()
return make_response(jsonify({'Users': users, 'Message':
'All Users'}), 200)
class LoginView(Resource):
def __init__(self):
self.db = UsersModel()
self.user_db = IncidentsModel()
def post(self):
data = request.get_json()
username = data['username']
password = data['password']
auth = self.db.authenticate(username, password)
return auth
class UserView(Resource):
def __init__(self):
self.db = UsersModel()
def get(self, id):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
else:
res = self.db.get_single_user(id)
return make_response(jsonify({'Response': res}), 201)
def delete(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
else:
self.db.delete_user(id)
return {'Message': 'User Deleted'}
def put(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({'Message': 'Token needed. Please login'})
elif not user:
return jsonify({'Message': 'User ID does not exist'})
if access_token:
data = request.get_json()
resp = Validations().validate_user_inputs(data)
if resp == str(resp):
return make_response(jsonify({'Message': resp}), 201)
else:
self.db.update_user(id, resp)
return make_response(jsonify({'Message':
'User Details Updated'}), 201)
|
from flask_restful import Resource
from flask import jsonify, make_response, request
from ..models.Users import UsersModel
from ..models.Incidents import IncidentsModel
from app.api.validations.validations import Validations
class UsersView(Resource):
def __init__(self):
self.db = UsersModel()
def post(self):
data = request.get_json()
resp = Validations().validate_user_inputs(data)
username = data['username']
user = self.db.register_users(username)
if len(user) != 0:
return make_response(jsonify({
'Message': 'Username already exists'
}), 202)
elif resp == str(resp):
return make_response(jsonify({
"Message": resp
}), 201)
else:
self.db.save(resp)
return make_response(jsonify({
"Message": "User Registered. Please login"
}), 201)
def get(self):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({"Message": "Token needed. Please login"})
else:
users = self.db.get_users()
return make_response(jsonify({
"Users": users,
"Message": "All Users"
}), 200)
class LoginView(Resource):
def __init__(self):
self.db = UsersModel()
self.user_db = IncidentsModel()
def post(self):
data = request.get_json()
username = data['username']
password = data['password']
auth = self.db.authenticate(username, password)
return auth
class UserView(Resource):
def __init__(self):
self.db = UsersModel()
def get(self, id):
access_token = Validations().get_access_token()
if not access_token:
return jsonify({"Message": "Token needed. Please login"})
else:
res = self.db.get_single_user(id)
return make_response(jsonify({
'Response': res
}), 201)
def delete(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({"Message": "Token needed. Please login"})
elif not user:
return jsonify({"Message": "User ID does not exist"})
else:
self.db.delete_user(id)
return {
"Message": "User Deleted"
}
def put(self, id):
access_token = Validations().get_access_token()
user = self.db.check_user_id(id)
if not access_token:
return jsonify({"Message": "Token needed. Please login"})
elif not user:
return jsonify({"Message": "User ID does not exist"})
if access_token:
data = request.get_json()
resp = Validations().validate_user_inputs(data)
if resp == str(resp):
return make_response(jsonify({
"Message": resp
}), 201)
else:
self.db.update_user(id, resp)
return make_response(jsonify({
'Message': 'User Details Updated'
}), 201)
|
[
8,
10,
11,
12,
14
] |
2,245 |
d499b4e189a0c3c6efa6a07871dbc6c2996a2dcb
|
<mask token>
|
<mask token>
class TGAbstractRegistry(ABC):
<mask token>
|
<mask token>
class TGAbstractRegistry(ABC):
def __init__(self):
self.rule_engine = TGLoggingRuleEngineFactory().create()
self.logger = logging.getLogger()
self.event_distributor = TGEventDistributor(logging.getLogger())
self.handler_map_factory = TGHandlerMapFactory().create()
|
import logging
from abc import ABC
from thraxisgamespatterns.application.handler_map_factory import TGHandlerMapFactory
from thraxisgamespatterns.eventhandling.event_distributor import TGEventDistributor
from thraxisgamespatterns.factories.logging_rule_engine_factory import TGLoggingRuleEngineFactory
class TGAbstractRegistry(ABC):
def __init__(self):
self.rule_engine = TGLoggingRuleEngineFactory().create()
self.logger = logging.getLogger()
self.event_distributor = TGEventDistributor(logging.getLogger())
self.handler_map_factory = TGHandlerMapFactory().create()
| null |
[
0,
1,
2,
3
] |
2,246 |
36ab827b889adcd4d54296e7da432d3b39d5a2e6
|
from cobra.model.fabric import HIfPol
from createMo import *
DEFAULT_AUTO_NEGOTIATION = 'on'
DEFAULT_SPEED = '10G'
DEFAULT_LINK_DEBOUNCE_INTERVAL = 100
AUTO_NEGOTIATION_CHOICES = ['on', 'off']
SPEED_CHOICES = ['100M', '1G', '10G', '40G']
def input_key_args(msg='\nPlease Specify Link Level Policy:'):
print msg
return input_raw_input("Link Level Policy Name", required=True)
def input_optional_args():
args = {}
args['atuo_negotiation'] = input_options('Auto Negotiation', DEFAULT_AUTO_NEGOTIATION, AUTO_NEGOTIATION_CHOICES)
args['speed'] = input_options('Speed', DEFAULT_SPEED, SPEED_CHOICES)
args['link_debounce_interval'] = input_options('Link Debounce Interval (msec)', str(DEFAULT_LINK_DEBOUNCE_INTERVAL), '', num_accept=True)
return args
def create_link_level_policy(parent_mo, link_level_policy, **args):
"""Create Link Level Policy"""
args = args['optional_args'] if 'optional_args' in args.keys() else args
# Create mo
if is_valid_key(args, 'atuo_negotiation'):
if args['atuo_negotiation'] or args['atuo_negotiation'] == 'on':
args['atuo_negotiation'] = 'on'
elif not args['atuo_negotiation'] or args['atuo_negotiation'] == 'off':
args['atuo_negotiation'] = 'off'
fabric_hifpol = HIfPol(parent_mo, link_level_policy,
autoNeg=get_value(args, 'atuo_negotiation', DEFAULT_AUTO_NEGOTIATION),
speed=get_value(args, 'speed', DEFAULT_SPEED),
linkDebounce=get_value(args, 'link_debounce_interval', DEFAULT_LINK_DEBOUNCE_INTERVAL),
)
return fabric_hifpol
class CreateLinkLevelPolicy(CreateMo):
def __init__(self):
self.description = 'Create Link Level Policy. The host interface policy. This specifies the layer 1 parameters of host facing ports. '
self.link_level_policy = None
super(CreateLinkLevelPolicy, self).__init__()
def set_cli_mode(self):
super(CreateLinkLevelPolicy, self).set_cli_mode()
self.parser_cli.add_argument('link_level_policy', help='The name of the interface policy. ')
self.parser_cli.add_argument('-a', '--atuo_negotiation', default= DEFAULT_AUTO_NEGOTIATION, choices=AUTO_NEGOTIATION_CHOICES, help='The policy auto-negotiation. Auto-negotiation is an optional function of the IEEE 802.3u Fast Ethernet standard that enables devices to automatically exchange information over a link about speed and duplex abilities.')
self.parser_cli.add_argument('-s', '--speed', default= DEFAULT_SPEED, choices=SPEED_CHOICES, help='The interface policy administrative port speed. The data transfer rate for the port should match the destination to which the port is linked. The administrative speed can be changed only for certain ports, and not all speeds are available on all systems. For more information, see the Hardware Installation Guide for your fabric interconnect.')
self.parser_cli.add_argument('-l', '--link_debounce_interval', default= DEFAULT_LINK_DEBOUNCE_INTERVAL, help='The interface policy administrative port link debounce interval. Enables the debounce timer for physical interface ports and sets it for a specified amount of time in milliseconds. The debounce timer is disabled if you specify the time to 0 ms.')
def read_key_args(self):
self.link_level_policy = self.args.pop('link_level_policy')
def wizard_mode_input_args(self):
self.args['link_level_policy'] = input_key_args()
if not self.delete:
self.args['optional_args'] = input_optional_args()
def delete_mo(self):
self.check_if_mo_exist('uni/infra/hintfpol-', self.link_level_policy, HIfPol, description='Link Level Policy')
super(CreateLinkLevelPolicy, self).delete_mo()
def main_function(self):
# Query to parent
self.look_up_mo('uni/infra/', '')
create_link_level_policy(self.mo, self.link_level_policy, optional_args=self.optional_args)
if __name__ == '__main__':
mo = CreateLinkLevelPolicy()
| null | null | null | null |
[
0
] |
2,247 |
fac60a8967354e4f306b95fdb5c75d02dc2c1455
|
<mask token>
|
<mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
|
<mask token>
class Migration(migrations.Migration):
dependencies = [('chat', '0005_user_image')]
operations = [migrations.AlterField(model_name='user', name=
'first_name', field=models.CharField(max_length=255, verbose_name=
'Имя')), migrations.AlterField(model_name='user', name='last_name',
field=models.CharField(max_length=255, verbose_name='Фамилия'))]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('chat', '0005_user_image')]
operations = [migrations.AlterField(model_name='user', name=
'first_name', field=models.CharField(max_length=255, verbose_name=
'Имя')), migrations.AlterField(model_name='user', name='last_name',
field=models.CharField(max_length=255, verbose_name='Фамилия'))]
|
# Generated by Django 3.2.6 on 2021-08-19 22:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chat', '0005_user_image'),
]
operations = [
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(max_length=255, verbose_name='Имя'),
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(max_length=255, verbose_name='Фамилия'),
),
]
|
[
0,
1,
2,
3,
4
] |
2,248 |
39d82267f966ca106ee384e540c31a3e5e433318
|
<mask token>
|
<mask token>
def EuclidGCD(a, b):
if b == 0:
return a
else:
a = a % b
return EuclidGCD(b, a)
<mask token>
|
<mask token>
def EuclidGCD(a, b):
if b == 0:
return a
else:
a = a % b
return EuclidGCD(b, a)
<mask token>
print(EuclidGCD(in_[0], in_[1]))
|
<mask token>
def EuclidGCD(a, b):
if b == 0:
return a
else:
a = a % b
return EuclidGCD(b, a)
in_ = [int(n) for n in input().split()]
print(EuclidGCD(in_[0], in_[1]))
|
"""
Task. Given two integers a and b, find their greatest common divisor.
Input Format. The two integers a, b are given in the same line separated by space.
Constraints. 1<=a,b<=2·109.
Output Format. Output GCD(a, b).
"""
def EuclidGCD(a, b):
if b == 0:
return a
else:
a = a%b
return EuclidGCD(b, a)
in_ = [int(n) for n in input().split()]
print(EuclidGCD(in_[0], in_[1]))
|
[
0,
1,
2,
3,
4
] |
2,249 |
7d0b0cb19e22ff338104e0c2061da94ba04d4f16
|
from __future__ import division
import numpy as np
import scipy.stats
from tms import read_and_transform
__author__ = 'Diego'
def estimate_vrpn_clock_drift(points):
# clocks = [map(np.datetime64,(p.date,p.ref_date,p.point_date)) for p in points]
clocks = [(p.date, p.ref_date, p.point_date) for p in points]
real_date, uni_date, obj_date = map(np.array, zip(*clocks))
get_seconds = np.vectorize(lambda x: x.total_seconds())
uni_obj = get_seconds(uni_date - obj_date)
good_sample = uni_obj == 0
good_real = real_date[good_sample]
good_uni = uni_date[good_sample]
assert good_uni.shape[0] > 0
good_drift = good_real - good_uni
mode_drift = scipy.stats.mode(good_drift)
return mode_drift[0][0]
def add_time_drift_to_vrpn(point, drift):
point.point_date = point.point_date + drift
point.ref_date = point.ref_date + drift
return point
def fix_vrpn_time_drift(points):
drift = estimate_vrpn_clock_drift(points)
for p in points:
add_time_drift_to_vrpn(p, drift)
return drift
def estimate_timing_errors(point, corrected=True):
internal_error = abs((point.ref_date - point.point_date).total_seconds())
if corrected:
uni_error = abs((point.date - point.ref_date).total_seconds())
obj_error = abs((point.date - point.point_date).total_seconds())
return uni_error + obj_error + internal_error
else:
return internal_error
def __test():
import os
test_dir = os.path.join(os.path.dirname(__file__), "data")
test_file = os.path.join(test_dir, "TMS-758.csv")
test_file = os.path.join(test_dir, "TMS-441.csv")
# test_file = os.path.join(test_dir, "TMS-310.csv")
points = read_and_transform.read_csv_file(test_file)
fix_vrpn_time_drift(points)
print points[0]
errors = [estimate_timing_errors(p, True) for p in points]
mean_error = np.mean(errors)
print "mean error:\t", mean_error
print "std error:\t", np.std(errors)
max_error = np.max(errors)
print "max error:\t", max_error
print "above 10% max:\t", len(filter(lambda x: x > max_error / 10, errors))
if __name__ == "__main__":
__test()
| null | null | null | null |
[
0
] |
2,250 |
732478fd826e09cf304760dfcc30cd077f74d83e
|
import pandas as pd
import numpy as np
#import data
df = pd.read_csv('../.gitignore/PPP_data_to_150k.csv')
counties = pd.read_csv('../data/zip_code_database.csv')
demographics = pd.read_csv('../data/counties.csv')
#filter out all unanswered ethnicities
df2 = df[~df.RaceEthnicity.str.contains("Unanswered")]
#drop nonprofit column
df2.drop('NonProfit', axis=1,inplace=True)
#drop row with Nebraska Zip code
df2.drop([71479],axis=0, inplace=True)
#filter zip code database for Colorado, drop unnecessary columns
co_counties = counties[counties['state']=='CO']
co_counties_1 = co_counties.drop(['decommissioned', 'acceptable_cities', 'unacceptable_cities','timezone','area_codes','world_region','country','irs_estimated_population_2015','primary_city','state'],axis=1)
#merge counties onto dataframe
df_with_counties = pd.merge(df2,co_counties_1, left_on='Zip', right_on='zip')
#only include 2018 demographic data
demographics_18 = demographics[demographics['YEAR']==2018]
demographics_18 = demographics_18.iloc[:,:11]
#drop NAN Jobs Retained values for scatter comparison of Jobs Retained to Loan Amount by ethnicity
ethnicity_dfs_job_comparison = [x.dropna(subset=['JobsRetained']) for x in ethnicity_dfs]
if __name__ == '__main__':
| null | null | null | null |
[
0
] |
2,251 |
673d6bb02ec666dbdbecb5fd7fd5041da1941cf8
|
import mosquitto
import json
import time
device_id = "868850013067326"
# The callback for when the client receives a CONNACK response from the server.
def on_connect(mosq, userdata, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe('vneigbor/%s' % device_id)
# The callback for when a PUBLISH message is received from the server.
def on_message(mosq, userdata, message):
print message.topic, message.payload
client = mosquitto.Mosquitto("mosq-rec")
client.on_connect = on_connect
client.on_message = on_message
client.connect("localhost", "18833")
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
| null | null | null | null |
[
0
] |
2,252 |
2a69aa0cd9d0e39ad82d6a354e956bdad0648797
|
<mask token>
|
<mask token>
class ActivityConfig(AppConfig):
<mask token>
|
<mask token>
class ActivityConfig(AppConfig):
name = 'apps.activity'
|
from django.apps import AppConfig
class ActivityConfig(AppConfig):
name = 'apps.activity'
| null |
[
0,
1,
2,
3
] |
2,253 |
2ab3adb4d0ed7e6e48afb2a8dab8f9250d335723
|
class A(object):
_a ='d'
@staticmethod
def func_1():
A._a = 'b'
print A._a
@classmethod
def func_3(cls):
print cls._a
def func_2(self):
# self._a = 'c'
print self._a
# print A._a
#
# class B(object):
# @staticmethod
# def func_1():
# A.___a = 'c'
# print A.___a
# print A.___a
# B.func_1()
print A._a
# A.func_3()
A.func_1()
# A().func_2()
A.func_1()
A.func_3()
# print A().a
print A._a
| null | null | null | null |
[
0
] |
2,254 |
6eb59f62a1623f308e0eda4e616be4177a421179
|
import sys
import pysolr
import requests
import logging
import json
import datetime
from urlparse import urlparse
from django.conf import settings
from django.utils.html import strip_tags
from aggregator.utils import mercator_to_llbbox
def get_date(layer):
"""
Returns a date for Solr. A date can be detected or from metadata.
It can be a range or a simple date in isoformat.
"""
date = None
type = 1
layer_dates = layer.get_layer_dates()
if layer_dates:
date = layer_dates[0][0]
type = layer_dates[0][1]
if date is None:
date = layer.created.date()
# layer date > 2300 is invalid for sure
# TODO put this logic in date miner
if date.year > 2300:
date = None
if type == 0:
type = "Detected"
if type == 1:
type = "From Metadata"
return get_solr_date(date), type
def get_solr_date(pydate):
"""
Returns a date in a valid Solr format from a string.
"""
# check if date is valid and then set it to solr format YYYY-MM-DDThh:mm:ssZ
try:
if isinstance(pydate, datetime.datetime):
solr_date = '%sZ' % pydate.isoformat()[0:19]
return solr_date
else:
return None
except Exception:
return None
class SolrHypermap(object):
solr_url = settings.SOLR_URL
solr = pysolr.Solr(solr_url, timeout=60)
logger = logging.getLogger("hypermap")
@staticmethod
def get_domain(url):
urlParts = urlparse(url)
hostname = urlParts.hostname
if hostname == "localhost":
return "Harvard" # assumption
return hostname
@staticmethod
def is_solr_up():
solr_url = settings.SOLR_URL
solr_url_parts = solr_url.split('/')
admin_url = '/'.join(solr_url_parts[:-1]) + '/admin/cores'
params = {'action': 'STATUS', 'wt': 'json'}
try:
req = requests.get(admin_url, params=params)
response = json.loads(req.text)
status = response['status']
if status:
response = True
except requests.exceptions.RequestException:
response = False
return response
@staticmethod
def layer_to_solr(layer):
category = None
username = None
try:
# as a first thing we need to remove the existing index in case there is already one
SolrHypermap.solr.delete(q='LayerId:%s' % layer.id)
bbox = None
if not layer.has_valid_bbox():
message = 'There are not valid coordinates for layer id: %s' % layer.id
SolrHypermap.logger.error(message)
else:
bbox = [float(layer.bbox_x0), float(layer.bbox_y0), float(layer.bbox_x1), float(layer.bbox_y1)]
for proj in layer.srs.values():
if proj['code'] in ('102113', '102100'):
bbox = mercator_to_llbbox(bbox)
minX = bbox[0]
minY = bbox[1]
maxX = bbox[2]
maxY = bbox[3]
# coords hack needed by solr
if (minX < -180):
minX = -180
if (maxX > 180):
maxX = 180
if (minY < -90):
minY = -90
if (maxY > 90):
maxY = 90
wkt = "ENVELOPE({:f},{:f},{:f},{:f})".format(minX, maxX, maxY, minY)
halfWidth = (maxX - minX) / 2.0
halfHeight = (maxY - minY) / 2.0
area = (halfWidth * 2) * (halfHeight * 2)
domain = SolrHypermap.get_domain(layer.service.url)
if hasattr(layer, 'layerwm'):
category = layer.layerwm.category
username = layer.layerwm.username
abstract = layer.abstract
if abstract:
abstract = strip_tags(layer.abstract)
else:
abstract = ''
if layer.service.type == "WM":
originator = username
else:
originator = domain
# now we add the index
solr_record = {
"LayerId": str(layer.id),
"LayerName": layer.name,
"LayerTitle": layer.title,
"Originator": originator,
"ServiceId": str(layer.service.id),
"ServiceType": layer.service.type,
"LayerCategory": category,
"LayerUsername": username,
"LayerUrl": layer.url,
"LayerReliability": layer.reliability,
"LayerRecentReliability": layer.recent_reliability,
"LayerLastStatus": layer.last_status,
"Is_Public": layer.is_public,
"Availability": "Online",
"Location": '{"layerInfoPage": "' + layer.get_absolute_url() + '"}',
"Abstract": abstract,
"SrsProjectionCode": layer.srs.values_list('code', flat=True),
"DomainName": layer.service.get_domain
}
solr_date, type = get_date(layer)
if solr_date is not None:
solr_record['LayerDate'] = solr_date
solr_record['LayerDateType'] = type
if bbox is not None:
solr_record['MinX'] = minX
solr_record['MinY'] = minY
solr_record['MaxX'] = maxX
solr_record['MaxY'] = maxY
solr_record['Area'] = area
solr_record['bbox'] = wkt
SolrHypermap.solr.add([solr_record])
SolrHypermap.logger.info("Solr record saved for layer with id: %s" % layer.id)
return True, None
except Exception:
SolrHypermap.logger.error("Error saving solr record for layer with id: %s - %s"
% (layer.id, sys.exc_info()[1]))
return False, sys.exc_info()[1]
@staticmethod
def clear_solr():
"""Clear all indexes in the solr core"""
SolrHypermap.solr.delete(q='*:*')
print 'Solr core cleared'
| null | null | null | null |
[
0
] |
2,255 |
8180dac5d33334d7f16ab6bef41f1fe800879ca7
|
<mask token>
class TheguardianSpider(scrapy.Spider):
<mask token>
<mask token>
<mask token>
<mask token>
def article(self, response):
brexit_news = BrexitNewsItem()
title = response.xpath('string(//h1[@data-trackable="header"])'
).extract_first().strip()
brexit_news['title'] = title
text = ''
for sel in response.xpath(
'//div[contains(@class,"article__content-body")]//p'):
line = sel.xpath('string(.)').extract_first()
if line is not None:
text += line + '\n\n'
brexit_news['text'] = text
brexit_news['url'] = response.url
brexit_news['media'] = 'ft'
brexit_news['date'] = response.xpath(
'//time[contains(@class,"article-info__timestamp")]/@datetime'
).extract_first()[:10]
yield brexit_news
def parse(self, response):
for sel in response.xpath(
'//li[@class="search-results__list-item"]//a[@data-trackable="heading-link"]'
):
article_url = parse.urljoin(response.url, sel.xpath('@href').
extract_first())
if check_url(article_url) and 'video' not in article_url:
yield Request(article_url, self.article, cookies=self.cookies)
next_page_url = parse.urljoin(response.url, response.xpath(
'//a[@data-trackable="next-page"]/@href').extract_first())
if check_url(next_page_url):
yield Request(next_page_url, self.parse, cookies=self.cookies)
|
<mask token>
class TheguardianSpider(scrapy.Spider):
name = 'ft'
allowed_domains = ['www.ft.com']
start_urls = [
'https://www.ft.com/search?q=brexit&dateTo=2016-06-24&dateFrom=2016-06-16&sort=date'
]
cookies = {}
def article(self, response):
brexit_news = BrexitNewsItem()
title = response.xpath('string(//h1[@data-trackable="header"])'
).extract_first().strip()
brexit_news['title'] = title
text = ''
for sel in response.xpath(
'//div[contains(@class,"article__content-body")]//p'):
line = sel.xpath('string(.)').extract_first()
if line is not None:
text += line + '\n\n'
brexit_news['text'] = text
brexit_news['url'] = response.url
brexit_news['media'] = 'ft'
brexit_news['date'] = response.xpath(
'//time[contains(@class,"article-info__timestamp")]/@datetime'
).extract_first()[:10]
yield brexit_news
def parse(self, response):
for sel in response.xpath(
'//li[@class="search-results__list-item"]//a[@data-trackable="heading-link"]'
):
article_url = parse.urljoin(response.url, sel.xpath('@href').
extract_first())
if check_url(article_url) and 'video' not in article_url:
yield Request(article_url, self.article, cookies=self.cookies)
next_page_url = parse.urljoin(response.url, response.xpath(
'//a[@data-trackable="next-page"]/@href').extract_first())
if check_url(next_page_url):
yield Request(next_page_url, self.parse, cookies=self.cookies)
|
<mask token>
def check_url(url):
if url is not None:
url = url.strip()
if url != '' and url != 'None':
return True
return False
class TheguardianSpider(scrapy.Spider):
name = 'ft'
allowed_domains = ['www.ft.com']
start_urls = [
'https://www.ft.com/search?q=brexit&dateTo=2016-06-24&dateFrom=2016-06-16&sort=date'
]
cookies = {}
def article(self, response):
brexit_news = BrexitNewsItem()
title = response.xpath('string(//h1[@data-trackable="header"])'
).extract_first().strip()
brexit_news['title'] = title
text = ''
for sel in response.xpath(
'//div[contains(@class,"article__content-body")]//p'):
line = sel.xpath('string(.)').extract_first()
if line is not None:
text += line + '\n\n'
brexit_news['text'] = text
brexit_news['url'] = response.url
brexit_news['media'] = 'ft'
brexit_news['date'] = response.xpath(
'//time[contains(@class,"article-info__timestamp")]/@datetime'
).extract_first()[:10]
yield brexit_news
def parse(self, response):
for sel in response.xpath(
'//li[@class="search-results__list-item"]//a[@data-trackable="heading-link"]'
):
article_url = parse.urljoin(response.url, sel.xpath('@href').
extract_first())
if check_url(article_url) and 'video' not in article_url:
yield Request(article_url, self.article, cookies=self.cookies)
next_page_url = parse.urljoin(response.url, response.xpath(
'//a[@data-trackable="next-page"]/@href').extract_first())
if check_url(next_page_url):
yield Request(next_page_url, self.parse, cookies=self.cookies)
|
import datetime
from urllib import parse
import scrapy
from scrapy import Request
from BrexitNews.items import BrexitNewsItem
def check_url(url):
if url is not None:
url = url.strip()
if url != '' and url != 'None':
return True
return False
class TheguardianSpider(scrapy.Spider):
name = 'ft'
allowed_domains = ['www.ft.com']
start_urls = [
'https://www.ft.com/search?q=brexit&dateTo=2016-06-24&dateFrom=2016-06-16&sort=date'
]
cookies = {}
def article(self, response):
brexit_news = BrexitNewsItem()
title = response.xpath('string(//h1[@data-trackable="header"])'
).extract_first().strip()
brexit_news['title'] = title
text = ''
for sel in response.xpath(
'//div[contains(@class,"article__content-body")]//p'):
line = sel.xpath('string(.)').extract_first()
if line is not None:
text += line + '\n\n'
brexit_news['text'] = text
brexit_news['url'] = response.url
brexit_news['media'] = 'ft'
brexit_news['date'] = response.xpath(
'//time[contains(@class,"article-info__timestamp")]/@datetime'
).extract_first()[:10]
yield brexit_news
def parse(self, response):
for sel in response.xpath(
'//li[@class="search-results__list-item"]//a[@data-trackable="heading-link"]'
):
article_url = parse.urljoin(response.url, sel.xpath('@href').
extract_first())
if check_url(article_url) and 'video' not in article_url:
yield Request(article_url, self.article, cookies=self.cookies)
next_page_url = parse.urljoin(response.url, response.xpath(
'//a[@data-trackable="next-page"]/@href').extract_first())
if check_url(next_page_url):
yield Request(next_page_url, self.parse, cookies=self.cookies)
|
# -*- coding: utf-8 -*-
import datetime
from urllib import parse
import scrapy
from scrapy import Request
from BrexitNews.items import BrexitNewsItem
def check_url(url):
if url is not None:
url = url.strip()
if url != '' and url != 'None':
return True
return False
class TheguardianSpider(scrapy.Spider):
name = 'ft'
allowed_domains = ['www.ft.com']
start_urls = ['https://www.ft.com/search?q=brexit&dateTo=2016-06-24&dateFrom=2016-06-16&sort=date']
cookies = {
}
def article(self, response):
brexit_news = BrexitNewsItem()
title = response.xpath('string(//h1[@data-trackable="header"])').extract_first().strip()
brexit_news['title'] = title
text = ''
for sel in response.xpath('//div[contains(@class,"article__content-body")]//p'):
line = sel.xpath('string(.)').extract_first()
if line is not None:
text += line + '\n\n'
brexit_news['text'] = text
brexit_news['url'] = response.url
brexit_news['media'] = 'ft'
brexit_news['date'] = response.xpath('//time[contains(@class,"article-info__timestamp")]/@datetime').extract_first()[:10]
# print(brexit_news)
yield brexit_news
def parse(self, response):
for sel in response.xpath('//li[@class="search-results__list-item"]//a[@data-trackable="heading-link"]'):
article_url = parse.urljoin(response.url, sel.xpath('@href').extract_first())
if check_url(article_url) and 'video' not in article_url:
yield Request(article_url, self.article, cookies=self.cookies)
# handle every page
next_page_url = parse.urljoin(response.url, response.xpath('//a[@data-trackable="next-page"]/@href').extract_first())
if check_url(next_page_url):
yield Request(next_page_url, self.parse, cookies=self.cookies)
|
[
3,
4,
5,
6,
7
] |
2,256 |
6297256bce1954f041915a1ce0aa0546689850f3
|
<mask token>
|
<mask token>
if len(numero) < 8:
while len(numero) < 8:
numero = '3' + numero
numero = numero[:4] + '-' + numero[4:]
print('Numero: ', numero)
elif len(numero) > 8:
print('Numero invalido')
|
numero = input('Digite um numero de telefone: ')
numero = numero.replace('-', '')
if len(numero) < 8:
while len(numero) < 8:
numero = '3' + numero
numero = numero[:4] + '-' + numero[4:]
print('Numero: ', numero)
elif len(numero) > 8:
print('Numero invalido')
|
# Feito por Kelvin Schneider
#12
numero = input("Digite um numero de telefone: ")
numero = numero.replace("-","")
if (len(numero) < 8):
while len(numero) < 8:
numero = "3" + numero
numero = numero[:4] + "-" + numero[4:]
print("Numero: ", numero)
elif (len(numero) > 8):
print("Numero invalido")
| null |
[
0,
1,
2,
3
] |
2,257 |
4e38ad17ad66ac71b0df3cbcaa33cb546e96ce9d
|
import pymel.core as PM
import socket
def getShadingGroupMembership():
'''
Get a dictionary of shading group set information
{'shadingGroup': [assignmnet1, assignment2...]}
'''
result = {}
#sgs = PM.ls(sl= 1, et='shadingEngine')
sgs = PM.listConnections(s= 1, t='shadingEngine')
for sg in sgs:
result[sg.name()] = sg.members(flatten=True)
return result
def remoteMaye(msg):
global maya
maya.send(msg)
def vmtl_nameMap(name):
whiteList = ['woman_Rig:surfaceShader1',
'lady_Rig:surfaceShader1',
'richman_rigging_master:richman_spot',
'oldman_Rig:surfaceShader1']
if name == 'oldman_Rig:VRayMtl2':
name = 'richPeopleSuck:oldman_cloth_vmtl'
if name == 'oldman_Rig:VRayMtl3':
name = 'richPeopleSuck:oldman_skin_vmtl'
if name == 'oldman_Rig:VRayMtl4':
name = 'richPeopleSuck:oldman_glass_vmtl'
if name == 'lady_Rig:VRayMtl2':
name = 'richPeopleSuck:lady_cloth_vmtl'
if name == 'lady_Rig:VRayMtl1':
name = 'richPeopleSuck:lady_skin_vmtl'
if name == 'woman_Rig:VRayMtl1':
name = 'richPeopleSuck:woman_cloth_vmtl'
if name == 'woman_Rig:VRayMtl2':
name = 'richPeopleSuck:woman_skin_vmtl'
if name == 'richman_rigging_master:VRayMtl2':
name = 'richPeopleSuck:richman_cloth_vmtl'
if name == 'richman_rigging_master:VRayMtl1':
name = 'richPeopleSuck:richman_skin_vmtl'
if name == 'richman_rigging_master:surfaceShader3':
name = 'richPeopleSuck:maneye_black_surface'
if name in whiteList:
name = 'richPeopleSuck:maneye_white_surface'
return name
def doJob(port):
host = "127.0.0.1"
global maya
maya = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
maya.connect( (host, port) )
mtlDict = getShadingGroupMembership()
for meshList in mtlDict.keys():
vmtl = cmds.listConnections(meshList + '.surfaceShader', s= 1)[0]
if mtlDict[meshList]:
for mesh in mtlDict[meshList]:
msg = ''
target = ''
if '.' in str(mesh):
faceList = []
faceStr = str(mesh).split('.f')[1].replace('[', '').replace(']', '')
if ',' in faceStr:
faceList = faceStr.split(',')
else:
faceList = [faceStr]
for face in faceList:
target = str(mesh).split('.')[0] + '.f[' + face + ']'
try:
msg += 'cmds.select("' + target + '", r= 1)\n'
msg += 'cmds.hyperShade(a= "' + vmtl_nameMap(vmtl) + '")\n'
except:
if len(target.split(':')) > 1:
target_1 = ':'.join(target.split(':')[0:2]) + ']'
target_2 = ':'.join([target.split(':')[0], target.split(':')[2]])
try:
msg += 'cmds.select("' + target_1 + '", r= 1)\n'
msg += 'cmds.hyperShade(a= "' + vmtl_nameMap(vmtl) + '")\n'
except:
print '+++++++++++++++++++++++++++++++++++++\n+++++++++++++++++++++++++++++++++++++'
else:
target = str(mesh)
msg += 'cmds.select("' + target + '", r= 1)\n'
msg += 'cmds.hyperShade(a= "' + vmtl_nameMap(vmtl) + '")\n'
remoteMaye(msg)
maya.close()
| null | null | null | null |
[
0
] |
2,258 |
b186ae7a48afbb70edf3be0d9697deed4f31e542
|
<mask token>
|
<mask token>
print(response.geturl())
<mask token>
with open('bgdGW.html', 'w', encoding='utf-8') as fp:
fp.write(html)
print(response.geturl())
|
<mask token>
response = urllib.request.urlopen('http://www.gengdan.cn/')
print(response.geturl())
html = response.read().decode('UTF-8')
with open('bgdGW.html', 'w', encoding='utf-8') as fp:
fp.write(html)
print(response.geturl())
|
<mask token>
import urllib.request
response = urllib.request.urlopen('http://www.gengdan.cn/')
print(response.geturl())
html = response.read().decode('UTF-8')
with open('bgdGW.html', 'w', encoding='utf-8') as fp:
fp.write(html)
print(response.geturl())
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
# @Project :experiment9
# @File :text1
# @Date :2020/10/28 09:13
# @Author :施嘉伟
# @Email :[email protected]
# @Software :PyCharm
-------------------------------------------------
"""
import urllib.request
# 发出请求,得到响应
response=urllib.request.urlopen("http://www.gengdan.cn/")
print(response.geturl())
html = response.read().decode("UTF-8")
with open("bgdGW.html",'w',encoding="utf-8")as fp:
fp.write(html)
print(response.geturl())
|
[
0,
1,
2,
3,
4
] |
2,259 |
63be96c0d1231f836bbec9ce93f06bda32775511
|
<mask token>
def convert(lst: list) ->list():
"""String Unicode to int"""
l = list()
for item in lst:
l.append(ord(item))
return l
<mask token>
def write(filename: str, data: list, header: list):
name = filename.split('.')
filename = name[0] + '_out.' + name[1]
f = open(filename, 'w')
f.write('')
f.close()
s = ''
with open(filename, 'w', encoding='ISO-8859-1') as file:
for h in header:
s += h
for d in data:
s += str(d)
file.write(s)
file.close()
|
<mask token>
def readfile(filename: str) ->tuple:
"""read given pgm file"""
col = 0
row = 0
lst = list()
with open(filename, 'rb') as file:
header = list()
ls = list()
header.append(file.readline().decode('utf-8'))
while True:
line = file.readline().decode('utf-8')
if not line:
break
elif line[0] == '#':
continue
else:
header.append(line)
ss = str(line)
l = re.findall('\\d+', ss)
col = int(l[0])
row = int(l[1])
break
header.append(file.readline().decode('utf-8'))
n = col * row
lst = list()
for i in range(n):
try:
lst.append(ord(file.read(1)))
except:
pass
file.close()
return header, lst, [col, row]
def convert(lst: list) ->list():
"""String Unicode to int"""
l = list()
for item in lst:
l.append(ord(item))
return l
<mask token>
def write(filename: str, data: list, header: list):
name = filename.split('.')
filename = name[0] + '_out.' + name[1]
f = open(filename, 'w')
f.write('')
f.close()
s = ''
with open(filename, 'w', encoding='ISO-8859-1') as file:
for h in header:
s += h
for d in data:
s += str(d)
file.write(s)
file.close()
|
<mask token>
def readfile(filename: str) ->tuple:
"""read given pgm file"""
col = 0
row = 0
lst = list()
with open(filename, 'rb') as file:
header = list()
ls = list()
header.append(file.readline().decode('utf-8'))
while True:
line = file.readline().decode('utf-8')
if not line:
break
elif line[0] == '#':
continue
else:
header.append(line)
ss = str(line)
l = re.findall('\\d+', ss)
col = int(l[0])
row = int(l[1])
break
header.append(file.readline().decode('utf-8'))
n = col * row
lst = list()
for i in range(n):
try:
lst.append(ord(file.read(1)))
except:
pass
file.close()
return header, lst, [col, row]
def convert(lst: list) ->list():
"""String Unicode to int"""
l = list()
for item in lst:
l.append(ord(item))
return l
def writeNumeric(filename: str, data: list, header: list, dimension: list):
"""write pgm file in numeric format (P2 as a header)"""
name = filename.split('.')
filename = name[0] + '_out.' + name[1]
f = open(filename, 'w')
f.write('')
f.close()
col = dimension[0]
row = dimension[1]
s = ''
with open(filename, 'w', encoding='ISO-8859-1') as file:
header[0] = 'P2\n'
for h in header:
s += h
for i in range(row):
for j in range(col):
try:
index = i * col + j
s += str(data[index])
if j < col - 1:
s += ' '
except:
pass
s += '\n'
file.write(s)
file.close()
def write(filename: str, data: list, header: list):
name = filename.split('.')
filename = name[0] + '_out.' + name[1]
f = open(filename, 'w')
f.write('')
f.close()
s = ''
with open(filename, 'w', encoding='ISO-8859-1') as file:
for h in header:
s += h
for d in data:
s += str(d)
file.write(s)
file.close()
|
import re
import numpy as np
def readfile(filename: str) ->tuple:
"""read given pgm file"""
col = 0
row = 0
lst = list()
with open(filename, 'rb') as file:
header = list()
ls = list()
header.append(file.readline().decode('utf-8'))
while True:
line = file.readline().decode('utf-8')
if not line:
break
elif line[0] == '#':
continue
else:
header.append(line)
ss = str(line)
l = re.findall('\\d+', ss)
col = int(l[0])
row = int(l[1])
break
header.append(file.readline().decode('utf-8'))
n = col * row
lst = list()
for i in range(n):
try:
lst.append(ord(file.read(1)))
except:
pass
file.close()
return header, lst, [col, row]
def convert(lst: list) ->list():
"""String Unicode to int"""
l = list()
for item in lst:
l.append(ord(item))
return l
def writeNumeric(filename: str, data: list, header: list, dimension: list):
"""write pgm file in numeric format (P2 as a header)"""
name = filename.split('.')
filename = name[0] + '_out.' + name[1]
f = open(filename, 'w')
f.write('')
f.close()
col = dimension[0]
row = dimension[1]
s = ''
with open(filename, 'w', encoding='ISO-8859-1') as file:
header[0] = 'P2\n'
for h in header:
s += h
for i in range(row):
for j in range(col):
try:
index = i * col + j
s += str(data[index])
if j < col - 1:
s += ' '
except:
pass
s += '\n'
file.write(s)
file.close()
def write(filename: str, data: list, header: list):
name = filename.split('.')
filename = name[0] + '_out.' + name[1]
f = open(filename, 'w')
f.write('')
f.close()
s = ''
with open(filename, 'w', encoding='ISO-8859-1') as file:
for h in header:
s += h
for d in data:
s += str(d)
file.write(s)
file.close()
|
import re
import numpy as np
# only read pgm file
def readfile(filename:str)->tuple:
'''read given pgm file'''
col = 0
row = 0
lst = list()
with open(filename, 'rb') as file:
header = list()
ls = list()
# remove first line
header.append((file.readline()).decode("utf-8"))
while True:
line = (file.readline()).decode("utf-8")
if not line:
break
elif(line[0] == '#'):
continue
else:
header.append(line)
ss = str(line)
l = re.findall(r'\d+', ss)
col = int(l[0])
row = int(l[1])
break
header.append((file.readline()).decode("utf-8"))
n = col*row
lst = list()
for i in range(n):
try:
lst.append(ord(file.read(1)))
except:
pass
file.close()
return header, lst, [col, row]
#convert list
def convert(lst:list)->list():
'''String Unicode to int'''
l = list()
for item in lst:
l.append(ord(item))
return l
def writeNumeric(filename:str, data:list, header:list, dimension:list):
'''write pgm file in numeric format (P2 as a header)'''
# clear file if exists
name = filename.split('.')
filename = name[0]+'_out.'+name[1]
f = open(filename, 'w')
f.write('')
f.close()
col = dimension[0]
row = dimension[1]
s = ''
# write new file
with open(filename, 'w', encoding='ISO-8859-1') as file:
header[0] = 'P2\n'
for h in header:
# decoding
s += h
for i in range(row):
for j in range(col):
try:
index = i*col + j
s += str(data[index])
if j < col -1:
s += ' '
except:
# print(i)
# print(j)
pass
s += '\n'
file.write(s)
file.close()
def write(filename:str, data:list, header:list):
# clear file if exists
name = filename.split('.')
filename = name[0]+'_out.'+name[1]
f = open(filename, 'w')
f.write('')
f.close()
s = ''
# write new file
with open(filename, 'w', encoding='ISO-8859-1') as file:
for h in header:
# decoding
s += h
for d in data:
s += str(d)
file.write(s)
file.close()
|
[
2,
3,
4,
5,
6
] |
2,260 |
f960c95afe1f7a161e0144bb523bfaca117ae61e
|
<mask token>
|
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
def find_packages():
return ['sqlpython']
<mask token>
setup(name='sqlpython', version='1.7.3', description=
'Command-line interface to Oracle', long_description=
"Customizable alternative to Oracle's SQL*PLUS command-line interface",
author='Luca Canali', author_email='[email protected]', url=
'http://packages.python.org/sqlpython', packages=find_packages(),
include_package_data=True, install_requires=['pyparsing', 'cmd2==0.6.3',
'gerald>=0.4.1.1', 'genshi==0.6'], extras_require={'oracle': [
'cx_Oracle==6.1'], 'postgres': ['psycopg2']}, keywords=
'client oracle database', license='MIT', platforms=['any'],
entry_points=
"""
[console_scripts]
sqlpython = sqlpython.mysqlpy:run
editplot_sqlpython = sqlpython.editplot.bash"""
)
|
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
def find_packages():
return ['sqlpython']
classifiers = (
"""Development Status :: 4 - Beta
Intended Audience :: Information Technology
License :: OSI Approved :: MIT License
Programming Language :: Python
Programming Language :: SQL
Topic :: Database :: Front-Ends
Operating System :: OS Independent"""
.splitlines())
setup(name='sqlpython', version='1.7.3', description=
'Command-line interface to Oracle', long_description=
"Customizable alternative to Oracle's SQL*PLUS command-line interface",
author='Luca Canali', author_email='[email protected]', url=
'http://packages.python.org/sqlpython', packages=find_packages(),
include_package_data=True, install_requires=['pyparsing', 'cmd2==0.6.3',
'gerald>=0.4.1.1', 'genshi==0.6'], extras_require={'oracle': [
'cx_Oracle==6.1'], 'postgres': ['psycopg2']}, keywords=
'client oracle database', license='MIT', platforms=['any'],
entry_points=
"""
[console_scripts]
sqlpython = sqlpython.mysqlpy:run
editplot_sqlpython = sqlpython.editplot.bash"""
)
|
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
def find_packages():
return ['sqlpython']
classifiers = """Development Status :: 4 - Beta
Intended Audience :: Information Technology
License :: OSI Approved :: MIT License
Programming Language :: Python
Programming Language :: SQL
Topic :: Database :: Front-Ends
Operating System :: OS Independent""".splitlines()
setup(name="sqlpython",
version="1.7.3",
description="Command-line interface to Oracle",
long_description="Customizable alternative to Oracle's SQL*PLUS command-line interface",
author="Luca Canali",
author_email="[email protected]",
url="http://packages.python.org/sqlpython",
packages=find_packages(),
include_package_data=True,
install_requires=['pyparsing','cmd2==0.6.3','gerald>=0.4.1.1',
'genshi==0.6'],
extras_require = {
'oracle': ['cx_Oracle==6.1'],
'postgres': ['psycopg2'],
},
keywords = 'client oracle database',
license = 'MIT',
platforms = ['any'],
entry_points = """
[console_scripts]
sqlpython = sqlpython.mysqlpy:run
editplot_sqlpython = sqlpython.editplot.bash"""
)
| null |
[
0,
1,
2,
3
] |
2,261 |
53e397068fcf88bbbce4dcc1bf1b441a2fbbee48
|
<mask token>
|
<mask token>
print(max(d1.values()))
print(min(d1.values()))
|
d1 = {(6): 10, (2): 20, (5): 30, (4): 40, (1): 50, (3): 60}
print(max(d1.values()))
print(min(d1.values()))
|
#Write a Python program to get the maximum and minimum value in a dictionary.
d1={6: 10, 2: 20, 5: 30, 4: 40, 1: 50, 3: 60}
print(max(d1.values()))
print(min(d1.values()))
| null |
[
0,
1,
2,
3
] |
2,262 |
8560c0068eff894e5aa1d0788bd9e5ad05c14997
|
<mask token>
class ThermalSpectrum(Spectrum):
<mask token>
<mask token>
<mask token>
<mask token>
@staticmethod
def units_string():
return '1/erg/cm^3'
def integrate(self, units=True, e_weight=0):
""" Integrate the thermal spectrum from emin to emax.
Returns the integral in units of [erg^e_weight/cm^-3] """
int = logsimps(lambda e: e ** e_weight * self(e, units=False), self
.emin, self.emax, sed_config.PER_DECADE)
return int * (u.erg ** (e_weight + 1) * self.units() if units else 1)
class BlackBody(ThermalSpectrum):
@staticmethod
def compute_energy_density(kT):
""" Comparing the formula for a blackbody spectrum
with prefactor
pref = 8pi/(hc)^3
to the fomrula for a general thermal spectrum:
pref = 15*U/(pi*kT)^4,
we find that for a blackbody spectrum,
we have a thermal spectrum with
U = (8*pi/(hc)^3)*(pi*kT)^4/15. """
h = u.planck
c = u.speed_of_light
pi = np.pi
return 8 * pi / (h * c) ** 3 * ((pi * kT) ** 4 / 15)
def __init__(self, kT=None, T=None):
""" Implement a blackbody spectrum.
The formula for the blackbody spectrum is
n(E)=((8pi)/(hc)^3)*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* kT is the temperature of the photons
This formula is on the top of page 208 in R&L
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * T
else:
raise Exception('kT or k must be passed to BlackBody')
energy_density = BlackBody.compute_energy_density(kT)
super(BlackBody, self).__init__(energy_density=energy_density, kT=kT)
class CMB(BlackBody):
""" The CMB is a blackbody spectrum with temperature 2.725K.
Note, the energy density for a CMB spectrum is 0.26 eV/cm^3:
>>> cmb = CMB()
>>> print u.repr(cmb.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.26 eV/cm^3
"""
def __init__(self):
super(CMB, self).__init__(T=2.725 * u.kelvin)
<mask token>
|
<mask token>
class ThermalSpectrum(Spectrum):
<mask token>
def __init__(self, energy_density, kT=None, T=None):
""" A thermal spectrum has the sameself):
spectral shape as the blackbody
spectrum but has an arbitrarily
normalizable energy density.
The thermal spectrum is
n(E) = 15*U/(pi*kT)^4*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* U is the total energy per unit volume.
* kT is the temperature of the photons
This formula is equation 33 from Sturner et al 1997
http://iopscience.iop.org/0004-637X/490/2/619/pdf/35841.pdf
Input can be either 'kT' in energy units or
'T' in temperature units.
For example, in XXX et al, the infrared photon
field has temperature kT=3e-3 eV and energy
density U=0.9 eV/cm^3
>>> infrared=ThermalSpectrum(kT=3e-3*u.eV, energy_density=0.9*u.eV/u.cm**3)
To convince yourself that this code correctly normalized
the spectrum, you can explicity integrate E*dN/dE = total energy per unit volume:
>>> print u.repr(infrared.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.90 eV/cm^3
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * kwargs.pop('T')
else:
raise Exception('kT or k must be passed to ThermalSpectrum')
self.kT = float(kT / u.erg)
self.emin = 0.0001 * self.kT
self.emax = 100.0 * self.kT
self.pref = 15 * energy_density / (np.pi * kT) ** 4
self.pref = float(self.pref / (u.erg ** -3 * u.cm ** -3))
@staticmethod
def occupation_number(x):
""" This is equation 1.49 in R&L. """
return 1 / (np.exp(x) - 1)
def _spectrum(self, energy):
""" Return the energy density in units of [1/erg/cm^-3]."""
return self.pref * energy ** 2 * self.occupation_number(energy /
self.kT)
@staticmethod
def units_string():
return '1/erg/cm^3'
def integrate(self, units=True, e_weight=0):
""" Integrate the thermal spectrum from emin to emax.
Returns the integral in units of [erg^e_weight/cm^-3] """
int = logsimps(lambda e: e ** e_weight * self(e, units=False), self
.emin, self.emax, sed_config.PER_DECADE)
return int * (u.erg ** (e_weight + 1) * self.units() if units else 1)
class BlackBody(ThermalSpectrum):
@staticmethod
def compute_energy_density(kT):
""" Comparing the formula for a blackbody spectrum
with prefactor
pref = 8pi/(hc)^3
to the fomrula for a general thermal spectrum:
pref = 15*U/(pi*kT)^4,
we find that for a blackbody spectrum,
we have a thermal spectrum with
U = (8*pi/(hc)^3)*(pi*kT)^4/15. """
h = u.planck
c = u.speed_of_light
pi = np.pi
return 8 * pi / (h * c) ** 3 * ((pi * kT) ** 4 / 15)
def __init__(self, kT=None, T=None):
""" Implement a blackbody spectrum.
The formula for the blackbody spectrum is
n(E)=((8pi)/(hc)^3)*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* kT is the temperature of the photons
This formula is on the top of page 208 in R&L
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * T
else:
raise Exception('kT or k must be passed to BlackBody')
energy_density = BlackBody.compute_energy_density(kT)
super(BlackBody, self).__init__(energy_density=energy_density, kT=kT)
class CMB(BlackBody):
""" The CMB is a blackbody spectrum with temperature 2.725K.
Note, the energy density for a CMB spectrum is 0.26 eV/cm^3:
>>> cmb = CMB()
>>> print u.repr(cmb.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.26 eV/cm^3
"""
def __init__(self):
super(CMB, self).__init__(T=2.725 * u.kelvin)
<mask token>
|
<mask token>
class ThermalSpectrum(Spectrum):
vectorized = True
def __init__(self, energy_density, kT=None, T=None):
""" A thermal spectrum has the sameself):
spectral shape as the blackbody
spectrum but has an arbitrarily
normalizable energy density.
The thermal spectrum is
n(E) = 15*U/(pi*kT)^4*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* U is the total energy per unit volume.
* kT is the temperature of the photons
This formula is equation 33 from Sturner et al 1997
http://iopscience.iop.org/0004-637X/490/2/619/pdf/35841.pdf
Input can be either 'kT' in energy units or
'T' in temperature units.
For example, in XXX et al, the infrared photon
field has temperature kT=3e-3 eV and energy
density U=0.9 eV/cm^3
>>> infrared=ThermalSpectrum(kT=3e-3*u.eV, energy_density=0.9*u.eV/u.cm**3)
To convince yourself that this code correctly normalized
the spectrum, you can explicity integrate E*dN/dE = total energy per unit volume:
>>> print u.repr(infrared.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.90 eV/cm^3
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * kwargs.pop('T')
else:
raise Exception('kT or k must be passed to ThermalSpectrum')
self.kT = float(kT / u.erg)
self.emin = 0.0001 * self.kT
self.emax = 100.0 * self.kT
self.pref = 15 * energy_density / (np.pi * kT) ** 4
self.pref = float(self.pref / (u.erg ** -3 * u.cm ** -3))
@staticmethod
def occupation_number(x):
""" This is equation 1.49 in R&L. """
return 1 / (np.exp(x) - 1)
def _spectrum(self, energy):
""" Return the energy density in units of [1/erg/cm^-3]."""
return self.pref * energy ** 2 * self.occupation_number(energy /
self.kT)
@staticmethod
def units_string():
return '1/erg/cm^3'
def integrate(self, units=True, e_weight=0):
""" Integrate the thermal spectrum from emin to emax.
Returns the integral in units of [erg^e_weight/cm^-3] """
int = logsimps(lambda e: e ** e_weight * self(e, units=False), self
.emin, self.emax, sed_config.PER_DECADE)
return int * (u.erg ** (e_weight + 1) * self.units() if units else 1)
class BlackBody(ThermalSpectrum):
@staticmethod
def compute_energy_density(kT):
""" Comparing the formula for a blackbody spectrum
with prefactor
pref = 8pi/(hc)^3
to the fomrula for a general thermal spectrum:
pref = 15*U/(pi*kT)^4,
we find that for a blackbody spectrum,
we have a thermal spectrum with
U = (8*pi/(hc)^3)*(pi*kT)^4/15. """
h = u.planck
c = u.speed_of_light
pi = np.pi
return 8 * pi / (h * c) ** 3 * ((pi * kT) ** 4 / 15)
def __init__(self, kT=None, T=None):
""" Implement a blackbody spectrum.
The formula for the blackbody spectrum is
n(E)=((8pi)/(hc)^3)*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* kT is the temperature of the photons
This formula is on the top of page 208 in R&L
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * T
else:
raise Exception('kT or k must be passed to BlackBody')
energy_density = BlackBody.compute_energy_density(kT)
super(BlackBody, self).__init__(energy_density=energy_density, kT=kT)
class CMB(BlackBody):
""" The CMB is a blackbody spectrum with temperature 2.725K.
Note, the energy density for a CMB spectrum is 0.26 eV/cm^3:
>>> cmb = CMB()
>>> print u.repr(cmb.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.26 eV/cm^3
"""
def __init__(self):
super(CMB, self).__init__(T=2.725 * u.kelvin)
<mask token>
|
<mask token>
class ThermalSpectrum(Spectrum):
vectorized = True
def __init__(self, energy_density, kT=None, T=None):
""" A thermal spectrum has the sameself):
spectral shape as the blackbody
spectrum but has an arbitrarily
normalizable energy density.
The thermal spectrum is
n(E) = 15*U/(pi*kT)^4*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* U is the total energy per unit volume.
* kT is the temperature of the photons
This formula is equation 33 from Sturner et al 1997
http://iopscience.iop.org/0004-637X/490/2/619/pdf/35841.pdf
Input can be either 'kT' in energy units or
'T' in temperature units.
For example, in XXX et al, the infrared photon
field has temperature kT=3e-3 eV and energy
density U=0.9 eV/cm^3
>>> infrared=ThermalSpectrum(kT=3e-3*u.eV, energy_density=0.9*u.eV/u.cm**3)
To convince yourself that this code correctly normalized
the spectrum, you can explicity integrate E*dN/dE = total energy per unit volume:
>>> print u.repr(infrared.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.90 eV/cm^3
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * kwargs.pop('T')
else:
raise Exception('kT or k must be passed to ThermalSpectrum')
self.kT = float(kT / u.erg)
self.emin = 0.0001 * self.kT
self.emax = 100.0 * self.kT
self.pref = 15 * energy_density / (np.pi * kT) ** 4
self.pref = float(self.pref / (u.erg ** -3 * u.cm ** -3))
@staticmethod
def occupation_number(x):
""" This is equation 1.49 in R&L. """
return 1 / (np.exp(x) - 1)
def _spectrum(self, energy):
""" Return the energy density in units of [1/erg/cm^-3]."""
return self.pref * energy ** 2 * self.occupation_number(energy /
self.kT)
@staticmethod
def units_string():
return '1/erg/cm^3'
def integrate(self, units=True, e_weight=0):
""" Integrate the thermal spectrum from emin to emax.
Returns the integral in units of [erg^e_weight/cm^-3] """
int = logsimps(lambda e: e ** e_weight * self(e, units=False), self
.emin, self.emax, sed_config.PER_DECADE)
return int * (u.erg ** (e_weight + 1) * self.units() if units else 1)
class BlackBody(ThermalSpectrum):
@staticmethod
def compute_energy_density(kT):
""" Comparing the formula for a blackbody spectrum
with prefactor
pref = 8pi/(hc)^3
to the fomrula for a general thermal spectrum:
pref = 15*U/(pi*kT)^4,
we find that for a blackbody spectrum,
we have a thermal spectrum with
U = (8*pi/(hc)^3)*(pi*kT)^4/15. """
h = u.planck
c = u.speed_of_light
pi = np.pi
return 8 * pi / (h * c) ** 3 * ((pi * kT) ** 4 / 15)
def __init__(self, kT=None, T=None):
""" Implement a blackbody spectrum.
The formula for the blackbody spectrum is
n(E)=((8pi)/(hc)^3)*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* kT is the temperature of the photons
This formula is on the top of page 208 in R&L
"""
if kT is not None:
kT = kT
elif T is not None:
kT = u.boltzmann * T
else:
raise Exception('kT or k must be passed to BlackBody')
energy_density = BlackBody.compute_energy_density(kT)
super(BlackBody, self).__init__(energy_density=energy_density, kT=kT)
class CMB(BlackBody):
""" The CMB is a blackbody spectrum with temperature 2.725K.
Note, the energy density for a CMB spectrum is 0.26 eV/cm^3:
>>> cmb = CMB()
>>> print u.repr(cmb.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.26 eV/cm^3
"""
def __init__(self):
super(CMB, self).__init__(T=2.725 * u.kelvin)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
""" sed_thermal.py
Author: Joshua Lande <[email protected]>
"""
import numpy as np
from scipy import integrate
from . sed_integrate import logsimps
from . sed_spectrum import Spectrum
from . import sed_config
from . import units as u
class ThermalSpectrum(Spectrum):
vectorized = True
def __init__(self, energy_density, kT=None, T=None):
""" A thermal spectrum has the sameself):
spectral shape as the blackbody
spectrum but has an arbitrarily
normalizable energy density.
The thermal spectrum is
n(E) = 15*U/(pi*kT)^4*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* U is the total energy per unit volume.
* kT is the temperature of the photons
This formula is equation 33 from Sturner et al 1997
http://iopscience.iop.org/0004-637X/490/2/619/pdf/35841.pdf
Input can be either 'kT' in energy units or
'T' in temperature units.
For example, in XXX et al, the infrared photon
field has temperature kT=3e-3 eV and energy
density U=0.9 eV/cm^3
>>> infrared=ThermalSpectrum(kT=3e-3*u.eV, energy_density=0.9*u.eV/u.cm**3)
To convince yourself that this code correctly normalized
the spectrum, you can explicity integrate E*dN/dE = total energy per unit volume:
>>> print u.repr(infrared.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.90 eV/cm^3
"""
if kT is not None: kT = kT
elif T is not None: kT = u.boltzmann*kwargs.pop('T')
else: raise Exception("kT or k must be passed to ThermalSpectrum")
self.kT = float(kT/u.erg)
# function is essentially 0 outside of this energy range.
self.emin=1e-4*self.kT
self.emax=1e2*self.kT
# equation 33 in Sturner et al 1997
# Note, prefactor*E^2/(exp(E/kT)-1) has units
# of photons/energy/volume, so prefactor has units
# of photons/energy^3/volume.
self.pref = 15*energy_density/(np.pi*kT)**4
self.pref = float(self.pref/(u.erg**-3*u.cm**-3))
@staticmethod
def occupation_number(x):
""" This is equation 1.49 in R&L. """
return 1/(np.exp(x)-1)
def _spectrum(self, energy):
""" Return the energy density in units of [1/erg/cm^-3]."""
return self.pref*energy**2*self.occupation_number(energy/self.kT)
@staticmethod
def units_string(): return '1/erg/cm^3'
def integrate(self, units=True, e_weight=0):
""" Integrate the thermal spectrum from emin to emax.
Returns the integral in units of [erg^e_weight/cm^-3] """
int = logsimps(lambda e: e**e_weight*self(e, units=False), self.emin, self.emax, sed_config.PER_DECADE)
return int*(u.erg**(e_weight+1)*self.units() if units else 1)
class BlackBody(ThermalSpectrum):
@staticmethod
def compute_energy_density(kT):
""" Comparing the formula for a blackbody spectrum
with prefactor
pref = 8pi/(hc)^3
to the fomrula for a general thermal spectrum:
pref = 15*U/(pi*kT)^4,
we find that for a blackbody spectrum,
we have a thermal spectrum with
U = (8*pi/(hc)^3)*(pi*kT)^4/15. """
h=u.planck
c=u.speed_of_light
pi=np.pi
return (8*pi/(h*c)**3)*((pi*kT)**4/15)
def __init__(self,kT=None,T=None):
""" Implement a blackbody spectrum.
The formula for the blackbody spectrum is
n(E)=((8pi)/(hc)^3)*E^2/(exp(E/kT)-1)
where
* n(E) is the number of photons per unit energy per unit volume,
* kT is the temperature of the photons
This formula is on the top of page 208 in R&L
"""
if kT is not None: kT = kT
elif T is not None: kT = u.boltzmann*T
else: raise Exception("kT or k must be passed to BlackBody")
energy_density=BlackBody.compute_energy_density(kT)
super(BlackBody,self).__init__(energy_density=energy_density, kT=kT)
class CMB(BlackBody):
""" The CMB is a blackbody spectrum with temperature 2.725K.
Note, the energy density for a CMB spectrum is 0.26 eV/cm^3:
>>> cmb = CMB()
>>> print u.repr(cmb.integrate(units=True,e_weight=1),'eV/cm^3','%.2f')
0.26 eV/cm^3
"""
def __init__(self): super(CMB,self).__init__(T=2.725*u.kelvin)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
[
9,
12,
13,
14,
16
] |
2,263 |
a49c00dab8d445ce0b08fd31a4a41d6c8976d662
|
<mask token>
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
<mask token>
|
<mask token>
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
|
<mask token>
BLACK = '\x1b[30;0m'
RED = '\x1b[31;0m'
GREEN = '\x1b[32;0m'
YELLOW = '\x1b[33;0m'
BLUE = '\x1b[34;0m'
PINK = '\x1b[35;0m'
CBLUE = '\x1b[36;0m'
WHITE = '\x1b[37;0m'
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
|
import sys
BLACK = '\x1b[30;0m'
RED = '\x1b[31;0m'
GREEN = '\x1b[32;0m'
YELLOW = '\x1b[33;0m'
BLUE = '\x1b[34;0m'
PINK = '\x1b[35;0m'
CBLUE = '\x1b[36;0m'
WHITE = '\x1b[37;0m'
def colorPrint(color, str):
print(color + str + '\x1b[0m')
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import sys
BLACK = '\033[30;0m'
RED = '\033[31;0m'
GREEN = '\033[32;0m'
YELLOW = '\033[33;0m'
BLUE = '\033[34;0m'
PINK = '\033[35;0m'
CBLUE = '\033[36;0m'
WHITE = '\033[37;0m'
def colorPrint(color, str):
print(color + str + '\033[0m');
def main():
if sys.argv.__len__() < 2:
print('Wrong usage, exit')
return
colorPrint(YELLOW, sys.argv[1])
if __name__ == '__main__':
main()
|
[
2,
3,
4,
5,
6
] |
2,264 |
2bce18354a53c49274f7dd017e1f65c9ff1327b9
|
<<<<<<< HEAD
"""Module docstring"""
import os
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
=======
#!/usr/bin/python
"""Module docstring"""
import os
import numpy as np
from pickle_data_2 import Data
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
### classifier methods ###
def linear_discriminant_analysis(data):
"""Linear Discriminant Analysis"""
clf = LinearDiscriminantAnalysis()
clf.name = "LDA"
train_predict_and_results(data, clf)
def nearest_neighbors_classifier(data):
"""K Nearest neighbors classification"""
clf = KNeighborsClassifier(3, 'distance')
clf.name = "KNN"
train_predict_and_results(data, clf)
def support_vector_machine(data):
"""Support Vector Machines"""
clf = SVC()
clf.name = "SVC"
train_predict_and_results(data, clf)
def gaussian_naive_bayes(data):
"""Naive Bayes"""
clf = GaussianNB()
clf.name = "GaussNB"
train_predict_and_results(data, clf)
def logistic_regression(data):
"""Logistic Regression """
clf = LogisticRegression()
clf.name = "LoReg"
train_predict_and_results(data, clf)
def random_forest(data):
"""Random Forest"""
clf = RandomForestClassifier()
clf.name = "RNDForest"
train_predict_and_results(data, clf)
### End of classifier methods ###
>>>>>>> 05e11c3b88b3fb5313f29e74125ab6fdd8fffd84
def normalize(data):
"""Returns data with columns normalized
input: numpy array
output: numpy array
"""
# normalize data and return
# https://stackoverflow.com/questions/29661574/normalize-numpy-array-columns-in-python
return (data - data.min(axis=0)) / data.ptp(axis=0)
<<<<<<< HEAD
def load_data():
"""Reads datafile and returns data as numpy array"""
# https://docs.scipy.org/doc/numpy-1.13.0/reference/generated/numpy.ndarray.astype.html
data = np.load("phase3-data/data_selected_1980_2010.npy").astype(float)
return normalize(data)
def load_target(column="label"):
"""Reads target labels and returns two columns: sum15 and label"""
columns = {"sum15": 0, "label": 1}
if column not in columns.keys():
raise ValueError("%s is not in target data" % column)
filepath = os.path.join("phase3-data", "target_1980_2010.npy")
target = np.load(filepath)
# lets normalize, sum15 might need it
target = normalize(target)
# return correct column
return target[:, columns[column]]
def concat_data(data, target):
'''Merge dataframe data with dataframe target and returns the final one '''
final_data = np.concatenate((data,target[:,None]), axis=1)
return final_data
=======
def load_ta_data():
"""Reads datafile and returns data as numpy array"""
# https://docs.scipy.org/doc/numpy-1.13.0/reference/generated/numpy.ndarray.astype.html
data = np.load("data/data_selected_1980_2010.npy").astype(float)
return normalize(data)
def load_ta_target():
"""Reads target labels and returns two columns: sum15 and label"""
filepath = os.path.join("data", "target_1980_2010.npy")
target = np.load(filepath)
return target[:, 1]
def load_own_data():
"""Loads data corresponding to selected features by custom saola algorithm"""
data = Data()
features = data.read_selected_features()
dataframe = data.get_dataframe_with(features)
return normalize(dataframe.values)
def load_own_target():
"""Loads target column as stored in our data files"""
data = Data()
target = data.get_label_col()
return target.values
>>>>>>> 05e11c3b88b3fb5313f29e74125ab6fdd8fffd84
def split_samples(data):
"""Splits data into training samples and test samples
input: numpy array
returns tuple (training_samples, test_samples)
both are numpy arrays
"""
training_samples = data[0:9497]
test_samples = data[9497:11300]
return training_samples, test_samples
<<<<<<< HEAD
def main():
"""The main method"""
feat_data = load_data()
label_data = load_target()
#final = concat_data(feat_data, label_data)
#print final
X_training, X_test = split_samples(feat_data)
Y_training, Y_test = split_samples(label_data)
#10- fold cross-validation
#knn = KNeighborsClassifier(n_neighbors=3)
lda = LinearDiscriminantAnalysis(n_components=3, priors=None, shrinkage=None,
solver='svd', store_covariance=False, tol=0.0001)
#folds = cross_val_score(lda, X_training, Y_training, cv=10)
#print folds
#kf = KFold(n_splits=10)
#print (kf.get_n_splits(X))
#for training_index, test_index in kf.split(X):
# print("TRAIN:", training_index, "TEST:", test_index)
# X_training, X_test = X[training_index], X[test_index]
# Y_training, Y_test = Y[training_index], Y[test_index]
#clf = LinearDiscriminantAnalysis(n_components=None, priors=None, shrinkage=None,
# solver='svd', store_covariance=False, tol=0.0001)
lda.fit(X_training, Y_training)
predictions = lda.predict(X_test)
print predictions
print accuracy_score(Y_test, predictions)
=======
def prepare_data():
"""Prepare data for classifier to use"""
#data, label = load_ta_data(), load_ta_target()
data, label = load_own_data(), load_own_target()
tra_x, tst_x = split_samples(data)
tra_y, tst_y = split_samples(label)
return (tra_x, tst_x, tra_y, tst_y)
def train_predict_and_results(data, clf):
"""Perform training, calculate predictions and show results"""
tra_x, tst_x, tra_y, tst_y = data
clf.fit(tra_x, tra_y)
prd_y = clf.predict(tst_x)
cnf = confusion_matrix(tst_y, prd_y)
print ("Classifier: %s \tAccuracy score:%7.2f %%"
"\tTN:%5d FP:%5d FN:%5d TP:%5d"
% (clf.name, accuracy_score(tst_y, prd_y) * 100,
cnf[0][0], cnf[0][1], cnf[1][0], cnf[1][1]))
def main():
"""The main method"""
data = prepare_data()
linear_discriminant_analysis(data)
nearest_neighbors_classifier(data)
support_vector_machine(data)
gaussian_naive_bayes(data)
logistic_regression(data)
random_forest(data)
>>>>>>> 05e11c3b88b3fb5313f29e74125ab6fdd8fffd84
if __name__ == "__main__":
main()
| null | null | null | null |
[
0
] |
2,265 |
da30cea4cfb1ffccabe708fe15e5a633b06d299f
|
<mask token>
class Instruction(QWidget):
<mask token>
def set_background_instruction(self):
img = QPixmap('../images/background_instruction.jpg')
self.background_instruction.setPixmap(img)
<mask token>
|
<mask token>
class Instruction(QWidget):
def __init__(self):
super().__init__()
uic.loadUi('../ui/instruction.ui', self)
self.OK_btn.clicked.connect(self.show_game)
self.set_background_instruction()
def set_background_instruction(self):
img = QPixmap('../images/background_instruction.jpg')
self.background_instruction.setPixmap(img)
<mask token>
|
<mask token>
class Instruction(QWidget):
def __init__(self):
super().__init__()
uic.loadUi('../ui/instruction.ui', self)
self.OK_btn.clicked.connect(self.show_game)
self.set_background_instruction()
def set_background_instruction(self):
img = QPixmap('../images/background_instruction.jpg')
self.background_instruction.setPixmap(img)
def show_game(self):
self.parent().show_game()
|
import sys
from PyQt5 import uic
from PyQt5.QtWidgets import QWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPixmap
class Instruction(QWidget):
def __init__(self):
super().__init__()
uic.loadUi('../ui/instruction.ui', self)
self.OK_btn.clicked.connect(self.show_game)
self.set_background_instruction()
def set_background_instruction(self):
img = QPixmap('../images/background_instruction.jpg')
self.background_instruction.setPixmap(img)
def show_game(self):
self.parent().show_game()
|
import sys
from PyQt5 import uic
from PyQt5.QtWidgets import QWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPixmap
class Instruction(QWidget):
def __init__(self):
super().__init__()
# Set UI file
uic.loadUi('../ui/instruction.ui', self)
# Connect handlers of buttons
self.OK_btn.clicked.connect(self.show_game)
self.set_background_instruction()
# Set background of the windows
def set_background_instruction(self):
img = QPixmap('../images/background_instruction.jpg')
self.background_instruction.setPixmap(img)
# Show window of the game
def show_game(self):
self.parent().show_game()
|
[
2,
3,
4,
5,
6
] |
2,266 |
8980ac4db2657d3dbd2b70b33a4d13a077d4590e
|
<mask token>
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
<mask token>
@app.route('/get_suggestion', methods=['GET', 'POST'])
def get_suggestion():
if 'words' not in request.values.keys():
raise InvalidUsage('No words were specified for prediction.',
status_code=400)
text = request.values['words']
prediction = []
if len(text.split(' ')) > 1:
prediction = autocomplete.split_predict(text, 10)
else:
prediction = autocomplete.predict_currword(text, 10)
return jsonify(prediction)
@app.route('/send_text', methods=['GET', 'POST'])
def send_text():
if 'text' not in request.values.keys():
raise InvalidUsage('The text message was not found in the request.',
status_code=400)
if 'to' not in request.values.keys():
raise InvalidUsage('The to-number was not found in the request',
status_code=400)
text = request.values['text']
to_number = request.values['to']
account_sid = 'ACbbd2cff98bcbbad08f76b03701a0f2d9'
auth_token = '7d786ff14c6b4572a6e8e78f8ad6aee5'
client = Client(account_sid, auth_token)
message = client.messages.create(from_='+12267992139', to=to_number,
body=text)
return jsonify({'to': to_number, 'message': message.body, 'error code':
message.error_code})
|
<mask token>
@app.route('/')
def index():
return render_template('index.html')
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/convert_text_to_speech', methods=['POST'])
def convert_text_to_speech():
if 'text_to_convert' not in request.values.keys():
raise InvalidUsage('No text included for conversion', status_code=400)
tts = gTTS(text=request.values['text_to_convert'], lang='en')
tts.save('converted_text.mp3')
os.system('start converted_text.mp3')
return send_file('converted_text.mp3', mimetype='audio/mpeg')
@app.route('/get_suggestion', methods=['GET', 'POST'])
def get_suggestion():
if 'words' not in request.values.keys():
raise InvalidUsage('No words were specified for prediction.',
status_code=400)
text = request.values['words']
prediction = []
if len(text.split(' ')) > 1:
prediction = autocomplete.split_predict(text, 10)
else:
prediction = autocomplete.predict_currword(text, 10)
return jsonify(prediction)
@app.route('/send_text', methods=['GET', 'POST'])
def send_text():
if 'text' not in request.values.keys():
raise InvalidUsage('The text message was not found in the request.',
status_code=400)
if 'to' not in request.values.keys():
raise InvalidUsage('The to-number was not found in the request',
status_code=400)
text = request.values['text']
to_number = request.values['to']
account_sid = 'ACbbd2cff98bcbbad08f76b03701a0f2d9'
auth_token = '7d786ff14c6b4572a6e8e78f8ad6aee5'
client = Client(account_sid, auth_token)
message = client.messages.create(from_='+12267992139', to=to_number,
body=text)
return jsonify({'to': to_number, 'message': message.body, 'error code':
message.error_code})
|
<mask token>
autocomplete.load()
<mask token>
CORS(app)
@app.route('/')
def index():
return render_template('index.html')
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/convert_text_to_speech', methods=['POST'])
def convert_text_to_speech():
if 'text_to_convert' not in request.values.keys():
raise InvalidUsage('No text included for conversion', status_code=400)
tts = gTTS(text=request.values['text_to_convert'], lang='en')
tts.save('converted_text.mp3')
os.system('start converted_text.mp3')
return send_file('converted_text.mp3', mimetype='audio/mpeg')
@app.route('/get_suggestion', methods=['GET', 'POST'])
def get_suggestion():
if 'words' not in request.values.keys():
raise InvalidUsage('No words were specified for prediction.',
status_code=400)
text = request.values['words']
prediction = []
if len(text.split(' ')) > 1:
prediction = autocomplete.split_predict(text, 10)
else:
prediction = autocomplete.predict_currword(text, 10)
return jsonify(prediction)
@app.route('/send_text', methods=['GET', 'POST'])
def send_text():
if 'text' not in request.values.keys():
raise InvalidUsage('The text message was not found in the request.',
status_code=400)
if 'to' not in request.values.keys():
raise InvalidUsage('The to-number was not found in the request',
status_code=400)
text = request.values['text']
to_number = request.values['to']
account_sid = 'ACbbd2cff98bcbbad08f76b03701a0f2d9'
auth_token = '7d786ff14c6b4572a6e8e78f8ad6aee5'
client = Client(account_sid, auth_token)
message = client.messages.create(from_='+12267992139', to=to_number,
body=text)
return jsonify({'to': to_number, 'message': message.body, 'error code':
message.error_code})
|
<mask token>
autocomplete.load()
app = Flask(__name__)
CORS(app)
@app.route('/')
def index():
return render_template('index.html')
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/convert_text_to_speech', methods=['POST'])
def convert_text_to_speech():
if 'text_to_convert' not in request.values.keys():
raise InvalidUsage('No text included for conversion', status_code=400)
tts = gTTS(text=request.values['text_to_convert'], lang='en')
tts.save('converted_text.mp3')
os.system('start converted_text.mp3')
return send_file('converted_text.mp3', mimetype='audio/mpeg')
@app.route('/get_suggestion', methods=['GET', 'POST'])
def get_suggestion():
if 'words' not in request.values.keys():
raise InvalidUsage('No words were specified for prediction.',
status_code=400)
text = request.values['words']
prediction = []
if len(text.split(' ')) > 1:
prediction = autocomplete.split_predict(text, 10)
else:
prediction = autocomplete.predict_currword(text, 10)
return jsonify(prediction)
@app.route('/send_text', methods=['GET', 'POST'])
def send_text():
if 'text' not in request.values.keys():
raise InvalidUsage('The text message was not found in the request.',
status_code=400)
if 'to' not in request.values.keys():
raise InvalidUsage('The to-number was not found in the request',
status_code=400)
text = request.values['text']
to_number = request.values['to']
account_sid = 'ACbbd2cff98bcbbad08f76b03701a0f2d9'
auth_token = '7d786ff14c6b4572a6e8e78f8ad6aee5'
client = Client(account_sid, auth_token)
message = client.messages.create(from_='+12267992139', to=to_number,
body=text)
return jsonify({'to': to_number, 'message': message.body, 'error code':
message.error_code})
|
from flask import Flask, jsonify, request, send_file, render_template
from flask_cors import CORS
from twilio.rest import Client
import autocomplete
from gtts import gTTS
import os
# Set up the model.
autocomplete.load()
app = Flask(__name__)
CORS(app)
# The application
@app.route("/")
def index():
return render_template("index.html")
# Create a class for custom error messages (reference: http://flask.pocoo.org/docs/0.12/patterns/apierrors/).
class InvalidUsage(Exception):
status_code = 400
# Initialize the InvalidUsage exception.
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
# Convert the exception information into a dictionary.
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
# Register the custom exception with the error handler (reference: http://flask.pocoo.org/docs/0.12/patterns/apierrors/).
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
# Converts English text to speech.
@app.route('/convert_text_to_speech', methods=['POST'])
def convert_text_to_speech():
# Check to see if the required parameters are present.
if 'text_to_convert' not in request.values.keys():
raise InvalidUsage("No text included for conversion", status_code = 400)
# Send the post request.
tts = gTTS(text=request.values['text_to_convert'], lang='en')
tts.save('converted_text.mp3')
os.system('start converted_text.mp3')
# Return the sound file.
return send_file('converted_text.mp3', mimetype='audio/mpeg')
# Get suggestions for words that the user typed in.
@app.route('/get_suggestion', methods=['GET','POST'])
def get_suggestion():
# Raise an exception if the required parameters are not specified.
if "words" not in request.values.keys():
raise InvalidUsage("No words were specified for prediction.", status_code = 400)
# Predict the next word.
text = request.values['words']
prediction = [];
if len(text.split(" ")) > 1:
prediction = autocomplete.split_predict(text, 10)
else:
prediction = autocomplete.predict_currword(text, 10)
return jsonify(prediction)
# Adds text message support to allow Don to send text messages.
@app.route('/send_text', methods=['GET', 'POST'])
def send_text():
# Raise an exception if the required parameters are not specified.
if "text" not in request.values.keys():
raise InvalidUsage("The text message was not found in the request.", status_code = 400)
if "to" not in request.values.keys():
raise InvalidUsage("The to-number was not found in the request", status_code = 400)
# Extract the required information from the request body.
text = request.values['text']
to_number = request.values['to']
# Set up the account credentials - in a production project, this would be placed in a "secrets" file.
account_sid = "ACbbd2cff98bcbbad08f76b03701a0f2d9"
auth_token = "7d786ff14c6b4572a6e8e78f8ad6aee5"
# Send the text message.
client = Client(account_sid, auth_token)
message = client.messages.create(
from_="+12267992139",
to=to_number,
body=text)
return jsonify({"to":to_number, "message":message.body, "error code":message.error_code})
|
[
7,
9,
10,
11,
13
] |
2,267 |
eb5256543d6095668d6eeaf6cfdc9f744d7c73c5
|
<mask token>
class Load(Command):
<mask token>
<mask token>
<mask token>
def find_groupname(self, g, s):
tries = 0
while True:
groups = s.groups()
if g not in groups:
return g
if tries > 3:
raise RuntimeError('%r is conflicting, try to pass a new one')
i = 0
while True:
g = '%s.%s' % (g, i)
if g not in groups:
break
tries += 1
|
<mask token>
class Load(Command):
<mask token>
<mask token>
def run(self, procfile, pargs):
args = pargs.args
uri = None
if len(args) == 2:
group = args[0]
uri = args[1]
elif len(args) == 1:
group = args[0]
else:
group = '.'
if pargs.endpoint:
uri = pargs.endpoint
if not uri:
uri = 'http://127.0.0.1:5000'
if group == '.':
group = procfile.get_groupname()
s = Server(uri)
group = self.find_groupname(group, s)
concurrency = self.parse_concurrency(pargs)
for name, cmd_str in procfile.processes():
cmd, args = procfile.parse_cmd(cmd_str)
pname = '%s:%s' % (group, name)
params = dict(args=args, env=procfile.env, numprocesses=
concurrency.get(name, 1), redirect_output=['out', 'err'],
cwd=os.path.abspath(procfile.root))
s.add_process(pname, cmd, **params)
print('%r has been loaded in %s' % (group, uri))
def find_groupname(self, g, s):
tries = 0
while True:
groups = s.groups()
if g not in groups:
return g
if tries > 3:
raise RuntimeError('%r is conflicting, try to pass a new one')
i = 0
while True:
g = '%s.%s' % (g, i)
if g not in groups:
break
tries += 1
|
<mask token>
class Load(Command):
""" Load a Procfile application to gafferd
======================================
This command allows you to load your Procfile application
in gafferd.
Command line
------------
$ gaffer load [name] [url]
Arguments
+++++++++
*name* is the name of the group of process recoreded in gafferd.
By default it will be the name of your project folder.You can use
``.`` to specify the current folder.
*uri* is the url to connect to a gaffer node. By default
'http://127.0.0.1:5000'
Options
+++++++
**--endpoint**
Gaffer node URL to connect.
"""
name = 'load'
def run(self, procfile, pargs):
args = pargs.args
uri = None
if len(args) == 2:
group = args[0]
uri = args[1]
elif len(args) == 1:
group = args[0]
else:
group = '.'
if pargs.endpoint:
uri = pargs.endpoint
if not uri:
uri = 'http://127.0.0.1:5000'
if group == '.':
group = procfile.get_groupname()
s = Server(uri)
group = self.find_groupname(group, s)
concurrency = self.parse_concurrency(pargs)
for name, cmd_str in procfile.processes():
cmd, args = procfile.parse_cmd(cmd_str)
pname = '%s:%s' % (group, name)
params = dict(args=args, env=procfile.env, numprocesses=
concurrency.get(name, 1), redirect_output=['out', 'err'],
cwd=os.path.abspath(procfile.root))
s.add_process(pname, cmd, **params)
print('%r has been loaded in %s' % (group, uri))
def find_groupname(self, g, s):
tries = 0
while True:
groups = s.groups()
if g not in groups:
return g
if tries > 3:
raise RuntimeError('%r is conflicting, try to pass a new one')
i = 0
while True:
g = '%s.%s' % (g, i)
if g not in groups:
break
tries += 1
|
import os
from .base import Command
from ...httpclient import Server
class Load(Command):
""" Load a Procfile application to gafferd
======================================
This command allows you to load your Procfile application
in gafferd.
Command line
------------
$ gaffer load [name] [url]
Arguments
+++++++++
*name* is the name of the group of process recoreded in gafferd.
By default it will be the name of your project folder.You can use
``.`` to specify the current folder.
*uri* is the url to connect to a gaffer node. By default
'http://127.0.0.1:5000'
Options
+++++++
**--endpoint**
Gaffer node URL to connect.
"""
name = 'load'
def run(self, procfile, pargs):
args = pargs.args
uri = None
if len(args) == 2:
group = args[0]
uri = args[1]
elif len(args) == 1:
group = args[0]
else:
group = '.'
if pargs.endpoint:
uri = pargs.endpoint
if not uri:
uri = 'http://127.0.0.1:5000'
if group == '.':
group = procfile.get_groupname()
s = Server(uri)
group = self.find_groupname(group, s)
concurrency = self.parse_concurrency(pargs)
for name, cmd_str in procfile.processes():
cmd, args = procfile.parse_cmd(cmd_str)
pname = '%s:%s' % (group, name)
params = dict(args=args, env=procfile.env, numprocesses=
concurrency.get(name, 1), redirect_output=['out', 'err'],
cwd=os.path.abspath(procfile.root))
s.add_process(pname, cmd, **params)
print('%r has been loaded in %s' % (group, uri))
def find_groupname(self, g, s):
tries = 0
while True:
groups = s.groups()
if g not in groups:
return g
if tries > 3:
raise RuntimeError('%r is conflicting, try to pass a new one')
i = 0
while True:
g = '%s.%s' % (g, i)
if g not in groups:
break
tries += 1
|
# -*- coding: utf-8 -
#
# This file is part of gaffer. See the NOTICE for more information.
import os
from .base import Command
from ...httpclient import Server
class Load(Command):
"""\
Load a Procfile application to gafferd
======================================
This command allows you to load your Procfile application
in gafferd.
Command line
------------
$ gaffer load [name] [url]
Arguments
+++++++++
*name* is the name of the group of process recoreded in gafferd.
By default it will be the name of your project folder.You can use
``.`` to specify the current folder.
*uri* is the url to connect to a gaffer node. By default
'http://127.0.0.1:5000'
Options
+++++++
**--endpoint**
Gaffer node URL to connect.
"""
name = "load"
def run(self, procfile, pargs):
args = pargs.args
# get args
uri = None
if len(args) == 2:
group = args[0]
uri = args[1]
elif len(args) == 1:
group = args[0]
else:
group = "."
if pargs.endpoint:
uri = pargs.endpoint
if not uri:
uri = "http://127.0.0.1:5000"
# get the default groupname
if group == ".":
group = procfile.get_groupname()
# create a server instance
s = Server(uri)
# finally manage group conflicts
group = self.find_groupname(group, s)
# parse the concurrency settings
concurrency = self.parse_concurrency(pargs)
# finally send the processes
for name, cmd_str in procfile.processes():
cmd, args = procfile.parse_cmd(cmd_str)
pname = "%s:%s" % (group, name)
params = dict(args=args, env=procfile.env,
numprocesses=concurrency.get(name, 1),
redirect_output=['out', 'err'],
cwd=os.path.abspath(procfile.root))
s.add_process(pname, cmd, **params)
print("%r has been loaded in %s" % (group, uri))
def find_groupname(self, g, s):
tries = 0
while True:
groups = s.groups()
if g not in groups:
return g
if tries > 3:
raise RuntimeError("%r is conflicting, try to pass a new one")
i = 0
while True:
g = "%s.%s" % (g, i)
if g not in groups:
break
tries += 1
|
[
2,
3,
5,
6,
7
] |
2,268 |
dacd4334433eb323ce732c96f680fb7b9333721a
|
# -*- coding: utf-8 -*-
import sys
import xlrd
import numpy as np
import matplotlib.pyplot as plt
if __name__ == "__main__":
param = sys.argv
print "Hello:" + param[0]
# ファイルのオープン
book = xlrd.open_workbook('sample.xls')
# シートの選択
sheet = book.sheet_by_name(u"Sheet1")
# sheet = book.sheet_by_index(0)
plot_x = np.zeros(sheet.nrows-1, dtype=np.float64)
plot_y = np.zeros(sheet.nrows-1, dtype=np.float64)
for row in range(sheet.nrows):
if row==0:
plt.xlabel(sheet.cell(0,1).value)
plt.ylabel(sheet.cell(0,2).value)
pass
elif row>=1:
plot_x[row-1] = float(sheet.cell(row,1).value)
plot_y[row-1] = float(sheet.cell(row,2).value)
plt.xlim([0,100])
plt.ylim([0,50])
plt.plot(plot_x, plot_y,'o',color='r', label='test1')
plt.title(u'排出量')
plt.legend(loc='lower right') # 凡例表示
plt.show()
| null | null | null | null |
[
0
] |
2,269 |
7599f13d1cabe73d876ff97722962f2fcf9a9940
|
<mask token>
def test_sample_input():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
logger.info(traverse_map(map_template))
def test_sample_input_custom_slope():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert traverse_map(map_template, slope=[1, 1]) == 2
assert traverse_map(map_template, slope=[1, 3]) == 7
assert traverse_map(map_template, slope=[2, 1]) == 2
def test_big_input():
with open(os.path.join(local_path, 'input'), 'r') as f:
found_trees = traverse_map(f.read())
assert found_trees == 237
def test_sample_input_with_multiple_slopes():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
tree_product = traverse_map_multiple_slopes(map_template, [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 336
def test_big_input_with_multiple_slopes():
with open(os.path.join(local_path, 'input'), 'r') as f:
tree_product = traverse_map_multiple_slopes(f.read(), [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 2106818610
|
<mask token>
def test_get_map_cell():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert get_map_cell(map_template.split('\n'), 1, 10) == '.'
assert get_map_cell(map_template.split('\n'), 1, 10 + 11) == '.'
def test_sample_input():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
logger.info(traverse_map(map_template))
def test_sample_input_custom_slope():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert traverse_map(map_template, slope=[1, 1]) == 2
assert traverse_map(map_template, slope=[1, 3]) == 7
assert traverse_map(map_template, slope=[2, 1]) == 2
def test_big_input():
with open(os.path.join(local_path, 'input'), 'r') as f:
found_trees = traverse_map(f.read())
assert found_trees == 237
def test_sample_input_with_multiple_slopes():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
tree_product = traverse_map_multiple_slopes(map_template, [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 336
def test_big_input_with_multiple_slopes():
with open(os.path.join(local_path, 'input'), 'r') as f:
tree_product = traverse_map_multiple_slopes(f.read(), [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 2106818610
|
<mask token>
logger = logging.getLogger(__name__)
local_path = os.path.abspath(os.path.dirname(__file__))
def test_get_map_cell():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert get_map_cell(map_template.split('\n'), 1, 10) == '.'
assert get_map_cell(map_template.split('\n'), 1, 10 + 11) == '.'
def test_sample_input():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
logger.info(traverse_map(map_template))
def test_sample_input_custom_slope():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert traverse_map(map_template, slope=[1, 1]) == 2
assert traverse_map(map_template, slope=[1, 3]) == 7
assert traverse_map(map_template, slope=[2, 1]) == 2
def test_big_input():
with open(os.path.join(local_path, 'input'), 'r') as f:
found_trees = traverse_map(f.read())
assert found_trees == 237
def test_sample_input_with_multiple_slopes():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
tree_product = traverse_map_multiple_slopes(map_template, [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 336
def test_big_input_with_multiple_slopes():
with open(os.path.join(local_path, 'input'), 'r') as f:
tree_product = traverse_map_multiple_slopes(f.read(), [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 2106818610
|
import logging
import os.path
from day03.code.main import traverse_map, get_map_cell, traverse_map_multiple_slopes
logger = logging.getLogger(__name__)
local_path = os.path.abspath(os.path.dirname(__file__))
def test_get_map_cell():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert get_map_cell(map_template.split('\n'), 1, 10) == '.'
assert get_map_cell(map_template.split('\n'), 1, 10 + 11) == '.'
def test_sample_input():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
logger.info(traverse_map(map_template))
def test_sample_input_custom_slope():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert traverse_map(map_template, slope=[1, 1]) == 2
assert traverse_map(map_template, slope=[1, 3]) == 7
assert traverse_map(map_template, slope=[2, 1]) == 2
def test_big_input():
with open(os.path.join(local_path, 'input'), 'r') as f:
found_trees = traverse_map(f.read())
assert found_trees == 237
def test_sample_input_with_multiple_slopes():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
tree_product = traverse_map_multiple_slopes(map_template, [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 336
def test_big_input_with_multiple_slopes():
with open(os.path.join(local_path, 'input'), 'r') as f:
tree_product = traverse_map_multiple_slopes(f.read(), [[1, 1], [1,
3], [1, 5], [1, 7], [2, 1]])
assert tree_product == 2106818610
|
import logging
import os.path
from day03.code.main import traverse_map, get_map_cell, traverse_map_multiple_slopes
logger = logging.getLogger(__name__)
local_path = os.path.abspath(os.path.dirname(__file__))
def test_get_map_cell():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert get_map_cell(map_template.split("\n"), 1, 10) == "."
assert get_map_cell(map_template.split("\n"), 1, 10 + 11) == "."
def test_sample_input():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
logger.info(traverse_map(map_template))
def test_sample_input_custom_slope():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
assert traverse_map(map_template, slope=[1, 1]) == 2
assert traverse_map(map_template, slope=[1, 3]) == 7
assert traverse_map(map_template, slope=[2, 1]) == 2
def test_big_input():
with open(os.path.join(local_path, "input"), "r") as f:
found_trees = traverse_map(f.read())
assert found_trees == 237
def test_sample_input_with_multiple_slopes():
map_template = """..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#"""
tree_product = traverse_map_multiple_slopes(
map_template, [[1, 1], [1, 3], [1, 5], [1, 7], [2, 1]]
)
assert tree_product == 336
def test_big_input_with_multiple_slopes():
with open(os.path.join(local_path, "input"), "r") as f:
tree_product = traverse_map_multiple_slopes(
f.read(), [[1, 1], [1, 3], [1, 5], [1, 7], [2, 1]]
)
assert tree_product == 2106818610
|
[
5,
6,
7,
8,
9
] |
2,270 |
ec9f27b4313f72ae6eb7e8280d47de226aeb6bb1
|
<mask token>
|
<mask token>
def similarity_metric(embedding1: numpy.ndarray, embedding2: numpy.ndarray
) ->float:
return numpy.nan_to_num(1 - cosine(embedding1, embedding2), 0)
|
import numpy
from scipy.spatial.distance import cosine
def similarity_metric(embedding1: numpy.ndarray, embedding2: numpy.ndarray
) ->float:
return numpy.nan_to_num(1 - cosine(embedding1, embedding2), 0)
| null | null |
[
0,
1,
2
] |
2,271 |
967c8348352c805b926643617b88b03a62df2d16
|
<mask token>
|
from access.ssh.session import Client
from access.ssh.datachannel import DataChannel
| null | null | null |
[
0,
1
] |
2,272 |
7c39b3927bc0702818c54875785b4657c20c441e
|
<mask token>
|
class Solution(object):
<mask token>
|
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
if len(s) == 0:
return ''
if len(s) == 1:
return s
start = -1
end = -2
for i in range(len(s)):
side = 1
while i - side >= 0 and i + side < len(s) and s[i - side] == s[
i + side]:
side += 1
if (side - 1) * 2 + 1 > end - start + 1:
start = i - (side - 1)
end = i + side
out_string = s[start:end]
start = -1
end = -2
for i in range(len(s) - 1):
side = 0
while i - side >= 0 and i + 1 + side < len(s) and s[i - side] == s[
i + 1 + side]:
side += 1
if side * 2 > end - start + 1:
start = i - side + 1
end = i + 1 + side
return out_string if len(out_string) > end - start else s[start:end]
|
# Given a string S, find the longest palindromic substring in S. You may assume that the maximum length of S is 1000, and there exists one unique longest palindromic substring.
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
if len(s) == 0:
return ""
if len(s) == 1:
return s
start = -1
end = -2
for i in range(len(s)):
side = 1
while i - side >= 0 and i + side < len(s) and s[i - side] == s[i + side]:
side += 1
if (side - 1) * 2 + 1 > end - start + 1:
start = i - (side - 1)
end = i + side
out_string = s[start:end]
start = -1
end = -2
for i in range(len(s) - 1):
side = 0
while i - side >= 0 and i + 1 + side < len(s) and s[i - side] == s[i + 1 + side]:
side += 1
if side * 2 > end - start + 1:
start = i - side + 1
end = i + 1 + side
return out_string if len(out_string) > end - start else s[start:end]
| null |
[
0,
1,
2,
3
] |
2,273 |
4605a3f88c73b43fa7611a10a400ad2d4d7c6dfc
|
<mask token>
|
__author__ = 'zhouhenglc'
TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
ENCODING = 'utf-8'
G_MULTI_MODE = [6]
STATUS_ONLINE = 64
STATUS_OFFLINE = 128
TOKEN_BAD_FORMAT = 'token_bad_format'
TOKEN_EXPIRED = 'token_expired'
TOKEN_NOT_STORAGE = 'token_not_storage'
TOKEN_REQUIRE_REFRESH = 'token_require_refresh'
T_STATE_RIGHT = 'right'
T_STATE_WRONG = 'wrong'
T_STATE_SKIP = 'skip'
T_STATES = [T_STATE_RIGHT, T_STATE_WRONG, T_STATE_SKIP]
R_EXAM = 'exam'
R_QUESTION = 'question'
R_VC = 'virtual_currency'
R_SE = 'security'
E_AFTER_UPDATE = 'after_update'
E_GEN_TOKEN = 'gen_token'
E_PARSING_TOKEN = 'parsing_token'
E_NEW_BILLING = 'new_billing'
E_SE_FIREWALL = 'security_firewall'
VB_FB = 'feedback_exam'
VB_FB_NAME = '题库问题反馈得积分'
VC_EC_EM = 'vc_exchange_exam_mem'
VC_EC_EM_NAME = '积分换题库会员'
SE_ACTION_NORMAL = 'normal'
SE_ACTION_WARN = 'warn'
SE_ACTION_EXIT = 'exit'
DR_KEY_VC_GOODS = 'vc_goods'
DR_KEY_ROUTES = 'routes'
GOOD_TYPE_EXAM = 'exam'
|
# !/usr/bin/env python
# coding: utf-8
__author__ = 'zhouhenglc'
TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
ENCODING = 'utf-8'
# exam mode
# G_SELECT_MODE
# 待废弃,逐步完善使用classes.objects.question_type
# G_SELECT_MODE = ["无", "选择题", "名词解释", "简答题", "计算题", "论述题", "多选题", "判断题"]
G_MULTI_MODE = [6, ] # 多选题型 多选题=6
# G_DEF_OPTIONS = [1, 6] # 自定义选项 单选题=1 多选题=6
# exam status
STATUS_ONLINE = 64
STATUS_OFFLINE = 128
# token error
TOKEN_BAD_FORMAT = 'token_bad_format' # login again
TOKEN_EXPIRED = 'token_expired' # try refresh
TOKEN_NOT_STORAGE = 'token_not_storage' # login again
TOKEN_REQUIRE_REFRESH = 'token_require_refresh' # try refresh
# training question state
T_STATE_RIGHT = 'right'
T_STATE_WRONG = 'wrong'
T_STATE_SKIP = 'skip'
T_STATES = [T_STATE_RIGHT, T_STATE_WRONG, T_STATE_SKIP]
# resource constants
R_EXAM = 'exam'
R_QUESTION = 'question'
R_VC = 'virtual_currency'
R_SE = 'security'
# resource event
E_AFTER_UPDATE = 'after_update'
E_GEN_TOKEN = 'gen_token'
E_PARSING_TOKEN = 'parsing_token'
E_NEW_BILLING = 'new_billing'
E_SE_FIREWALL = 'security_firewall'
# vc billing
VB_FB = 'feedback_exam'
VB_FB_NAME = '题库问题反馈得积分'
VC_EC_EM = 'vc_exchange_exam_mem'
VC_EC_EM_NAME = '积分换题库会员'
# security handle action
SE_ACTION_NORMAL = 'normal'
SE_ACTION_WARN = 'warn'
SE_ACTION_EXIT = 'exit'
# DATA_REGISTRY keys
DR_KEY_VC_GOODS = 'vc_goods'
DR_KEY_ROUTES = 'routes'
# goods type
GOOD_TYPE_EXAM = 'exam'
| null | null |
[
0,
1,
2
] |
2,274 |
3e48de2e3b12965de1b3b5cb6c3cf68c90ec6212
|
<mask token>
|
<mask token>
for i in range(10):
line = sys.stdin.readline()
height = int(line)
heights.append(height)
heights.sort()
heights.reverse()
for i in range(3):
print(heights[i])
|
<mask token>
heights = []
for i in range(10):
line = sys.stdin.readline()
height = int(line)
heights.append(height)
heights.sort()
heights.reverse()
for i in range(3):
print(heights[i])
|
import sys
heights = []
for i in range(10):
line = sys.stdin.readline()
height = int(line)
heights.append(height)
heights.sort()
heights.reverse()
for i in range(3):
print(heights[i])
| null |
[
0,
1,
2,
3
] |
2,275 |
fbd5400823a8148adf358a2acc58fde146a25313
|
<mask token>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
<mask token>
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<mask token>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
<mask token>
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
<mask token>
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
<mask token>
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
|
<mask token>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<mask token>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
<mask token>
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
<mask token>
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',
'-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
<mask token>
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
|
<mask token>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<mask token>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
def import_worker(thread_index):
"""
Thread worker that deals with tasks.
:return:
"""
THREAD_STOP_FLAGS[thread_index] = False
while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):
task = task_queue.get()
do_import_video_task(task)
task_queue.task_done()
THREAD_STOP_FLAGS[thread_index] = True
def do_import_video_task(task):
video_id = task.video_id
file_path = task.file_path
rating = task.rating
file_name = os.path.basename(file_path)[:-4]
tlog = get_logger(current_thread().name)
videos = Video.objects.filter(path=file_path)
if videos:
tlog.info('Existing video: {0}'.format(task.file_path))
return
video = Video()
video.video_id = video_id
video.rating = rating
thumb_path = get_thumb_path(video.video_id)
cover_path = get_cover_path(video.video_id)
if not gen_cover(task.file_path, cover_path):
tlog.error('Failed to gen cover for {0}'.format(file_path))
return
success, duration = gen_thumb(file_path, thumb_path)
if success:
if not gen_flips(file_path, video.video_id, duration, FLIP_DIR,
FLIP_NUM):
tlog.error('Failed to gen flips for {0}'.format(file_path))
else:
tlog.error('Failed to gen thumb for {0}'.format(file_path))
video.title = file_name
video.path = file_path
video.duration = duration
video.save()
tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration,
video.path))
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
<mask token>
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',
'-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def convert_video_to_mp4(video_path, dest_path):
tlog = get_logger(current_thread().name)
if os.path.isfile(dest_path):
tlog.info('#Already converted, skip: {0}'.format(dest_path))
return True
tlog.info('#Converting: {0} => {1}\n', video_path, dest_path)
cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac',
dest_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
|
<mask token>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<mask token>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
def import_worker(thread_index):
"""
Thread worker that deals with tasks.
:return:
"""
THREAD_STOP_FLAGS[thread_index] = False
while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):
task = task_queue.get()
do_import_video_task(task)
task_queue.task_done()
THREAD_STOP_FLAGS[thread_index] = True
def do_import_video_task(task):
video_id = task.video_id
file_path = task.file_path
rating = task.rating
file_name = os.path.basename(file_path)[:-4]
tlog = get_logger(current_thread().name)
videos = Video.objects.filter(path=file_path)
if videos:
tlog.info('Existing video: {0}'.format(task.file_path))
return
video = Video()
video.video_id = video_id
video.rating = rating
thumb_path = get_thumb_path(video.video_id)
cover_path = get_cover_path(video.video_id)
if not gen_cover(task.file_path, cover_path):
tlog.error('Failed to gen cover for {0}'.format(file_path))
return
success, duration = gen_thumb(file_path, thumb_path)
if success:
if not gen_flips(file_path, video.video_id, duration, FLIP_DIR,
FLIP_NUM):
tlog.error('Failed to gen flips for {0}'.format(file_path))
else:
tlog.error('Failed to gen thumb for {0}'.format(file_path))
video.title = file_name
video.path = file_path
video.duration = duration
video.save()
tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration,
video.path))
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
def get_cover_path(fn):
return './static/cover/' + str(fn) + '.png'
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',
'-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def convert_video_to_mp4(video_path, dest_path):
tlog = get_logger(current_thread().name)
if os.path.isfile(dest_path):
tlog.info('#Already converted, skip: {0}'.format(dest_path))
return True
tlog.info('#Converting: {0} => {1}\n', video_path, dest_path)
cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac',
dest_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
|
# coding=utf8
# encoding: utf-8
import os
import platform
import re
import signal
import sys
import traceback
from subprocess import Popen, PIPE
from threading import Thread, current_thread
from Queue import Queue
from util.log import get_logger, log
from video.models import Video, KeywordVideoId
from django.db.models import Max
from collect_video import G_GEN_IMAGE
MAX_THREAD_NUM = 4
THREAD_STOP_FLAGS = []
THUMB_DIR = './static/thumb'
THUMB_SIZE = '180x135'
COVER_DIR = './static/cover'
FLIP_DIR = './static/flip'
FLIP_NUM = 10
task_queue = Queue(maxsize=2000)
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info("Received signal {0}. Will stop all task threads".format(signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))['video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for (path, rating) in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id, path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for (root, dirs, files) in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path, file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
def add_keywords_to_db(task_list):
blacklist = load_keyword_blacklist_from_file()
for task in task_list:
base_path = task.base_path
file_path = task.file_path
video_id = task.video_id
keywords = get_keywords(base_path, file_path, blacklist)
log.info('#Keywords:'.format(keywords))
for key in keywords:
try:
if KeywordVideoId.objects.filter(keyword=key, video_id=video_id):
log.info("Existing keyword {0} for {1}".format(key, video_id))
continue
keyword_record = KeywordVideoId()
keyword_record.keyword = key
keyword_record.video = Video.objects.get(video_id=video_id)
keyword_record.save()
log.info('#Added keyword:{0} for video_id: {1}'.format(key, video_id))
except Exception as e:
log.error("Error while adding keyword {0} to video {1}: {2}".format(key, video_id, e))
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
def import_worker(thread_index):
"""
Thread worker that deals with tasks.
:return:
"""
THREAD_STOP_FLAGS[thread_index] = False
while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):
task = task_queue.get()
do_import_video_task(task)
task_queue.task_done()
THREAD_STOP_FLAGS[thread_index] = True
def do_import_video_task(task):
video_id = task.video_id
file_path = task.file_path
rating = task.rating
file_name = os.path.basename(file_path)[:-4]
tlog = get_logger(current_thread().name)
videos = Video.objects.filter(path=file_path)
if videos:
tlog.info("Existing video: {0}".format(task.file_path))
return
video = Video()
video.video_id = video_id
video.rating = rating
thumb_path = get_thumb_path(video.video_id)
cover_path = get_cover_path(video.video_id)
if not gen_cover(task.file_path, cover_path):
tlog.error("Failed to gen cover for {0}".format(file_path))
return
success, duration = gen_thumb(file_path, thumb_path)
if success:
if not gen_flips(file_path, video.video_id, duration, FLIP_DIR, FLIP_NUM):
tlog.error("Failed to gen flips for {0}".format(file_path))
else:
tlog.error("Failed to gen thumb for {0}".format(file_path))
video.title = file_name
video.path = file_path
video.duration = duration
video.save()
tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration, video.path))
def is_valid_video_file(file_path, file_name):
# skip hidden files (possibly not valid video files)
if file_name.startswith('.') or (not file_name.endswith('.mp4')):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info("Keywords blacklist: {0}".format(blacklist))
except Exception as e:
log.error("Error while processing {0}:{1}".format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '') # remove base_dir from file_path
file_path = os.path.splitext(file_path)[0] # Only keep the part without extension
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub(r'\s+', ' ', file_path) # Replace multiple spaces to single one
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
def get_cover_path(fn):
return './static/cover/' + str(fn) + '.png'
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1', '-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error("Failed to find duration for {0}".format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error("Cannot generate flips. Duration: {0} FlipNum:{1}".format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM+3):
flip_file = "{0}-{1}.png".format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE, flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1', '-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
# Convert video to mp4
def convert_video_to_mp4(video_path, dest_path):
tlog = get_logger(current_thread().name)
if os.path.isfile(dest_path):
tlog.info('#Already converted, skip: {0}'.format(dest_path))
return True
tlog.info('#Converting: {0} => {1}\n', video_path, dest_path)
cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac', dest_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
# Search the duration from given text
def search_duration_from_text(text):
# Match pattern like Duration: 00:24:14.91, s
regExp = re.compile(r'Duration: (\d{2}):(\d{2}):(\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
(hour, min, sec) = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
|
[
13,
16,
19,
20,
24
] |
2,276 |
e735529eddd3a46ea335e593e5937558b50b142d
|
<mask token>
def get_second_long(time_str=None):
if time_str is None:
return long(time.time())
time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')
return long(time.mktime(time_array))
<mask token>
def get_curtimestamp():
return int(time.time() * 1000)
<mask token>
def get_curdate_format():
return get_curtime_str().strftime('%Y-%m-%d')
def get_curmonth_format():
return get_curtime_str().strftime('%Y-%m')
<mask token>
def get_curday_str():
return get_curtime_str().day
def get_curdate_str():
return get_curtime_str().strftime('%Y%m%d')
def get_curdatetime_str():
return get_curtime_str().strftime('%Y%m%d%H%M%S')
<mask token>
|
<mask token>
def get_second_long(time_str=None):
if time_str is None:
return long(time.time())
time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')
return long(time.mktime(time_array))
<mask token>
def get_curtimestamp():
return int(time.time() * 1000)
def get_curdatetime_format():
return get_curtime_str().strftime('%Y-%m-%d %H:%M:%S')
def get_curdate_format():
return get_curtime_str().strftime('%Y-%m-%d')
def get_curmonth_format():
return get_curtime_str().strftime('%Y-%m')
<mask token>
def get_curday_str():
return get_curtime_str().day
def get_curdate_str():
return get_curtime_str().strftime('%Y%m%d')
def get_curdatetime_str():
return get_curtime_str().strftime('%Y%m%d%H%M%S')
def get_curminuter_str():
return get_curtime_str().strftime('%Y%m%d%H%M')
|
<mask token>
def get_second_long(time_str=None):
if time_str is None:
return long(time.time())
time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')
return long(time.mktime(time_array))
<mask token>
def get_curtimestamp():
return int(time.time() * 1000)
def get_curdatetime_format():
return get_curtime_str().strftime('%Y-%m-%d %H:%M:%S')
def get_curdate_format():
return get_curtime_str().strftime('%Y-%m-%d')
def get_curmonth_format():
return get_curtime_str().strftime('%Y-%m')
<mask token>
def get_curminuter_str():
return get_curtime_str().minute
def get_curday_str():
return get_curtime_str().day
def get_curdate_str():
return get_curtime_str().strftime('%Y%m%d')
def get_curdatetime_str():
return get_curtime_str().strftime('%Y%m%d%H%M%S')
def get_curminuter_str():
return get_curtime_str().strftime('%Y%m%d%H%M')
|
<mask token>
def get_second_long(time_str=None):
if time_str is None:
return long(time.time())
time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')
return long(time.mktime(time_array))
<mask token>
def get_curtimestamp():
return int(time.time() * 1000)
def get_curdatetime_format():
return get_curtime_str().strftime('%Y-%m-%d %H:%M:%S')
def get_curdate_format():
return get_curtime_str().strftime('%Y-%m-%d')
def get_curmonth_format():
return get_curtime_str().strftime('%Y-%m')
def get_curhour_str():
return get_curtime_str().hour
def get_curminuter_str():
return get_curtime_str().minute
def get_curday_str():
return get_curtime_str().day
def get_curdate_str():
return get_curtime_str().strftime('%Y%m%d')
def get_curdatetime_str():
return get_curtime_str().strftime('%Y%m%d%H%M%S')
def get_curminuter_str():
return get_curtime_str().strftime('%Y%m%d%H%M')
|
# -*- coding: utf-8 -*-
import time
import datetime
def get_second_long(time_str=None):
if time_str is None:
return long(time.time())
time_array = time.strptime(time_str, "%Y-%m-%d %H:%M:%S")
return long(time.mktime(time_array))
def get_curtime_str():
return datetime.datetime.now()
def get_curtimestamp():
return int(time.time() * 1000)
def get_curdatetime_format():
return get_curtime_str().strftime("%Y-%m-%d %H:%M:%S")
def get_curdate_format():
return get_curtime_str().strftime("%Y-%m-%d")
def get_curmonth_format():
return get_curtime_str().strftime("%Y-%m")
def get_curhour_str():
return get_curtime_str().hour
def get_curminuter_str():
return get_curtime_str().minute
def get_curday_str():
return get_curtime_str().day
def get_curdate_str():
return get_curtime_str().strftime("%Y%m%d")
def get_curdatetime_str():
return get_curtime_str().strftime("%Y%m%d%H%M%S")
def get_curminuter_str():
return get_curtime_str().strftime("%Y%m%d%H%M")
|
[
7,
9,
10,
11,
14
] |
2,277 |
7dc7c7598c9069e5fbb336bb97161ebb7c74366e
|
#!/usr/bin/env python
from imgproc import *
from time import sleep
# open the webcam
#camera = Camera(640, 480)
camera = Camera(320, 240)
#camera = Camera(160, 120)
#while True:
# grab an image from the camera
frame = camera.grabImage()
print frame[x,y]
# open a view, setting the view to the size of the captured image
#view = Viewer(frame.width, frame.height, "Basic image processing")
# display the image on the screen
#view.displayImage(frame)
"""
width, height = 320, 240
#HUD = Image.new()
HUD = Image.open("HUDs/preview_320x240.png")
for x in range(width):
for y in range(height):
red1, green1, blue1 = frame[x,y]
red2, green2, blue2, alpha = HUD.getpixel((x,y))
if red2 != 0 and green2 != 0 and blue2 != 0:
frame[x,y] = red2, green2, blue2
"""
| null | null | null | null |
[
0
] |
2,278 |
02b20c3f5941873dfd22a7fbedb825e66c613ace
|
Xeval[[1,2],:]
# *** Spyder Python Console History Log ***
Xeval[:,:]
optfunc.P(Xeval[:,:])
optfunc.P(Xeval)
optfunc.P(Xeval[[0,1,2,3,4],:])
optfunc.P(Xeval[[0,1,],:])
optfunc.P(Xeval[[0,1],:])
optfunc.P(Xeval[[0,1,2,3],:])
optfunc.P(Xeval[[0,1,2,3,4],:])
optfunc.P(Xeval[[0,1,2],:])
Xeval[[0,1,2,3,4],:]
Xeval[[0,1,2,3],:]
Xeval[[0,1,2],:]
optfunc.gp_list[0]
optfunc.gp_list[0](Xeval)
optfunc.gp_list[0].preduct(Xeval)
optfunc.gp_list[0].predict(Xeval)
optfunc.gp_list[0].predict(Xeval[[0,1,2,3,4],:])
optfunc.gp_list[0].predict(Xeval[[0,1,2,3],:])
optfunc.gp_list[0].predict(Xeval[[0,1,2],:])
optfunc.P(Xeval[[0,1,2,3,4],:])
optfunc.ypred
optfunc.P(Xeval[[0,1,2],:])
optfunc.ypred
optfunc.P(Xeval[[0,1,2,3,4],:])
optfunc.MSE
optfunc.sigma
optfunc.P(Xeval[[0,1,2],:])
optfunc.sigma
optfunc.gp_list[0].predict(Xeval[[0,1,2],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,2,3],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,2],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,2,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0,0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,1],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,1,1],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[zeros(1,5)],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[np.zeros(1,5)],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[np.zeros(1,5),:],eval_MSE=True)
np.zeros(1,5)
np.zeros(5)
optfunc.gp_list[0].predict(Xeval[np.zeros(15),:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[np.zeros(5),:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[np.zeros(5)],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0,0,0,0,0,0,0,0,0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],:],eval_MSE=True)
Xeval[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],:]
optfunc.gp_list[0].predict(Xeval[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,2,3],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,1,2],:],eval_MSE=True)
Xeval[[0,1,2,3]]
Xeval[[0,1,2]]
Xeval[[0,1,2],:]
optfunc.gp_list[0].predict(Xeval[[0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict(Xeval[[0,0,0,0],:],eval_MSE=True)
optfunc.gp_list[0].predict([0.5,0.5],:],eval_MSE=True)
optfunc.gp_list[0].predict([0.5,0.5],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.5]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.49]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.48]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.495]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.499]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.4999]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.49999]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.5]],eval_MSE=True)
optfunc.gp_list[0].predict([[0.5,0.5001]],eval_MSE=True)
for i in range(0,100)
for i in range(0,100): y[i],s[i] = optfunc.gp_list[0].predict([[0.5,i*0.01]],eval_MSE=True)
y = []
s = []
for i in range(0,100): y[i],s[i] = optfunc.gp_list[0].predict([[0.5,i*0.01]],eval_MSE=True)
for i in range(0,100): optfunc.gp_list[0].predict([[0.5,i*0.01]],eval_MSE=True)
for i in range(0,100): a, b = optfunc.gp_list[0].predict([[0.5,i*0.01]],eval_MSE=True) y = np.r_[y,a] s = np.r_[s,b]
y
optfunc.gp_list[0]
runfile('C:/Users/b4/.spyder2-py3/PEIOPT.py', wdir='C:/Users/b4/.spyder2-py3')
y = []
s = []
for i in range(0,100): a, b = optfunc.gp_list[0].predict([[0.5,i*0.01]],eval_MSE=True) y = np.r_[y,a] s = np.r_[s,b]
y = []
s = []
for i in range(0,200): a, b = optfunc.gp_list[0].predict([[0.5,i*0.01]],eval_MSE=True) y = np.r_[y,a] s = np.r_[s,b]
y = []
s = []
for i in range(0,200): a, b = optfunc.gp_list[0].predict([[1.,i*0.01]],eval_MSE=True) y = np.r_[y,a] s = np.r_[s,b]
runfile('C:/Users/b4/.spyder2-py3/PEIOPT.py', wdir='C:/Users/b4/.spyder2-py3')
y = []
s = []
for i in range(0,200): a, b = optfunc.gp_list[0].predict([[1.,i*0.01]],eval_MSE=True) y = np.r_[y,a] s = np.r_[s,b]
runfile('C:/Users/b4/.spyder2-py3/PEIOPT.py', wdir='C:/Users/b4/.spyder2-py3')
y = []
s = []
for i in range(0,200): a, b = optfunc.gp_list[0].predict([[1.,i*0.01]],eval_MSE=True) y = np.r_[y,a] s = np.r_[s,b]
runfile('C:/Users/b4/.spyder2-py3/PEIOPT.py', wdir='C:/Users/b4/.spyder2-py3')
##---(Wed Mar 23 11:14:55 2016)---
runfile('C:/Users/b4/.spyder2-py3/PEIOPT.py', wdir='C:/Users/b4/.spyder2-py3')
| null | null | null | null |
[
0
] |
2,279 |
2ff398e38b49d95fdc8a36a08eeb5950aaea1bc9
|
<mask token>
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print('Connection resumed. return_code: {} session_present: {}'.format(
return_code, session_present))
<mask token>
|
<mask token>
def on_connection_interrupted(connection, error, **kwargs):
print('Connection interrupted. error: {}'.format(error))
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print('Connection resumed. return_code: {} session_present: {}'.format(
return_code, session_present))
<mask token>
|
<mask token>
def on_connection_interrupted(connection, error, **kwargs):
print('Connection interrupted. error: {}'.format(error))
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print('Connection resumed. return_code: {} session_present: {}'.format(
return_code, session_present))
if __name__ == '__main__':
io.init_logging(log_level=io.LogLevel.Trace, file_name='stderr')
cmdData = CommandLineUtils.parse_sample_input_pkcs12_connect()
proxy_options = None
if cmdData.input_proxy_host is not None and cmdData.input_proxy_port != 0:
proxy_options = http.HttpProxyOptions(host_name=cmdData.
input_proxy_host, port=cmdData.input_proxy_port)
mqtt_connection = mqtt_connection_builder.mtls_with_pkcs12(endpoint=
cmdData.input_endpoint, port=cmdData.input_port, pkcs12_filepath=
cmdData.input_pkcs12_file, pkcs12_password=cmdData.
input_pkcs12_password, on_connection_interrupted=
on_connection_interrupted, on_connection_resumed=
on_connection_resumed, client_id=cmdData.input_clientId,
clean_session=False, keep_alive_secs=30, http_proxy_options=
proxy_options)
if not cmdData.input_is_ci:
print(
f"Connecting to {cmdData.input_endpoint} with client ID '{cmdData.input_clientId}'..."
)
else:
print('Connecting to endpoint with client ID')
connect_future = mqtt_connection.connect()
connect_future.result()
print('Connected!')
print('Disconnecting...')
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print('Disconnected!')
|
from awscrt import http, io
from awsiot import mqtt_connection_builder
from utils.command_line_utils import CommandLineUtils
def on_connection_interrupted(connection, error, **kwargs):
print('Connection interrupted. error: {}'.format(error))
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print('Connection resumed. return_code: {} session_present: {}'.format(
return_code, session_present))
if __name__ == '__main__':
io.init_logging(log_level=io.LogLevel.Trace, file_name='stderr')
cmdData = CommandLineUtils.parse_sample_input_pkcs12_connect()
proxy_options = None
if cmdData.input_proxy_host is not None and cmdData.input_proxy_port != 0:
proxy_options = http.HttpProxyOptions(host_name=cmdData.
input_proxy_host, port=cmdData.input_proxy_port)
mqtt_connection = mqtt_connection_builder.mtls_with_pkcs12(endpoint=
cmdData.input_endpoint, port=cmdData.input_port, pkcs12_filepath=
cmdData.input_pkcs12_file, pkcs12_password=cmdData.
input_pkcs12_password, on_connection_interrupted=
on_connection_interrupted, on_connection_resumed=
on_connection_resumed, client_id=cmdData.input_clientId,
clean_session=False, keep_alive_secs=30, http_proxy_options=
proxy_options)
if not cmdData.input_is_ci:
print(
f"Connecting to {cmdData.input_endpoint} with client ID '{cmdData.input_clientId}'..."
)
else:
print('Connecting to endpoint with client ID')
connect_future = mqtt_connection.connect()
connect_future.result()
print('Connected!')
print('Disconnecting...')
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print('Disconnected!')
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
from awscrt import http, io
from awsiot import mqtt_connection_builder
from utils.command_line_utils import CommandLineUtils
# This sample shows how to create a MQTT connection using a certificate file and key file.
# This sample is intended to be used as a reference for making MQTT connections.
# Callback when connection is accidentally lost.
def on_connection_interrupted(connection, error, **kwargs):
print("Connection interrupted. error: {}".format(error))
# Callback when an interrupted connection is re-established.
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print("Connection resumed. return_code: {} session_present: {}".format(return_code, session_present))
if __name__ == '__main__':
io.init_logging(log_level=io.LogLevel.Trace, file_name="stderr")
# cmdData is the arguments/input from the command line placed into a single struct for
# use in this sample. This handles all of the command line parsing, validating, etc.
# See the Utils/CommandLineUtils for more information.
cmdData = CommandLineUtils.parse_sample_input_pkcs12_connect()
# Create the proxy options if the data is present in cmdData
proxy_options = None
if cmdData.input_proxy_host is not None and cmdData.input_proxy_port != 0:
proxy_options = http.HttpProxyOptions(
host_name=cmdData.input_proxy_host,
port=cmdData.input_proxy_port)
# Create a MQTT connection from the command line data
mqtt_connection = mqtt_connection_builder.mtls_with_pkcs12(
endpoint=cmdData.input_endpoint,
port=cmdData.input_port,
pkcs12_filepath=cmdData.input_pkcs12_file,
pkcs12_password=cmdData.input_pkcs12_password,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=cmdData.input_clientId,
clean_session=False,
keep_alive_secs=30,
http_proxy_options=proxy_options)
if not cmdData.input_is_ci:
print(f"Connecting to {cmdData.input_endpoint} with client ID '{cmdData.input_clientId}'...")
else:
print("Connecting to endpoint with client ID")
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!")
|
[
1,
2,
3,
4,
5
] |
2,280 |
4b44f4343da1677b5436ec2b153e573fda3c0cee
|
<mask token>
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
<mask token>
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
<mask token>
|
<mask token>
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
def generate_combinations(lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
<mask token>
|
<mask token>
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
def generate_combinations(lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
if __name__ == '__main__':
a, b, r = read_input_RS()
d, e = generate_combinations(a, b, r)
print(b)
print(e)
np.savetxt('test.out', e, delimiter=',')
|
<mask token>
import itertools as itt
import numpy as np
import matplotlib.pyplot as plt
def read_input_RS():
low = np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1)))
return lower_bound, upper_bound, low[0, :].size
def generate_combinations(lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input
def independent_probability():
probability_assignment = np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1)
return probability_assignment
if __name__ == '__main__':
a, b, r = read_input_RS()
d, e = generate_combinations(a, b, r)
print(b)
print(e)
np.savetxt('test.out', e, delimiter=',')
|
# Code Rodrigo
'''
This script, basically generates all he possible combinations
to be analyzed according to the Dempster Shafer Theory.
It requires to define beforehand, the combination of variables
that lead to the higher and lower bound for a given combination
of random sets, via the sensitivity analysis
'''
import itertools as itt
import numpy as np
import matplotlib.pyplot as plt
def read_input_RS ():
low=np.loadtxt('LowerArray.csv', delimiter=',', skiprows=1)
lower_bound = np.ravel(low)
upper_bound = (np.ravel(np.transpose(np.loadtxt('UpperArray.csv',
delimiter=',', skiprows=1))))
return lower_bound, upper_bound, low[0,:].size
def generate_combinations (lower, upper, n):
lower_input = itt.combinations(lower, n)
upper_input = np.array(list(itt.product(upper, repeat=n)))
return lower_input, upper_input,
def independent_probability ():
probability_assignment = (np.loadtxt('ProbabilityAssignment.csv',
delimiter=',', skiprows=1))
return probability_assignment
if __name__ == "__main__":
a,b,r=read_input_RS ()
#c=a[0,:].size
d,e=generate_combinations (a,b,r)
print(b)
print(e)
np.savetxt('test.out', e, delimiter=',')
#b=read_input_RS ()
#c=generate_combinations (a,b)
|
[
2,
3,
4,
5,
6
] |
2,281 |
6ece524c82521b175cc7791e22c8249dd24dc714
|
import datetime
import matplotlib.pyplot as plt
import numpy as np
import statsmodels.api as sm
import xlrd
from pandas import *
from xlrd import xldate
#since I messed up when first scraping the data, I have the dates and viewcounts in separate files
#need to create a dictionary of 'author-title':[viewcount, date]
viewcount_dict = {}
#to get the viewcount
workbook = xlrd.open_workbook('ted_info.xlsx')
worksheet = workbook.sheet_by_name('Sheet1')
num_rows = worksheet.nrows - 1
num_cells = worksheet.ncols - 1
curr_row = 0
while curr_row < num_rows:
curr_row += 1
row = worksheet.row(curr_row)
print 'Row:', curr_row
author_name = worksheet.cell_value(curr_row, 0)
talk_title = worksheet.cell_value(curr_row, 3)
viewcount = worksheet.cell_value(curr_row, 5)
if author_name + ":" + talk_title in viewcount_dict:
print author_name + ":" + talk_title
raise "error in datafile, there is a duplicate"
viewcount_dict[author_name + ":" + talk_title] = [viewcount]
#the following prints each cell value and cell type
#curr_cell = -1
#while curr_cell < num_cells:
#curr_cell += 1
# Cell Types: 0=Empty, 1=Text, 2=Number, 3=Date, 4=Boolean, 5=Error, 6=Blank
#cell_type = worksheet.cell_type(curr_row, curr_cell)
#cell_value = worksheet.cell_value(curr_row, curr_cell)
#print ' ', cell_type, ':', cell_value
#to get the year
workbook = xlrd.open_workbook('ted_info_name_title_date.xlsx')
worksheet = workbook.sheet_by_name('Sheet1')
num_rows = worksheet.nrows - 1
num_cells = worksheet.ncols - 1
curr_row = 0
while curr_row < num_rows:
curr_row += 1
row = worksheet.row(curr_row)
author_name = worksheet.cell_value(curr_row, 0)
talk_title = worksheet.cell_value(curr_row, 1)
date = worksheet.cell_value(curr_row, 2)
date_as_datetime = xldate.xldate_as_tuple(date, workbook.datemode)
year, month, day, hour, minute, second = date_as_datetime
print year
try:
viewcount_dict[author_name + ":" + talk_title].append(year)
except:
#author/title not in dictionary (because it was one of the weirdly formatted pages)
print row
continue
print len(viewcount_dict)
year_viewcount_dict = {}
for year in range(2006,2016):
#create a dictionary for each year due to the input of the violin plot
year_viewcount_dict[year] = {}
year_viewcount_dict["All"] = {} #also have one that includes all years
for key, value in viewcount_dict.iteritems():
#print value
try:
year = value[1]
except:
continue
#this means that it did not have a year, likely because that author/talk was not in the date file
viewcount = value[0]
year_viewcount_dict[year][len(year_viewcount_dict[value[1]])] = viewcount
year_viewcount_dict["All"][len(year_viewcount_dict[value[1]])] = viewcount
list_of_counts = [Series(year_viewcount_dict[year]) for year in ["All"] + range(2006,2016)] #turn into data type required for violinplot
labels = ["All"] + [str(year) for year in range(2006, 2016)] #note that they started in June of 2006 and that this data only invludes up to april 2015
plt.rcParams['figure.subplot.bottom'] = 0.23 # keep labels visible
fig = plt.figure()
ax = fig.add_subplot(111)
sm.graphics.violinplot(list_of_counts, ax=ax, labels=labels,
plot_opts={'cutoff_val':5, 'cutoff_type':'abs',
'label_fontsize':'small'})
ax.set_xlabel("Year")
ax.set_yscale("log") #set to log scale because the range of viewcounts
ax.set_ylabel("Viewcount of talks (log scale)")
#plt.show()
plt.savefig('violinplot_viewcounts.png', bbox_inches='tight')
| null | null | null | null |
[
0
] |
2,282 |
bf49893fee79b0c3e34340cf1633c1797ce1bf41
|
#MenuTitle: Check for open paths in selected glyphs
"""
Checks for open paths in selected glyphs (or all glyphs if no selection).
Output appears in Macro Window (Option-Command-M).
"""
# FIXME: test with masters and instances -- may not work
Font = Glyphs.font
Doc = Glyphs.currentDocument
selectedGlyphs = [ x.parent for x in Doc.selectedLayers() ]
selectedNames = [ x.name for x in selectedGlyphs ]
nopenpaths = 0
checkedGlyphs = []
print "Font: ", Font.familyName
if not selectedGlyphs:
selectedGlyphs = Font.glyphs
selectedNames = "all glyphs."
for glyph in selectedGlyphs:
# assumption: glyph layer 0 without paths means glyph doesn't have any drawing in it, yet
if glyph.layers[0].paths:
checkedGlyphs.append(glyph.name)
layers = glyph.layers
for layer in layers:
paths = layer.paths
for path in paths:
if not path.closed:
print "OPEN PATH: %s (%s)" % (layer.parent.name, layer.parent.unicode), "[layer: %s]" % layer.name, path
nopenpaths += 1
if not nopenpaths:
print "No open paths in %d glyphs:" % len(checkedGlyphs), checkedGlyphs
else:
print "Total open paths: %d out of %d checked glyphs." % (nopenpaths, len(checkedGlyphs))
| null | null | null | null |
[
0
] |
2,283 |
1fad591fde707c73bd52aa8518828c8b8be9cd32
|
<mask token>
class Article:
<mask token>
title: str
target: str
g: float
f: float
parent: typing.Union[Article, Type(None)]
heuristic: Callable[[str, str], float]
def __init__(self, title: str, target: str, parent: typing.Union[
Article, Type(None)], heuristic: Callable[[str, str], float]):
"""
Initializes based on [urls/titles/nodes]
"""
self.title = title
self.target = target
self.heuristic = heuristic
if parent:
self.parent = parent
self.g = parent.g + 1
else:
self.parent = None
self.g = 0
h = self.heuristic(title, target)
self.f = self.g + h
def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:
"""
Return list of connected (children) article object using the wikipedia API functions.
"""
s = requests.Session()
url = 'https://en.wikipedia.org/w/api.php'
if cont is None:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max'}
else:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}
titles_so_far = []
r = s.get(url=url, params=params)
data = r.json()
pages = data['query']['pages']
for k, v in pages.items():
if 'links' not in v:
return []
for l in v['links']:
titles_so_far.append(l['title'])
if 'batchcomplete' in data:
return titles_so_far
else:
contHolder = data['continue']['plcontinue']
titles_so_far.extend(self.get_children(contHolder))
return titles_so_far
def get_first_x(self, lst: List, x: int) ->List:
lst_so_far = []
for i in range(x):
lst_so_far.append(lst[i])
return lst_so_far
<mask token>
<mask token>
def __eq__(self, other):
return compare_titles(self.title, other.title)
def __ne__(self, other):
return not compare_titles(self.title, other.title)
<mask token>
<mask token>
class PQ:
"""
MinHeap implementation of a priority queue for A* search.
"""
heap = []
def __init__(self):
self.heap = []
def insert(self, to_insert: Article) ->None:
"""
Insert new element in Priority queue
"""
heapq.heappush(self.heap, to_insert)
def pop(self) ->Article:
"""
pops minimum element from priority queue
"""
return heapq.heappop(self.heap)
<mask token>
|
<mask token>
class Article:
<mask token>
title: str
target: str
g: float
f: float
parent: typing.Union[Article, Type(None)]
heuristic: Callable[[str, str], float]
def __init__(self, title: str, target: str, parent: typing.Union[
Article, Type(None)], heuristic: Callable[[str, str], float]):
"""
Initializes based on [urls/titles/nodes]
"""
self.title = title
self.target = target
self.heuristic = heuristic
if parent:
self.parent = parent
self.g = parent.g + 1
else:
self.parent = None
self.g = 0
h = self.heuristic(title, target)
self.f = self.g + h
def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:
"""
Return list of connected (children) article object using the wikipedia API functions.
"""
s = requests.Session()
url = 'https://en.wikipedia.org/w/api.php'
if cont is None:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max'}
else:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}
titles_so_far = []
r = s.get(url=url, params=params)
data = r.json()
pages = data['query']['pages']
for k, v in pages.items():
if 'links' not in v:
return []
for l in v['links']:
titles_so_far.append(l['title'])
if 'batchcomplete' in data:
return titles_so_far
else:
contHolder = data['continue']['plcontinue']
titles_so_far.extend(self.get_children(contHolder))
return titles_so_far
def get_first_x(self, lst: List, x: int) ->List:
lst_so_far = []
for i in range(x):
lst_so_far.append(lst[i])
return lst_so_far
def __lt__(self, other):
return self.f < other.f
def __le__(self, other):
return self.f <= other.f
def __eq__(self, other):
return compare_titles(self.title, other.title)
def __ne__(self, other):
return not compare_titles(self.title, other.title)
<mask token>
def __ge__(self, other):
return self.f >= other.f
class PQ:
"""
MinHeap implementation of a priority queue for A* search.
"""
heap = []
def __init__(self):
self.heap = []
def insert(self, to_insert: Article) ->None:
"""
Insert new element in Priority queue
"""
heapq.heappush(self.heap, to_insert)
def pop(self) ->Article:
"""
pops minimum element from priority queue
"""
return heapq.heappop(self.heap)
<mask token>
|
<mask token>
class Article:
"""
This is the article class that represents each Wikipedia article.
Instance Variables:
- title: str that represents the title of the article
- target: the final target given by the user
- g:
- f:
"""
title: str
target: str
g: float
f: float
parent: typing.Union[Article, Type(None)]
heuristic: Callable[[str, str], float]
def __init__(self, title: str, target: str, parent: typing.Union[
Article, Type(None)], heuristic: Callable[[str, str], float]):
"""
Initializes based on [urls/titles/nodes]
"""
self.title = title
self.target = target
self.heuristic = heuristic
if parent:
self.parent = parent
self.g = parent.g + 1
else:
self.parent = None
self.g = 0
h = self.heuristic(title, target)
self.f = self.g + h
def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:
"""
Return list of connected (children) article object using the wikipedia API functions.
"""
s = requests.Session()
url = 'https://en.wikipedia.org/w/api.php'
if cont is None:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max'}
else:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}
titles_so_far = []
r = s.get(url=url, params=params)
data = r.json()
pages = data['query']['pages']
for k, v in pages.items():
if 'links' not in v:
return []
for l in v['links']:
titles_so_far.append(l['title'])
if 'batchcomplete' in data:
return titles_so_far
else:
contHolder = data['continue']['plcontinue']
titles_so_far.extend(self.get_children(contHolder))
return titles_so_far
def get_first_x(self, lst: List, x: int) ->List:
lst_so_far = []
for i in range(x):
lst_so_far.append(lst[i])
return lst_so_far
def __lt__(self, other):
return self.f < other.f
def __le__(self, other):
return self.f <= other.f
def __eq__(self, other):
return compare_titles(self.title, other.title)
def __ne__(self, other):
return not compare_titles(self.title, other.title)
def __gt__(self, other):
return self.f > other.f
def __ge__(self, other):
return self.f >= other.f
class PQ:
"""
MinHeap implementation of a priority queue for A* search.
"""
heap = []
def __init__(self):
self.heap = []
def insert(self, to_insert: Article) ->None:
"""
Insert new element in Priority queue
"""
heapq.heappush(self.heap, to_insert)
def pop(self) ->Article:
"""
pops minimum element from priority queue
"""
return heapq.heappop(self.heap)
<mask token>
|
<mask token>
def heuristic_2(a: str, b: str) ->float:
"""
Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated
term-document matrices of the article. The heuristic in this case is purely semantic.
The HTML enriched query for the JSON is:
https://en.wikipedia.org/w/api.php?action=query&titles=TITLE&prop=extracts&format=json&exintro=1
"""
if get_intro(a) == '' or get_intro(b) == '':
return 2
else:
corpus = [get_intro(a), get_intro(b)]
vect = TfidfVectorizer()
mat = vect.fit_transform(corpus)
return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2
class Article:
"""
This is the article class that represents each Wikipedia article.
Instance Variables:
- title: str that represents the title of the article
- target: the final target given by the user
- g:
- f:
"""
title: str
target: str
g: float
f: float
parent: typing.Union[Article, Type(None)]
heuristic: Callable[[str, str], float]
def __init__(self, title: str, target: str, parent: typing.Union[
Article, Type(None)], heuristic: Callable[[str, str], float]):
"""
Initializes based on [urls/titles/nodes]
"""
self.title = title
self.target = target
self.heuristic = heuristic
if parent:
self.parent = parent
self.g = parent.g + 1
else:
self.parent = None
self.g = 0
h = self.heuristic(title, target)
self.f = self.g + h
def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:
"""
Return list of connected (children) article object using the wikipedia API functions.
"""
s = requests.Session()
url = 'https://en.wikipedia.org/w/api.php'
if cont is None:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max'}
else:
params = {'action': 'query', 'format': 'json', 'titles': self.
title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}
titles_so_far = []
r = s.get(url=url, params=params)
data = r.json()
pages = data['query']['pages']
for k, v in pages.items():
if 'links' not in v:
return []
for l in v['links']:
titles_so_far.append(l['title'])
if 'batchcomplete' in data:
return titles_so_far
else:
contHolder = data['continue']['plcontinue']
titles_so_far.extend(self.get_children(contHolder))
return titles_so_far
def get_first_x(self, lst: List, x: int) ->List:
lst_so_far = []
for i in range(x):
lst_so_far.append(lst[i])
return lst_so_far
def __lt__(self, other):
return self.f < other.f
def __le__(self, other):
return self.f <= other.f
def __eq__(self, other):
return compare_titles(self.title, other.title)
def __ne__(self, other):
return not compare_titles(self.title, other.title)
def __gt__(self, other):
return self.f > other.f
def __ge__(self, other):
return self.f >= other.f
class PQ:
"""
MinHeap implementation of a priority queue for A* search.
"""
heap = []
def __init__(self):
self.heap = []
def insert(self, to_insert: Article) ->None:
"""
Insert new element in Priority queue
"""
heapq.heappush(self.heap, to_insert)
def pop(self) ->Article:
"""
pops minimum element from priority queue
"""
return heapq.heappop(self.heap)
<mask token>
|
from __future__ import annotations
import typing
import requests
import heapq
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import TfidfVectorizer
from bs4 import BeautifulSoup
from wikiAPI import get_JSON, get_intro, compare_titles
from typing import List, Type, Callable
def heuristic_0(a: str, b: str) -> float:
return 2
def heuristic_1(a: str, b: str) -> float:
"""
Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated
term-document matrices of the article. The heuristic in this case is purely semantic.
The HTML enriched query for the JSON is:
https://en.wikipedia.org/w/api.php?action=parse&page=TITLE&prop=text&formatversion=2&format=json
"""
query = "https://en.wikipedia.org/w/api.php?action=parse&page=TEMP&prop=text&formatversion=2&format=json"
startTitle = (a.replace(" ", "%20")).replace("&", "%26")
endTitle = (b.replace(" ", "%20")).replace("&", "%26")
startURL = (query.replace("TEMP", startTitle))
endURL = (query.replace("TEMP", endTitle))
# text processing using SOUP
initialSoup = BeautifulSoup(get_JSON(startURL)['parse']['text'], 'html.parser')
finalSoup = BeautifulSoup(get_JSON(endURL)['parse']['text'], 'html.parser')
# generate term-document matrices
corpus = [initialSoup.get_text().replace('\n', ' '), finalSoup.get_text().replace('\n', ' ')]
vect = TfidfVectorizer()
mat = vect.fit_transform(corpus)
# return cosine similarity
return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2
def heuristic_2(a: str, b: str) -> float:
"""
Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated
term-document matrices of the article. The heuristic in this case is purely semantic.
The HTML enriched query for the JSON is:
https://en.wikipedia.org/w/api.php?action=query&titles=TITLE&prop=extracts&format=json&exintro=1
"""
# generate term-document matrices
if get_intro(a) == "" or get_intro(b) == "":
return 2
else:
corpus = [get_intro(a), get_intro(b)]
vect = TfidfVectorizer()
mat = vect.fit_transform(corpus)
# return cosine similarity
return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2
# def semantic_similarity(a: str, b: str) -> float:
# web_model = WebBertSimilarity(device='cpu', batch_size=10)
# return web_model.predict([(a, b)])
class Article:
"""
This is the article class that represents each Wikipedia article.
Instance Variables:
- title: str that represents the title of the article
- target: the final target given by the user
- g:
- f:
"""
title: str
target: str
g: float
f: float
parent: typing.Union[Article, Type(None)]
heuristic: Callable[[str, str], float]
def __init__(self, title: str, target: str, parent: typing.Union[Article, Type(None)], heuristic: Callable[[str, str], float] ):
"""
Initializes based on [urls/titles/nodes]
"""
self.title = title
self.target = target
self.heuristic = heuristic
if parent:
self.parent = parent
self.g = parent.g + 1
else:
self.parent = None
self.g = 0
h = self.heuristic(title, target)
self.f = self.g + h
def get_children(self, cont: typing.Union[str, Type(None)]) -> List[str]:
"""
Return list of connected (children) article object using the wikipedia API functions.
"""
s = requests.Session()
url = "https://en.wikipedia.org/w/api.php"
if cont is None:
params = {
"action": "query",
"format": "json",
"titles": self.title,
"prop": "links",
"pllimit": "max"
}
else:
params = {
"action": "query",
"format": "json",
"titles": self.title,
"prop": "links",
"pllimit": "max",
"plcontinue": cont
}
titles_so_far = []
r = s.get(url=url, params=params)
data = r.json()
pages = data["query"]["pages"]
for k, v in pages.items():
if "links" not in v:
return []
for l in v["links"]:
titles_so_far.append(l["title"])
if "batchcomplete" in data:
return titles_so_far
else:
contHolder = data["continue"]["plcontinue"]
titles_so_far.extend(self.get_children(contHolder))
return titles_so_far
# return [Article(child, self.target, self.title) for child in titles_so_far]
def get_first_x(self, lst: List, x: int) -> List:
lst_so_far = []
for i in range(x):
lst_so_far.append(lst[i])
return lst_so_far
def __lt__(self, other):
return self.f < other.f
def __le__(self, other):
return self.f <= other.f
def __eq__(self, other):
return compare_titles(self.title, other.title)
def __ne__(self, other):
return not compare_titles(self.title, other.title)
def __gt__(self, other):
return self.f > other.f
def __ge__(self, other):
return self.f >= other.f
class PQ:
"""
MinHeap implementation of a priority queue for A* search.
"""
heap = []
def __init__(self):
self.heap = []
def insert(self, to_insert: Article) -> None:
"""
Insert new element in Priority queue
"""
heapq.heappush(self.heap, to_insert)
def pop(self) -> Article:
"""
pops minimum element from priority queue
"""
return heapq.heappop(self.heap)
def a_star(source: str, target: str, heuristic: Callable[[str, str], float]) -> list:
"""
Returns path from source to target using A* search algorithm.
"""
visited: set = set((source))
cur: Article = Article(source, target, None, heuristic)
queue = PQ()
while not compare_titles(cur.title, target):
nexts = cur.get_children(None)
for next in nexts:
if next not in visited:
article = Article(next, target, cur, heuristic)
queue.insert(article)
visited.add(next)
print(article.f, article.title)
cur = queue.pop()
print("CUR:", cur.f, cur.title)
path = [cur.title]
while path[0] != source:
cur = cur.parent
path.insert(0, cur.title)
return path
# print(a_star("Dog", "Aardwolf", heuristic_2))
|
[
12,
15,
17,
18,
23
] |
2,284 |
a7d11f130e0d5d6c9b4ac7c5d3a804fb9f79b943
|
<mask token>
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
<mask token>
def main():
for i in range(3650):
send(i)
time.sleep(5)
<mask token>
|
<mask token>
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
|
<mask token>
bot = Bot(cache_path='wxpy.pkl')
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
|
import time
from wxpy import *
bot = Bot(cache_path='wxpy.pkl')
def get(i):
with open('晚安.txt', 'r', encoding='utf-8') as f:
line = f.readlines()[i]
return line
def send(i):
myfriend = bot.friends().search('微信好友昵称')[0]
myfriend.send(get(i))
i += 1
def main():
for i in range(3650):
send(i)
time.sleep(5)
if __name__ == '__main__':
main()
| null |
[
2,
4,
5,
6
] |
2,285 |
e49c5c6475a1210a9657d7bbd0490c8d20863718
|
<mask token>
class GlibConan(ConanFile):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
|
<mask token>
class GlibConan(ConanFile):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def source(self):
tools.get(
f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'
)
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
|
<mask token>
class GlibConan(ConanFile):
name = 'glib'
description = 'Common C routines used by Gtk+ and other libs'
license = 'LGPL'
settings = {'os': ['Linux'], 'arch': ['x86_64', 'armv8']}
build_requires = 'generators/1.0.0', 'autotools/1.0.0'
requires = 'glibc/[>=2.31]', 'sh/[>=]'
def source(self):
tools.get(
f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'
)
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
|
from conans import *
class GlibConan(ConanFile):
name = 'glib'
description = 'Common C routines used by Gtk+ and other libs'
license = 'LGPL'
settings = {'os': ['Linux'], 'arch': ['x86_64', 'armv8']}
build_requires = 'generators/1.0.0', 'autotools/1.0.0'
requires = 'glibc/[>=2.31]', 'sh/[>=]'
def source(self):
tools.get(
f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'
)
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
|
from conans import *
class GlibConan(ConanFile):
name = "glib"
description = "Common C routines used by Gtk+ and other libs"
license = "LGPL"
settings = {"os": ["Linux"], "arch": ["x86_64", "armv8"]}
build_requires = (
"generators/1.0.0",
"autotools/1.0.0",
)
requires = (
"glibc/[>=2.31]",
"sh/[>=]",
)
def source(self):
tools.get(f"ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz")
def build(self):
args = [
"--disable-static",
]
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=f"{self.name}-{self.version}")
autotools.make()
autotools.install()
|
[
2,
3,
4,
5,
6
] |
2,286 |
b041e9577af72d2bcee3dda0cc78fa12800d53bd
|
<mask token>
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))
self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)
b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',
(50, 100))
self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)
self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',
'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',
'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',
'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',
'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',
'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),
size=(220, 160), style=wx.LB_SINGLE)
self.list.Select(0)
tt = 'Timeout in milliseconds\n0 is system default'
self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.
ALIGN_LEFT)
self.spin.SetToolTip(wx.ToolTip(tt))
self.spin.SetRange(0, 5000)
self.spin.SetValue(0)
def OnButton1(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.Show(True)
<mask token>
class printLog:
def __init__(self):
pass
def write(self, txt):
print('%s' % txt)
def WriteText(self, txt):
print('%s' % txt)
class TestFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.
DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,
name='frame'):
wx.Frame.__init__(self, parent, id, title, pos, size, style, name)
log = printLog()
panel = TestPanel(self, log)
self.Bind(wx.EVT_CLOSE, self.OnDestroy)
try:
self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))
except Exception as exc:
raise exc
def OnDestroy(self, event):
self.Destroy()
class TestApp(wx.App):
def OnInit(self):
gMainWin = TestFrame(None)
gMainWin.SetTitle('Extended Frame Demo')
gMainWin.Show()
return True
<mask token>
|
<mask token>
class MyMiniFrame(wx.MiniFrame):
<mask token>
<mask token>
<mask token>
<mask token>
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))
self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)
b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',
(50, 100))
self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)
self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',
'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',
'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',
'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',
'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',
'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),
size=(220, 160), style=wx.LB_SINGLE)
self.list.Select(0)
tt = 'Timeout in milliseconds\n0 is system default'
self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.
ALIGN_LEFT)
self.spin.SetToolTip(wx.ToolTip(tt))
self.spin.SetRange(0, 5000)
self.spin.SetValue(0)
def OnButton1(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.Show(True)
def OnButton2(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.ShowWithEffect(effect=eval(self.list.GetString(self.list.
GetSelection())), timeout=self.spin.GetValue())
class printLog:
def __init__(self):
pass
def write(self, txt):
print('%s' % txt)
def WriteText(self, txt):
print('%s' % txt)
class TestFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.
DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,
name='frame'):
wx.Frame.__init__(self, parent, id, title, pos, size, style, name)
log = printLog()
panel = TestPanel(self, log)
self.Bind(wx.EVT_CLOSE, self.OnDestroy)
try:
self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))
except Exception as exc:
raise exc
def OnDestroy(self, event):
self.Destroy()
class TestApp(wx.App):
def OnInit(self):
gMainWin = TestFrame(None)
gMainWin.SetTitle('Extended Frame Demo')
gMainWin.Show()
return True
<mask token>
|
<mask token>
class MyMiniFrame(wx.MiniFrame):
def __init__(self, parent, id, title, pos=wx.DefaultPosition, size=wx.
DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name='frame'):
wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)
panel = wx.Panel(self, -1)
button = wx.Button(panel, 1003, 'Close Me')
button.SetPosition((15, 15))
button2 = wx.Button(panel, -1, 'ToggleWindowStyle(wx.STAY_ON_TOP)')
button2.SetPosition((30, 50))
self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)
self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def OnToggleWindowStyle(self, event):
self.ToggleWindowStyle(wx.STAY_ON_TOP)
<mask token>
<mask token>
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))
self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)
b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',
(50, 100))
self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)
self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',
'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',
'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',
'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',
'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',
'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),
size=(220, 160), style=wx.LB_SINGLE)
self.list.Select(0)
tt = 'Timeout in milliseconds\n0 is system default'
self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.
ALIGN_LEFT)
self.spin.SetToolTip(wx.ToolTip(tt))
self.spin.SetRange(0, 5000)
self.spin.SetValue(0)
def OnButton1(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.Show(True)
def OnButton2(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.ShowWithEffect(effect=eval(self.list.GetString(self.list.
GetSelection())), timeout=self.spin.GetValue())
class printLog:
def __init__(self):
pass
def write(self, txt):
print('%s' % txt)
def WriteText(self, txt):
print('%s' % txt)
class TestFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.
DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,
name='frame'):
wx.Frame.__init__(self, parent, id, title, pos, size, style, name)
log = printLog()
panel = TestPanel(self, log)
self.Bind(wx.EVT_CLOSE, self.OnDestroy)
try:
self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))
except Exception as exc:
raise exc
def OnDestroy(self, event):
self.Destroy()
class TestApp(wx.App):
def OnInit(self):
gMainWin = TestFrame(None)
gMainWin.SetTitle('Extended Frame Demo')
gMainWin.Show()
return True
<mask token>
|
<mask token>
try:
gFileDir = os.path.dirname(os.path.abspath(__file__))
except:
gFileDir = os.path.dirname(os.path.abspath(sys.argv[0]))
<mask token>
class MyMiniFrame(wx.MiniFrame):
def __init__(self, parent, id, title, pos=wx.DefaultPosition, size=wx.
DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name='frame'):
wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)
panel = wx.Panel(self, -1)
button = wx.Button(panel, 1003, 'Close Me')
button.SetPosition((15, 15))
button2 = wx.Button(panel, -1, 'ToggleWindowStyle(wx.STAY_ON_TOP)')
button2.SetPosition((30, 50))
self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)
self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def OnToggleWindowStyle(self, event):
self.ToggleWindowStyle(wx.STAY_ON_TOP)
def OnCloseMe(self, event):
self.Close(True)
def OnCloseWindow(self, event):
self.Destroy()
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b1 = wx.Button(self, -1, 'Create and Show a MiniFrame', (50, 50))
self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)
b2 = wx.Button(self, -1, 'Create and Show a MiniFrame With Effect',
(50, 100))
self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)
self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',
'wx.SHOW_EFFECT_ROLL_TO_LEFT', 'wx.SHOW_EFFECT_ROLL_TO_RIGHT',
'wx.SHOW_EFFECT_ROLL_TO_TOP', 'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',
'wx.SHOW_EFFECT_SLIDE_TO_LEFT', 'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',
'wx.SHOW_EFFECT_SLIDE_TO_TOP', 'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',
'wx.SHOW_EFFECT_BLEND', 'wx.SHOW_EFFECT_EXPAND'], pos=(50, 155),
size=(220, 160), style=wx.LB_SINGLE)
self.list.Select(0)
tt = 'Timeout in milliseconds\n0 is system default'
self.spin = wx.SpinCtrl(self, -1, tt, pos=(50, 130), style=wx.
ALIGN_LEFT)
self.spin.SetToolTip(wx.ToolTip(tt))
self.spin.SetRange(0, 5000)
self.spin.SetValue(0)
def OnButton1(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.Show(True)
def OnButton2(self, evt):
win = MyMiniFrame(self, -1, 'This is a wx.MiniFrame', size=(350,
200), style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.ShowWithEffect(effect=eval(self.list.GetString(self.list.
GetSelection())), timeout=self.spin.GetValue())
class printLog:
def __init__(self):
pass
def write(self, txt):
print('%s' % txt)
def WriteText(self, txt):
print('%s' % txt)
class TestFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString, pos=wx.
DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,
name='frame'):
wx.Frame.__init__(self, parent, id, title, pos, size, style, name)
log = printLog()
panel = TestPanel(self, log)
self.Bind(wx.EVT_CLOSE, self.OnDestroy)
try:
self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))
except Exception as exc:
raise exc
def OnDestroy(self, event):
self.Destroy()
class TestApp(wx.App):
def OnInit(self):
gMainWin = TestFrame(None)
gMainWin.SetTitle('Extended Frame Demo')
gMainWin.Show()
return True
if __name__ == '__main__':
import sys
print('Python %s.%s.%s %s' % sys.version_info[0:4])
print('wxPython %s' % wx.version())
gApp = TestApp(redirect=False, filename=None, useBestVisual=False,
clearSigInt=True)
gApp.MainLoop()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__doc__ = """\
A MiniFrame is a Frame with a small title bar. It is suitable for floating
toolbars that must not take up too much screen area. In other respects, it's the
same as a wx.Frame.
"""
__wxPyOnlineDocs__ = 'https://wxpython.org/Phoenix/docs/html/wx.MiniFrame.html'
__wxPyDemoPanel__ = 'TestPanel'
#-Imports-----------------------------------------------------------------------
#--Python Imports.
import os
import sys
#--wxPython Imports.
import wx
#-Globals-----------------------------------------------------------------------
try:
gFileDir = os.path.dirname(os.path.abspath(__file__))
except:
gFileDir = os.path.dirname(os.path.abspath(sys.argv[0]))
gBmpDir = gFileDir + os.sep + 'bitmaps'
class MyMiniFrame(wx.MiniFrame):
def __init__(self, parent, id, title, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE,
name='frame'):
wx.MiniFrame.__init__(self, parent, id, title, pos, size, style, name)
panel = wx.Panel(self, -1)
button = wx.Button(panel, 1003, "Close Me")
button.SetPosition((15, 15))
button2 = wx.Button(panel, -1, "ToggleWindowStyle(wx.STAY_ON_TOP)")
button2.SetPosition((30, 50))
self.Bind(wx.EVT_BUTTON, self.OnCloseMe, button)
self.Bind(wx.EVT_BUTTON, self.OnToggleWindowStyle, button2)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def OnToggleWindowStyle(self, event):
self.ToggleWindowStyle(wx.STAY_ON_TOP)
def OnCloseMe(self, event):
self.Close(True)
def OnCloseWindow(self, event):
self.Destroy()
#---------------------------------------------------------------------------
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b1 = wx.Button(self, -1, "Create and Show a MiniFrame", (50, 50))
self.Bind(wx.EVT_BUTTON, self.OnButton1, b1)
b2 = wx.Button(self, -1, "Create and Show a MiniFrame With Effect", (50, 100))
self.Bind(wx.EVT_BUTTON, self.OnButton2, b2)
self.list = wx.ListBox(self, choices=['wx.SHOW_EFFECT_NONE',
'wx.SHOW_EFFECT_ROLL_TO_LEFT',
'wx.SHOW_EFFECT_ROLL_TO_RIGHT',
'wx.SHOW_EFFECT_ROLL_TO_TOP',
'wx.SHOW_EFFECT_ROLL_TO_BOTTOM',
'wx.SHOW_EFFECT_SLIDE_TO_LEFT',
'wx.SHOW_EFFECT_SLIDE_TO_RIGHT',
'wx.SHOW_EFFECT_SLIDE_TO_TOP',
'wx.SHOW_EFFECT_SLIDE_TO_BOTTOM',
'wx.SHOW_EFFECT_BLEND',
'wx.SHOW_EFFECT_EXPAND'
# 'wx.SHOW_EFFECT_MAX'
],
pos=(50, 155), size=(220, 160),
style=wx.LB_SINGLE)
self.list.Select(0)
tt = "Timeout in milliseconds\n0 is system default"
self.spin = wx.SpinCtrl(self, -1, tt,
pos=(50, 130), style=wx.ALIGN_LEFT)
self.spin.SetToolTip(wx.ToolTip(tt))
self.spin.SetRange(0, 5000)
self.spin.SetValue(0)
def OnButton1(self, evt):
win = MyMiniFrame(self, -1, "This is a wx.MiniFrame", size=(350, 200),
style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.Show(True)
def OnButton2(self, evt):
win = MyMiniFrame(self, -1, "This is a wx.MiniFrame", size=(350, 200),
style=wx.DEFAULT_FRAME_STYLE)
win.Centre()
win.ShowWithEffect(effect=eval(self.list.GetString(self.list.GetSelection())),
timeout=self.spin.GetValue())
#- __main__ Demo ---------------------------------------------------------------
class printLog:
def __init__(self):
pass
def write(self, txt):
print('%s' % txt)
def WriteText(self, txt):
print('%s' % txt)
class TestFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title=wx.EmptyString,
pos=wx.DefaultPosition, size=wx.DefaultSize,
style=wx.DEFAULT_FRAME_STYLE, name='frame'):
wx.Frame.__init__(self, parent, id, title, pos, size, style, name)
log = printLog()
panel = TestPanel(self, log)
self.Bind(wx.EVT_CLOSE, self.OnDestroy)
try:
self.SetIcon(wx.IconFromLocation(wx.IconLocation(sys.executable)))
except Exception as exc:
raise exc
def OnDestroy(self, event):
self.Destroy()
class TestApp(wx.App):
def OnInit(self):
gMainWin = TestFrame(None)
gMainWin.SetTitle('Extended Frame Demo')
gMainWin.Show()
return True
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
print('Python %s.%s.%s %s' % sys.version_info[0:4])
print('wxPython %s' % wx.version())
gApp = TestApp(redirect=False,
filename=None,
useBestVisual=False,
clearSigInt=True)
gApp.MainLoop()
|
[
12,
14,
16,
19,
22
] |
2,287 |
c336bb6cdadfb836ab68ebd5bbb210f63af3d084
|
<mask token>
def ping_calculate_pong(expression, operator_index):
"""The function takes two arguments.
Argument 1: an expression from which we will extract one subexpression.
Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.
The function:
1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;
2. calculates subexpression result using function math_operation();
3. replaces in expression: subexpression to subexpression result - pong.
"""
if len(expression) < 3 or operator_index == len(expression
) - 1 or operator_index == 0:
raise ValueError(
f'{expression} - check this fragment, something wrong.')
sub_expression = expression[operator_index - 1:operator_index + 2]
sub_result = math_operation(sub_expression)
expression[operator_index + 1] = sub_result
del expression[operator_index - 1:operator_index + 1]
def calculator_without_parentheses(expression):
"""The function:
1. prioritizes mathematical operations in expression without any parentheses;
2. transfers expression and indexes of math operators to the function ping_calculate_pong();
3. returns result of calculations.
"""
j = 1
while len(expression) > j:
if '**' in expression:
ping_calculate_pong(expression, expression.index('**'))
elif '*' in expression or '/' in expression:
if '*' in expression and '/' in expression:
if expression.index('*') < expression.index('/'):
ping_calculate_pong(expression, expression.index('*'))
else:
ping_calculate_pong(expression, expression.index('/'))
elif '/' not in expression:
ping_calculate_pong(expression, expression.index('*'))
elif '*' not in expression:
ping_calculate_pong(expression, expression.index('/'))
elif '+' in expression or '-' in expression:
if '+' in expression and '-' in expression:
if expression.index('+') < expression.index('-'):
ping_calculate_pong(expression, expression.index('+'))
else:
ping_calculate_pong(expression, expression.index('-'))
elif '-' not in expression:
ping_calculate_pong(expression, expression.index('+'))
elif '+' not in expression:
ping_calculate_pong(expression, expression.index('-'))
else:
j += 1
return expression
<mask token>
|
<mask token>
def math_operation(expression):
"""Simple calculator for two numbers in expression like 3 + 3."""
if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():
if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit(
) or not str(expression[2]).replace('.', '1').replace('-', '1'
).isdigit():
raise ValueError(
f'{expression} - check this fragment, something wrong.')
if expression[2] == 0 and expression[1] == '/':
raise ValueError(f'{expression} - division by zero.')
operator = expression[1]
if operator == '**':
return expression[0] ** expression[2]
elif operator == '*':
return expression[0] * expression[2]
elif operator == '/':
return expression[0] / expression[2]
elif operator == '+':
return expression[0] + expression[2]
elif operator == '-':
return expression[0] - expression[2]
def ping_calculate_pong(expression, operator_index):
"""The function takes two arguments.
Argument 1: an expression from which we will extract one subexpression.
Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.
The function:
1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;
2. calculates subexpression result using function math_operation();
3. replaces in expression: subexpression to subexpression result - pong.
"""
if len(expression) < 3 or operator_index == len(expression
) - 1 or operator_index == 0:
raise ValueError(
f'{expression} - check this fragment, something wrong.')
sub_expression = expression[operator_index - 1:operator_index + 2]
sub_result = math_operation(sub_expression)
expression[operator_index + 1] = sub_result
del expression[operator_index - 1:operator_index + 1]
def calculator_without_parentheses(expression):
"""The function:
1. prioritizes mathematical operations in expression without any parentheses;
2. transfers expression and indexes of math operators to the function ping_calculate_pong();
3. returns result of calculations.
"""
j = 1
while len(expression) > j:
if '**' in expression:
ping_calculate_pong(expression, expression.index('**'))
elif '*' in expression or '/' in expression:
if '*' in expression and '/' in expression:
if expression.index('*') < expression.index('/'):
ping_calculate_pong(expression, expression.index('*'))
else:
ping_calculate_pong(expression, expression.index('/'))
elif '/' not in expression:
ping_calculate_pong(expression, expression.index('*'))
elif '*' not in expression:
ping_calculate_pong(expression, expression.index('/'))
elif '+' in expression or '-' in expression:
if '+' in expression and '-' in expression:
if expression.index('+') < expression.index('-'):
ping_calculate_pong(expression, expression.index('+'))
else:
ping_calculate_pong(expression, expression.index('-'))
elif '-' not in expression:
ping_calculate_pong(expression, expression.index('+'))
elif '+' not in expression:
ping_calculate_pong(expression, expression.index('-'))
else:
j += 1
return expression
<mask token>
|
<mask token>
def math_operation(expression):
"""Simple calculator for two numbers in expression like 3 + 3."""
if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():
if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit(
) or not str(expression[2]).replace('.', '1').replace('-', '1'
).isdigit():
raise ValueError(
f'{expression} - check this fragment, something wrong.')
if expression[2] == 0 and expression[1] == '/':
raise ValueError(f'{expression} - division by zero.')
operator = expression[1]
if operator == '**':
return expression[0] ** expression[2]
elif operator == '*':
return expression[0] * expression[2]
elif operator == '/':
return expression[0] / expression[2]
elif operator == '+':
return expression[0] + expression[2]
elif operator == '-':
return expression[0] - expression[2]
def ping_calculate_pong(expression, operator_index):
"""The function takes two arguments.
Argument 1: an expression from which we will extract one subexpression.
Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.
The function:
1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;
2. calculates subexpression result using function math_operation();
3. replaces in expression: subexpression to subexpression result - pong.
"""
if len(expression) < 3 or operator_index == len(expression
) - 1 or operator_index == 0:
raise ValueError(
f'{expression} - check this fragment, something wrong.')
sub_expression = expression[operator_index - 1:operator_index + 2]
sub_result = math_operation(sub_expression)
expression[operator_index + 1] = sub_result
del expression[operator_index - 1:operator_index + 1]
def calculator_without_parentheses(expression):
"""The function:
1. prioritizes mathematical operations in expression without any parentheses;
2. transfers expression and indexes of math operators to the function ping_calculate_pong();
3. returns result of calculations.
"""
j = 1
while len(expression) > j:
if '**' in expression:
ping_calculate_pong(expression, expression.index('**'))
elif '*' in expression or '/' in expression:
if '*' in expression and '/' in expression:
if expression.index('*') < expression.index('/'):
ping_calculate_pong(expression, expression.index('*'))
else:
ping_calculate_pong(expression, expression.index('/'))
elif '/' not in expression:
ping_calculate_pong(expression, expression.index('*'))
elif '*' not in expression:
ping_calculate_pong(expression, expression.index('/'))
elif '+' in expression or '-' in expression:
if '+' in expression and '-' in expression:
if expression.index('+') < expression.index('-'):
ping_calculate_pong(expression, expression.index('+'))
else:
ping_calculate_pong(expression, expression.index('-'))
elif '-' not in expression:
ping_calculate_pong(expression, expression.index('+'))
elif '+' not in expression:
ping_calculate_pong(expression, expression.index('-'))
else:
j += 1
return expression
def clear_and_convert(string_math_expression):
"""This function takes string expression and converts it to list with int, float, and 'math signs'."""
cleared_expression = list(filter(lambda x: x != ' ',
string_math_expression))
check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+',
'-', '*', '/', '(', ')', '.']
for element in cleared_expression:
if element not in check_list:
raise ValueError(
f'Houston, we have a problem. Element "{element}" in expression is not correct.'
)
num_exp = []
number = ''
len_cleared_expression = len(cleared_expression)
for i, element in enumerate(cleared_expression):
if element.isdigit():
number += element
if i == len_cleared_expression - 1 or not cleared_expression[i + 1
].isdigit():
num_exp.append(int(number))
number = ''
else:
num_exp.append(element)
while '.' in num_exp:
i = num_exp.index('.')
if i != 0 and i != len(num_exp) - 1 and isinstance(num_exp[i - 1], int
) and isinstance(num_exp[i + 1], int):
float_number = float(str(num_exp[i - 1]) + num_exp[i] + str(
num_exp[i + 1]))
num_exp[i + 1] = float_number
del num_exp[i - 1:i + 1]
else:
raise ValueError('Something wrong with ".".')
neg_exp = []
excluded_index = None
neg_check_list = ['+', '-', '*', '/', '(']
len_num_exp = len(num_exp)
for i, element in enumerate(num_exp):
if element == '-':
if i == len_num_exp - 1:
raise ValueError('Something wrong with "-".')
elif isinstance(num_exp[i + 1], int) and (i == 0 or num_exp[i -
1] in neg_check_list):
n_number = int('-' + str(num_exp[i + 1]))
neg_exp.append(n_number)
excluded_index = i + 1
elif isinstance(num_exp[i + 1], float) and (i == 0 or num_exp[i -
1] in neg_check_list):
n_number = float('-' + str(num_exp[i + 1]))
neg_exp.append(n_number)
excluded_index = i + 1
else:
neg_exp.append(element)
elif i != excluded_index:
neg_exp.append(element)
converted_expression = []
i = 0
len_neg_exp = len(neg_exp)
while i < len_neg_exp:
if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':
raise ValueError('Something wrong with "*".')
elif neg_exp[i] == '*' and neg_exp[i + 1] == '*':
converted_expression.append('**')
i += 2
else:
converted_expression.append(neg_exp[i])
i += 1
return converted_expression
<mask token>
|
<mask token>
def math_operation(expression):
"""Simple calculator for two numbers in expression like 3 + 3."""
if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():
if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit(
) or not str(expression[2]).replace('.', '1').replace('-', '1'
).isdigit():
raise ValueError(
f'{expression} - check this fragment, something wrong.')
if expression[2] == 0 and expression[1] == '/':
raise ValueError(f'{expression} - division by zero.')
operator = expression[1]
if operator == '**':
return expression[0] ** expression[2]
elif operator == '*':
return expression[0] * expression[2]
elif operator == '/':
return expression[0] / expression[2]
elif operator == '+':
return expression[0] + expression[2]
elif operator == '-':
return expression[0] - expression[2]
def ping_calculate_pong(expression, operator_index):
"""The function takes two arguments.
Argument 1: an expression from which we will extract one subexpression.
Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.
The function:
1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;
2. calculates subexpression result using function math_operation();
3. replaces in expression: subexpression to subexpression result - pong.
"""
if len(expression) < 3 or operator_index == len(expression
) - 1 or operator_index == 0:
raise ValueError(
f'{expression} - check this fragment, something wrong.')
sub_expression = expression[operator_index - 1:operator_index + 2]
sub_result = math_operation(sub_expression)
expression[operator_index + 1] = sub_result
del expression[operator_index - 1:operator_index + 1]
def calculator_without_parentheses(expression):
"""The function:
1. prioritizes mathematical operations in expression without any parentheses;
2. transfers expression and indexes of math operators to the function ping_calculate_pong();
3. returns result of calculations.
"""
j = 1
while len(expression) > j:
if '**' in expression:
ping_calculate_pong(expression, expression.index('**'))
elif '*' in expression or '/' in expression:
if '*' in expression and '/' in expression:
if expression.index('*') < expression.index('/'):
ping_calculate_pong(expression, expression.index('*'))
else:
ping_calculate_pong(expression, expression.index('/'))
elif '/' not in expression:
ping_calculate_pong(expression, expression.index('*'))
elif '*' not in expression:
ping_calculate_pong(expression, expression.index('/'))
elif '+' in expression or '-' in expression:
if '+' in expression and '-' in expression:
if expression.index('+') < expression.index('-'):
ping_calculate_pong(expression, expression.index('+'))
else:
ping_calculate_pong(expression, expression.index('-'))
elif '-' not in expression:
ping_calculate_pong(expression, expression.index('+'))
elif '+' not in expression:
ping_calculate_pong(expression, expression.index('-'))
else:
j += 1
return expression
def clear_and_convert(string_math_expression):
"""This function takes string expression and converts it to list with int, float, and 'math signs'."""
cleared_expression = list(filter(lambda x: x != ' ',
string_math_expression))
check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+',
'-', '*', '/', '(', ')', '.']
for element in cleared_expression:
if element not in check_list:
raise ValueError(
f'Houston, we have a problem. Element "{element}" in expression is not correct.'
)
num_exp = []
number = ''
len_cleared_expression = len(cleared_expression)
for i, element in enumerate(cleared_expression):
if element.isdigit():
number += element
if i == len_cleared_expression - 1 or not cleared_expression[i + 1
].isdigit():
num_exp.append(int(number))
number = ''
else:
num_exp.append(element)
while '.' in num_exp:
i = num_exp.index('.')
if i != 0 and i != len(num_exp) - 1 and isinstance(num_exp[i - 1], int
) and isinstance(num_exp[i + 1], int):
float_number = float(str(num_exp[i - 1]) + num_exp[i] + str(
num_exp[i + 1]))
num_exp[i + 1] = float_number
del num_exp[i - 1:i + 1]
else:
raise ValueError('Something wrong with ".".')
neg_exp = []
excluded_index = None
neg_check_list = ['+', '-', '*', '/', '(']
len_num_exp = len(num_exp)
for i, element in enumerate(num_exp):
if element == '-':
if i == len_num_exp - 1:
raise ValueError('Something wrong with "-".')
elif isinstance(num_exp[i + 1], int) and (i == 0 or num_exp[i -
1] in neg_check_list):
n_number = int('-' + str(num_exp[i + 1]))
neg_exp.append(n_number)
excluded_index = i + 1
elif isinstance(num_exp[i + 1], float) and (i == 0 or num_exp[i -
1] in neg_check_list):
n_number = float('-' + str(num_exp[i + 1]))
neg_exp.append(n_number)
excluded_index = i + 1
else:
neg_exp.append(element)
elif i != excluded_index:
neg_exp.append(element)
converted_expression = []
i = 0
len_neg_exp = len(neg_exp)
while i < len_neg_exp:
if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':
raise ValueError('Something wrong with "*".')
elif neg_exp[i] == '*' and neg_exp[i + 1] == '*':
converted_expression.append('**')
i += 2
else:
converted_expression.append(neg_exp[i])
i += 1
return converted_expression
def calculate_expression(str_math_expression):
"""This function:
1. uses clear_and_convert() to prepare the string math expression for further calculations;
2. finds all subexpressions inside parentheses (if there are such);
3. transfers subexpression to calculator_without_parentheses() for further calculations;
4. replaces subexpression with the result;
5. returns final result of all calculations.
"""
expression = clear_and_convert(str_math_expression)
for element in expression.copy():
if ')' in expression:
if '(' in expression:
if expression.index(')') > expression.index('('):
z = expression.index(')')
a = z
while expression[a] != '(':
a -= 1
fragment = expression[a + 1:z]
fr_result = calculator_without_parentheses(fragment)
if len(fr_result) != 1:
raise ValueError(
f'{fr_result} - check this fragment, something wrong.'
)
expression[z] = fr_result[0]
del expression[a:z]
else:
raise ValueError('Something wrong with parentheses.')
else:
raise ValueError('Something wrong with parentheses.')
else:
expression = calculator_without_parentheses(expression)
if len(expression) != 1:
raise ValueError('Something wrong in your expression.')
if len(expression) == 1:
return str(round(expression[0], 5))
|
"""Calculator is built using "ping pong" algorithm, without eval() etc.
Main final function: calculate_expression().
calculate_expression() uses two functions in utils.py: clear_and_convert() and calculator_without_parentheses().
calculator_without_parentheses() uses two remaining functions:
math_operation() -> ping_calculate_pong() -> calculator_without_parentheses().
Allowed operations: +, -, *, /, **, use of parentheses. Spaces don't matter.
Negative numbers should be written as: (-34), float numbers: 3.4
Expression example: ((-2.3) + 3 ** (2 - 2)) * 2.2 + (6/(3 + 3)* (-2)) ** 2
"""
def math_operation(expression):
"""Simple calculator for two numbers in expression like 3 + 3."""
if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():
# eliminates the error call for float and negative numbers
if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit() or \
not str(expression[2]).replace('.', '1').replace('-', '1').isdigit():
raise ValueError(f'{expression} - check this fragment, something wrong.')
if expression[2] == 0 and expression[1] == '/':
raise ValueError(f'{expression} - division by zero.')
operator = expression[1]
if operator == '**':
return expression[0]**expression[2]
elif operator == '*':
return expression[0]*expression[2]
elif operator == '/':
return expression[0]/expression[2]
elif operator == '+':
return expression[0]+expression[2]
elif operator == '-':
return expression[0]-expression[2]
def ping_calculate_pong(expression, operator_index):
"""The function takes two arguments.
Argument 1: an expression from which we will extract one subexpression.
Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.
The function:
1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;
2. calculates subexpression result using function math_operation();
3. replaces in expression: subexpression to subexpression result - pong.
"""
if len(expression) < 3 or operator_index == len(expression)-1 or operator_index == 0:
raise ValueError(f'{expression} - check this fragment, something wrong.')
sub_expression = expression[operator_index - 1:operator_index + 2]
sub_result = math_operation(sub_expression)
expression[operator_index+1] = sub_result
del expression[operator_index-1:operator_index+1]
def calculator_without_parentheses(expression):
"""The function:
1. prioritizes mathematical operations in expression without any parentheses;
2. transfers expression and indexes of math operators to the function ping_calculate_pong();
3. returns result of calculations.
"""
j = 1
while len(expression) > j:
if "**" in expression:
ping_calculate_pong(expression, expression.index('**'))
elif '*' in expression or '/' in expression:
if '*' in expression and '/' in expression:
if expression.index('*') < expression.index('/'):
ping_calculate_pong(expression, expression.index('*'))
else:
ping_calculate_pong(expression, expression.index('/'))
elif '/' not in expression:
ping_calculate_pong(expression, expression.index('*'))
elif '*' not in expression:
ping_calculate_pong(expression, expression.index('/'))
elif '+' in expression or '-' in expression:
if '+' in expression and '-' in expression:
if expression.index('+') < expression.index('-'):
ping_calculate_pong(expression, expression.index('+'))
else:
ping_calculate_pong(expression, expression.index('-'))
elif '-' not in expression:
ping_calculate_pong(expression, expression.index('+'))
elif '+' not in expression:
ping_calculate_pong(expression, expression.index('-'))
else:
j += 1 # protection against a possible eternal loop when an incorrect expression is entered
return expression
def clear_and_convert(string_math_expression):
"""This function takes string expression and converts it to list with int, float, and 'math signs'."""
# clear the expression of spaces and convert it to the list
cleared_expression = list(filter(lambda x: x != ' ', string_math_expression))
# check characters in the expression for correctness
check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '-', '*', '/', '(', ')', '.']
for element in cleared_expression:
if element not in check_list:
raise ValueError(f'Houston, we have a problem. Element "{element}" in expression is not correct.')
# find multi-digit numbers and create new list num_exp with int
num_exp = []
number = ''
len_cleared_expression = len(cleared_expression)
for i, element in enumerate(cleared_expression):
if element.isdigit():
number += element
if i == len_cleared_expression - 1 or not cleared_expression[i+1].isdigit():
num_exp.append(int(number))
number = ''
else:
num_exp.append(element)
# find float numbers and update list num_exp
while '.' in num_exp:
i = num_exp.index('.')
if (i != 0 and i != len(num_exp) - 1
and isinstance(num_exp[i-1], int)
and isinstance(num_exp[i+1], int)):
float_number = float(str(num_exp[i-1]) + num_exp[i] + str(num_exp[i+1]))
num_exp[i+1] = float_number
del num_exp[i-1:i+1]
else:
raise ValueError('Something wrong with ".".')
# find negative numbers and create new list with negative numbers
neg_exp = []
excluded_index = None
neg_check_list = ['+', '-', '*', '/', '(']
len_num_exp = len(num_exp)
for i, element in enumerate(num_exp):
if element == '-':
if i == len_num_exp - 1:
raise ValueError('Something wrong with "-".')
elif isinstance(num_exp[i+1], int) and (i == 0 or num_exp[i-1] in neg_check_list):
n_number = int('-' + str(num_exp[i+1]))
neg_exp.append(n_number)
excluded_index = i + 1
elif isinstance(num_exp[i+1], float) and (i == 0 or num_exp[i-1] in neg_check_list):
n_number = float('-' + str(num_exp[i+1]))
neg_exp.append(n_number)
excluded_index = i + 1
else:
neg_exp.append(element)
elif i != excluded_index:
neg_exp.append(element)
# find exponent operator and create new list with final converted expression
converted_expression = []
i = 0
len_neg_exp = len(neg_exp)
while i < len_neg_exp:
if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':
raise ValueError('Something wrong with "*".')
elif neg_exp[i] == '*' and neg_exp[i+1] == '*':
converted_expression.append('**')
i += 2
else:
converted_expression.append(neg_exp[i])
i += 1
return converted_expression
def calculate_expression(str_math_expression):
"""This function:
1. uses clear_and_convert() to prepare the string math expression for further calculations;
2. finds all subexpressions inside parentheses (if there are such);
3. transfers subexpression to calculator_without_parentheses() for further calculations;
4. replaces subexpression with the result;
5. returns final result of all calculations.
"""
expression = clear_and_convert(str_math_expression)
for element in expression.copy():
if ')' in expression:
if '(' in expression:
if expression.index(')') > expression.index('('):
z = expression.index(')')
a = z
while expression[a] != '(':
a -= 1
fragment = expression[a+1:z]
fr_result = calculator_without_parentheses(fragment)
if len(fr_result) != 1: # checking for an input error in a fragment of the expression like ((()))
raise ValueError(f'{fr_result} - check this fragment, something wrong.')
expression[z] = fr_result[0]
del expression[a:z]
else:
raise ValueError('Something wrong with parentheses.')
else:
raise ValueError('Something wrong with parentheses.')
else:
expression = calculator_without_parentheses(expression)
if len(expression) != 1:
raise ValueError('Something wrong in your expression.')
if len(expression) == 1:
return str(round(expression[0], 5))
|
[
2,
3,
4,
5,
6
] |
2,288 |
c08e6cee61e9f32a9f067a9554c74bb2ddbd7cf3
|
<mask token>
def process_data(num11, den11, num21, den21):
w11 = ctrl.tf(num11, den11)
w21 = ctrl.tf(num21, den21)
print('результат w11={} w21={}'.format(w11, w21))
TimeLine = []
for i in range(1, 3000):
TimeLine.append(i / 1000)
plt.figure(0, figsize=[7, 6])
[y11, x11] = ctrl.step(w11, TimeLine)
[y21, x21] = ctrl.step(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Переходная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
[y11, x11] = ctrl.impulse(w11, TimeLine)
[y21, x21] = ctrl.impulse(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Импульсная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
ctrl.mag, ctrl.phase, ctrl.omega = ctrl.bode(w11, w21, dB=False)
plt.plot()
plt.show()
return w11, w21
<mask token>
|
<mask token>
def process_data(num11, den11, num21, den21):
w11 = ctrl.tf(num11, den11)
w21 = ctrl.tf(num21, den21)
print('результат w11={} w21={}'.format(w11, w21))
TimeLine = []
for i in range(1, 3000):
TimeLine.append(i / 1000)
plt.figure(0, figsize=[7, 6])
[y11, x11] = ctrl.step(w11, TimeLine)
[y21, x21] = ctrl.step(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Переходная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
[y11, x11] = ctrl.impulse(w11, TimeLine)
[y21, x21] = ctrl.impulse(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Импульсная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
ctrl.mag, ctrl.phase, ctrl.omega = ctrl.bode(w11, w21, dB=False)
plt.plot()
plt.show()
return w11, w21
print('1 - безынерционное звено')
print('2 - апериодическое звено')
print('3 - интегрирующее звено')
print('4 - идеальное дифференцирующее звено')
print('5 - реально дифференцирующее звено')
print('Введите номер функции, которую необходимо отобразить:')
<mask token>
if func_number == 1:
process_data([4.0], [1.0], [2.0], [1.0])
elif func_number == 2:
process_data([3.0], [2, 1.0], [1.5, 0.0], [4, 1.0])
elif func_number == 3:
process_data([1.0], [1, 0.0], [1.0], [0.5, 0.0])
elif func_number == 4:
process_data([5, 0.0], [1e-12, 1.0], [10, 0.0], [1e-12, 1.0])
elif func_number == 5:
process_data([3.0], [1, 1.0], [1.5, 0.0], [2, 1.0])
|
<mask token>
def process_data(num11, den11, num21, den21):
w11 = ctrl.tf(num11, den11)
w21 = ctrl.tf(num21, den21)
print('результат w11={} w21={}'.format(w11, w21))
TimeLine = []
for i in range(1, 3000):
TimeLine.append(i / 1000)
plt.figure(0, figsize=[7, 6])
[y11, x11] = ctrl.step(w11, TimeLine)
[y21, x21] = ctrl.step(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Переходная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
[y11, x11] = ctrl.impulse(w11, TimeLine)
[y21, x21] = ctrl.impulse(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Импульсная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
ctrl.mag, ctrl.phase, ctrl.omega = ctrl.bode(w11, w21, dB=False)
plt.plot()
plt.show()
return w11, w21
print('1 - безынерционное звено')
print('2 - апериодическое звено')
print('3 - интегрирующее звено')
print('4 - идеальное дифференцирующее звено')
print('5 - реально дифференцирующее звено')
print('Введите номер функции, которую необходимо отобразить:')
func_number = int(input())
if func_number == 1:
process_data([4.0], [1.0], [2.0], [1.0])
elif func_number == 2:
process_data([3.0], [2, 1.0], [1.5, 0.0], [4, 1.0])
elif func_number == 3:
process_data([1.0], [1, 0.0], [1.0], [0.5, 0.0])
elif func_number == 4:
process_data([5, 0.0], [1e-12, 1.0], [10, 0.0], [1e-12, 1.0])
elif func_number == 5:
process_data([3.0], [1, 1.0], [1.5, 0.0], [2, 1.0])
|
import control.matlab as ctrl
import matplotlib.pylab as plt
def process_data(num11, den11, num21, den21):
w11 = ctrl.tf(num11, den11)
w21 = ctrl.tf(num21, den21)
print('результат w11={} w21={}'.format(w11, w21))
TimeLine = []
for i in range(1, 3000):
TimeLine.append(i / 1000)
plt.figure(0, figsize=[7, 6])
[y11, x11] = ctrl.step(w11, TimeLine)
[y21, x21] = ctrl.step(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Переходная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
[y11, x11] = ctrl.impulse(w11, TimeLine)
[y21, x21] = ctrl.impulse(w21, TimeLine)
plt.plot(x11, y11, 'r', label='Исходная')
plt.plot(x21, y21, 'b', label='Увеличенная k и уменшенная Т')
plt.title('Импульсная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
ctrl.mag, ctrl.phase, ctrl.omega = ctrl.bode(w11, w21, dB=False)
plt.plot()
plt.show()
return w11, w21
print('1 - безынерционное звено')
print('2 - апериодическое звено')
print('3 - интегрирующее звено')
print('4 - идеальное дифференцирующее звено')
print('5 - реально дифференцирующее звено')
print('Введите номер функции, которую необходимо отобразить:')
func_number = int(input())
if func_number == 1:
process_data([4.0], [1.0], [2.0], [1.0])
elif func_number == 2:
process_data([3.0], [2, 1.0], [1.5, 0.0], [4, 1.0])
elif func_number == 3:
process_data([1.0], [1, 0.0], [1.0], [0.5, 0.0])
elif func_number == 4:
process_data([5, 0.0], [1e-12, 1.0], [10, 0.0], [1e-12, 1.0])
elif func_number == 5:
process_data([3.0], [1, 1.0], [1.5, 0.0], [2, 1.0])
|
import control.matlab as ctrl
import matplotlib.pylab as plt
def process_data(num11, den11, num21, den21):
w11 = ctrl.tf(num11, den11)
w21 = ctrl.tf(num21, den21)
print('результат w11={} w21={}'.format(w11, w21))
TimeLine = []
for i in range (1, 3000):
TimeLine.append(i/1000)
plt.figure(0, figsize = [7, 6])
[y11, x11] = ctrl.step(w11, TimeLine)
[y21, x21] = ctrl.step(w21, TimeLine)
plt.plot(x11, y11, "r", label='Исходная')
plt.plot(x21, y21, "b", label='Увеличенная k и уменшенная Т')
plt.title('Переходная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
[y11, x11] = ctrl.impulse(w11, TimeLine)
[y21, x21] = ctrl.impulse(w21, TimeLine)
plt.plot(x11, y11, "r", label='Исходная')
plt.plot(x21, y21, "b", label='Увеличенная k и уменшенная Т')
plt.title('Импульсная функция звена')
plt.ylabel('Амплитуда')
plt.xlabel('Время(с)')
plt.grid(True)
plt.show()
ctrl.mag, ctrl.phase, ctrl.omega = ctrl.bode(w11, w21, dB=False)
plt.plot()
plt.show()
return w11, w21
print('1 - безынерционное звено')
print('2 - апериодическое звено')
print('3 - интегрирующее звено')
print('4 - идеальное дифференцирующее звено')
print('5 - реально дифференцирующее звено')
print('Введите номер функции, которую необходимо отобразить:')
func_number = int(input())
if func_number == 1:
process_data([4.], [ 1.], [2.], [ 1.])
elif func_number == 2:
process_data([3.], [2, 1.], [1.5, 0.], [4, 1.])
elif func_number == 3:
process_data([1.], [1, 0.], [1.], [0.5, 0.])
elif func_number == 4:
process_data([5, 0.], [1e-12, 1.], [10, 0.], [1e-12, 1.])
elif func_number == 5:
process_data([3.], [1, 1.], [1.5, 0.], [2, 1.])
|
[
1,
2,
3,
4,
5
] |
2,289 |
d32f009f373249b7b602ac36f29982273a2ed192
|
<mask token>
class AmplitudeLogger:
<mask token>
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
|
<mask token>
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources,
'schema.json'))
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
|
<mask token>
log = logging.getLogger('amplitude-client')
API_URL = 'https://api.amplitude.com/2/httpapi'
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources,
'schema.json'))
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
|
from . import resources
from jsonschema import validate
from jsonschema.exceptions import ValidationError
import aiohttp_client
import importlib.resources as pkg_resources
import json
import logging
log = logging.getLogger('amplitude-client')
API_URL = 'https://api.amplitude.com/2/httpapi'
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources,
'schema.json'))
async def log_event(self, event):
event = {'api_key': self.api_key, 'events': [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error('Invalid payload', exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)
) as resp:
if resp.status != 200:
log.warn('Failed to log event', exc_info=True)
return resp
|
from . import resources
from jsonschema import validate
from jsonschema.exceptions import ValidationError
import aiohttp_client
import importlib.resources as pkg_resources
import json
import logging
log = logging.getLogger("amplitude-client")
API_URL = "https://api.amplitude.com/2/httpapi"
class AmplitudeLogger:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_schema = json.loads(pkg_resources.read_text(resources, "schema.json"))
async def log_event(self, event):
# Amplitude API requires (user_id OR device_id) AND event_type
event = {"api_key": self.api_key, "events": [event]}
try:
validate(instance=event, schema=self.api_schema)
except ValidationError:
log.error("Invalid payload", exc_info=True)
return None
async with aiohttp_client.post(API_URL, data=json.dumps(event)) as resp:
if resp.status != 200:
log.warn("Failed to log event", exc_info=True)
return resp
|
[
1,
2,
3,
4,
5
] |
2,290 |
caa28bd64141c8d2f3212b5e4e77129d81d24c71
|
<mask token>
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
<mask token>
|
<mask token>
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
app.run(debug=True)
|
<mask token>
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
app.run(debug=True)
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def inicio():
nombre = 'jose'
return render_template('inicio.html', nombre=nombre)
app.run(debug=True)
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/',methods=["GET","POST"])
def inicio():
nombre = "jose"
return render_template("inicio.html",nombre=nombre)
app.run(debug=True)
|
[
1,
2,
3,
4,
5
] |
2,291 |
a048396019aa7603a20535a3ce4bc9770509097d
|
<mask token>
|
<mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
|
<mask token>
class Migration(migrations.Migration):
dependencies = [('excursions', '0003_auto_20210420_1608')]
operations = [migrations.AlterField(model_name='exscursion', name=
'type', field=models.CharField(choices=[('group', 'Групповая'), (
'individual', 'Индивидуальная')], default='group', max_length=10,
verbose_name='Тип'))]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('excursions', '0003_auto_20210420_1608')]
operations = [migrations.AlterField(model_name='exscursion', name=
'type', field=models.CharField(choices=[('group', 'Групповая'), (
'individual', 'Индивидуальная')], default='group', max_length=10,
verbose_name='Тип'))]
|
# Generated by Django 3.2 on 2021-04-20 13:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('excursions', '0003_auto_20210420_1608'),
]
operations = [
migrations.AlterField(
model_name='exscursion',
name='type',
field=models.CharField(choices=[('group', 'Групповая'), ('individual', 'Индивидуальная')], default='group', max_length=10, verbose_name='Тип'),
),
]
|
[
0,
1,
2,
3,
4
] |
2,292 |
a745f72081e06ff3399f9d7f65a30d7eef594689
|
<mask token>
|
<mask token>
if __name__ == '__main__':
hostname = 'forecast.derivacloud.org'
catalog_id = '5'
model = Model.from_catalog(DerivaServer('https', hostname, credentials=
get_credential(hostname)).connect_ermrest(catalog_id))
tabname = model.schemas['ETAS'].tables['Forecast']
print('Before Adding Column')
for column in tabname.column_definitions:
print(column.name, column.type.typename, column.nullok)
"""
Define a series of column names that reflect metadata we expect to extract from
the ETAS directory names. These are initial names, defined by developers.
ETAS modelers may want to rename these columns to be more meaningful to domain experts.
For this first version, all fields are defined as free text.
Redefinition of these values as controlled vocabularies are a future refinement.
1) Sim_Start_Time: Enumeration List
e.g: "2019_07_16"
not null
2) Catalog_Mag: Enumeration List
e.g.: "ComCatM7p1"
not null
3) Event_ID: Enumeration List
e.g.: "ci39457511"
not null
4) Post_Event_Date: Enumeration List
e.g.: "7DaysAfter"
maybe null
5) Rupture_Def: Enumeration List
e.g. "ShakeMapSurfaces"
"ShakeMapSurfaces-noSpont-full_td-scale1.14"
not null
"""
tabname.create_column(Column.define('Sim_Start_Time', builtin_types.
text, comment='Simulation Start Time'))
tabname.create_column(Column.define('Catalog_Mag', builtin_types.text,
comment='Catalog Name and Event Magnitude'))
tabname.create_column(Column.define('Event_ID', builtin_types.text,
comment='Earthquake Event ID'))
tabname.create_column(Column.define('Post_Event_Date', builtin_types.
text, comment='Days Forecast made after Mainshock'))
tabname.create_column(Column.define('Rupture_Definition', builtin_types
.text, comment='Type of Rupture used in ETAS forecast'))
print('After Adding Column')
etas_model = model.schemas['ETAS']
tabname = etas_model.tables['Forecast']
for column in tabname.column_definitions:
print(column.name, column.type.typename, column.nullok)
sys.exit(0)
|
<mask token>
import os
import sys
from deriva.core import DerivaServer, ErmrestCatalog, get_credential
from deriva.chisel import Model, Schema, Table, Column, Key, ForeignKey, builtin_types, tag
if __name__ == '__main__':
hostname = 'forecast.derivacloud.org'
catalog_id = '5'
model = Model.from_catalog(DerivaServer('https', hostname, credentials=
get_credential(hostname)).connect_ermrest(catalog_id))
tabname = model.schemas['ETAS'].tables['Forecast']
print('Before Adding Column')
for column in tabname.column_definitions:
print(column.name, column.type.typename, column.nullok)
"""
Define a series of column names that reflect metadata we expect to extract from
the ETAS directory names. These are initial names, defined by developers.
ETAS modelers may want to rename these columns to be more meaningful to domain experts.
For this first version, all fields are defined as free text.
Redefinition of these values as controlled vocabularies are a future refinement.
1) Sim_Start_Time: Enumeration List
e.g: "2019_07_16"
not null
2) Catalog_Mag: Enumeration List
e.g.: "ComCatM7p1"
not null
3) Event_ID: Enumeration List
e.g.: "ci39457511"
not null
4) Post_Event_Date: Enumeration List
e.g.: "7DaysAfter"
maybe null
5) Rupture_Def: Enumeration List
e.g. "ShakeMapSurfaces"
"ShakeMapSurfaces-noSpont-full_td-scale1.14"
not null
"""
tabname.create_column(Column.define('Sim_Start_Time', builtin_types.
text, comment='Simulation Start Time'))
tabname.create_column(Column.define('Catalog_Mag', builtin_types.text,
comment='Catalog Name and Event Magnitude'))
tabname.create_column(Column.define('Event_ID', builtin_types.text,
comment='Earthquake Event ID'))
tabname.create_column(Column.define('Post_Event_Date', builtin_types.
text, comment='Days Forecast made after Mainshock'))
tabname.create_column(Column.define('Rupture_Definition', builtin_types
.text, comment='Type of Rupture used in ETAS forecast'))
print('After Adding Column')
etas_model = model.schemas['ETAS']
tabname = etas_model.tables['Forecast']
for column in tabname.column_definitions:
print(column.name, column.type.typename, column.nullok)
sys.exit(0)
|
#!/usr/bin/env python
"""add_columns.py: This script reads an SCEC ETAS forecast directory name
and extracts key fields that are then added as attributes in the SCEC Deriva
schema.
This script is an example of how the ERD used by Deriva is extended as additional
information or metadata is added to the asset descriptions in Deriva.
This must be run after the create_model.py script has been run, because this modifies
the ERD created by that script.
The expectation is this is run once. If it is run a second time, we expect errors
indicating the columns already exist.
Philip Maechling
3 April 2021
"""
import os
import sys
from deriva.core import DerivaServer, ErmrestCatalog, get_credential
from deriva.chisel import Model, Schema, Table, Column, Key, ForeignKey, builtin_types, tag
if __name__ == "__main__":
# Connect to server and catalog ------------------------------------------------------------------#
hostname = 'forecast.derivacloud.org' # this is a dev server for throw-away work (change to 'forecast.derivacloud.org)
catalog_id = '5' # this was a throw-away catalog used to test this script (change to TBD)
model = Model.from_catalog(
DerivaServer('https', hostname, credentials=get_credential(hostname)).connect_ermrest(catalog_id)
)
#
# During testing, exit before any table modifications are done
#
tabname = model.schemas['ETAS'].tables["Forecast"]
print("Before Adding Column")
for column in tabname.column_definitions:
print(column.name,column.type.typename,column.nullok)
"""
Define a series of column names that reflect metadata we expect to extract from
the ETAS directory names. These are initial names, defined by developers.
ETAS modelers may want to rename these columns to be more meaningful to domain experts.
For this first version, all fields are defined as free text.
Redefinition of these values as controlled vocabularies are a future refinement.
1) Sim_Start_Time: Enumeration List
e.g: "2019_07_16"
not null
2) Catalog_Mag: Enumeration List
e.g.: "ComCatM7p1"
not null
3) Event_ID: Enumeration List
e.g.: "ci39457511"
not null
4) Post_Event_Date: Enumeration List
e.g.: "7DaysAfter"
maybe null
5) Rupture_Def: Enumeration List
e.g. "ShakeMapSurfaces"
"ShakeMapSurfaces-noSpont-full_td-scale1.14"
not null
"""
tabname.create_column(Column.define('Sim_Start_Time',
builtin_types.text,
comment="Simulation Start Time"))
tabname.create_column(Column.define('Catalog_Mag',
builtin_types.text,
comment="Catalog Name and Event Magnitude"))
tabname.create_column(Column.define('Event_ID',
builtin_types.text,
comment="Earthquake Event ID"))
tabname.create_column(Column.define('Post_Event_Date',
builtin_types.text,
comment="Days Forecast made after Mainshock"))
tabname.create_column(Column.define('Rupture_Definition',
builtin_types.text,
comment="Type of Rupture used in ETAS forecast"))
# retrieve catalog model again to ensure we reflect latest structural changes
# example shows this, but I'm not sure what it returns
print("After Adding Column")
etas_model = model.schemas['ETAS']
tabname = etas_model.tables["Forecast"]
for column in tabname.column_definitions:
print(column.name,column.type.typename,column.nullok)
sys.exit(0)
| null |
[
0,
1,
2,
3
] |
2,293 |
50ed1512b0e6ff8e01f5d4aa034406fa78850176
|
<mask token>
class CifarResNeXt(nn.Module):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
|
<mask token>
class ResNeXtBottleneck(nn.Module):
<mask token>
def __init__(self, in_channels, out_channels, stride, cardinality,
widen_factor):
""" Constructor
Args:
in_channels: input channel dimensionality
out_channels: output channel dimensionality
stride: conv stride. Replaces pooling layer.
cardinality: num of convolution groups.
widen_factor: factor to reduce the input dimensionality before convolution.
"""
super(ResNeXtBottleneck, self).__init__()
D = cardinality * out_channels // widen_factor
self.conv_reduce = nn.Conv2d(in_channels, D, kernel_size=1, stride=
1, padding=0, bias=False)
self.bn_reduce = nn.BatchNorm2d(D)
self.conv_conv = nn.Conv2d(D, D, kernel_size=3, stride=stride,
padding=1, groups=cardinality, bias=False)
self.bn = nn.BatchNorm2d(D)
self.conv_expand = nn.Conv2d(D, out_channels, kernel_size=1, stride
=1, padding=0, bias=False)
self.bn_expand = nn.BatchNorm2d(out_channels)
self.shortcut = nn.Sequential()
if in_channels != out_channels:
self.shortcut.add_module('shortcut_conv', nn.Conv2d(in_channels,
out_channels, kernel_size=1, stride=stride, padding=0, bias
=False))
self.shortcut.add_module('shortcut_bn', nn.BatchNorm2d(
out_channels))
def forward(self, x):
bottleneck = self.conv_reduce.forward(x)
bottleneck = F.relu(self.bn_reduce.forward(bottleneck), inplace=True)
bottleneck = self.conv_conv.forward(bottleneck)
bottleneck = F.relu(self.bn.forward(bottleneck), inplace=True)
bottleneck = self.conv_expand.forward(bottleneck)
bottleneck = self.bn_expand.forward(bottleneck)
residual = self.shortcut.forward(x)
return F.relu(residual + bottleneck, inplace=True)
class CifarResNeXt(nn.Module):
"""
ResNext optimized for the Cifar dataset, as specified in
https://arxiv.org/pdf/1611.05431.pdf
"""
def __init__(self, cardinality, depth, num_classes, widen_factor=4,
dropRate=0):
""" Constructor
Args:
cardinality: number of convolution groups.
depth: number of layers.
num_classes: number of classes
widen_factor: factor to adjust the channel dimensionality
"""
super(CifarResNeXt, self).__init__()
self.cardinality = cardinality
self.depth = depth
self.block_depth = (self.depth - 2) // 9
self.widen_factor = widen_factor
self.num_classes = num_classes
self.output_size = 64
self.stages = [64, 64 * self.widen_factor, 128 * self.widen_factor,
256 * self.widen_factor]
self.conv_1_3x3 = nn.Conv2d(3, 64, 3, 1, 1, bias=False)
self.bn_1 = nn.BatchNorm2d(64)
self.stage_1 = self.block('stage_1', self.stages[0], self.stages[1], 1)
self.stage_2 = self.block('stage_2', self.stages[1], self.stages[2], 2)
self.stage_3 = self.block('stage_3', self.stages[2], self.stages[3], 2)
self.classifier = nn.Linear(1024, num_classes)
self.stage_att = self.block('stage_att', self.stages[2], self.
stages[3], 1)
self.bn_att = nn.BatchNorm2d(self.stages[3])
self.att_conv = nn.Conv2d(self.stages[3], num_classes, kernel_size=
1, padding=0, bias=False)
self.bn_att2 = nn.BatchNorm2d(num_classes)
self.att_conv2 = nn.Conv2d(num_classes, num_classes, kernel_size=1,
padding=0, bias=False)
self.att_conv3 = nn.Conv2d(num_classes, 1, kernel_size=3, padding=1,
bias=False)
self.bn_att3 = nn.BatchNorm2d(1)
self.att_gap = nn.AvgPool2d(16)
self.sigmoid = nn.Sigmoid()
self.relu = nn.ReLU(inplace=True)
init.kaiming_normal(self.classifier.weight)
for key in self.state_dict():
if key.split('.')[-1] == 'weight':
if 'conv' in key:
init.kaiming_normal(self.state_dict()[key], mode='fan_out')
if 'bn' in key:
self.state_dict()[key][...] = 1
elif key.split('.')[-1] == 'bias':
self.state_dict()[key][...] = 0
def block(self, name, in_channels, out_channels, pool_stride=2):
""" Stack n bottleneck modules where n is inferred from the depth of the network.
Args:
name: string name of the current block.
in_channels: number of input channels
out_channels: number of output channels
pool_stride: factor to reduce the spatial dimensionality in the first bottleneck of the block.
Returns: a Module consisting of n sequential bottlenecks.
"""
block = nn.Sequential()
for bottleneck in range(self.block_depth):
name_ = '%s_bottleneck_%d' % (name, bottleneck)
if bottleneck == 0:
block.add_module(name_, ResNeXtBottleneck(in_channels,
out_channels, pool_stride, self.cardinality, self.
widen_factor))
else:
block.add_module(name_, ResNeXtBottleneck(out_channels,
out_channels, 1, self.cardinality, self.widen_factor))
return block
def forward(self, x):
x = self.conv_1_3x3.forward(x)
x = F.relu(self.bn_1.forward(x), inplace=True)
x = self.stage_1.forward(x)
x = self.stage_2.forward(x)
ax = self.stage_att(x)
ax = self.relu(self.bn_att2(self.att_conv(ax)))
bs, cs, ys, xs = ax.shape
self.att = self.sigmoid(self.bn_att3(self.att_conv3(ax)))
ax = self.att_conv2(ax)
ax = self.att_gap(ax)
ax = ax.view(ax.size(0), -1)
rx = x * self.att
rx = rx + x
rx = self.stage_3.forward(rx)
rx = F.avg_pool2d(rx, 8, 1)
rx = rx.view(-1, 1024)
rx = self.classifier(rx)
return ax, rx, self.att
<mask token>
|
<mask token>
class ResNeXtBottleneck(nn.Module):
"""
RexNeXt bottleneck type C (https://github.com/facebookresearch/ResNeXt/blob/master/models/resnext.lua)
"""
def __init__(self, in_channels, out_channels, stride, cardinality,
widen_factor):
""" Constructor
Args:
in_channels: input channel dimensionality
out_channels: output channel dimensionality
stride: conv stride. Replaces pooling layer.
cardinality: num of convolution groups.
widen_factor: factor to reduce the input dimensionality before convolution.
"""
super(ResNeXtBottleneck, self).__init__()
D = cardinality * out_channels // widen_factor
self.conv_reduce = nn.Conv2d(in_channels, D, kernel_size=1, stride=
1, padding=0, bias=False)
self.bn_reduce = nn.BatchNorm2d(D)
self.conv_conv = nn.Conv2d(D, D, kernel_size=3, stride=stride,
padding=1, groups=cardinality, bias=False)
self.bn = nn.BatchNorm2d(D)
self.conv_expand = nn.Conv2d(D, out_channels, kernel_size=1, stride
=1, padding=0, bias=False)
self.bn_expand = nn.BatchNorm2d(out_channels)
self.shortcut = nn.Sequential()
if in_channels != out_channels:
self.shortcut.add_module('shortcut_conv', nn.Conv2d(in_channels,
out_channels, kernel_size=1, stride=stride, padding=0, bias
=False))
self.shortcut.add_module('shortcut_bn', nn.BatchNorm2d(
out_channels))
def forward(self, x):
bottleneck = self.conv_reduce.forward(x)
bottleneck = F.relu(self.bn_reduce.forward(bottleneck), inplace=True)
bottleneck = self.conv_conv.forward(bottleneck)
bottleneck = F.relu(self.bn.forward(bottleneck), inplace=True)
bottleneck = self.conv_expand.forward(bottleneck)
bottleneck = self.bn_expand.forward(bottleneck)
residual = self.shortcut.forward(x)
return F.relu(residual + bottleneck, inplace=True)
class CifarResNeXt(nn.Module):
"""
ResNext optimized for the Cifar dataset, as specified in
https://arxiv.org/pdf/1611.05431.pdf
"""
def __init__(self, cardinality, depth, num_classes, widen_factor=4,
dropRate=0):
""" Constructor
Args:
cardinality: number of convolution groups.
depth: number of layers.
num_classes: number of classes
widen_factor: factor to adjust the channel dimensionality
"""
super(CifarResNeXt, self).__init__()
self.cardinality = cardinality
self.depth = depth
self.block_depth = (self.depth - 2) // 9
self.widen_factor = widen_factor
self.num_classes = num_classes
self.output_size = 64
self.stages = [64, 64 * self.widen_factor, 128 * self.widen_factor,
256 * self.widen_factor]
self.conv_1_3x3 = nn.Conv2d(3, 64, 3, 1, 1, bias=False)
self.bn_1 = nn.BatchNorm2d(64)
self.stage_1 = self.block('stage_1', self.stages[0], self.stages[1], 1)
self.stage_2 = self.block('stage_2', self.stages[1], self.stages[2], 2)
self.stage_3 = self.block('stage_3', self.stages[2], self.stages[3], 2)
self.classifier = nn.Linear(1024, num_classes)
self.stage_att = self.block('stage_att', self.stages[2], self.
stages[3], 1)
self.bn_att = nn.BatchNorm2d(self.stages[3])
self.att_conv = nn.Conv2d(self.stages[3], num_classes, kernel_size=
1, padding=0, bias=False)
self.bn_att2 = nn.BatchNorm2d(num_classes)
self.att_conv2 = nn.Conv2d(num_classes, num_classes, kernel_size=1,
padding=0, bias=False)
self.att_conv3 = nn.Conv2d(num_classes, 1, kernel_size=3, padding=1,
bias=False)
self.bn_att3 = nn.BatchNorm2d(1)
self.att_gap = nn.AvgPool2d(16)
self.sigmoid = nn.Sigmoid()
self.relu = nn.ReLU(inplace=True)
init.kaiming_normal(self.classifier.weight)
for key in self.state_dict():
if key.split('.')[-1] == 'weight':
if 'conv' in key:
init.kaiming_normal(self.state_dict()[key], mode='fan_out')
if 'bn' in key:
self.state_dict()[key][...] = 1
elif key.split('.')[-1] == 'bias':
self.state_dict()[key][...] = 0
def block(self, name, in_channels, out_channels, pool_stride=2):
""" Stack n bottleneck modules where n is inferred from the depth of the network.
Args:
name: string name of the current block.
in_channels: number of input channels
out_channels: number of output channels
pool_stride: factor to reduce the spatial dimensionality in the first bottleneck of the block.
Returns: a Module consisting of n sequential bottlenecks.
"""
block = nn.Sequential()
for bottleneck in range(self.block_depth):
name_ = '%s_bottleneck_%d' % (name, bottleneck)
if bottleneck == 0:
block.add_module(name_, ResNeXtBottleneck(in_channels,
out_channels, pool_stride, self.cardinality, self.
widen_factor))
else:
block.add_module(name_, ResNeXtBottleneck(out_channels,
out_channels, 1, self.cardinality, self.widen_factor))
return block
def forward(self, x):
x = self.conv_1_3x3.forward(x)
x = F.relu(self.bn_1.forward(x), inplace=True)
x = self.stage_1.forward(x)
x = self.stage_2.forward(x)
ax = self.stage_att(x)
ax = self.relu(self.bn_att2(self.att_conv(ax)))
bs, cs, ys, xs = ax.shape
self.att = self.sigmoid(self.bn_att3(self.att_conv3(ax)))
ax = self.att_conv2(ax)
ax = self.att_gap(ax)
ax = ax.view(ax.size(0), -1)
rx = x * self.att
rx = rx + x
rx = self.stage_3.forward(rx)
rx = F.avg_pool2d(rx, 8, 1)
rx = rx.view(-1, 1024)
rx = self.classifier(rx)
return ax, rx, self.att
def resnext(**kwargs):
"""Constructs a ResNeXt.
"""
model = CifarResNeXt(**kwargs)
return model
|
<mask token>
__all__ = ['resnext']
class ResNeXtBottleneck(nn.Module):
"""
RexNeXt bottleneck type C (https://github.com/facebookresearch/ResNeXt/blob/master/models/resnext.lua)
"""
def __init__(self, in_channels, out_channels, stride, cardinality,
widen_factor):
""" Constructor
Args:
in_channels: input channel dimensionality
out_channels: output channel dimensionality
stride: conv stride. Replaces pooling layer.
cardinality: num of convolution groups.
widen_factor: factor to reduce the input dimensionality before convolution.
"""
super(ResNeXtBottleneck, self).__init__()
D = cardinality * out_channels // widen_factor
self.conv_reduce = nn.Conv2d(in_channels, D, kernel_size=1, stride=
1, padding=0, bias=False)
self.bn_reduce = nn.BatchNorm2d(D)
self.conv_conv = nn.Conv2d(D, D, kernel_size=3, stride=stride,
padding=1, groups=cardinality, bias=False)
self.bn = nn.BatchNorm2d(D)
self.conv_expand = nn.Conv2d(D, out_channels, kernel_size=1, stride
=1, padding=0, bias=False)
self.bn_expand = nn.BatchNorm2d(out_channels)
self.shortcut = nn.Sequential()
if in_channels != out_channels:
self.shortcut.add_module('shortcut_conv', nn.Conv2d(in_channels,
out_channels, kernel_size=1, stride=stride, padding=0, bias
=False))
self.shortcut.add_module('shortcut_bn', nn.BatchNorm2d(
out_channels))
def forward(self, x):
bottleneck = self.conv_reduce.forward(x)
bottleneck = F.relu(self.bn_reduce.forward(bottleneck), inplace=True)
bottleneck = self.conv_conv.forward(bottleneck)
bottleneck = F.relu(self.bn.forward(bottleneck), inplace=True)
bottleneck = self.conv_expand.forward(bottleneck)
bottleneck = self.bn_expand.forward(bottleneck)
residual = self.shortcut.forward(x)
return F.relu(residual + bottleneck, inplace=True)
class CifarResNeXt(nn.Module):
"""
ResNext optimized for the Cifar dataset, as specified in
https://arxiv.org/pdf/1611.05431.pdf
"""
def __init__(self, cardinality, depth, num_classes, widen_factor=4,
dropRate=0):
""" Constructor
Args:
cardinality: number of convolution groups.
depth: number of layers.
num_classes: number of classes
widen_factor: factor to adjust the channel dimensionality
"""
super(CifarResNeXt, self).__init__()
self.cardinality = cardinality
self.depth = depth
self.block_depth = (self.depth - 2) // 9
self.widen_factor = widen_factor
self.num_classes = num_classes
self.output_size = 64
self.stages = [64, 64 * self.widen_factor, 128 * self.widen_factor,
256 * self.widen_factor]
self.conv_1_3x3 = nn.Conv2d(3, 64, 3, 1, 1, bias=False)
self.bn_1 = nn.BatchNorm2d(64)
self.stage_1 = self.block('stage_1', self.stages[0], self.stages[1], 1)
self.stage_2 = self.block('stage_2', self.stages[1], self.stages[2], 2)
self.stage_3 = self.block('stage_3', self.stages[2], self.stages[3], 2)
self.classifier = nn.Linear(1024, num_classes)
self.stage_att = self.block('stage_att', self.stages[2], self.
stages[3], 1)
self.bn_att = nn.BatchNorm2d(self.stages[3])
self.att_conv = nn.Conv2d(self.stages[3], num_classes, kernel_size=
1, padding=0, bias=False)
self.bn_att2 = nn.BatchNorm2d(num_classes)
self.att_conv2 = nn.Conv2d(num_classes, num_classes, kernel_size=1,
padding=0, bias=False)
self.att_conv3 = nn.Conv2d(num_classes, 1, kernel_size=3, padding=1,
bias=False)
self.bn_att3 = nn.BatchNorm2d(1)
self.att_gap = nn.AvgPool2d(16)
self.sigmoid = nn.Sigmoid()
self.relu = nn.ReLU(inplace=True)
init.kaiming_normal(self.classifier.weight)
for key in self.state_dict():
if key.split('.')[-1] == 'weight':
if 'conv' in key:
init.kaiming_normal(self.state_dict()[key], mode='fan_out')
if 'bn' in key:
self.state_dict()[key][...] = 1
elif key.split('.')[-1] == 'bias':
self.state_dict()[key][...] = 0
def block(self, name, in_channels, out_channels, pool_stride=2):
""" Stack n bottleneck modules where n is inferred from the depth of the network.
Args:
name: string name of the current block.
in_channels: number of input channels
out_channels: number of output channels
pool_stride: factor to reduce the spatial dimensionality in the first bottleneck of the block.
Returns: a Module consisting of n sequential bottlenecks.
"""
block = nn.Sequential()
for bottleneck in range(self.block_depth):
name_ = '%s_bottleneck_%d' % (name, bottleneck)
if bottleneck == 0:
block.add_module(name_, ResNeXtBottleneck(in_channels,
out_channels, pool_stride, self.cardinality, self.
widen_factor))
else:
block.add_module(name_, ResNeXtBottleneck(out_channels,
out_channels, 1, self.cardinality, self.widen_factor))
return block
def forward(self, x):
x = self.conv_1_3x3.forward(x)
x = F.relu(self.bn_1.forward(x), inplace=True)
x = self.stage_1.forward(x)
x = self.stage_2.forward(x)
ax = self.stage_att(x)
ax = self.relu(self.bn_att2(self.att_conv(ax)))
bs, cs, ys, xs = ax.shape
self.att = self.sigmoid(self.bn_att3(self.att_conv3(ax)))
ax = self.att_conv2(ax)
ax = self.att_gap(ax)
ax = ax.view(ax.size(0), -1)
rx = x * self.att
rx = rx + x
rx = self.stage_3.forward(rx)
rx = F.avg_pool2d(rx, 8, 1)
rx = rx.view(-1, 1024)
rx = self.classifier(rx)
return ax, rx, self.att
def resnext(**kwargs):
"""Constructs a ResNeXt.
"""
model = CifarResNeXt(**kwargs)
return model
|
"""
Creates a ResNeXt Model as defined in:
Xie, S., Girshick, R., Dollar, P., Tu, Z., & He, K. (2016).
Aggregated residual transformations for deep neural networks.
arXiv preprint arXiv:1611.05431.
import from https://github.com/prlz77/ResNeXt.pytorch/blob/master/models/model.py
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import init
__all__ = ['resnext']
class ResNeXtBottleneck(nn.Module):
"""
RexNeXt bottleneck type C (https://github.com/facebookresearch/ResNeXt/blob/master/models/resnext.lua)
"""
def __init__(self, in_channels, out_channels, stride, cardinality, widen_factor):
""" Constructor
Args:
in_channels: input channel dimensionality
out_channels: output channel dimensionality
stride: conv stride. Replaces pooling layer.
cardinality: num of convolution groups.
widen_factor: factor to reduce the input dimensionality before convolution.
"""
super(ResNeXtBottleneck, self).__init__()
D = cardinality * out_channels // widen_factor
self.conv_reduce = nn.Conv2d(in_channels, D, kernel_size=1, stride=1, padding=0, bias=False)
self.bn_reduce = nn.BatchNorm2d(D)
self.conv_conv = nn.Conv2d(D, D, kernel_size=3, stride=stride, padding=1, groups=cardinality, bias=False)
self.bn = nn.BatchNorm2d(D)
self.conv_expand = nn.Conv2d(D, out_channels, kernel_size=1, stride=1, padding=0, bias=False)
self.bn_expand = nn.BatchNorm2d(out_channels)
self.shortcut = nn.Sequential()
if in_channels != out_channels:
self.shortcut.add_module('shortcut_conv', nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, padding=0, bias=False))
self.shortcut.add_module('shortcut_bn', nn.BatchNorm2d(out_channels))
def forward(self, x):
bottleneck = self.conv_reduce.forward(x)
bottleneck = F.relu(self.bn_reduce.forward(bottleneck), inplace=True)
bottleneck = self.conv_conv.forward(bottleneck)
bottleneck = F.relu(self.bn.forward(bottleneck), inplace=True)
bottleneck = self.conv_expand.forward(bottleneck)
bottleneck = self.bn_expand.forward(bottleneck)
residual = self.shortcut.forward(x)
return F.relu(residual + bottleneck, inplace=True)
class CifarResNeXt(nn.Module):
"""
ResNext optimized for the Cifar dataset, as specified in
https://arxiv.org/pdf/1611.05431.pdf
"""
def __init__(self, cardinality, depth, num_classes, widen_factor=4, dropRate=0):
""" Constructor
Args:
cardinality: number of convolution groups.
depth: number of layers.
num_classes: number of classes
widen_factor: factor to adjust the channel dimensionality
"""
super(CifarResNeXt, self).__init__()
self.cardinality = cardinality
self.depth = depth
self.block_depth = (self.depth - 2) // 9
self.widen_factor = widen_factor
self.num_classes = num_classes
self.output_size = 64
self.stages = [64, 64 * self.widen_factor, 128 * self.widen_factor, 256 * self.widen_factor]
self.conv_1_3x3 = nn.Conv2d(3, 64, 3, 1, 1, bias=False)
self.bn_1 = nn.BatchNorm2d(64)
self.stage_1 = self.block('stage_1', self.stages[0], self.stages[1], 1)
self.stage_2 = self.block('stage_2', self.stages[1], self.stages[2], 2)
self.stage_3 = self.block('stage_3', self.stages[2], self.stages[3], 2)
self.classifier = nn.Linear(1024, num_classes)
self.stage_att = self.block('stage_att', self.stages[2], self.stages[3], 1)
self.bn_att = nn.BatchNorm2d(self.stages[3])
self.att_conv = nn.Conv2d(self.stages[3], num_classes, kernel_size=1, padding=0,
bias=False)
self.bn_att2 = nn.BatchNorm2d(num_classes)
self.att_conv2 = nn.Conv2d(num_classes, num_classes, kernel_size=1, padding=0,
bias=False)
self.att_conv3 = nn.Conv2d(num_classes, 1, kernel_size=3, padding=1,
bias=False)
self.bn_att3 = nn.BatchNorm2d(1)
self.att_gap = nn.AvgPool2d(16)
self.sigmoid = nn.Sigmoid()
self.relu = nn.ReLU(inplace=True)
init.kaiming_normal(self.classifier.weight)
for key in self.state_dict():
if key.split('.')[-1] == 'weight':
if 'conv' in key:
init.kaiming_normal(self.state_dict()[key], mode='fan_out')
if 'bn' in key:
self.state_dict()[key][...] = 1
elif key.split('.')[-1] == 'bias':
self.state_dict()[key][...] = 0
def block(self, name, in_channels, out_channels, pool_stride=2):
""" Stack n bottleneck modules where n is inferred from the depth of the network.
Args:
name: string name of the current block.
in_channels: number of input channels
out_channels: number of output channels
pool_stride: factor to reduce the spatial dimensionality in the first bottleneck of the block.
Returns: a Module consisting of n sequential bottlenecks.
"""
block = nn.Sequential()
for bottleneck in range(self.block_depth):
name_ = '%s_bottleneck_%d' % (name, bottleneck)
if bottleneck == 0:
block.add_module(name_, ResNeXtBottleneck(in_channels, out_channels, pool_stride, self.cardinality,
self.widen_factor))
else:
block.add_module(name_,
ResNeXtBottleneck(out_channels, out_channels, 1, self.cardinality, self.widen_factor))
return block
def forward(self, x):
x = self.conv_1_3x3.forward(x)
x = F.relu(self.bn_1.forward(x), inplace=True)
x = self.stage_1.forward(x)
x = self.stage_2.forward(x)
ax = self.stage_att(x)
ax = self.relu(self.bn_att2(self.att_conv(ax)))
bs, cs, ys, xs = ax.shape
self.att = self.sigmoid(self.bn_att3(self.att_conv3(ax)))
# self.att = self.att.view(bs, 1, ys, xs)
ax = self.att_conv2(ax)
ax = self.att_gap(ax)
ax = ax.view(ax.size(0), -1)
rx = x * self.att
rx = rx + x
rx = self.stage_3.forward(rx)
rx = F.avg_pool2d(rx, 8, 1)
rx = rx.view(-1, 1024)
rx = self.classifier(rx)
return ax, rx, self.att
def resnext(**kwargs):
"""Constructs a ResNeXt.
"""
model = CifarResNeXt(**kwargs)
return model
# """
# resneXt for cifar with pytorch
# Reference:
# [1] S. Xie, G. Ross, P. Dollar, Z. Tu and K. He Aggregated residual transformations for deep neural networks. In CVPR, 2017
# """
#
# import torch
# import torch.nn as nn
# import math
#
#
# class Bottleneck(nn.Module):
# expansion = 4
#
# def __init__(self, inplanes, planes, cardinality, baseWidth, stride=1, downsample=None):
# super(Bottleneck, self).__init__()
# D = int(planes * (baseWidth / 64.))
# C = cardinality
# self.conv1 = nn.Conv2d(inplanes, D * C, kernel_size=1, bias=False)
# self.bn1 = nn.BatchNorm2d(D * C)
# self.conv2 = nn.Conv2d(D * C, D * C, kernel_size=3, stride=stride, padding=1, groups=C, bias=False)
# self.bn2 = nn.BatchNorm2d(D * C)
# self.conv3 = nn.Conv2d(D * C, planes * 4, kernel_size=1, bias=False)
# self.bn3 = nn.BatchNorm2d(planes * 4)
# self.relu = nn.ReLU(inplace=True)
# self.downsample = downsample
# self.stride = stride
#
# def forward(self, x):
# residual = x
#
# out = self.conv1(x)
# out = self.bn1(out)
# out = self.relu(out)
#
# out = self.conv2(out)
# out = self.bn2(out)
# out = self.relu(out)
#
# out = self.conv3(out)
# out = self.bn3(out)
#
# if self.downsample is not None:
# residual = self.downsample(x)
#
# if residual.size() != out.size():
# print(out.size(), residual.size())
# out += residual
# out = self.relu(out)
#
# return out
#
#
# class ResNeXt_Cifar(nn.Module):
#
# def __init__(self, block, layers, cardinality, baseWidth, num_classes=10):
# super(ResNeXt_Cifar, self).__init__()
# self.inplanes = 64
# self.cardinality = cardinality
# self.baseWidth = baseWidth
# self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
# self.bn1 = nn.BatchNorm2d(64)
# self.relu = nn.ReLU(inplace=True)
# self.layer1 = self._make_layer(block, 64, layers[0])
# self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
# self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
# self.avgpool = nn.AvgPool2d(8, stride=1)
# self.fc = nn.Linear(256 * block.expansion, num_classes)
#
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# elif isinstance(m, nn.BatchNorm2d):
# m.weight.data.fill_(1)
# m.bias.data.zero_()
#
# def _make_layer(self, block, planes, blocks, stride=1):
# downsample = None
# if stride != 1 or self.inplanes != planes * block.expansion:
# downsample = nn.Sequential(
# nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False),
# nn.BatchNorm2d(planes * block.expansion)
# )
#
# layers = []
# layers.append(block(self.inplanes, planes, self.cardinality, self.baseWidth, stride, downsample))
# self.inplanes = planes * block.expansion
# for _ in range(1, blocks):
# layers.append(block(self.inplanes, planes, self.cardinality, self.baseWidth))
#
# return nn.Sequential(*layers)
#
# def forward(self, x):
# x = self.conv1(x)
# x = self.bn1(x)
# x = self.relu(x)
#
# x = self.layer1(x)
# x = self.layer2(x)
# x = self.layer3(x)
#
# x = self.avgpool(x)
# x = x.view(x.size(0), -1)
# x = self.fc(x)
#
# return x
#
#
# def resneXt_cifar(depth, cardinality, baseWidth, **kwargs):
# assert (depth - 2) % 9 == 0
# n = int((depth - 2) / 9)
# model = ResNeXt_Cifar(Bottleneck, [n, n, n], cardinality, baseWidth, **kwargs)
# return model
# if __name__ == '__main__':
# net = resneXt_cifar(29, 16, 64)
# y = net(torch.randn(1, 3, 32, 32))
# print(net)
# print(y.size())
|
[
1,
8,
10,
11,
13
] |
2,294 |
32b961f3971819fdbbe1a30fd7cf1883353c1854
|
<mask token>
|
<mask token>
for b in range(1, w + 1):
print('*', end='')
print('')
for i in range(1, h - 1):
print('*', end='')
for j in range(1, w - 1):
print(' ', end='')
print('*', end='')
print('')
for b in range(1, w + 1):
print('*', end='')
print('')
|
w = int(input('Width ?'))
h = int(input('Height ?'))
for b in range(1, w + 1):
print('*', end='')
print('')
for i in range(1, h - 1):
print('*', end='')
for j in range(1, w - 1):
print(' ', end='')
print('*', end='')
print('')
for b in range(1, w + 1):
print('*', end='')
print('')
|
w = int(input("Width ?"))
h= int(input("Height ?"))
for b in range(1,w+1):
print ("*", end='')
print("")
for i in range(1,h-1):
print ("*", end='')
for j in range(1,w-1):
print (" ", end='')
print ("*", end='')
print("")
for b in range(1,w+1):
print ("*", end='')
print("")
| null |
[
0,
1,
2,
3
] |
2,295 |
bac3f78b8eb9c4595bc9e8b85587819f92329729
|
#!/usr/bin/env python
"""
Calculate trigger efficiency error
"""
__author__ = "XIAO Suyu<[email protected]>"
__copyright__ = "Copyright (c) XIAO Suyu"
__created__ = "[2018-02-06 Tue 15:25]"
import math
n1 = 4212.0
n2 = 4237.0
N = 5000.0
eff = n1 / n2
err = math.sqrt(eff*(1-eff)/N)
print 'trig_eff = %.4f +- %f' % (eff, err)
| null | null | null | null |
[
0
] |
2,296 |
18391df9a3e52400fe4fc54d6381b9ce21e25f0b
|
<mask token>
class Templating(templating.Templating):
<mask token>
<mask token>
<mask token>
|
<mask token>
class Templating(templating.Templating):
"""
Application-specific templating implementation.
Overriding "args" methods makes it trivial to push extra, application-wide
data to the templates without any assistance from the resource.
"""
def __init__(self, app_conf):
renderer = make_renderer(app_conf)
templating.Templating.__init__(self, renderer)
<mask token>
|
<mask token>
class Templating(templating.Templating):
"""
Application-specific templating implementation.
Overriding "args" methods makes it trivial to push extra, application-wide
data to the templates without any assistance from the resource.
"""
def __init__(self, app_conf):
renderer = make_renderer(app_conf)
templating.Templating.__init__(self, renderer)
def make_renderer(app_conf):
"""
Create and return a restish.templating "renderer".
"""
import pkg_resources
import os.path
from restish.contrib.makorenderer import MakoRenderer
return MakoRenderer(directories=[pkg_resources.resource_filename(
'example', 'templates'), pkg_resources.resource_filename('formish',
'templates/mako'), pkg_resources.resource_filename('adminish',
'templates')], module_directory=os.path.join(app_conf['cache_dir'],
'templates'), input_encoding='utf-8', output_encoding='utf-8',
default_filters=['unicode', 'h'])
|
<mask token>
from restish import templating
class Templating(templating.Templating):
"""
Application-specific templating implementation.
Overriding "args" methods makes it trivial to push extra, application-wide
data to the templates without any assistance from the resource.
"""
def __init__(self, app_conf):
renderer = make_renderer(app_conf)
templating.Templating.__init__(self, renderer)
def make_renderer(app_conf):
"""
Create and return a restish.templating "renderer".
"""
import pkg_resources
import os.path
from restish.contrib.makorenderer import MakoRenderer
return MakoRenderer(directories=[pkg_resources.resource_filename(
'example', 'templates'), pkg_resources.resource_filename('formish',
'templates/mako'), pkg_resources.resource_filename('adminish',
'templates')], module_directory=os.path.join(app_conf['cache_dir'],
'templates'), input_encoding='utf-8', output_encoding='utf-8',
default_filters=['unicode', 'h'])
|
"""
Templating support library and renderer configuration.
"""
from restish import templating
class Templating(templating.Templating):
"""
Application-specific templating implementation.
Overriding "args" methods makes it trivial to push extra, application-wide
data to the templates without any assistance from the resource.
"""
def __init__(self, app_conf):
renderer = make_renderer(app_conf)
templating.Templating.__init__(self, renderer)
def make_renderer(app_conf):
"""
Create and return a restish.templating "renderer".
"""
# Uncomment for an example of Mako templating support.
import pkg_resources
import os.path
from restish.contrib.makorenderer import MakoRenderer
return MakoRenderer(
directories=[
pkg_resources.resource_filename('example', 'templates'),
pkg_resources.resource_filename('formish', 'templates/mako'),
pkg_resources.resource_filename('adminish', 'templates'),
],
module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
input_encoding='utf-8', output_encoding='utf-8',
default_filters=['unicode', 'h']
)
|
[
1,
3,
4,
5,
6
] |
2,297 |
f15bc62fad2c47fed2e9e5d269284ebe7487b789
|
<mask token>
def _get_change_making_matrix(set_of_coins, r):
matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)
]
for i in range(1, len(set_of_coins) + 1):
matrix[i][0] = i
return matrix
def change_making(coins, target):
"""This function assumes that all coins are available infinitely.
n is the number that we need to obtain with the fewest number of coins.
coins is a list or tuple with the available denominations."""
matrix = _get_change_making_matrix(coins, target)
for coin in range(1, len(coins) + 1):
for sub_target in range(1, target + 1):
if coins[coin - 1] == sub_target:
matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]
elif coins[coin - 1] > sub_target:
matrix[coin][sub_target] = matrix[coin - 1][sub_target]
else:
matrix[coin][sub_target] = matrix[coin - 1][sub_target
] + matrix[coin][sub_target - coins[coin - 1]]
return matrix[-1][-1]
<mask token>
|
<mask token>
def _get_change_making_matrix(set_of_coins, r):
matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)
]
for i in range(1, len(set_of_coins) + 1):
matrix[i][0] = i
return matrix
def change_making(coins, target):
"""This function assumes that all coins are available infinitely.
n is the number that we need to obtain with the fewest number of coins.
coins is a list or tuple with the available denominations."""
matrix = _get_change_making_matrix(coins, target)
for coin in range(1, len(coins) + 1):
for sub_target in range(1, target + 1):
if coins[coin - 1] == sub_target:
matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]
elif coins[coin - 1] > sub_target:
matrix[coin][sub_target] = matrix[coin - 1][sub_target]
else:
matrix[coin][sub_target] = matrix[coin - 1][sub_target
] + matrix[coin][sub_target - coins[coin - 1]]
return matrix[-1][-1]
<mask token>
print(ways)
|
<mask token>
def _get_change_making_matrix(set_of_coins, r):
matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)
]
for i in range(1, len(set_of_coins) + 1):
matrix[i][0] = i
return matrix
def change_making(coins, target):
"""This function assumes that all coins are available infinitely.
n is the number that we need to obtain with the fewest number of coins.
coins is a list or tuple with the available denominations."""
matrix = _get_change_making_matrix(coins, target)
for coin in range(1, len(coins) + 1):
for sub_target in range(1, target + 1):
if coins[coin - 1] == sub_target:
matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]
elif coins[coin - 1] > sub_target:
matrix[coin][sub_target] = matrix[coin - 1][sub_target]
else:
matrix[coin][sub_target] = matrix[coin - 1][sub_target
] + matrix[coin][sub_target - coins[coin - 1]]
return matrix[-1][-1]
input1 = input()
input2 = input()
n, m = input1.strip().split(' ')
n, m = [int(n), int(m)]
c = list(map(int, input2.strip().split(' ')))
ways = change_making(c, n)
print(ways)
|
import sys
def _get_change_making_matrix(set_of_coins, r):
matrix = [[(0) for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)
]
for i in range(1, len(set_of_coins) + 1):
matrix[i][0] = i
return matrix
def change_making(coins, target):
"""This function assumes that all coins are available infinitely.
n is the number that we need to obtain with the fewest number of coins.
coins is a list or tuple with the available denominations."""
matrix = _get_change_making_matrix(coins, target)
for coin in range(1, len(coins) + 1):
for sub_target in range(1, target + 1):
if coins[coin - 1] == sub_target:
matrix[coin][sub_target] = 1 + matrix[coin - 1][sub_target]
elif coins[coin - 1] > sub_target:
matrix[coin][sub_target] = matrix[coin - 1][sub_target]
else:
matrix[coin][sub_target] = matrix[coin - 1][sub_target
] + matrix[coin][sub_target - coins[coin - 1]]
return matrix[-1][-1]
input1 = input()
input2 = input()
n, m = input1.strip().split(' ')
n, m = [int(n), int(m)]
c = list(map(int, input2.strip().split(' ')))
ways = change_making(c, n)
print(ways)
|
#!/bin/python3
import sys
# import numpy as np
def _get_change_making_matrix(set_of_coins, r):
matrix = [[0 for _ in range(r + 1)] for _ in range(len(set_of_coins) + 1)]
# matrix = np.array(matrix)
for i in range(1,len(set_of_coins) + 1):
matrix[i][0] = i
return matrix
def change_making(coins, target):
"""This function assumes that all coins are available infinitely.
n is the number that we need to obtain with the fewest number of coins.
coins is a list or tuple with the available denominations."""
matrix = _get_change_making_matrix(coins, target)
for coin in range(1, len(coins) + 1):
for sub_target in range(1, target + 1):
# Just use the coin coins[c - 1].
if coins[coin - 1] == sub_target:
matrix[coin][sub_target] = 1+matrix[coin-1][sub_target]
# coins[c - 1] cannot be included.
# We use the previous solution for making r,
# excluding coins[c - 1].
elif coins[coin - 1] > sub_target:
matrix[coin][sub_target] = matrix[coin - 1][sub_target]
# We can use coins[c - 1].
# We need to decide which one of the following solutions is the best:
# 1. Using the previous solution for making r (without using coins[c - 1]).
# 2. Using the previous solution for making r - coins[c - 1] (without using coins[c - 1]) plus this 1 extra coin.
else:
matrix[coin][sub_target] = (matrix[coin - 1][sub_target]) + (
matrix[coin][sub_target - coins[coin - 1]])
return matrix[-1][-1]
input1 = input()
input2 = input()
# input1 = "10 4"
# input2 = "2 5 3 6"
n, m = input1.strip().split(' ')
n, m = [int(n), int(m)]
c = list(map(int, input2.strip().split(' ')))
# Print the number of ways of making change for 'n' units using coins having the values given by 'c'
ways = change_making(c, n)
print(ways)
|
[
2,
3,
4,
5,
6
] |
2,298 |
2dcf0466c84c952c60dcfce86498f063f43726f3
|
#!/usr/bin/python
import socket
import threading
import signal
import sys
class Proxy:
#initialise server socket
def __init__(self):
self.serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.serverSocket.bind(('', 1700))
self.serverSocket.listen(1)
self.__clients = {}
#proxy thread to handle requests
def proxy_thread(self, conn, client_addr):
request = conn.recv(1024) # get the request from browser
line = request.split('\n')[0] # parse the first line
url = line.split(' ')[1]
mName = 'localhost'
mPort = 12000
proxySocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
proxySocket.connect((mName,mPort))
proxySocket.send(url)
#blacklist from proxy side
response = proxySocket.recv(1024)
if "blacklist" in response:
conn.send('403: Forbidden')
conn.close()
return
else:
#get the host and port out the url path
self.path = url[7:]
i = self.path.find('/')
host = self.path[:i]
i = host.find(':')
if i!=-1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
try:
# create a socket to connect to the web server
webSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
webSocket.settimeout(5)
webSocket.connect((host, port))
webSocket.sendall(request) # send request to webserver
while 1:
data = webSocket.recv(1024) # receive data from web server
if (len(data) > 0):
conn.send(data) # send to browser
else:
break
webSocket.close()
conn.close()
except socket.error as error_msg:
if webSocket:
webSocket.close()
if conn:
conn.close()
#listen for web client to send request
def client(self):
while True:
(clientSocket, client_address) = self.serverSocket.accept() # Establish the connection
p = threading.Thread(name=self._getClientName(client_address), target=self.proxy_thread, args=(clientSocket, client_address))
p.setDaemon(True)
p.start()
def _getClientName(self, cli_addr):
return "Client"
if __name__ == '__main__':
proxy = Proxy()
proxy.client()
| null | null | null | null |
[
0
] |
2,299 |
1db866ca73bc264d474d5e5086c4a047d7e46546
|
<mask token>
|
<mask token>
with Session() as ps:
ps.app.runMenuItem(ps.app.stringIDToTypeID('toggleProofColors'))
|
<mask token>
from photoshop import Session
with Session() as ps:
ps.app.runMenuItem(ps.app.stringIDToTypeID('toggleProofColors'))
|
"""Toggle the proof color.
Like operating in the menu:
**View** > **Proof Colors** (Ctrl + Y)
"""
# Import local modules
from photoshop import Session
with Session() as ps:
ps.app.runMenuItem(ps.app.stringIDToTypeID("toggleProofColors"))
| null |
[
0,
1,
2,
3
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.